diff --git a/datashader/core.py b/datashader/core.py index e83eae451..a0e746737 100644 --- a/datashader/core.py +++ b/datashader/core.py @@ -11,11 +11,14 @@ from xarray import DataArray, Dataset from collections import OrderedDict -from .utils import Dispatcher, ngjit, calc_res, calc_bbox, orient_array, \ - compute_coords, dshape_from_xarray_dataset +from .utils import ( + Dispatcher, ngjit, calc_res, calc_bbox, orient_array, + compute_coords, calc_res3d, calc_bbox3d, compute_coords3d, + dshape_from_xarray_dataset, orient_array3d +) from .utils import get_indices, dshape_from_pandas, dshape_from_dask from .utils import Expr # noqa (API import) -from .resampling import resample_2d, resample_2d_distributed +from .resampling import resample_2d, resample_2d_distributed, resample_3d from . import reductions as rd try: @@ -149,24 +152,27 @@ class Canvas(object): Parameters ---------- - plot_width, plot_height : int, optional - Width and height of the output aggregate in pixels. - x_range, y_range : tuple, optional + plot_width, plot_height, plot_depth : int, optional + Width and height (and depth) of the output aggregate in pixels. + x_range, y_range, z_range : tuple, optional A tuple representing the bounds inclusive space ``[min, max]`` along the axis. - x_axis_type, y_axis_type : str, optional + x_axis_type, y_axis_type, z_axis_type : str, optional The type of the axis. Valid options are ``'linear'`` [default], and ``'log'``. """ - def __init__(self, plot_width=600, plot_height=600, - x_range=None, y_range=None, - x_axis_type='linear', y_axis_type='linear'): + def __init__(self, plot_width=600, plot_height=600, plot_depth=None, + x_range=None, y_range=None, z_range=None, + x_axis_type='linear', y_axis_type='linear', z_axis_type='linear'): self.plot_width = plot_width self.plot_height = plot_height + self.plot_depth = plot_depth self.x_range = None if x_range is None else tuple(x_range) self.y_range = None if y_range is None else tuple(y_range) + self.z_range = None if z_range is None else tuple(z_range) self.x_axis = _axis_lookup[x_axis_type] self.y_axis = _axis_lookup[y_axis_type] + self.z_axis = _axis_lookup[z_axis_type] def points(self, source, x=None, y=None, agg=None, geometry=None): """Compute a reduction by pixel, mapping data to pixels as points. @@ -888,6 +894,46 @@ def trimesh(self, vertices, simplices, mesh=None, agg=None, interp=True, interpo return bypixel(source, self, Triangles(x, y, weights, weight_type=verts_have_weights, interp=interp), agg) + + def _validate_regrid(self, source, agg, interpolate, upsample_methods, downsample_methods): + if interpolate not in upsample_methods: + raise ValueError('Invalid interpolate method: options include {}'.format(upsample_methods)) + + if not isinstance(source, (DataArray, Dataset)): + raise ValueError('Expected xarray DataArray or Dataset as ' + 'the data source, found %s.' + % type(source).__name__) + + column = None + if isinstance(agg, rd.Reduction): + agg, column = type(agg), agg.column + if (isinstance(source, DataArray) and column is not None + and source.name != column): + agg_repr = '%s(%r)' % (agg.__name__, column) + raise ValueError('DataArray name %r does not match ' + 'supplied reduction %s.' % + (source.name, agg_repr)) + + if isinstance(source, Dataset): + data_vars = list(source.data_vars) + if column is None: + raise ValueError('When supplying a Dataset the agg reduction ' + 'must specify the variable to aggregate. ' + 'Available data_vars include: %r.' % data_vars) + elif column not in source.data_vars: + raise KeyError('Supplied reduction column %r not found ' + 'in Dataset, expected one of the following ' + 'data variables: %r.' % (column, data_vars)) + source = source[column] + + if agg not in downsample_methods.keys(): + raise ValueError('Invalid aggregation method: options include {}'.format(list(downsample_methods.keys()))) + + if source.ndim not in [2, 3]: + raise ValueError('Raster aggregation expects a 2D or 3D ' + 'DataArray, found %s dimensions' % source.ndim) + return source + def raster(self, source, layer=None, @@ -964,44 +1010,11 @@ def raster(self, 'min':'min', rd.min:'min', 'max':'max', rd.max:'max'} - if interpolate not in upsample_methods: - raise ValueError('Invalid interpolate method: options include {}'.format(upsample_methods)) - - if not isinstance(source, (DataArray, Dataset)): - raise ValueError('Expected xarray DataArray or Dataset as ' - 'the data source, found %s.' - % type(source).__name__) - - column = None - if isinstance(agg, rd.Reduction): - agg, column = type(agg), agg.column - if (isinstance(source, DataArray) and column is not None - and source.name != column): - agg_repr = '%s(%r)' % (agg.__name__, column) - raise ValueError('DataArray name %r does not match ' - 'supplied reduction %s.' % - (source.name, agg_repr)) - - if isinstance(source, Dataset): - data_vars = list(source.data_vars) - if column is None: - raise ValueError('When supplying a Dataset the agg reduction ' - 'must specify the variable to aggregate. ' - 'Available data_vars include: %r.' % data_vars) - elif column not in source.data_vars: - raise KeyError('Supplied reduction column %r not found ' - 'in Dataset, expected one of the following ' - 'data variables: %r.' % (column, data_vars)) - source = source[column] - - if agg not in downsample_methods.keys(): - raise ValueError('Invalid aggregation method: options include {}'.format(list(downsample_methods.keys()))) + source = self._validate_regrid( + source, agg, interpolate, upsample_methods, downsample_methods + ) ds_method = downsample_methods[agg] - if source.ndim not in [2, 3]: - raise ValueError('Raster aggregation expects a 2D or 3D ' - 'DataArray, found %s dimensions' % source.ndim) - res = calc_res(source) ydim, xdim = source.dims[-2:] xvals, yvals = source[xdim].values, source[ydim].values @@ -1031,7 +1044,11 @@ def raster(self, height_ratio = min((ymax - ymin) / (self.y_range[1] - self.y_range[0]), 1) if np.isclose(width_ratio, 0) or np.isclose(height_ratio, 0): - raise ValueError('Canvas x_range or y_range values do not match closely enough with the data source to be able to accurately rasterize. Please provide ranges that are more accurate.') + raise ValueError( + 'Canvas x_range or y_range values do not match closely ' + 'enough with the data source to be able to accurately ' + 'rasterize. Please provide ranges that are more accurate.' + ) w = max(int(round(self.plot_width * width_ratio)), 1) h = max(int(round(self.plot_height * height_ratio)), 1) @@ -1131,10 +1148,173 @@ def raster(self, dims = [layer_dim]+dims return DataArray(data, coords=coords, dims=dims, attrs=attrs) + def volume(self, source, nan_value=None, agg='mean', interpolate='nearest'): + """Sample a raster dataset by canvas size and bounds. + + Handles 3D xarray DataArrays. + + Missing values (those having the value indicated by the + "nodata" attribute of the raster) are replaced with `NaN` if + floats, and 0 if int. + + Parameters + ---------- + source : xarray.DataArray or xr.Dataset + 2D or 3D labelled array (if Dataset, the agg reduction must + define the data variable). + nan_value : int or float, optional + Optional nan_value which will be masked out when applying + the resampling. + agg : Reduction, optional default=mean() + Resampling mode when downsampling raster. + options include: first, last, mean, mode, var, std, min, max + Accepts an executable function, function object, or string name. + interpolate : str, optional default=linear + Resampling mode when upsampling raster. + options include: nearest, linear. + + Returns + ------- + data : xarray.Dataset + """ + upsample_methods = ['nearest'] + + downsample_methods = {'mean': 'mean', rd.mean:'mean'} + + source = self._validate_regrid( + source, agg, interpolate, upsample_methods, downsample_methods + ) + + ds_method = downsample_methods[agg] + + if self.plot_depth is None: + raise ValueError("Supply plot_depth to Canvas to aggregate in 3D.") + + res = calc_res3d(source) + zdim, ydim, xdim = source.dims[-3:] + xvals, yvals, zvals = (source[dim].values for dim in (xdim, ydim, zdim)) + left, bottom, back, right, top, front = calc_bbox3d(xvals, yvals, zvals, res) + array = orient_array3d(source, res) + dtype = array.dtype + + if nan_value is not None: + mask = array==nan_value + array = np.ma.masked_array(array, mask=mask, fill_value=nan_value) + fill_value = nan_value + else: + fill_value = np.NaN + + if self.x_range is None: self.x_range = (left,right) + if self.y_range is None: self.y_range = (bottom,top) + if self.z_range is None: self.z_range = (back,front) + + # window coordinates + xmin = max(self.x_range[0], left) + ymin = max(self.y_range[0], bottom) + zmin = max(self.z_range[0], back) + xmax = min(self.x_range[1], right) + ymax = min(self.y_range[1], top) + zmax = min(self.z_range[1], front) + + width_ratio = min((xmax - xmin) / (self.x_range[1] - self.x_range[0]), 1) + height_ratio = min((ymax - ymin) / (self.y_range[1] - self.y_range[0]), 1) + depth_ratio = min((zmax - zmin) / (self.z_range[1] - self.z_range[0]), 1) + + if np.isclose(width_ratio, 0) or np.isclose(height_ratio, 0) or np.isclose(depth_ratio, 0): + raise ValueError('Canvas x_range, y_range or z_range values ' + 'do not match closely enough with the data ' + 'source to be able to accurately rasterize. ' + 'Please provide ranges that are more accurate.') + + w = max(int(round(self.plot_width * width_ratio)), 1) + h = max(int(round(self.plot_height * height_ratio)), 1) + d = max(int(round(self.plot_depth * depth_ratio)), 1) + cmin, cmax = get_indices(xmin, xmax, xvals, res[0]) + rmin, rmax = get_indices(ymin, ymax, yvals, res[1]) + zmin, zmax = get_indices(zmin, zmax, zvals, res[2]) + + kwargs = dict(w=w, h=h, d=d, fill_value=fill_value) + source_window = array[zmin: zmax+1, rmin:rmax+1, cmin:cmax+1] + data = resample_3d(source_window, **kwargs) + + if w != self.plot_width or h != self.plot_height: + num_height = self.plot_height - h + num_width = self.plot_width - w + num_depth = self.plot_depth - d + + lpad = xmin - self.x_range[0] + rpad = self.x_range[1] - xmax + lpct = lpad / (lpad + rpad) if lpad + rpad > 0 else 0 + left = max(int(np.ceil(num_width * lpct)), 0) + right = max(num_width - left, 0) + lshape, rshape = (self.plot_depth, self.plot_height, left), (self.plot_depth, self.plot_height, right) + left_pad = np.full(lshape, fill_value, source_window.dtype) + right_pad = np.full(rshape, fill_value, source_window.dtype) + + tpad = ymin - self.y_range[0] + bpad = self.y_range[1] - ymax + tpct = tpad / (tpad + bpad) if tpad + bpad > 0 else 0 + top = max(int(np.ceil(num_height * tpct)), 0) + bottom = max(num_height - top, 0) + tshape, bshape = (self.plot_depth, top, w), (self.plot_depth, bottom, w) + top_pad = np.full(tshape, fill_value, source_window.dtype) + bottom_pad = np.full(bshape, fill_value, source_window.dtype) + + bkpad = zmin - self.z_range[0] + frpad = self.z_range[1] - zmax + frpct = bkpad / (frpad + bkpad) if (frpad + bkpad) > 0 else 0 + front = max(int(np.ceil(num_depth * frpct)), 0) + back = max(num_depth - front, 0) + bkshape, frshape = (back, h, w), (front, h, w) + back_pad = np.full(bkshape, fill_value, source_window.dtype) + front_pad = np.full(frshape, fill_value, source_window.dtype) + + arrays = (back_pad, data) if back_pad.shape[1] > 0 else (data,) + if front_pad.shape[0] > 0: + arrays += (front_pad,) + data = np.concat(arrays, axis=0) if len(arrays) > 1 else arrays[0] + + arrays = (top_pad, data) if top_pad.shape[1] > 0 else (data,) + if bottom_pad.shape[1] > 0: + arrays += (bottom_pad,) + data = np.concat(arrays, axis=1) if len(arrays) > 1 else arrays[0] + + arrays = (left_pad, data) if left_pad.shape[2] > 0 else (data,) + if right_pad.shape[2] > 0: + arrays += (right_pad,) + data = concat(arrays, axis=2) if len(arrays) > 1 else arrays[0] + + # Reorient array to original orientation + if res[2] < 0: data = data[::-1] + if res[1] < 0: data = data[:, ::-1] + if res[0] < 0: data = data[:, :, ::-1] + + # Restore nan_value from masked array + if nan_value is not None: + data = data.filled() + + # Restore original dtype + if dtype != data.dtype: + data = data.astype(dtype) + + # Compute DataArray metadata + xs, ys, zs = compute_coords3d( + self.plot_width, self.plot_height, self.plot_depth, + self.x_range, self.y_range, self.z_range, res + ) + coords = {xdim: xs, ydim: ys, zdim: zs} + dims = [zdim, ydim, xdim] + attrs = dict(res=res[0]) + if source._file_obj is not None and hasattr(source._file_obj, 'nodata'): + attrs['nodata'] = source._file_obj.nodata + return DataArray(data, coords=coords, dims=dims, attrs=attrs) + def validate(self): """Check that parameter settings are valid for this object""" self.x_axis.validate(self.x_range) self.y_axis.validate(self.y_range) + if self.plot_depth is not None: + self.z_axis.validate(self.z_range) def bypixel(source, canvas, glyph, agg): diff --git a/datashader/resampling.py b/datashader/resampling.py index 9c1992ac5..ead0e4d2b 100644 --- a/datashader/resampling.py +++ b/datashader/resampling.py @@ -75,6 +75,7 @@ #: Constant indicating an empty 2-D mask _NOMASK2D = np.ma.getmaskarray(np.ma.array([[0]], mask=[[0]])) +_NOMASK3D = np.ma.getmaskarray(np.ma.array([[[0]]], mask=[[[0]]])) _EPS = 1e-10 @@ -334,6 +335,44 @@ def resample_2d(src, w, h, ds_method='mean', us_method='linear', return _mask_or_not(resampled, src, fill_value) +def resample_3d(src, w, h, d, fill_value=None, out=None): + """ + Resample a 3-D grid to a new resolution. + + Parameters + ---------- + src : np.ndarray + The source array to resample + w : int + New grid width_u + h : int + New grid height + d : int + New grid depth + fill_value : scalar (optional) + If ``None``, it is taken from **src** if it is a masked array, + otherwise from *out* if it is a masked array, + otherwise numpy's default value is used. + out : numpy.ndarray (optional) + Alternate output array in which to place the result. The + default is *None*; if provided, it must have the same shape as + the expected output. + + Returns + ------- + resampled : numpy.ndarray or dask.array.Array + A resampled version of the *src* array. + """ + out = _get_out(out, src, (d, h, w)) + if out is None: + return src + mask, use_mask = _get_mask(src) + fill_value = _get_fill_value(fill_value, src, out) + + resampled = _resample_3d(src, mask, use_mask, fill_value, out) + return _mask_or_not(resampled, src, fill_value) + + _resample_2d_delayed = delayed(resample_2d) @@ -443,7 +482,8 @@ def _get_mask(src): mask = np.ma.getmask(src) if mask is not np.ma.nomask: return mask, True - return _NOMASK2D, False + mask = _NOMASK2D if len(src.shape) == 2 else _NOMASK3D + return mask, False def _mask_or_not(out, src, fill_value): @@ -479,6 +519,17 @@ def _get_dimensions(src, out): return src_w, src_h, out_w, out_h +@ngjit +def _get_dimensions3d(src, out): + src_w = src.shape[-1] + src_h = src.shape[-2] + src_d = src.shape[-3] + out_w = out.shape[-1] + out_h = out.shape[-2] + out_d = out.shape[-3] + return src_w, src_h, src_d, out_w, out_h, out_d + + def _resample_2d(src, mask, use_mask, ds_method, us_method, fill_value, mode_rank, x_offset, y_offset, out): src_w, src_h, out_w, out_h = _get_dimensions(src, out) @@ -533,6 +584,34 @@ def _resample_2d(src, mask, use_mask, ds_method, us_method, fill_value, return src +def _resample_3d(src, mask, use_mask, fill_value, out): + src_w, src_h, src_d, out_w, out_h, out_d = _get_dimensions3d(src, out) + + if src_h == 0 or src_w == 0 or src_d == 0 or out_h == 0 or out_w == 0 or out_d == 0: + return np.zeros((out_d, out_h, out_w), dtype=src.dtype) + elif out_w < src_w and out_h < src_h and out_d < src_d: + return _downsample_3d_mean(src, mask, use_mask, fill_value, out) + elif out_w > src_w and out_h > src_h and out_d > src_d: + return _upsample_3d_nearest(src, mask, use_mask, fill_value, out) + elif out_w < src_w: + temp = np.zeros((src_d, src_h, out_w), dtype=src.dtype) + temp = _downsample_3d_mean(src, mask, use_mask, fill_value, temp) + else: + temp = np.zeros((src_d, src_h, out_w), dtype=src.dtype) + temp = _upsample_3d_nearest(src, mask, use_mask, fill_value, temp) + + temp2 = np.zeros((src_d, out_h, out_w), dtype=src.dtype) + if out_h > src_h: + temp2 = _upsample_3d_nearest(temp, mask, use_mask, fill_value, temp2) + else: + temp2 = _downsample_3d_mean(temp, mask, use_mask, fill_value, temp2) + + if out_d > src_d: + return _upsample_3d_nearest(temp2, mask, use_mask, fill_value, out) + else: + return _downsample_3d_mean(temp2, mask, use_mask, fill_value, out) + + @ngjit_parallel def _upsample_2d_nearest(src, mask, use_mask, fill_value, x_offset, y_offset, out): src_w, src_h, out_w, out_h = _get_dimensions(src, out) @@ -562,6 +641,34 @@ def _upsample_2d_nearest(src, mask, use_mask, fill_value, x_offset, y_offset, ou return out +@ngjit +def _upsample_3d_nearest(src, mask, use_mask, fill_value, out): + src_w, src_h, src_d, out_w, out_h, out_d = _get_dimensions3d(src, out) + + if src_w == out_w and src_h == out_h and src_d == out_d: + return src + + if out_w < src_w or out_h < src_h or out_d < src_d: + raise ValueError("invalid target size") + + scale_x = src_w / out_w + scale_y = src_h / out_h + scale_z = src_d / out_d + + for out_z in range(out_d): + src_z = int(scale_z * out_z) + for out_y in range(out_h): + src_y = int(scale_y * out_y) + for out_x in range(out_w): + src_x = int(scale_x * out_x) + value = src[src_z, src_y, src_x] + if np.isfinite(value) and not (use_mask and mask[src_z, src_y, src_x]): + out[out_z, out_y, out_x] = value + else: + out[out_z, out_y, out_x] = fill_value + return out + + @ngjit_parallel def _upsample_2d_linear(src, mask, use_mask, fill_value, x_offset, y_offset, out): src_w, src_h, out_w, out_h = _get_dimensions(src, out) @@ -882,6 +989,75 @@ def _downsample_2d_mean(src, mask, use_mask, method, fill_value, return out +@ngjit +def _downsample_3d_mean(src, mask, use_mask, fill_value, out): + src_w, src_h, src_d, out_w, out_h, out_d = _get_dimensions3d(src, out) + + if src_w == out_w and src_h == out_h and src_d == out_d: + return src + + if out_w > src_w or out_h > src_h and out_d > src_d: + raise ValueError("invalid target size") + + scale_x = src_w / out_w + scale_y = src_h / out_h + scale_z = src_d / out_d + + for out_z in range(out_d): + src_zf0 = (scale_z * out_z) + src_zf1 = (src_zf0 + scale_z) + src_z0 = int(src_zf0) + src_z1 = int(src_zf1) + + wz0 = 1.0 - (src_zf0 - src_z0) + wz1 = src_zf1 - src_z1 + if wz1 < _EPS: + wz1 = 1.0 + if src_z1 > src_z0: + src_z1 -= 1 + for out_y in range(out_h): + src_yf0 = (scale_y * out_y) + src_yf1 = (src_yf0 + scale_y) + src_y0 = int(src_yf0) + src_y1 = int(src_yf1) + + wy0 = 1.0 - (src_yf0 - src_y0) + wy1 = src_yf1 - src_y1 + if wy1 < _EPS: + wy1 = 1.0 + if src_y1 > src_y0: + src_y1 -= 1 + for out_x in range(out_w): + src_xf0 = (scale_x * out_x) + src_xf1 = src_xf0 + scale_x + src_x0 = int(src_xf0) + src_x1 = int(src_xf1) + wx0 = 1.0 - (src_xf0 - src_x0) + wx1 = src_xf1 - src_x1 + if wx1 < _EPS: + wx1 = 1.0 + if src_x1 > src_x0: + src_x1 -= 1 + v_sum = 0.0 + w_sum = 0.0 + for src_z in range(src_z0, src_z1 + 1): + wz = wz0 if (src_z == src_z0) else wz1 if (src_z == src_z1) else 1.0 + for src_y in range(src_y0, src_y1 + 1): + wy = wy0 if (src_y == src_y0) else wy1 if (src_y == src_y1) else 1.0 + for src_x in range(src_x0, src_x1 + 1): + wx = wx0 if (src_x == src_x0) else wx1 if (src_x == src_x1) else 1.0 + v = src[src_z, src_y, src_x] + if np.isfinite(v) and not (use_mask and mask[src_z, src_y, src_x]): + w = wx * wy * wz + v_sum += w * v + w_sum += w + if w_sum < _EPS: + out[out_z, out_y, out_x] = fill_value + else: + out[out_z, out_y, out_x] = v_sum / w_sum + return out + + @ngjit_parallel def _downsample_2d_std_var(src, mask, use_mask, method, fill_value, mode_rank, x_offset, y_offset, out): diff --git a/datashader/utils.py b/datashader/utils.py index 07da2823f..18fe099cb 100644 --- a/datashader/utils.py +++ b/datashader/utils.py @@ -167,8 +167,8 @@ def nansum_missing(array, axis): def calc_res(raster): - """Calculate the resolution of xarray.DataArray raster and return it as the - two-tuple (xres, yres). + """Calculate the resolution of xarray.DataArray raster and return + it as the two-tuple (xres, yres). """ h, w = raster.shape[-2:] ydim, xdim = raster.dims[-2:] @@ -219,6 +219,47 @@ def calc_bbox(xs, ys, res): return xmin-xpad, ymin+ypad, xmax-xpad, ymax+ypad +def calc_res3d(volume): + """Calculate the resolution of xarray.DataArray volume and return + it as the three-tuple (xres, yres, zres). + """ + d, h, w = volume.shape[-3:] + zdim, ydim, xdim = volume.dims[-3:] + xcoords = volume[xdim].values + ycoords = volume[ydim].values + zcoords = volume[ydim].values + xres = (xcoords[-1] - xcoords[0]) / (w - 1) + yres = (ycoords[-1] - ycoords[0]) / (h - 1) + zres = (zcoords[-1] - zcoords[0]) / (d - 1) + return xres, yres, zres + + +def calc_bbox3d(xs, ys, zs, res): + """ + Extends calc_bbox to 3D. + """ + xbound = xs.max() if res[0] < 0 else xs.min() + ybound = ys.max() if res[1] < 0 else ys.min() + zbound = zs.max() if res[2] < 0 else zs.min() + + xmin = ymin = zmin = np.inf + xmax = ymax = zmax = -np.inf + Ab = np.array([[res[0], 0., 0., xbound], + [0., res[1], 0., ybound], + [0., 0., res[2], zbound], + [0., 0., 0., 1.]]) + for x_, y_, z_ in [(0, 0, 0), (0, 0, len(zs)), (0, len(ys), 0), (len(xs), 0, 0), (len(xs), len(ys), len(zs))]: + x, y, z, _ = np.dot(Ab, np.array([x_, y_, z_, 1.])) + if x < xmin: xmin = x + if x > xmax: xmax = x + if y < ymin: ymin = y + if y > ymax: ymax = y + if z < zmin: zmin = z + if z > zmax: zmax = z + xpad, ypad, zpad = res[0]/2., res[1]/2., res[2]/2. + return xmin-xpad, ymin-ypad, zmin-zpad, xmax-xpad, ymax-ypad, zmax-zpad + + def get_indices(start, end, coords, res): """ Transform continuous start and end coordinates into array indices. @@ -280,6 +321,37 @@ def orient_array(raster, res=None, layer=None): return array +def orient_array3d(volume, res=None): + """ + Reorients the array to a canonical orientation depending on + whether the x, y and z-resolution values are positive or negative. + + Parameters + ---------- + volume : DataArray + xarray DataArray to be reoriented + res : tuple + Three-tuple (int, int, int) which includes x, y and z resolutions + (aka "grid/cell sizes"), respectively. + + Returns + ------- + array : numpy.ndarray + Reoriented 3d NumPy ndarray + """ + if res is None: + res = calc_res3d(volume) + array = volume.data + if layer is not None: array = array[layer-1] + r0zero = np.timedelta64(0, 'ns') if isinstance(res[0], np.timedelta64) else 0 + r1zero = np.timedelta64(0, 'ns') if isinstance(res[1], np.timedelta64) else 0 + r2zero = np.timedelta64(0, 'ns') if isinstance(res[2], np.timedelta64) else 0 + if res[0] < r0zero: array = array[:, :, ::-1] + if res[1] < r1zero: array = array[:, ::-1] + if res[2] < r2zero: array = array[::-1] + return array + + def compute_coords(width, height, x_range, y_range, res): """ Computes DataArray coordinates at bin centers @@ -318,6 +390,54 @@ def compute_coords(width, height, x_range, y_range, res): return xs, ys +def compute_coords3d(width, height, depth, x_range, y_range, z_range, res): + """ + Computes DataArray coordinates at bin centers + + Parameters + ---------- + width : int + Number of coordinates along the x-axis + height : int + Number of coordinates along the y-axis + depth : int + Number of coordinates along the z-axis + x_range : tuple + Left and right edge of the coordinates + y_range : tuple + Bottom and top edges of the coordinates + z_range : tuple + Bottom and top edges of the coordinates + res : tuple + Three-tuple (int, int, int) which includes x and y resolutions (aka "grid/cell + sizes"), respectively. Used to determine coordinate orientation. + + Returns + ------- + xs : numpy.ndarray + 1D array of x-coordinates + ys : numpy.ndarray + 1D array of y-coordinates + zs : numpy.ndarray + 1D array of y-coordinates + """ + (x0, x1), (y0, y1), (z0, z1) = x_range, y_range, z_range + xd = (x1-x0)/float(width) + yd = (y1-y0)/float(height) + zd = (z1-z0)/float(depth) + xpad, ypad, zpad = abs(xd/2.), abs(yd/2.), abs(zd/2.) + x0, x1 = x0+xpad, x1-xpad + y0, y1 = y0+ypad, y1-ypad + z0, z1 = z0+zpad, z1-zpad + xs = np.linspace(x0, x1, width) + ys = np.linspace(y0, y1, height) + zs = np.linspace(z0, z1, depth) + if res[0] < 0: xs = xs[::-1] + if res[1] < 0: ys = ys[::-1] + if res[2] < 0: zs = zs[::-1] + return xs, ys, zs + + def downsample_aggregate(aggregate, factor, how='mean'): """Create downsampled aggregate factor in pixels units""" ys, xs = aggregate.shape[:2]