diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5e371a90722d553de8094c6f61646a534f9fe4a7..cf83c922635ebbd8dee34ba7b6e83ebfe6fbce1a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -9,6 +9,15 @@
 * `Output.targets` is private, `Output.get_targets()` becomes property `Output.targets` (!273)
 * Composition metadata was restructured to hold components and adapters in separate sub-dictionaries (!274)
 * Time components implement method `_next_time` instead of property `next_time` (!283)
+* `Info` now has properties for `grid`, `time` and `mask` (!286)
+* all init-args of `Info` are now optional (!286)
+* `Info.accepts` has changed signature: renamed `ignore_none` to `incoming_donwstream` (!286)
+* `Info.accepts` now only checks: `grid`, `mask` and `units` (other meta data can differ) (!286)
+* `Grid.to_/from_canonical` now allows additional dimensions (!286)
+* `data_shape` now a property of `GridBase` (!286)
+  * `NoGrid` can be initialized with `dim` or `data_shape` now
+  * `NoGrid.data_shape` can have `-1` entries for variable size dimensions
+  * if only `dim` given to `NoGrid`, all entries in `data_shape` will be `-1`
 
 ### Features
 
@@ -28,6 +37,18 @@
   * added missing casting methods to convert esri to uniform and uniform to rectilinear (when you want to use point data on an esri-grid, you can cast it to uniform first)
   * added `axes_attributes` also to unstructured grids
 * Grid method `compatible_with` now has a `check_location` argument to optionally check data location (!280)
+* added `Mask` enum with two options: (!286)
+  * `Mask.FLEX` for flexible masking
+  * `Mask.NONE` to explicitly use plain numpy arrays
+* added `mask` attribute and init-arg to `Info` : can be a `Mask` value or a valid mask for `numpy.ma.MaskedArray` (!286)
+* `data.tools.prepare` now applies masks to data if set in `Info` object (!286)
+* `ARegridding` now has a `out_mask` arg (!286)
+* `RegridNearest` and `RegridLinear` now support explicitly masked data (input doesn't have `Mask.FLEX`) (!286)
+* adapters now have an `in_info` property (!286)
+
+### Bug fixes
+
+* cells for structured grids in 3D are now created correctly (no negative Volume in VTK/ESMF) (!286)
 
 ### Documentation
 
diff --git a/docs/source/_templates/autosummary/class.rst b/docs/source/_templates/autosummary/class.rst
index 590af265147d79bc2de584b58768489ab1fade8c..a64371c61467d0b7e32c8c52d2fd614bb68b0e2f 100644
--- a/docs/source/_templates/autosummary/class.rst
+++ b/docs/source/_templates/autosummary/class.rst
@@ -7,7 +7,7 @@
    :private-members: _initialize, _connect, _validate, _update, _finalize, _get_data, _get_info, _source_updated, _pulled
    :special-members: __getitem__
    :undoc-members:
-   :inherited-members:
+   :inherited-members: object, int
    :show-inheritance:
 
 .. raw:: latex
diff --git a/docs/source/conf.py b/docs/source/conf.py
index a8dc741ee498d1a6ee76416a1e2ca9e613c3f6d5..3b343032ab93e5fa9062ee33e16c49319fc67f8d 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -1,7 +1,22 @@
 import datetime
+import inspect
+from enum import Enum
 
 from finam import __version__ as finam_version
 
+
+def skip_member(app, what, name, obj, skip, options):
+    # Check if we're dealing with an Enum
+    if inspect.isclass(obj) and issubclass(obj, Enum):
+        if name not in [e.name for e in obj]:
+            return True  # Skip anything that isn’t an enum member
+    return skip
+
+
+def setup(app):
+    app.connect("autodoc-skip-member", skip_member)
+
+
 # Configuration file for the Sphinx documentation builder.
 #
 # For the full list of built-in configuration values, see the documentation:
diff --git a/docs/source/finam-book/development/data_metadata.rst b/docs/source/finam-book/development/data_metadata.rst
index 9c7a46f14262b87726a323e3922e1a138dbc4432..a794b41132b02045a30c58b293b9c5f1d07d5c2d 100644
--- a/docs/source/finam-book/development/data_metadata.rst
+++ b/docs/source/finam-book/development/data_metadata.rst
@@ -42,11 +42,23 @@ Masked arrays
 FINAM uses :class:`numpy.ma.MaskedArray` inside :class:`pint.Quantity` to represent masked data.
 Masked data does not require any special treatment and can be used like usual numpy arrays.
 
+By default FINAM will allow data to have flexible masks, which means they can change over time.
+In the :class:`.Info` object (see below), the mask of the data can be specified:
+
+* :any:`.Mask.FLEX`: data can be masked or unmasked and the mask could change over time (default)
+* :any:`.Mask.NONE`: data is unmasked and exchanged as plain numpy arrays
+* :class:`numpy.ndarray` or :class:`bool`: data is masked with a given mask that is constant over time
+
 Convenience functions for masked arrays are:
 
 * :func:`is_masked_array <.data.is_masked_array>` to check if the given data is a masked array
 * :func:`has_masked_values <.data.has_masked_values>` to check if the given data is a masked array and has some values masked
 * :func:`filled <.data.filled>` to create a copy of the data with masked entries filled with a given value, if it is a masked array
+* :func:`to_masked <.data.to_masked>` to create a masked version of the data
+* :func:`to_masked <.data.to_compressed>` to create a flattened version of the data only containing the unmasked values
+* :func:`to_masked <.data.from_compressed>` to create a full mask array from a compressed version of the data
+* :func:`to_masked <.data.masks_compatible>` to check if mask settings in info objects are compatiblemasks_equal
+* :func:`to_masked <.data.masks_equal>` to check if masks are equal
 
 .. warning::
     Due to a :mod:`numpy` bug, quantities should not be created from masked data using multiplication syntax (i.e. ``magnitude * units``).
@@ -88,14 +100,16 @@ The :class:`.Info` object
 Objects of type :class:`.Info` represent the metadata associated with an input or output.
 It has the following properties:
 
+* ``time`` - initial time stamp for the associated data
 * ``grid`` - for the `Grid specification`_
 * ``meta`` - a :class:`dict` for all other metadata
+* ``mask`` - the mask specification for the data, either :class:`.Mask`, :class:`numpy.ndarray` or :class:`bool`
 
 For convenience, entries in ``meta`` can be used like normal member variables:
 
 .. testsetup:: create-info
 
-    from finam import Info, NoGrid
+    from finam import Info, NoGrid, Mask
     from datetime import datetime
 
 .. testcode:: create-info
@@ -103,6 +117,7 @@ For convenience, entries in ``meta`` can be used like normal member variables:
     info = Info(
         time=datetime(2000, 1, 1),
         grid=NoGrid(),
+        mask=Mask.NONE,
         units="m",
         foo="bar"
     )
@@ -134,7 +149,7 @@ In component code, these two lines are equivalent:
 Metadata from source or target
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-Any :class:`.Info` attributes initialized with `None` will be filled from the metadata on the other end of the coupling link.
+Any :class:`.Info` attributes initialized with `None` (default for all entries) will be filled from the metadata on the other end of the coupling link.
 E.g. if the grid specification of an input is intended to be taken from the connected output, the input can be initialized like this:
 
 .. testcode:: create-inputs
diff --git a/pyproject.toml b/pyproject.toml
index 71049752160d0f1bdcaf6fd381574043f0cf8587..66e30b06337a471f3f7019bceee8f3cf11caeda0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -147,4 +147,5 @@ multi_line_output = 3
     max-parents = 10
     min-public-methods = 0
     max-public-methods = 25
-    max-positional-arguments=15
+    max-positional-arguments = 15
+    max-returns = 10
diff --git a/src/finam/__init__.py b/src/finam/__init__.py
index 743564dd711f116460ab792745d5e711527cf3aa..2bbc49d7cc16b3e5351cdcee5f0e5d35387ca47b 100644
--- a/src/finam/__init__.py
+++ b/src/finam/__init__.py
@@ -79,6 +79,7 @@ Utilities for data and metadata handling.
    :caption: Data tools
 
     Info
+    Mask
     UNITS
 
 Interfaces
@@ -145,7 +146,7 @@ from .data.grid_spec import (
     UnstructuredPoints,
 )
 from .data.grid_tools import CellType, Location
-from .data.tools import UNITS, Info
+from .data.tools import UNITS, Info, Mask
 from .errors import (
     FinamCircularCouplingError,
     FinamConnectError,
@@ -225,7 +226,7 @@ __all__ += [
     "UnstructuredPoints",
 ]
 __all__ += ["CellType", "Location"]
-__all__ += ["UNITS", "Info"]
+__all__ += ["UNITS", "Info", "Mask"]
 __all__ += [
     "FinamCircularCouplingError",
     "FinamConnectError",
diff --git a/src/finam/adapters/base.py b/src/finam/adapters/base.py
index b8055b7ac6fff8851761ae1dcba3fd860c02cfec..fd4f6fbc32d107b088cc3fe068d992adc5256d9a 100644
--- a/src/finam/adapters/base.py
+++ b/src/finam/adapters/base.py
@@ -1,6 +1,7 @@
 """
 Basic data transformation adapters.
 """
+
 import numpy as np
 
 from ..data.grid_spec import NoGrid
@@ -123,7 +124,6 @@ class ValueToGrid(Adapter):
     def __init__(self, grid):
         super().__init__()
         self.grid = grid
-        self._info = None
 
     def _get_data(self, time, target):
         """Get the output's data-set for the given time.
@@ -139,12 +139,7 @@ class ValueToGrid(Adapter):
             data-set for the requested time.
         """
         value = self.pull_data(time, target)
-        return quantify(
-            np.full(
-                self._info.grid.data_shape, get_magnitude(value), dtype=value.dtype
-            ),
-            get_units(value),
-        )
+        return np.full(self.info.grid_shape, get_magnitude(value))
 
     def _get_info(self, info):
         up_info = info.copy_with(grid=NoGrid())
@@ -157,7 +152,6 @@ class ValueToGrid(Adapter):
                     f"Grid specifications don't match. Target has {info.grid}, expected {out_info.grid}"
                 )
 
-        self._info = out_info
         return out_info
 
 
diff --git a/src/finam/adapters/regrid.py b/src/finam/adapters/regrid.py
index 0fc9fe80aaf329c824aebbf9de99af6313986913..a62ee111105e8128431e2fb18957b2e59ee19724 100644
--- a/src/finam/adapters/regrid.py
+++ b/src/finam/adapters/regrid.py
@@ -3,6 +3,7 @@ Basic linear and nearest neighbour regridding adapters.
 
 See package `finam-regrid <https://finam.pages.ufz.de/finam-regrid/>`_ for more advanced regridding.
 """
+
 from abc import ABC, abstractmethod
 
 import numpy as np
@@ -12,7 +13,7 @@ from scipy.spatial import KDTree
 
 from ..data import tools as dtools
 from ..data.grid_spec import StructuredGrid
-from ..errors import FinamMetaDataError
+from ..errors import FinamDataError, FinamMetaDataError
 from ..sdk import Adapter
 from ..tools.log_helper import ErrorLogger
 
@@ -26,20 +27,26 @@ __all__ = [
 class ARegridding(Adapter, ABC):
     """Abstract regridding class for handling data info"""
 
-    def __init__(self, in_grid=None, out_grid=None):
+    def __init__(self, in_grid=None, out_grid=None, out_mask=None):
         super().__init__()
         self.input_grid = in_grid
         self.output_grid = out_grid
+        if dtools.mask_specified(out_mask) and out_mask is not None:
+            out_mask = np.ma.make_mask(out_mask, shrink=False)
+        self.output_mask = out_mask
+        self.downstream_mask = None
+        self.input_mask = None
         self.input_meta = None
         self.transformer = None
         self._is_initialized = False
+        self._out_mask_checked = False
 
     @abstractmethod
     def _update_grid_specs(self):
         """set up interpolator"""
 
     def _get_info(self, info):
-        request = info.copy_with(grid=self.input_grid)
+        request = info.copy_with(grid=self.input_grid, mask=None)
         in_info = self.exchange_info(request)
 
         if self.output_grid is None and info.grid is None:
@@ -49,17 +56,24 @@ class ARegridding(Adapter, ABC):
             with ErrorLogger(self.logger):
                 raise FinamMetaDataError("Missing source grid specification")
 
+        if self.output_mask is None and info.mask is None:
+            with ErrorLogger(self.logger):
+                raise FinamMetaDataError("Missing target mask specification")
+        if self.input_mask is None and in_info.mask is None:
+            with ErrorLogger(self.logger):
+                raise FinamMetaDataError("Missing source mask specification")
+
         if (
             self.output_grid is not None
             and info.grid is not None
             and self.output_grid != info.grid
         ):
             with ErrorLogger(self.logger):
-                raise FinamMetaDataError(
-                    "Target grid specification is already set, new specs differ"
-                )
+                msg = "Target grid specification is already set, new specs differ"
+                raise FinamMetaDataError(msg)
 
         self.input_grid = self.input_grid or in_info.grid
+        self.input_mask = self.input_mask or in_info.mask
         self.output_grid = self.output_grid or info.grid
 
         if self.input_grid.crs is None and self.output_grid.crs is not None:
@@ -67,21 +81,75 @@ class ARegridding(Adapter, ABC):
         if self.output_grid.crs is None and self.input_grid.crs is not None:
             raise FinamMetaDataError("output grid has a CRS, but input grid does not")
 
-        out_info = in_info.copy_with(grid=self.output_grid)
-
         if not self._is_initialized:
+            self.downstream_mask = info.mask
+            self.transformer = _create_transformer(self.output_grid, self.input_grid)
             self._update_grid_specs()
+            # self.output_mask may be determined by "_update_grid_specs"
+            self._check_and_set_out_mask()
             self._is_initialized = True
 
         self.input_meta = in_info.meta
-
-        return out_info
+        return in_info.copy_with(grid=self.output_grid, mask=self.output_mask)
 
     def _do_transform(self, points):
         if self.transformer is None:
             return points
         return np.asarray(list(self.transformer.itransform(points)))
 
+    def _check_and_set_out_mask(self):
+        if self._out_mask_checked:
+            return  # already done
+        if (
+            self.output_mask is not None
+            and self.downstream_mask is not None
+            and not dtools.masks_compatible(
+                self.output_mask, self.downstream_mask, True
+            )
+        ):
+            with ErrorLogger(self.logger):
+                msg = (
+                    "Regrid: Target mask specification is already set, new specs differ"
+                )
+                raise FinamMetaDataError(msg)
+        self.output_mask = (
+            self.output_mask if self.output_mask is not None else self.downstream_mask
+        )
+        self._out_mask_checked = self.output_mask is not None
+
+    def _need_mask(self, mask):
+        return dtools.mask_specified(mask) and mask is not np.ma.nomask
+
+    def _get_in_coords(self):
+        if self._need_mask(self.input_mask):
+            return self.input_grid.data_points[
+                np.logical_not(self.input_mask.ravel(order=self.input_grid.order))
+            ]
+        return self.input_grid.data_points
+
+    def _get_out_coords(self):
+        if not self._out_mask_checked:
+            with ErrorLogger(self.logger):
+                msg = (
+                    "Regrid: Output coordinates weren't checked for mask compatibility"
+                )
+                raise FinamMetaDataError(msg)
+        if self._need_mask(self.output_mask):
+            out_data_points = self.output_grid.data_points[
+                np.logical_not(self.output_mask.ravel(order=self.output_grid.order))
+            ]
+        else:
+            out_data_points = self.output_grid.data_points
+        return self._do_transform(out_data_points)
+
+    def _check_in_data(self, in_data):
+        if dtools.is_masked_array(in_data) and not dtools.mask_specified(
+            self.input_mask
+        ):
+            with ErrorLogger(self.logger):
+                msg = "For regridding masked input data, you need to explicitly set the mask in the input info."
+                raise FinamDataError(msg)
+
 
 class RegridNearest(ARegridding):
     """Regrid data between two grid specifications with nearest neighbour interpolation.
@@ -112,37 +180,42 @@ class RegridNearest(ARegridding):
         Input grid specification. Will be taken from source component if not specified.
     out_grid : Grid or None (optional)
         Output grid specification. Will be taken from target component if not specified.
+    out_mask : :any:`Mask` value or valid boolean mask for :any:`MaskedArray` or None, optional
+        masking specification of the regridding output. Options:
+            * :any:`Mask.FLEX`: data will be unmasked
+            * :any:`Mask.NONE`: data will be unmasked and given as plain numpy array
+            * valid boolean mask for MaskedArray
+            * None: will be determined by connected target
     tree_options : dict
         kwargs for :class:`scipy.spatial.KDTree`
     """
 
-    def __init__(self, in_grid=None, out_grid=None, tree_options=None):
-        super().__init__(in_grid, out_grid)
+    def __init__(self, in_grid=None, out_grid=None, out_mask=None, tree_options=None):
+        super().__init__(in_grid, out_grid, out_mask)
         self.tree_options = tree_options
         self.ids = None
 
     def _update_grid_specs(self):
-        self.transformer = _create_transformer(self.output_grid, self.input_grid)
-        out_coords = self._do_transform(self.output_grid.data_points)
-
+        if self.input_grid.dim != self.output_grid.dim:
+            msg = "Input grid and output grid have different dimensions"
+            raise FinamMetaDataError(msg)
+        # out mask not restricted by nearest interpolation
+        self._check_and_set_out_mask()
         # generate IDs to select data
         kw = self.tree_options or {}
-        tree = KDTree(self.input_grid.data_points, **kw)
+        tree = KDTree(self._get_in_coords(), **kw)
         # only store IDs, since they will be constant
-        self.ids = tree.query(out_coords)[1]
+        self.ids = tree.query(self._get_out_coords())[1]
 
     def _get_data(self, time, target):
         in_data = self.pull_data(time, target)
-
-        if dtools.is_masked_array(in_data):
-            with ErrorLogger(self.logger):
-                msg = "Regridding is currently not implemented for masked data"
-                raise NotImplementedError(msg)
-
-        res = in_data.reshape(-1, order=self.input_grid.order)[self.ids].reshape(
-            self.output_grid.data_shape, order=self.output_grid.order
+        self._check_in_data(in_data)
+        return dtools.from_compressed(
+            dtools.to_compressed(in_data, order=self.input_grid.order)[self.ids],
+            shape=self.output_grid.data_shape,
+            order=self.output_grid.order,
+            mask=self.output_mask,
         )
-        return res
 
 
 class RegridLinear(ARegridding):
@@ -180,6 +253,12 @@ class RegridLinear(ARegridding):
         Input grid specification. Will be taken from source component if not specified.
     out_grid : Grid or None (optional)
         Output grid specification. Will be taken from target component if not specified.
+    out_mask : :any:`Mask` value or valid boolean mask for :any:`MaskedArray` or None, optional
+        masking specification of the regridding output. Options:
+            * :any:`Mask.FLEX`: data will be unmasked
+            * :any:`Mask.NONE`: data will be unmasked and given as plain numpy array
+            * valid boolean mask for MaskedArray
+            * None: will be determined by connected target
     fill_with_nearest : bool
         Whether out of bounds points should be filled with the nearest value. Default ``False``.
     tree_options : dict
@@ -187,9 +266,14 @@ class RegridLinear(ARegridding):
     """
 
     def __init__(
-        self, in_grid=None, out_grid=None, fill_with_nearest=False, tree_options=None
+        self,
+        in_grid=None,
+        out_grid=None,
+        out_mask=None,
+        fill_with_nearest=False,
+        tree_options=None,
     ):
-        super().__init__(in_grid, out_grid)
+        super().__init__(in_grid, out_grid, out_mask)
         self.tree_options = tree_options
         self.fill_with_nearest = bool(fill_with_nearest)
         self.ids = None
@@ -197,41 +281,70 @@ class RegridLinear(ARegridding):
         self.out_ids = None
         self.fill_ids = None
         self.out_coords = None
+        self.structured = False
 
     def _update_grid_specs(self):
-        self.transformer = _create_transformer(self.output_grid, self.input_grid)
-        self.out_coords = self._do_transform(self.output_grid.data_points)
-
-        if isinstance(self.input_grid, StructuredGrid):
+        if isinstance(self.input_grid, StructuredGrid) and not self._need_mask(
+            self.input_mask
+        ):
+            self.structured = True
             self.inter = RegularGridInterpolator(
                 points=self.input_grid.data_axes,
                 values=np.zeros(self.input_grid.data_shape, dtype=np.double),
                 bounds_error=False,
             )
         else:
+            in_coords = self._get_in_coords()
             self.inter = LinearNDInterpolator(
-                points=self.input_grid.data_points,
-                values=np.zeros(np.prod(self.input_grid.data_shape), dtype=np.double),
+                points=in_coords,
+                values=np.zeros(len(in_coords), dtype=np.double),
             )
         if self.fill_with_nearest:
+            # out mask not restricted when filled with nearest
+            self._check_and_set_out_mask()
+            self.out_coords = self._get_out_coords()
             # check for outliers once
-            points = self.out_coords
-            res = self.inter(points)
+            res = self.inter(self.out_coords)
             self.out_ids = np.isnan(res)
-            out_points = points[self.out_ids]
+            out_points = self.out_coords[self.out_ids]
             kw = self.tree_options or {}
-            tree = KDTree(self.input_grid.data_points, **kw)
+            tree = KDTree(self._get_in_coords(), **kw)
             self.fill_ids = tree.query(out_points)[1]
+        else:
+            mask_save = self.output_mask
+            # temporarily unmask
+            self._out_mask_checked = True
+            self.output_mask = np.ma.nomask
+            # check for outliers once
+            res = self.inter(self._get_out_coords())
+            # create mask from outliers
+            outlier_mask = np.ma.make_mask(
+                dtools.from_compressed(
+                    np.isnan(res), self.output_grid.data_shape, self.output_grid.order
+                )
+            )
+            # determine mask from outliers
+            if mask_save is None or mask_save is dtools.Mask.FLEX:
+                self.output_mask = outlier_mask
+            elif mask_save is dtools.Mask.NONE:
+                if np.any(outlier_mask):
+                    msg = "RegridLinear: interpolation is not covering desired domain."
+                    raise FinamDataError(msg)
+                self.output_mask = mask_save
+            else:
+                if not dtools.is_sub_mask(outlier_mask, mask_save):
+                    msg = "RegridLinear: interpolation is not covering desired masked domain."
+                    raise FinamDataError(msg)
+                self.output_mask = mask_save
+            self._out_mask_checked = False
+            self._check_and_set_out_mask()
+            self.out_coords = self._get_out_coords()
 
     def _get_data(self, time, target):
         in_data = self.pull_data(time, target)
+        self._check_in_data(in_data)
 
-        if dtools.is_masked_array(in_data):
-            with ErrorLogger(self.logger):
-                msg = "Regridding is currently not implemented for masked data"
-                raise NotImplementedError(msg)
-
-        if isinstance(self.input_grid, StructuredGrid):
+        if self.structured:
             self.inter.values = in_data[0, ...].magnitude
             res = self.inter(self.out_coords)
             if self.fill_with_nearest:
@@ -239,15 +352,22 @@ class RegridLinear(ARegridding):
                     order=self.input_grid.order
                 )[self.fill_ids]
         else:
+            in_data = dtools.to_compressed(
+                in_data[0, ...].magnitude, order=self.input_grid.order
+            )
             self.inter.values = np.ascontiguousarray(
-                in_data[0, ...].magnitude.reshape((-1, 1), order=self.input_grid.order),
+                in_data.reshape((-1, 1)),
                 dtype=np.double,
             )
             res = self.inter(self.out_coords)
             if self.fill_with_nearest:
                 res[self.out_ids] = self.inter.values[self.fill_ids, 0]
-
-        return dtools.quantify(res, dtools.get_units(in_data))
+        return dtools.from_compressed(
+            res,
+            shape=self.output_grid.data_shape,
+            order=self.output_grid.order,
+            mask=self.output_mask,
+        )
 
 
 def _create_transformer(input_grid, output_grid):
diff --git a/src/finam/adapters/time_integration.py b/src/finam/adapters/time_integration.py
index 5d18cd5957766f41b0d315a042b7349331df1a61..19435c053f0966fb237acfc68f63c8599cf28da7 100644
--- a/src/finam/adapters/time_integration.py
+++ b/src/finam/adapters/time_integration.py
@@ -1,4 +1,5 @@
 """Adapters for time integration"""
+
 from abc import ABC
 from datetime import timedelta
 
@@ -14,7 +15,6 @@ class TimeIntegrationAdapter(TimeCachingAdapter, ABC):
     def __init__(self):
         super().__init__()
         self._prev_time = None
-        self._info = None
 
     def _source_updated(self, time):
         """Informs the input that a new output is available.
@@ -107,7 +107,6 @@ class AvgOverTime(TimeIntegrationAdapter):
         super().__init__()
         self._prev_time = None
         self._step = step
-        self._info = None
 
     def _interpolate(self, time):
         if len(self.data) == 1:
@@ -238,8 +237,6 @@ class SumOverTime(TimeIntegrationAdapter):
         self._per_time = per_time
         self._initial_interval = initial_interval
 
-        self._info = None
-
     def _interpolate(self, time):
         if len(self.data) == 1 or time <= self.data[0][0]:
             if self._per_time:
@@ -307,5 +304,4 @@ class SumOverTime(TimeIntegrationAdapter):
         else:
             out_info = in_info.copy_with()
 
-        self._info = out_info
         return out_info
diff --git a/src/finam/data/__init__.py b/src/finam/data/__init__.py
index c67f24b1b287606dd256ad13191b628d1331db91..f11437cf22f02e0f4078b84f8da795235d5d0f02 100644
--- a/src/finam/data/__init__.py
+++ b/src/finam/data/__init__.py
@@ -45,23 +45,40 @@ Data tools
 .. autosummary::
    :toctree: generated
 
-    :noindex: UNITS
     :noindex: Info
-    assert_type
+    prepare
+    strip_time
     check
-    check_quantified
     full
     full_like
-    get_dimensionality
-    get_magnitude
-    get_units
     has_time_axis
-    is_quantified
-    prepare
-    quantify
-    strip_time
     to_datetime
+    assert_type
+
+
+Unit tools
+==========
+
+.. autosummary::
+   :toctree: generated
+
+    :noindex: UNITS
+    quantify
     to_units
+    is_quantified
+    check_quantified
+    get_dimensionality
+    get_magnitude
+    get_units
+
+
+Mask tools
+==========
+
+.. autosummary::
+   :toctree: generated
+
+    :noindex: Mask
     is_masked_array
     has_masked_values
     filled
@@ -69,6 +86,9 @@ Data tools
     to_compressed
     from_compressed
     check_data_covers_domain
+    masks_compatible
+    masks_equal
+    mask_specified
 """
 
 from ..errors import FinamDataError
@@ -92,6 +112,7 @@ from .grid_tools import (
 from .tools import (
     UNITS,
     Info,
+    Mask,
     assert_type,
     check,
     check_data_covers_domain,
@@ -107,6 +128,9 @@ from .tools import (
     has_time_axis,
     is_masked_array,
     is_quantified,
+    mask_specified,
+    masks_compatible,
+    masks_equal,
     prepare,
     quantify,
     strip_time,
@@ -139,6 +163,7 @@ __all__ += [
     "UNITS",
     "FinamDataError",
     "Info",
+    "Mask",
     "assert_type",
     "check",
     "check_quantified",
@@ -163,4 +188,7 @@ __all__ += [
     "to_compressed",
     "from_compressed",
     "check_data_covers_domain",
+    "masks_compatible",
+    "masks_equal",
+    "mask_specified",
 ]
diff --git a/src/finam/data/esri_tools.py b/src/finam/data/esri_tools.py
index d6f36bccdb822dadcc003bf76e33e50041412570..9e8e3457432c958a150784565ac0c9735f7c22b2 100644
--- a/src/finam/data/esri_tools.py
+++ b/src/finam/data/esri_tools.py
@@ -1,4 +1,5 @@
 """Common ESRI ASCII grid routines."""
+
 import warnings
 
 import numpy as np
diff --git a/src/finam/data/grid_base.py b/src/finam/data/grid_base.py
index 0ca332a4041bc4f877d970881c9853170e64d886..e74f697a3879e9472aeae05a6a86ad858dfaef03 100644
--- a/src/finam/data/grid_base.py
+++ b/src/finam/data/grid_base.py
@@ -36,6 +36,11 @@ class GridBase(ABC):
     def dim(self):
         """int: Dimension of the grid or data."""
 
+    @property
+    @abstractmethod
+    def data_shape(self):
+        """tuple: Shape of the associated data."""
+
     def copy(self, deep=False):
         """
         Copy of this grid.
@@ -65,6 +70,9 @@ class GridBase(ABC):
         """Transformation between compatible grids."""
         return None
 
+    def __repr__(self):
+        return f"{self.name} ({self.dim}D) {self.data_shape}"
+
 
 class Grid(GridBase):
     """Abstract grid specification."""
@@ -157,11 +165,6 @@ class Grid(GridBase):
             return self.points
         return self.cell_centers
 
-    @property
-    @abstractmethod
-    def data_shape(self):
-        """tuple: Shape of the associated data."""
-
     @property
     def data_size(self):
         """int: Size of the associated data."""
@@ -187,9 +190,6 @@ class Grid(GridBase):
         """list of str: Axes names of the data."""
         return ["id"]
 
-    def __repr__(self):
-        return f"{self.__class__.__name__} ({self.dim}D) {self.data_shape}"
-
     def compatible_with(self, other, check_location=True):
         """
         Check for compatibility with other Grid.
@@ -519,7 +519,9 @@ class StructuredGrid(Grid):
         ValueError
             When data has wrong shape.
         """
-        if not np.array_equal(np.shape(data), self.data_shape):
+        rev = -1 if self.axes_reversed else 1
+        d_shp, in_shp, shp_len = self.data_shape, np.shape(data), len(self.data_shape)
+        if not np.array_equal(d_shp[::rev], in_shp[::rev][:shp_len]):
             msg = "to_canonical: data has wrong shape."
             raise ValueError(msg)
         if self.axes_reversed and np.ndim(data) > 1:
@@ -552,7 +554,8 @@ class StructuredGrid(Grid):
             When data has wrong shape.
         """
         rev = -1 if self.axes_reversed else 1
-        if not np.array_equal(np.shape(data)[::rev], self.data_shape):
+        d_shp, in_shp, shp_len = self.data_shape, np.shape(data), len(self.data_shape)
+        if not np.array_equal(d_shp[::rev], in_shp[:shp_len]):
             msg = "from_canonical: data has wrong shape."
             raise ValueError(msg)
         for i, inc in enumerate(self.axes_increase):
diff --git a/src/finam/data/grid_spec.py b/src/finam/data/grid_spec.py
index 6ef840cdccb6069bd3d613a67bd83d532e53d42f..fdebfd221c2759b119d1376defe8d7a60f70dfef 100644
--- a/src/finam/data/grid_spec.py
+++ b/src/finam/data/grid_spec.py
@@ -28,18 +28,44 @@ def _check_location(grid, data_location):
 
 
 class NoGrid(GridBase):
-    """Indicator for data without a spatial grid."""
+    """
+    Indicator for data without a spatial grid.
+
+    Parameters
+    ----------
+    dim : int or None, optional
+        Data dimensionality. Should match the length of data_shape.
+    data_shape : tuple of int or None, optional
+        Data shape. Can contain -1 to indicate flexible axis.
+
+    Raises
+    ------
+    ValueError
+        If dim does not match the length of data_shape.
+    """
 
-    def __init__(self, dim=0):
+    def __init__(self, dim=None, data_shape=None):
+        if dim is None and data_shape is None:
+            dim, data_shape = 0, tuple()
+        if data_shape is None:
+            data_shape = (-1,) * dim
+        if dim is None:
+            dim = len(data_shape)
+        if dim != len(data_shape):
+            msg = "NoGrid: dim needs to match the length of data_shape."
+            raise ValueError(msg)
         self._dim = dim
+        self._data_shape = data_shape
 
     @property
     def dim(self):
         """int: Dimension of the grid or data."""
         return self._dim
 
-    def __repr__(self):
-        return f"{self.__class__.__name__} ({self.dim}D)"
+    @property
+    def data_shape(self):
+        """tuple: Shape of the associated data."""
+        return self._data_shape
 
     # pylint: disable-next=unused-argument
     def compatible_with(self, other, check_location=True):
@@ -58,7 +84,7 @@ class NoGrid(GridBase):
         bool
             compatibility
         """
-        return isinstance(other, NoGrid) and self.dim == other.dim
+        return isinstance(other, NoGrid) and self.data_shape == other.data_shape
 
     def __eq__(self, other):
         return self.compatible_with(other)
diff --git a/src/finam/data/grid_tools.py b/src/finam/data/grid_tools.py
index ff4f890c7a940d1f11a197e4497cb146572834db..b345f511afb90368dd54c253e87757cc24ff69c0 100644
--- a/src/finam/data/grid_tools.py
+++ b/src/finam/data/grid_tools.py
@@ -1,4 +1,5 @@
 """Grid tools for FINAM."""
+
 from enum import Enum, IntEnum
 from math import isclose, nan
 
@@ -195,17 +196,17 @@ def gen_cells(dims, order="F"):
         c = np.empty((c_cnt, 8), dtype=int)
         # ? should upper and lower layer be swapped?
         # upper layer
-        c[:, 3] = c_rng
-        c[:, 3] += (c_dim[0] + c_dim[1] + 1) * (c_rng // (c_dim[0] * c_dim[1]))
-        c[:, 3] += (c_rng % (c_dim[0] * c_dim[1])) // c_dim[0]
-        c[:, 2] = c[:, 3] + 1
-        c[:, 1] = c[:, 3] + 2 + c_dim[0]
-        c[:, 0] = c[:, 1] - 1
-        # lower layer
-        c[:, 7] = c[:, 3] + (1 + c_dim[0]) * (1 + c_dim[1])
+        c[:, 7] = c_rng
+        c[:, 7] += (c_dim[0] + c_dim[1] + 1) * (c_rng // (c_dim[0] * c_dim[1]))
+        c[:, 7] += (c_rng % (c_dim[0] * c_dim[1])) // c_dim[0]
         c[:, 6] = c[:, 7] + 1
         c[:, 5] = c[:, 7] + 2 + c_dim[0]
         c[:, 4] = c[:, 5] - 1
+        # lower layer
+        c[:, 3] = c[:, 7] + (1 + c_dim[0]) * (1 + c_dim[1])
+        c[:, 2] = c[:, 3] + 1
+        c[:, 1] = c[:, 3] + 2 + c_dim[0]
+        c[:, 0] = c[:, 1] - 1
     if order == "C" and mesh_dim > 1:
         # inverse reorder point ids
         c = order_map(dims, of="C", to="F")[c]
@@ -395,8 +396,9 @@ def flatten_cells(cells):
     """
     if cells.ndim == 1:
         return cells
+    data = cells.ravel()
     # unused entries in "cells" marked with "-1"
-    return np.ma.masked_values(cells, -1).compressed()
+    return data.compress(data != -1)
 
 
 def get_cells_matrix(cell_types, cells, connectivity=False):
diff --git a/src/finam/data/tools.py b/src/finam/data/tools.py
deleted file mode 100644
index 19fda3d2cef873dbb1ba8c880bb7173eaa5ee8bd..0000000000000000000000000000000000000000
--- a/src/finam/data/tools.py
+++ /dev/null
@@ -1,941 +0,0 @@
-"""Data tools for FINAM."""
-import copy
-import datetime
-
-import numpy as np
-import pandas as pd
-import pint
-
-from ..errors import FinamDataError, FinamMetaDataError
-
-# pylint: disable-next=unused-import
-from . import cf_units, grid_spec
-from .grid_base import Grid, GridBase
-
-# set default format to cf-convention for pint.dequantify
-# some problems with degree_Celsius and similar here
-pint.application_registry.default_format = "cf"
-UNITS = pint.application_registry
-
-_UNIT_PAIRS_CACHE = {}
-
-_MASK_INDICATORS = ["_FillValue", "missing_value"]
-
-
-def prepare(data, info, time_entries=1, force_copy=False, report_conversion=False):
-    """
-    Prepares data in FINAM's internal transmission format.
-
-    Checks tha shape of the data.
-    Checks or adds units and time dimension.
-
-    Parameters
-    ----------
-    data : arraylike
-        The input data.
-    info : Info
-        Info associated with the data.
-    time_entries : int, optional
-        Number of time slices in the data. Default 1.
-    force_copy : bool, optional
-        Forces the result to be a copy of the passed data. Default ``False``.
-
-        If not used, the result is a view of the data if no units conversion needs to be done.
-    report_conversion : bool, optional
-        If true, returns a tuple with the second element indicating the unit conversion if it was required.
-
-    Returns
-    -------
-    pint.Quantity or tuple(pint.Quantity, tuple(pint.Unit, pint.Unit) or None)
-        The prepared data as a numpy array, wrapped into a :class:`pint.Quantity`.
-
-        If ``report_conversion`` is ``True``, a tuple is returned with the second element
-        indicating the unit conversion if it was required.
-
-        The second element is ``None`` if no conversion was required,
-        and a tuple of two :class:`pint.Unit` objects otherwise.
-
-    Raises
-    ------
-    FinamDataError
-        If the data doesn't match its info.
-    """
-    units_converted = None
-    units = info.units
-    if is_quantified(data):
-        if not compatible_units(data.units, units):
-            raise FinamDataError(
-                f"Given data has incompatible units. "
-                f"Got {data.units}, expected {units}."
-            )
-        if not equivalent_units(data.units, units):
-            units_converted = data.units, units
-            data = data.to(units)
-        elif force_copy:
-            data = data.copy()
-    else:
-        # this covers masked arrays as well
-        if isinstance(data, np.ndarray):
-            if force_copy:
-                data = data.copy()
-            data = UNITS.Quantity(data, units)
-        else:
-            if force_copy:
-                data = copy.copy(data)
-            data = UNITS.Quantity(np.asarray(data), units)
-
-    data = _check_input_shape(data, info, time_entries)
-
-    if report_conversion:
-        return data, units_converted
-    return data
-
-
-def _check_input_shape(data, info, time_entries):
-    # check correct data size
-    if isinstance(info.grid, Grid):
-        time_entries = (
-            data.shape[0]
-            if len(data.shape) == len(info.grid.data_shape) + 1
-            else time_entries
-        )
-        data_size = data.size / time_entries
-        if data_size != info.grid.data_size:
-            raise FinamDataError(
-                f"quantify: data size doesn't match grid size. "
-                f"Got {data_size}, expected {info.grid.data_size}"
-            )
-        # check shape of non-flat arrays
-        if len(data.shape) != 1:
-            if data.shape[1:] != info.grid.data_shape:
-                if data.shape == info.grid.data_shape:
-                    data = np.expand_dims(data, 0)
-                else:
-                    raise FinamDataError(
-                        f"quantify: data shape doesn't match grid shape. "
-                        f"Got {data.shape}, expected {info.grid.data_shape}"
-                    )
-        else:
-            # reshape arrays
-            if time_entries <= 1:
-                data = data.reshape(
-                    [1] + list(info.grid.data_shape), order=info.grid.order
-                )
-            else:
-                data = data.reshape(
-                    [time_entries] + list(info.grid.data_shape), order=info.grid.order
-                )
-    elif isinstance(info.grid, grid_spec.NoGrid):
-        data = _check_input_shape_no_grid(data, info, time_entries)
-    return data
-
-
-def _check_input_shape_no_grid(data, info, time_entries):
-    if len(data.shape) != info.grid.dim + 1:
-        if len(data.shape) == info.grid.dim:
-            data = np.expand_dims(data, 0)
-        else:
-            raise FinamDataError(
-                f"quantify: number of dimensions in data doesn't match expected number. "
-                f"Got {len(data.shape)}, expected {info.grid.dim}"
-            )
-    else:
-        if data.shape[0] != time_entries:
-            raise FinamDataError(
-                f"quantify: number of time entries in data doesn't match expected number. "
-                f"Got {data.shape[0]}, expected {time_entries}"
-            )
-    return data
-
-
-def has_time_axis(xdata, grid):
-    """
-    Check if the data array has a time axis.
-
-    Parameters
-    ----------
-    xdata : numpy.ndarray
-        The given data array.
-    grid : GridBase
-        The associated grid specification
-    Returns
-    -------
-    bool
-        Whether the data has a time axis.
-    """
-    grid_dim = None
-
-    if isinstance(grid, Grid):
-        grid_dim = len(grid.data_shape)
-    elif isinstance(grid, grid_spec.NoGrid):
-        grid_dim = grid.dim
-    else:
-        raise ValueError(
-            f"Expected type Grid or NoGrid, got {grid.__class__.__name__}."
-        )
-
-    if xdata.ndim == grid_dim:
-        return False
-
-    if xdata.ndim == grid_dim + 1:
-        return True
-
-    raise FinamDataError("Data dimension must be grid dimension or grid dimension + 1.")
-
-
-_BASE_DATETIME = datetime.datetime(1970, 1, 1)
-_BASE_TIME = np.datetime64("1970-01-01T00:00:00")
-_BASE_DELTA = np.timedelta64(1, "s")
-
-
-def to_datetime(date):
-    """Converts a numpy datetime64 object to a python datetime object"""
-    if np.isnan(date):
-        return pd.NaT
-
-    timestamp = (date - _BASE_TIME) / _BASE_DELTA
-
-    if timestamp < 0:
-        return _BASE_DATETIME + datetime.timedelta(seconds=timestamp)
-
-    return datetime.datetime.utcfromtimestamp(timestamp)
-
-
-def strip_time(xdata, grid):
-    """Returns a view of the data with the time dimension squeezed if there is only a single entry
-
-    Parameters
-    ----------
-    xdata : arraylike
-        Data to strip time dimension from
-    grid : GridBase
-        The associated grid specification
-
-    Returns
-    -------
-    arraylike
-        Stripped data
-
-    Raises
-    ------
-    FinamDataError
-        If the data has multiple time entries.
-    """
-    if has_time_axis(xdata, grid):
-        if xdata.shape[0] > 1:
-            raise FinamDataError(
-                "Can't strip time of a data array with multiple time entries"
-            )
-        return xdata[0, ...]
-
-    return xdata
-
-
-def get_magnitude(xdata):
-    """
-    Get magnitude of given data.
-
-    Parameters
-    ----------
-    xdata : pint.Quantity
-        The given data array.
-
-    Returns
-    -------
-    numpy.ndarray
-        Magnitude of given data.
-    """
-    check_quantified(xdata, "get_magnitude")
-    return xdata.magnitude
-
-
-def get_units(xdata):
-    """
-    Get units of the data.
-
-    Parameters
-    ----------
-    xdata : DataArray
-        The given data array.
-
-    Returns
-    -------
-    pint.Unit
-        Units of the data.
-    """
-    check_quantified(xdata, "get_units")
-    return xdata.units
-
-
-def get_dimensionality(xdata):
-    """
-    Get dimensionality of the data.
-
-    Parameters
-    ----------
-    xdata : pint.Quantity
-        The given data array.
-
-    Returns
-    -------
-    pint.UnitsContainer
-        Dimensionality of the data.
-    """
-    check_quantified(xdata, "get_dimensionality")
-    return xdata.dimensionality
-
-
-def to_units(xdata, units, check_equivalent=False, report_conversion=False):
-    """
-    Convert data to given units.
-
-    Parameters
-    ----------
-    xdata : pint.Quantity
-        The given data array.
-    units : str or pint.Unit
-        Desired units.
-    check_equivalent : bool, optional
-        Checks for equivalent units and simply re-assigns if possible.
-    report_conversion : bool, optional
-        If true, returns a tuple with the second element indicating the unit conversion if it was required.
-
-    Returns
-    -------
-    pint.Quantity or tuple(pint.Quantity, tuple(pint.Unit, pint.Unit) or None)
-        The converted data.
-
-        If ``report_conversion`` is ``True``, a tuple is returned with the second element
-        indicating the unit conversion if it was required.
-
-        The second element is ``None`` if no conversion was required,
-        and a tuple of two :class:`pint.Unit` objects otherwise.
-    """
-    check_quantified(xdata, "to_units")
-    units = _get_pint_units(units)
-    units2 = xdata.units
-    conversion = None
-    if units != units2:
-        if check_equivalent and equivalent_units(units, units2):
-            xdata = UNITS.Quantity(xdata.magnitude, units)
-        else:
-            xdata = xdata.to(units)
-            conversion = units2, units
-
-    if report_conversion:
-        return xdata, conversion
-    return xdata
-
-
-def full_like(xdata, value):
-    """
-    Return a new data array with the same shape, type and units as a given object.
-
-    Parameters
-    ----------
-    xdata : :class:`pint.Quantity` or :class:`numpy.ndarray`
-        The reference object input.
-    value : scalar
-        Value to fill the new object with before returning it.
-
-    Returns
-    -------
-    pint.Quantity or numpy.ndarray
-        New object with the same shape and type as other,
-        with the data filled with fill_value.
-        Units will be taken from the input if present.
-    """
-    data = np.full_like(xdata, value)
-    if is_quantified(xdata):
-        return UNITS.Quantity(data, xdata.units)
-    return data
-
-
-def full(value, info):
-    """
-    Return a new data array with units according to the given info, filled with given value.
-
-    Parameters
-    ----------
-    value : scalar
-        Value to fill the new object with before returning it.
-    info : Info
-        Info associated with the data.
-
-    Returns
-    -------
-    pint.Quantity
-        The converted data.
-    """
-    shape = info.grid.data_shape if isinstance(info.grid, Grid) else tuple()
-    return prepare(np.full([1] + list(shape), value), info)
-
-
-def check(xdata, info):
-    """
-    Check if data matches given info.
-
-    Parameters
-    ----------
-    xdata : numpy.ndarray
-        The given data array.
-    info : Info
-        Info associated with the data.
-
-    Raises
-    ------
-    FinamDataError
-        If data doesn't match given info.
-    """
-    check_quantified(xdata, "check")
-
-    if not has_time_axis(xdata, info.grid):
-        raise FinamDataError("check: given data should have a time dimension.")
-
-    _check_shape(xdata.shape[1:], info.grid)
-
-    # check units
-    if not compatible_units(info.units, xdata):
-        raise FinamDataError(
-            f"check: given data has incompatible units. "
-            f"Got {get_units(xdata)}, expected {info.units}."
-        )
-
-
-def _check_shape(shape, grid):
-    if isinstance(grid, Grid) and shape != grid.data_shape:
-        raise FinamDataError(
-            f"check: given data has wrong shape. "
-            f"Got {shape}, expected {grid.data_shape}"
-        )
-    if isinstance(grid, grid_spec.NoGrid) and len(shape) != grid.dim:
-        raise FinamDataError(
-            f"check: given data has wrong number of dimensions. "
-            f"Got {len(shape)}, expected {grid.dim}"
-        )
-
-
-def is_quantified(xdata):
-    """
-    Check if data is a quantified DataArray.
-
-    Parameters
-    ----------
-    xdata : Any
-        The given data array.
-
-    Returns
-    -------
-    bool
-        Whether the data is a quantified DataArray.
-    """
-    return isinstance(xdata, pint.Quantity)
-
-
-def is_masked_array(data):
-    """
-    Check if data is a masked array.
-
-    Parameters
-    ----------
-    data : Any
-        The given data array.
-
-    Returns
-    -------
-    bool
-        Whether the data is a MaskedArray.
-    """
-    if is_quantified(data):
-        return np.ma.isMaskedArray(data.magnitude)
-    return np.ma.isMaskedArray(data)
-
-
-def has_masked_values(data):
-    """
-    Determine whether the data has masked values.
-
-    Parameters
-    ----------
-    data : Any
-        The given data array.
-
-    Returns
-    -------
-    bool
-        Whether the data is a MaskedArray and has any masked values.
-    """
-    return np.ma.is_masked(data)
-
-
-def filled(data, fill_value=None):
-    """
-    Return input as an array with masked data replaced by a fill value.
-
-    This routine respects quantified and un-quantified data.
-
-    Parameters
-    ----------
-    data : :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
-        The reference object input.
-    fill_value : array_like, optional
-        The value to use for invalid entries. Can be scalar or non-scalar.
-        If non-scalar, the resulting ndarray must be broadcastable over
-        input array. Default is None, in which case, the `fill_value`
-        attribute of the array is used instead.
-
-    Returns
-    -------
-    pint.Quantity or numpy.ndarray
-        New object with the same shape and type as other,
-        with the data filled with fill_value.
-        Units will be taken from the input if present.
-
-    See also
-    --------
-    :func:`numpy.ma.filled`:
-        Numpy routine doing the same.
-    """
-    if not is_masked_array(data):
-        return data
-    if is_quantified(data):
-        return UNITS.Quantity(data.magnitude.filled(fill_value), data.units)
-    return data.filled(fill_value)
-
-
-def to_masked(data, **kwargs):
-    """
-    Return a masked version of the data.
-
-    Parameters
-    ----------
-    data : :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
-        The reference object input.
-    **kwargs
-        keyword arguments forwarded to :any:`numpy.ma.array`
-
-    Returns
-    -------
-    pint.Quantity or numpy.ma.MaskedArray
-        New object with the same shape and type but as a masked array.
-        Units will be taken from the input if present.
-    """
-    if is_masked_array(data) and not kwargs:
-        return data
-    if is_quantified(data):
-        return UNITS.Quantity(np.ma.array(data.magnitude, **kwargs), data.units)
-    return np.ma.array(data, **kwargs)
-
-
-def to_compressed(xdata, order="C"):
-    """
-    Return all the non-masked data as a 1-D array respecting the given array order.
-
-    Parameters
-    ----------
-    data : :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
-        The reference object input.
-    order : str
-        order argument for :any:`numpy.ravel`
-    **kwargs
-        keyword arguments forwarded to :any:`numpy.ma.array`
-
-    Returns
-    -------
-    :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
-        New object with the flat shape and only unmasked data but and same type as input.
-        Units will be taken from the input if present.
-
-    See also
-    --------
-    :func:`numpy.ma.compressed`:
-        Numpy routine doing the same but only for C-order.
-    """
-    if is_masked_array(xdata):
-        data = np.ravel(xdata.data, order)
-        if xdata.mask is not np.ma.nomask:
-            data = data.compress(np.logical_not(np.ravel(xdata.mask, order)))
-        return quantify(data, xdata.units) if is_quantified(xdata) else data
-    return np.reshape(xdata, -1, order=order)
-
-
-def from_compressed(xdata, shape, order="C", **kwargs):
-    """
-    Fill a (masked) array following a given mask or shape with the provided data.
-
-    This will only create a masked array if kwargs are given (especially a mask).
-    Otherwise this is simply reshaping the given data.
-    Filling is performed in the given array order.
-
-    Parameters
-    ----------
-    data : :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
-        The reference object input.
-    shape : str
-        shape argument for :any:`numpy.reshape`
-    order : str
-        order argument for :any:`numpy.reshape`
-    **kwargs
-        keyword arguments forwarded to :any:`numpy.ma.array`
-
-    Returns
-    -------
-    :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
-        New object with the desired shape and same type as input.
-        Units will be taken from the input if present.
-        Will only be a masked array if kwargs are given.
-
-    See also
-    --------
-    to_compressed:
-        Inverse operation.
-    :any:`numpy.ma.array`:
-        Routine consuming kwargs to create a masked array.
-    :any:`numpy.reshape`:
-        Equivalent routine if no mask is provided.
-
-    Notes
-    -----
-    If both `mask` and `shape` are given, they need to match in size.
-    """
-    if kwargs:
-        if "mask" in kwargs:
-            mask = np.reshape(kwargs["mask"], -1, order=order)
-            if is_quantified(xdata):
-                # pylint: disable-next=unexpected-keyword-arg
-                data = quantify(np.empty_like(xdata, shape=np.size(mask)), xdata.units)
-            else:
-                # pylint: disable-next=unexpected-keyword-arg
-                data = np.empty_like(xdata, shape=np.size(mask))
-            data[~mask] = xdata
-            data = np.reshape(data, shape, order=order)
-        else:
-            data = np.reshape(xdata, shape, order=order)
-        return to_masked(data, **kwargs)
-    return np.reshape(xdata, shape, order=order)
-
-
-def check_data_covers_domain(data, mask=None):
-    """
-    Check if the given data covers a domain defined by a mask on the same grid.
-
-    Parameters
-    ----------
-    data : Any
-        The given data array for a single time-step.
-    mask : None or bool or array of bool, optional
-        Mask defining the target domain on the same grid as the data,
-        by default None
-
-    Returns
-    -------
-    bool
-        Whether the data covers the desired domain.
-
-    Raises
-    ------
-    ValueError
-        When mask is given and mask and data don't share the same shape.
-    """
-    if not _is_single_mask_value(mask) and np.shape(mask) != np.shape(data):
-        raise ValueError("check_data_covers_domain: mask and data shape differ.")
-    if not has_masked_values(data):
-        return True
-    if _is_single_mask_value(mask):
-        return bool(mask)
-    return np.all(mask[data.mask])
-
-
-def _is_single_mask_value(mask):
-    return mask is None or mask is np.ma.nomask or mask is False or mask is True
-
-
-def quantify(xdata, units=None):
-    """
-    Quantifies data.
-
-    Parameters
-    ----------
-    xdata : Any
-        The given data array.
-    units :
-
-    Returns
-    -------
-    pint.Quantity
-        The quantified array.
-    """
-    if is_quantified(xdata):
-        raise FinamDataError(f"Data is already quantified with units '{xdata.units}'")
-    return UNITS.Quantity(xdata, _get_pint_units(units or UNITS.dimensionless))
-
-
-def check_quantified(xdata, routine="check_quantified"):
-    """
-    Check if data is a quantified DataArray.
-
-    Parameters
-    ----------
-    xdata : numpy.ndarray
-        The given data array.
-    routine : str, optional
-        Name of the routine to show in the Error, by default "check_quantified"
-
-    Raises
-    ------
-    FinamDataError
-        If the array is not a quantified DataArray.
-    """
-    if not is_quantified(xdata):
-        raise FinamDataError(f"{routine}: given data is not quantified.")
-
-
-def _get_pint_units(var):
-    if var is None:
-        raise FinamDataError("Can't extract units from 'None'.")
-
-    if isinstance(var, pint.Unit):
-        return var
-
-    if isinstance(var, pint.Quantity):
-        return var.units or UNITS.dimensionless
-
-    return UNITS.Unit(var)
-
-
-def compatible_units(unit1, unit2):
-    """
-    Checks if two units are compatible/convertible.
-
-    Parameters
-    ----------
-    unit1 : UnitLike or Quantified
-        First unit to compare.
-    unit2 : UnitLike or Quantified
-        Second unit to compare.
-
-    Returns
-    -------
-    bool
-        Unit compatibility.
-    """
-    unit1, unit2 = _get_pint_units(unit1), _get_pint_units(unit2)
-    comp_equiv = _UNIT_PAIRS_CACHE.get((unit1, unit2))
-    if comp_equiv is None:
-        comp_equiv = _cache_units(unit1, unit2)
-
-    return comp_equiv[0]
-
-
-def equivalent_units(unit1, unit2):
-    """
-    Check if two given units are equivalent.
-
-    Parameters
-    ----------
-    unit1 : UnitLike or Quantified
-        First unit to compare.
-    unit2 : UnitLike or Quantified
-        Second unit to compare.
-
-    Returns
-    -------
-    bool
-        Unit equivalence.
-    """
-    unit1, unit2 = _get_pint_units(unit1), _get_pint_units(unit2)
-    comp_equiv = _UNIT_PAIRS_CACHE.get((unit1, unit2))
-    if comp_equiv is None:
-        comp_equiv = _cache_units(unit1, unit2)
-
-    return comp_equiv[1]
-
-
-def _cache_units(unit1, unit2):
-    equiv = False
-    compat = False
-    try:
-        equiv = np.isclose((1.0 * unit1).to(unit2).magnitude, 1.0)
-        compat = True
-    except pint.errors.DimensionalityError:
-        pass
-
-    _UNIT_PAIRS_CACHE[(unit1, unit2)] = compat, equiv
-    return compat, equiv
-
-
-def clear_units_cache():
-    """Clears the units cache"""
-    _UNIT_PAIRS_CACHE.clear()
-
-
-def assert_type(cls, slot, obj, types):
-    """Type assertion."""
-    for t in types:
-        if isinstance(obj, t):
-            return
-    raise TypeError(
-        f"Unsupported data type for {slot} in "
-        f"{cls.__class__.__name__}: {obj.__class__.__name__}. "
-        f"Expected one of [{', '.join([tp.__name__ for tp in types])}]"
-    )
-
-
-class Info:
-    """Data info containing grid specification and metadata
-
-    Parameters
-    ----------
-    grid : Grid or NoGrid or None
-        grid specification
-    meta : dict
-        dictionary of metadata
-    **meta_kwargs
-        additional metadata by name, will overwrite entries in ``meta``
-
-    Attributes
-    ----------
-    grid : Grid or NoGrid or None
-        grid specification
-    meta : dict
-        dictionary of metadata
-
-    """
-
-    def __init__(self, time, grid, meta=None, **meta_kwargs):
-        if time is not None and not isinstance(time, datetime.datetime):
-            raise FinamMetaDataError("Time in Info must be either None or a datetime")
-        if grid is not None and not isinstance(grid, GridBase):
-            raise FinamMetaDataError(
-                "Grid in Info must be either None or of a sub-class of GridBase"
-            )
-
-        self.time = time
-        self.grid = grid
-        self.meta = meta or {}
-        self.meta.update(meta_kwargs)
-
-        units = self.meta.get("units", "")
-        units = None if units is None else UNITS.Unit(units)
-        self.meta["units"] = units
-
-    @property
-    def is_masked(self):
-        """bool: whether info indicates masked data ("_FillValue" or "missing_value" in meta)."""
-        return any(v in self.meta for v in _MASK_INDICATORS)
-
-    def copy(self):
-        """Copies the info object"""
-        return copy.copy(self)
-
-    def copy_with(self, use_none=True, **kwargs):
-        """Copies the info object and sets variables and meta values according to the kwargs
-
-        Parameters
-        ----------
-        use_none : bool
-            whether properties with None value should also be transferred
-        **kwargs
-            key values pairs for properties to change
-        """
-        other = Info(time=self.time, grid=self.grid, meta=copy.copy(self.meta))
-        for k, v in kwargs.items():
-            if k == "time":
-                if v is not None or use_none:
-                    other.time = v
-            elif k == "grid":
-                if v is not None or use_none:
-                    other.grid = v
-            elif k == "units":
-                if v is not None or use_none:
-                    other.meta[k] = v if v is None else UNITS.Unit(v)
-            else:
-                if v is not None or use_none:
-                    other.meta[k] = v
-
-        return other
-
-    def accepts(self, incoming, fail_info, ignore_none=False):
-        """Tests whether this info can accept/is compatible with an incoming info
-
-        Parameters
-        ----------
-        incoming : Info
-            Incoming/source info to check. This is the info from upstream.
-        fail_info : dict
-            Dictionary that will be filled with failed properties; name: (source, target).
-        ignore_none : bool
-            Ignores ``None`` values in the incoming info.
-
-        Returns
-        -------
-        bool
-            Whether the incoming info is accepted
-        """
-        if not isinstance(incoming, Info):
-            fail_info["type"] = (incoming.__class__, self.__class__)
-            return False
-
-        success = True
-        if self.grid is not None and not self.grid.compatible_with(incoming.grid):
-            if not (ignore_none and incoming.grid is None):
-                fail_info["grid"] = (incoming.grid, self.grid)
-                success = False
-
-        for k, v in self.meta.items():
-            if v is not None and k in incoming.meta:
-                in_value = incoming.meta[k]
-                if k == "units":
-                    if not (ignore_none and in_value is None) and not compatible_units(
-                        v, in_value
-                    ):
-                        fail_info["meta." + k] = (in_value, v)
-                        success = False
-                else:
-                    if not (ignore_none and in_value is None) and in_value != v:
-                        fail_info["meta." + k] = (in_value, v)
-                        success = False
-
-        return success
-
-    def __copy__(self):
-        """Shallow copy of the info"""
-        return Info(time=self.time, grid=self.grid, meta=self.meta)
-
-    def __eq__(self, other):
-        """Equality check for two infos
-
-        Ignores time.
-        """
-        if not isinstance(other, Info):
-            return False
-        return self.grid == other.grid and self.meta == other.meta
-
-    def __getattr__(self, name):
-        # only called if attribute is not present in class
-        if "meta" in self.__dict__ and name in self.meta:
-            return self.meta[name]
-        raise AttributeError(f"'Info' object has no attribute '{name}'")
-
-    def __setattr__(self, name, value):
-        # first check if attribute present or meta not yet present (e.g. grid)
-        if name in self.__dir__() or "meta" not in self.__dict__:
-            super().__setattr__(name, value)
-        else:
-            self.__dict__["meta"][name] = value
-
-    def __repr__(self):
-        grid = self.grid.name if self.grid is not None else "None"
-        meta = ", " * bool(self.meta)
-        meta += ", ".join(
-            f"{k}=" + ("None" if v is None else f"'{v}'") for k, v in self.meta.items()
-        )
-        return f"Info(grid={grid}{meta})"
-
-    def as_dict(self):
-        """Returns a ``dict`` containing all metadata in this Info."""
-        return {
-            **self.meta,
-            "grid": f"{self.grid}",
-            "units": f"{self.units:~}",
-        }
diff --git a/src/finam/data/tools/__init__.py b/src/finam/data/tools/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a6ea7fd09011990cf71196e98b047183a45e06f
--- /dev/null
+++ b/src/finam/data/tools/__init__.py
@@ -0,0 +1,75 @@
+"""Data tools for FINAM."""
+
+from .core import (
+    assert_type,
+    check,
+    full,
+    full_like,
+    has_time_axis,
+    prepare,
+    strip_time,
+    to_datetime,
+)
+from .info import Info
+from .mask import (
+    Mask,
+    check_data_covers_domain,
+    filled,
+    from_compressed,
+    has_masked_values,
+    is_masked_array,
+    is_sub_mask,
+    mask_specified,
+    masks_compatible,
+    masks_equal,
+    to_compressed,
+    to_masked,
+)
+from .units import (
+    UNITS,
+    check_quantified,
+    clear_units_cache,
+    compatible_units,
+    equivalent_units,
+    get_dimensionality,
+    get_magnitude,
+    get_units,
+    is_quantified,
+    quantify,
+    to_units,
+)
+
+__all__ = [
+    "UNITS",
+    "Info",
+    "Mask",
+    "assert_type",
+    "check",
+    "check_data_covers_domain",
+    "check_quantified",
+    "clear_units_cache",
+    "compatible_units",
+    "equivalent_units",
+    "filled",
+    "from_compressed",
+    "full",
+    "full_like",
+    "get_dimensionality",
+    "get_magnitude",
+    "get_units",
+    "has_masked_values",
+    "has_time_axis",
+    "is_masked_array",
+    "is_quantified",
+    "is_sub_mask",
+    "mask_specified",
+    "masks_compatible",
+    "masks_equal",
+    "prepare",
+    "quantify",
+    "strip_time",
+    "to_compressed",
+    "to_datetime",
+    "to_masked",
+    "to_units",
+]
diff --git a/src/finam/data/cf_units.py b/src/finam/data/tools/cf_units.py
similarity index 100%
rename from src/finam/data/cf_units.py
rename to src/finam/data/tools/cf_units.py
diff --git a/src/finam/data/tools/core.py b/src/finam/data/tools/core.py
new file mode 100644
index 0000000000000000000000000000000000000000..b628ae6077700f514794b942ba03263e672b97a5
--- /dev/null
+++ b/src/finam/data/tools/core.py
@@ -0,0 +1,363 @@
+"""Core data tools for FINAM."""
+
+import copy
+import datetime
+
+import numpy as np
+import pandas as pd
+
+from ...errors import FinamDataError
+from .. import grid_spec
+from ..grid_base import Grid
+from .units import (
+    UNITS,
+    check_quantified,
+    compatible_units,
+    equivalent_units,
+    get_units,
+    is_quantified,
+)
+
+_BASE_DATETIME = datetime.datetime(1970, 1, 1)
+_BASE_TIME = np.datetime64("1970-01-01T00:00:00")
+_BASE_DELTA = np.timedelta64(1, "s")
+
+
+def prepare(data, info, time_entries=1, force_copy=False, report_conversion=False):
+    """
+    Prepares data in FINAM's internal transmission format.
+
+    Checks tha shape of the data.
+    Checks or adds units and time dimension.
+
+    Parameters
+    ----------
+    data : arraylike
+        The input data.
+    info : Info
+        Info associated with the data.
+    time_entries : int, optional
+        Number of time slices in the data. Default 1.
+    force_copy : bool, optional
+        Forces the result to be a copy of the passed data. Default ``False``.
+
+        If not used, the result is a view of the data if no units conversion needs to be done.
+    report_conversion : bool, optional
+        If true, returns a tuple with the second element indicating the unit conversion if it was required.
+
+    Returns
+    -------
+    pint.Quantity or tuple(pint.Quantity, tuple(pint.Unit, pint.Unit) or None)
+        The prepared data as a numpy array, wrapped into a :class:`pint.Quantity`.
+
+        If ``report_conversion`` is ``True``, a tuple is returned with the second element
+        indicating the unit conversion if it was required.
+
+        The second element is ``None`` if no conversion was required,
+        and a tuple of two :class:`pint.Unit` objects otherwise.
+
+    Raises
+    ------
+    FinamDataError
+        If the data doesn't match its info.
+    """
+    units_converted = None
+    units = info.units
+    if is_quantified(data):
+        if not compatible_units(data.units, units):
+            raise FinamDataError(
+                f"Given data has incompatible units. "
+                f"Got {data.units}, expected {units}."
+            )
+        if info.is_masked and not np.ma.isarray(data.magnitude):
+            data = UNITS.Quantity(
+                np.ma.array(
+                    data=data.magnitude,
+                    mask=info.mask,
+                    shrink=False,
+                    fill_value=info.fill_value,
+                ),
+                data.units,
+            )
+        if not equivalent_units(data.units, units):
+            units_converted = data.units, units
+            data = data.to(units)
+        elif force_copy:
+            data = data.copy()
+    else:
+        if info.is_masked and not np.ma.isarray(data):
+            data = UNITS.Quantity(
+                np.ma.array(
+                    data=data,
+                    mask=info.mask,
+                    shrink=False,
+                    fill_value=info.fill_value,
+                    copy=force_copy,
+                ),
+                units,
+            )
+        # this covers masked arrays as well
+        elif isinstance(data, np.ndarray):
+            if force_copy:
+                data = data.copy()
+            data = UNITS.Quantity(data, units)
+        else:
+            if force_copy:
+                data = copy.copy(data)
+            data = UNITS.Quantity(np.asarray(data), units)
+
+    data = _check_input_shape(data, info, time_entries)
+
+    if report_conversion:
+        return data, units_converted
+    return data
+
+
+def _check_input_shape(data, info, time_entries):
+    # check correct data size
+    if isinstance(info.grid, Grid):
+        time_entries = (
+            data.shape[0]
+            if len(data.shape) == len(info.grid.data_shape) + 1
+            else time_entries
+        )
+        data_size = data.size / time_entries
+        if data_size != info.grid.data_size:
+            raise FinamDataError(
+                f"quantify: data size doesn't match grid size. "
+                f"Got {data_size}, expected {info.grid.data_size}"
+            )
+        # check shape of non-flat arrays
+        if len(data.shape) != 1:
+            if data.shape[1:] != info.grid.data_shape:
+                if data.shape == info.grid.data_shape:
+                    data = np.expand_dims(data, 0)
+                else:
+                    raise FinamDataError(
+                        f"quantify: data shape doesn't match grid shape. "
+                        f"Got {data.shape}, expected {info.grid.data_shape}"
+                    )
+        else:
+            # reshape arrays
+            if time_entries <= 1:
+                data = data.reshape(
+                    [1] + list(info.grid.data_shape), order=info.grid.order
+                )
+            else:
+                data = data.reshape(
+                    [time_entries] + list(info.grid.data_shape), order=info.grid.order
+                )
+    elif isinstance(info.grid, grid_spec.NoGrid):
+        data = _check_input_shape_no_grid(data, info, time_entries)
+    return data
+
+
+def _check_input_shape_no_grid(data, info, time_entries):
+    if len(data.shape) != info.grid.dim + 1:
+        if _no_grid_shape_valid(data.shape, info.grid):
+            data = np.expand_dims(data, 0)
+        else:
+            raise FinamDataError(
+                f"Data shape not valid. "
+                f"Got {data.shape}, expected {info.grid.data_shape}"
+            )
+    else:
+        if not _no_grid_shape_valid(data.shape[1:], info.grid):
+            raise FinamDataError(
+                f"Data shape not valid. "
+                f"Got {data.shape[1:]}, expected {info.grid.data_shape}"
+            )
+        if data.shape[0] != time_entries:
+            raise FinamDataError(
+                f"Number of time entries in data doesn't match expected number. "
+                f"Got {data.shape[0]}, expected {time_entries}"
+            )
+    return data
+
+
+def _no_grid_shape_valid(data_shape, grid):
+    if len(data_shape) != grid.dim:
+        return False
+    dshp = np.array(data_shape)
+    gshp = np.array(grid.data_shape)
+    fix_dims = gshp != -1
+    return np.all(dshp[fix_dims] == gshp[fix_dims])
+
+
+def has_time_axis(xdata, grid):
+    """
+    Check if the data array has a time axis.
+
+    Parameters
+    ----------
+    xdata : numpy.ndarray
+        The given data array.
+    grid : GridBase
+        The associated grid specification
+    Returns
+    -------
+    bool
+        Whether the data has a time axis.
+    """
+    grid_dim = None
+
+    if isinstance(grid, Grid):
+        grid_dim = len(grid.data_shape)
+    elif isinstance(grid, grid_spec.NoGrid):
+        grid_dim = grid.dim
+    else:
+        raise ValueError(
+            f"Expected type Grid or NoGrid, got {grid.__class__.__name__}."
+        )
+
+    if xdata.ndim == grid_dim:
+        return False
+
+    if xdata.ndim == grid_dim + 1:
+        return True
+
+    raise FinamDataError("Data dimension must be grid dimension or grid dimension + 1.")
+
+
+def to_datetime(date):
+    """Converts a numpy datetime64 object to a python datetime object"""
+    if np.isnan(date):
+        return pd.NaT
+
+    timestamp = (date - _BASE_TIME) / _BASE_DELTA
+
+    if timestamp < 0:
+        return _BASE_DATETIME + datetime.timedelta(seconds=timestamp)
+
+    tz = datetime.timezone.utc
+    return datetime.datetime.fromtimestamp(timestamp, tz).replace(tzinfo=None)
+
+
+def strip_time(xdata, grid):
+    """Returns a view of the data with the time dimension squeezed if there is only a single entry
+
+    Parameters
+    ----------
+    xdata : arraylike
+        Data to strip time dimension from
+    grid : GridBase
+        The associated grid specification
+
+    Returns
+    -------
+    arraylike
+        Stripped data
+
+    Raises
+    ------
+    FinamDataError
+        If the data has multiple time entries.
+    """
+    if has_time_axis(xdata, grid):
+        if xdata.shape[0] > 1:
+            raise FinamDataError(
+                "Can't strip time of a data array with multiple time entries"
+            )
+        return xdata[0, ...]
+
+    return xdata
+
+
+def full_like(xdata, value):
+    """
+    Return a new data array with the same shape, type and units as a given object.
+
+    Parameters
+    ----------
+    xdata : :class:`pint.Quantity` or :class:`numpy.ndarray`
+        The reference object input.
+    value : scalar
+        Value to fill the new object with before returning it.
+
+    Returns
+    -------
+    pint.Quantity or numpy.ndarray
+        New object with the same shape and type as other,
+        with the data filled with fill_value.
+        Units will be taken from the input if present.
+    """
+    data = np.full_like(xdata, value)
+    if is_quantified(xdata):
+        return UNITS.Quantity(data, xdata.units)
+    return data
+
+
+def full(value, info):
+    """
+    Return a new data array with units according to the given info, filled with given value.
+
+    Parameters
+    ----------
+    value : scalar
+        Value to fill the new object with before returning it.
+    info : Info
+        Info associated with the data.
+
+    Returns
+    -------
+    pint.Quantity
+        The converted data.
+    """
+    shape = info.grid.data_shape if isinstance(info.grid, Grid) else tuple()
+    return prepare(np.full([1] + list(shape), value), info)
+
+
+def check(xdata, info):
+    """
+    Check if data matches given info.
+
+    Parameters
+    ----------
+    xdata : numpy.ndarray
+        The given data array.
+    info : Info
+        Info associated with the data.
+
+    Raises
+    ------
+    FinamDataError
+        If data doesn't match given info.
+    """
+    check_quantified(xdata, "check")
+
+    if not has_time_axis(xdata, info.grid):
+        raise FinamDataError("check: given data should have a time dimension.")
+
+    _check_shape(xdata.shape[1:], info.grid)
+
+    # check units
+    if not compatible_units(info.units, xdata):
+        raise FinamDataError(
+            f"check: given data has incompatible units. "
+            f"Got {get_units(xdata)}, expected {info.units}."
+        )
+
+
+def _check_shape(shape, grid):
+    if isinstance(grid, Grid) and shape != grid.data_shape:
+        raise FinamDataError(
+            f"check: given data has wrong shape. "
+            f"Got {shape}, expected {grid.data_shape}"
+        )
+    if isinstance(grid, grid_spec.NoGrid) and len(shape) != grid.dim:
+        raise FinamDataError(
+            f"check: given data has wrong number of dimensions. "
+            f"Got {len(shape)}, expected {grid.dim}"
+        )
+
+
+def assert_type(cls, slot, obj, types):
+    """Type assertion."""
+    for t in types:
+        if isinstance(obj, t):
+            return
+    raise TypeError(
+        f"Unsupported data type for {slot} in "
+        f"{cls.__class__.__name__}: {obj.__class__.__name__}. "
+        f"Expected one of [{', '.join([tp.__name__ for tp in types])}]"
+    )
diff --git a/src/finam/data/tools/info.py b/src/finam/data/tools/info.py
new file mode 100644
index 0000000000000000000000000000000000000000..e609b4ec0b4eb7b0ec54c06fb400b468ea05db1c
--- /dev/null
+++ b/src/finam/data/tools/info.py
@@ -0,0 +1,248 @@
+"""Data info tools for FINAM."""
+
+import copy
+import datetime
+
+import numpy as np
+
+from ...errors import FinamMetaDataError
+from ..grid_base import GridBase
+from .mask import MASK_INDICATORS, Mask, mask_specified, masks_compatible, masks_equal
+from .units import UNITS, compatible_units
+
+
+def _format_mask(mask):
+    if mask_specified(mask) and mask is not np.ma.nomask:
+        return "<ndarray>"
+    if mask is np.ma.nomask:
+        return "nomask"
+    return str(mask)
+
+
+class Info:
+    """Data info containing grid specification and metadata
+
+    Parameters
+    ----------
+    time : datetime or None, optional
+        time specification, default: None
+    grid : Grid or NoGrid or None, optional
+        grid specification, default: None
+    meta : dict, optional
+        dictionary of metadata, default: None
+    mask : :any:`Mask` value or valid boolean mask for :any:`MaskedArray`, optional
+        masking specification of the data. Options:
+            * :any:`Mask.FLEX`: data can be masked or unmasked (default)
+            * :any:`Mask.NONE`: data is unmasked and given as plain numpy array
+            * valid boolean mask for MaskedArray
+    **meta_kwargs
+        additional metadata by name, will overwrite entries in ``meta``
+
+    Attributes
+    ----------
+    grid : Grid or NoGrid or None
+        grid specification
+    meta : dict
+        dictionary of metadata
+    """
+
+    def __init__(self, time=None, grid=None, meta=None, mask=Mask.FLEX, **meta_kwargs):
+        self._time = self._grid = self._mask = None
+        self.time = time
+        self.grid = grid
+        self.mask = mask
+        # set meta last (see __setattr__)
+        self.meta = meta or {}
+        self.meta.update(meta_kwargs)
+        # handle units
+        units = self.meta.get("units", "")
+        units = None if units is None else UNITS.Unit(units)
+        self.meta["units"] = units
+
+    @property
+    def time(self):
+        """datetime: current time."""
+        return self._time
+
+    @time.setter
+    def time(self, time):
+        if time is not None and not isinstance(time, datetime.datetime):
+            msg = "Time in Info must be either None or a datetime"
+            raise FinamMetaDataError(msg)
+        self._time = time
+
+    @property
+    def grid(self):
+        """Grid: data grid."""
+        return self._grid
+
+    @grid.setter
+    def grid(self, grid):
+        if grid is not None and not isinstance(grid, GridBase):
+            msg = "Grid in Info must be either None or of a sub-class of GridBase"
+            raise FinamMetaDataError(msg)
+        self._grid = grid
+
+    @property
+    def mask(self):
+        """Mask or ndarray: data mask."""
+        return self._mask
+
+    @mask.setter
+    def mask(self, mask):
+        if mask_specified(mask) and mask is not None:
+            mask = np.ma.make_mask(mask, shrink=False)
+            if (
+                self.grid is not None
+                and mask is not np.ma.nomask
+                and not np.array_equal(self.grid_shape, np.shape(mask))
+            ):
+                msg = "Mask in Info not compatible with given grid."
+                raise FinamMetaDataError(msg)
+        self._mask = mask
+
+    @property
+    def grid_shape(self):
+        """tuple: shape of the data grid."""
+        return None if self.grid is None else self.grid.data_shape
+
+    @property
+    def is_masked(self):
+        """bool: whether data is set to be masked."""
+        return mask_specified(self.mask)
+
+    @property
+    def fill_value(self):
+        """Fill value for masked data."""
+        return self.meta.get(
+            MASK_INDICATORS[0], self.meta.get(MASK_INDICATORS[1], None)
+        )
+
+    def copy(self):
+        """Copies the info object"""
+        return copy.copy(self)
+
+    def copy_with(self, use_none=True, **kwargs):
+        """Copies the info object and sets variables and meta values according to the kwargs
+
+        Parameters
+        ----------
+        use_none : bool
+            whether properties with None value should also be transferred
+        **kwargs
+            key values pairs for properties to change
+        """
+        other = Info(
+            time=self.time, grid=self.grid, meta=copy.copy(self.meta), mask=self.mask
+        )
+        for k, v in kwargs.items():
+            if k == "time":
+                if v is not None or use_none:
+                    other.time = v
+            elif k == "grid":
+                if v is not None or use_none:
+                    other.grid = v
+            elif k == "mask":
+                if v is not None or use_none:
+                    other.mask = v
+            elif k == "units":
+                if v is not None or use_none:
+                    other.meta[k] = v if v is None else UNITS.Unit(v)
+            else:
+                if v is not None or use_none:
+                    other.meta[k] = v
+
+        return other
+
+    def accepts(self, incoming, fail_info, incoming_donwstream=False):
+        """
+        Tests whether this info can accept/is compatible with an incoming info.
+
+        Tested attributes are: "grid", "mask" and "units"
+
+        Parameters
+        ----------
+        incoming : Info
+            Incoming/source info to check. This is the info from upstream.
+        fail_info : dict
+            Dictionary that will be filled with failed properties; name: (source, target).
+        incoming_donwstream : bool, optional
+            Whether the incoming info is from downstream data. Default: False
+
+        Returns
+        -------
+        bool
+            Whether the incoming info is accepted
+        """
+        if not isinstance(incoming, Info):
+            fail_info["type"] = (incoming.__class__, self.__class__)
+            return False
+
+        success = True
+        if self.grid is not None and not self.grid.compatible_with(incoming.grid):
+            if not (incoming_donwstream and incoming.grid is None):
+                fail_info["grid"] = (incoming.grid, self.grid)
+                success = False
+
+        if self.mask is not None and not masks_compatible(
+            self.mask, incoming.mask, incoming_donwstream, self.grid, incoming.grid
+        ):
+            if not (incoming_donwstream and incoming.mask is None):
+                fail_info["mask"] = (incoming.mask, self.mask)
+                success = False
+
+        u1_none = (u1 := self.units) is None
+        u2_none = (u2 := incoming.units) is None
+        if not u1_none and (u2_none or not compatible_units(u1, u2)):
+            if not (incoming_donwstream and u2_none):
+                fail_info["units"] = (u2, u1)
+                success = False
+
+        return success
+
+    def __copy__(self):
+        """Shallow copy of the info"""
+        return Info(time=self.time, grid=self.grid, meta=self.meta, mask=self.mask)
+
+    def __eq__(self, other):
+        """Equality check for two infos
+
+        Ignores time.
+        """
+        if not isinstance(other, Info):
+            return False
+        return (
+            self.grid == other.grid
+            and self.meta == other.meta
+            and masks_equal(self.mask, other.mask, self.grid, other.grid)
+        )
+
+    def __getattr__(self, name):
+        # only called if attribute is not present in class
+        if "meta" in self.__dict__ and name in self.meta:
+            return self.meta[name]
+        raise AttributeError(f"'Info' object has no attribute '{name}'")
+
+    def __setattr__(self, name, value):
+        # first check if attribute present or meta not yet present (e.g. grid)
+        if name in self.__dir__() or "meta" not in self.__dict__:
+            super().__setattr__(name, value)
+        else:
+            self.__dict__["meta"][name] = value
+
+    def __repr__(self):
+        grid = self.grid.name if self.grid is not None else "None"
+        meta = ", " * bool(self.meta)
+        meta += ", ".join(
+            f"{k}=" + ("None" if v is None else f"'{v}'") for k, v in self.meta.items()
+        )
+        return f"Info(grid={grid}, mask={_format_mask(self.mask)}{meta})"
+
+    def as_dict(self):
+        """Returns a ``dict`` containing all metadata in this Info."""
+        return {
+            **self.meta,
+            "mask": _format_mask(self.mask),
+            "grid": f"{self.grid}",
+            "units": f"{self.units:~}",
+        }
diff --git a/src/finam/data/tools/mask.py b/src/finam/data/tools/mask.py
new file mode 100644
index 0000000000000000000000000000000000000000..f7eb3663cdbd650edef477264d52f9ae9fe9b2cf
--- /dev/null
+++ b/src/finam/data/tools/mask.py
@@ -0,0 +1,378 @@
+"""Mask tools for FINAM."""
+
+from enum import Enum
+
+import numpy as np
+
+from ...errors import FinamDataError
+from .units import UNITS, is_quantified, quantify
+
+MASK_INDICATORS = ["_FillValue", "missing_value"]
+
+
+class Mask(Enum):
+    """Mask settings for Info."""
+
+    FLEX = 0
+    """Data can be masked or unmasked."""
+    NONE = 1
+    """Data is expected to be unmasked and given as plain numpy arrays."""
+
+
+def is_masked_array(data):
+    """
+    Check if data is a masked array.
+
+    Parameters
+    ----------
+    data : Any
+        The given data array.
+
+    Returns
+    -------
+    bool
+        Whether the data is a MaskedArray.
+    """
+    if is_quantified(data):
+        return np.ma.isMaskedArray(data.magnitude)
+    return np.ma.isMaskedArray(data)
+
+
+def has_masked_values(data):
+    """
+    Determine whether the data has masked values.
+
+    Parameters
+    ----------
+    data : Any
+        The given data array.
+
+    Returns
+    -------
+    bool
+        Whether the data is a MaskedArray and has any masked values.
+    """
+    return np.ma.is_masked(data)
+
+
+def filled(data, fill_value=None):
+    """
+    Return input as an array with masked data replaced by a fill value.
+
+    This routine respects quantified and un-quantified data.
+
+    Parameters
+    ----------
+    data : :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
+        The reference object input.
+    fill_value : array_like, optional
+        The value to use for invalid entries. Can be scalar or non-scalar.
+        If non-scalar, the resulting ndarray must be broadcastable over
+        input array. Default is None, in which case, the `fill_value`
+        attribute of the array is used instead.
+
+    Returns
+    -------
+    pint.Quantity or numpy.ndarray
+        New object with the same shape and type as other,
+        with the data filled with fill_value.
+        Units will be taken from the input if present.
+
+    See also
+    --------
+    :func:`numpy.ma.filled`:
+        Numpy routine doing the same.
+    """
+    if not is_masked_array(data):
+        return data
+    if is_quantified(data):
+        return UNITS.Quantity(data.magnitude.filled(fill_value), data.units)
+    return data.filled(fill_value)
+
+
+def to_masked(data, **kwargs):
+    """
+    Return a masked version of the data.
+
+    Parameters
+    ----------
+    data : :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
+        The reference object input.
+    **kwargs
+        keyword arguments forwarded to :any:`numpy.ma.array`
+
+    Returns
+    -------
+    pint.Quantity or numpy.ma.MaskedArray
+        New object with the same shape and type but as a masked array.
+        Units will be taken from the input if present.
+    """
+    if is_masked_array(data) and not kwargs:
+        return data
+    if is_quantified(data):
+        return UNITS.Quantity(np.ma.array(data.magnitude, **kwargs), data.units)
+    return np.ma.array(data, **kwargs)
+
+
+def to_compressed(xdata, order="C", mask=None):
+    """
+    Return all the non-masked data as a 1-D array respecting the given array order.
+
+    Parameters
+    ----------
+    data : :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
+        The reference object input.
+    order : str
+        order argument for :any:`numpy.ravel`
+    mask : :any:`Mask` value or valid boolean mask for :any:`MaskedArray`, optional
+        mask to use when data is not masked already
+
+    Returns
+    -------
+    :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
+        New object with the flat shape and only unmasked data but and same type as input.
+        Units will be taken from the input if present.
+
+    See also
+    --------
+    :func:`numpy.ma.compressed`:
+        Numpy routine doing the same but only for C-order.
+    """
+    is_masked = is_masked_array(xdata)
+    if is_masked or (mask is not None and mask_specified(mask)):
+        data = np.ravel(xdata.data if is_masked else xdata, order)
+        mask = xdata.mask if is_masked else mask
+        if mask is not np.ma.nomask:
+            data = data.compress(np.logical_not(np.ravel(mask, order)))
+        return quantify(data, xdata.units) if is_quantified(xdata) else data
+    return np.reshape(xdata, -1, order=order)
+
+
+def from_compressed(xdata, shape, order="C", mask=None, **kwargs):
+    """
+    Fill a (masked) array following a given mask or shape with the provided data.
+
+    This will only create a masked array if kwargs are given (especially a mask).
+    Otherwise this is simply reshaping the given data.
+    Filling is performed in the given array order.
+
+    Parameters
+    ----------
+    data : :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
+        The reference object input.
+    shape : str
+        shape argument for :any:`numpy.reshape`
+    order : str
+        order argument for :any:`numpy.reshape`
+    mask : :any:`Mask` value or valid boolean mask for :any:`MaskedArray`
+        mask to use
+    **kwargs
+        keyword arguments forwarded to :any:`numpy.ma.array`
+
+    Returns
+    -------
+    :class:`pint.Quantity` or :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray`
+        New object with the desired shape and same type as input.
+        Units will be taken from the input if present.
+        Will only be a masked array if kwargs are given.
+
+    See also
+    --------
+    to_compressed:
+        Inverse operation.
+    :any:`numpy.ma.array`:
+        Routine consuming kwargs to create a masked array.
+    :any:`numpy.reshape`:
+        Equivalent routine if no mask is provided.
+
+    Notes
+    -----
+    If both `mask` and `shape` are given, they need to match in size.
+    """
+    if mask is None or mask is np.ma.nomask or not mask_specified(mask):
+        if kwargs and mask is Mask.NONE:
+            msg = "from_compressed: Can't create masked array with mask=Mask.NONE"
+            raise FinamDataError(msg)
+        data = np.reshape(xdata, shape, order=order)
+        return to_masked(data, **kwargs) if kwargs or mask is np.ma.nomask else data
+    if is_quantified(xdata):
+        # pylint: disable-next=unexpected-keyword-arg
+        data = quantify(np.empty_like(xdata, shape=np.prod(shape)), xdata.units)
+    else:
+        # pylint: disable-next=unexpected-keyword-arg
+        data = np.empty_like(xdata, shape=np.prod(shape))
+    data[np.logical_not(np.ravel(mask, order=order))] = xdata
+    return to_masked(np.reshape(data, shape, order=order), mask=mask, **kwargs)
+
+
+def check_data_covers_domain(data, mask=None):
+    """
+    Check if the given data covers a domain defined by a mask on the same grid.
+
+    Parameters
+    ----------
+    data : Any
+        The given data array for a single time-step.
+    mask : None or bool or array of bool, optional
+        Mask defining the target domain on the same grid as the data,
+        by default None
+
+    Returns
+    -------
+    bool
+        Whether the data covers the desired domain.
+
+    Raises
+    ------
+    ValueError
+        When mask is given and mask and data don't share the same shape.
+    """
+    if not _is_single_mask_value(mask) and np.shape(mask) != np.shape(data):
+        raise ValueError("check_data_covers_domain: mask and data shape differ.")
+    if not has_masked_values(data):
+        return True
+    if _is_single_mask_value(mask):
+        return bool(mask)
+    return np.all(mask[data.mask])
+
+
+def _is_single_mask_value(mask):
+    return mask is None or mask is np.ma.nomask or mask is False or mask is True
+
+
+def masks_compatible(
+    this, incoming, incoming_donwstream, this_grid=None, incoming_grid=None
+):
+    """
+    Check if an incoming mask is compatible with a given mask.
+
+    Parameters
+    ----------
+    this : :any:`Mask` value or valid boolean mask for :any:`MaskedArray` or None
+        mask specification to check against
+    incoming : :any:`Mask` value or valid boolean mask for :any:`MaskedArray` or None
+        incoming mask to check for compatibility
+    incoming_donwstream : bool
+        Whether the incoming mask is from downstream data
+    this_grid : Grid or NoGrid or None, optional
+        grid for first mask (to check shape and value equality)
+    incoming_grid : Grid or NoGrid or None, optional
+        grid for second mask (to check shape and value equality)
+
+    Returns
+    -------
+    bool
+        mask compatibility
+    """
+    if incoming_donwstream:
+        upstream, downstream = this, incoming
+        up_grid, down_grid = this_grid, incoming_grid
+    else:
+        upstream, downstream = incoming, this
+        up_grid, down_grid = incoming_grid, this_grid
+    # None is incompatible
+    if upstream is None:
+        return False
+    # Mask.FLEX accepts anything, Mask.NONE only Mask.NONE
+    if not mask_specified(downstream):
+        if not mask_specified(upstream):
+            return downstream == Mask.FLEX or upstream == Mask.NONE
+        return downstream == Mask.FLEX
+    # if mask is specified, upstream mask must also be specified
+    if not mask_specified(upstream):
+        return False
+    # if both mask given, compare them
+    return masks_equal(downstream, upstream, down_grid, up_grid)
+
+
+def masks_equal(this, other, this_grid=None, other_grid=None):
+    """
+    Check two masks for equality.
+
+    Parameters
+    ----------
+    this : :any:`Mask` value or valid boolean mask for :any:`MaskedArray` or None
+        first mask
+    other : :any:`Mask` value or valid boolean mask for :any:`MaskedArray` or None
+        second mask
+    this_grid : Grid or NoGrid or None, optional
+        grid for first mask (to check shape and value equality)
+    other_grid : Grid or NoGrid or None, optional
+        grid for second mask (to check shape and value equality)
+
+    Returns
+    -------
+    bool
+        mask equality
+    """
+    if this is None and other is None:
+        return True
+    if not mask_specified(this) and not mask_specified(other):
+        return this == other
+    # need a valid mask at this point
+    if not np.ma.is_mask(this) or not np.ma.is_mask(other):
+        return False
+    # special treatment of "nomask"
+    if this is np.ma.nomask:
+        if other is np.ma.nomask:
+            return True
+        return not np.any(other)
+    if other is np.ma.nomask:
+        return not np.any(this)
+    # compare masks
+    if not np.ndim(this) == np.ndim(other):
+        return False
+    # mask shape is grid specific (reversed axes, decreasing axis)
+    if this_grid is None or other_grid is None:
+        return True
+    this = this_grid.to_canonical(this)
+    other = other_grid.to_canonical(other)
+    if not np.all(np.shape(this) == np.shape(other)):
+        return False
+    return np.all(this == other)
+
+
+def is_sub_mask(mask, submask):
+    """
+    Check for a sub-mask.
+
+    Parameters
+    ----------
+    mask : arraylike
+        The original mask.
+    submask : arraylike
+        The potential submask.
+
+    Returns
+    -------
+    bool
+        Whether 'submask' is a sub-mask of 'mask'.
+    """
+    if not np.ma.is_mask(mask) or not np.ma.is_mask(submask):
+        return False
+    if mask is np.ma.nomask:
+        return True
+    if submask is np.ma.nomask:
+        return not np.any(mask)
+    if not np.ndim(mask) == np.ndim(submask):
+        return False
+    if not np.all(np.shape(mask) == np.shape(submask)):
+        return False
+    return np.all(submask[mask])
+
+
+def mask_specified(mask):
+    """
+    Determine whether given mask selection indicates a masked array.
+
+    Parameters
+    ----------
+    mask : :any:`Mask` value or valid boolean mask for :any:`MaskedArray`
+        mask to check
+
+    Returns
+    -------
+    bool
+        False if mask is Mask.FLEX or Mask.NONE, True otherwise
+    """
+    return not any(mask is val for val in list(Mask))
diff --git a/src/finam/data/tools/units.py b/src/finam/data/tools/units.py
new file mode 100644
index 0000000000000000000000000000000000000000..9cbd00e55f23a90a00ccc779b1e9cc24ce4e92a0
--- /dev/null
+++ b/src/finam/data/tools/units.py
@@ -0,0 +1,249 @@
+"""Units tools for FINAM."""
+
+import numpy as np
+import pint
+
+from ...errors import FinamDataError
+
+# pylint: disable-next=unused-import
+from . import cf_units
+
+# set default format to cf-convention for pint.dequantify
+# some problems with degree_Celsius and similar here
+pint.application_registry.default_format = "cf"
+UNITS = pint.application_registry
+
+_UNIT_PAIRS_CACHE = {}
+
+
+def get_magnitude(xdata):
+    """
+    Get magnitude of given data.
+
+    Parameters
+    ----------
+    xdata : pint.Quantity
+        The given data array.
+
+    Returns
+    -------
+    numpy.ndarray
+        Magnitude of given data.
+    """
+    check_quantified(xdata, "get_magnitude")
+    return xdata.magnitude
+
+
+def get_units(xdata):
+    """
+    Get units of the data.
+
+    Parameters
+    ----------
+    xdata : DataArray
+        The given data array.
+
+    Returns
+    -------
+    pint.Unit
+        Units of the data.
+    """
+    check_quantified(xdata, "get_units")
+    return xdata.units
+
+
+def get_dimensionality(xdata):
+    """
+    Get dimensionality of the data.
+
+    Parameters
+    ----------
+    xdata : pint.Quantity
+        The given data array.
+
+    Returns
+    -------
+    pint.UnitsContainer
+        Dimensionality of the data.
+    """
+    check_quantified(xdata, "get_dimensionality")
+    return xdata.dimensionality
+
+
+def to_units(xdata, units, check_equivalent=False, report_conversion=False):
+    """
+    Convert data to given units.
+
+    Parameters
+    ----------
+    xdata : pint.Quantity
+        The given data array.
+    units : str or pint.Unit
+        Desired units.
+    check_equivalent : bool, optional
+        Checks for equivalent units and simply re-assigns if possible.
+    report_conversion : bool, optional
+        If true, returns a tuple with the second element indicating the unit conversion if it was required.
+
+    Returns
+    -------
+    pint.Quantity or tuple(pint.Quantity, tuple(pint.Unit, pint.Unit) or None)
+        The converted data.
+
+        If ``report_conversion`` is ``True``, a tuple is returned with the second element
+        indicating the unit conversion if it was required.
+
+        The second element is ``None`` if no conversion was required,
+        and a tuple of two :class:`pint.Unit` objects otherwise.
+    """
+    check_quantified(xdata, "to_units")
+    units = _get_pint_units(units)
+    units2 = xdata.units
+    conversion = None
+    if units != units2:
+        if check_equivalent and equivalent_units(units, units2):
+            xdata = UNITS.Quantity(xdata.magnitude, units)
+        else:
+            xdata = xdata.to(units)
+            conversion = units2, units
+
+    if report_conversion:
+        return xdata, conversion
+    return xdata
+
+
+def is_quantified(xdata):
+    """
+    Check if data is a quantified DataArray.
+
+    Parameters
+    ----------
+    xdata : Any
+        The given data array.
+
+    Returns
+    -------
+    bool
+        Whether the data is a quantified DataArray.
+    """
+    return isinstance(xdata, pint.Quantity)
+
+
+def quantify(xdata, units=None):
+    """
+    Quantifies data.
+
+    Parameters
+    ----------
+    xdata : Any
+        The given data array.
+    units : UnitLike or Quantified or None, optional
+        units to use, dimensionless by default
+
+    Returns
+    -------
+    pint.Quantity
+        The quantified array.
+    """
+    if is_quantified(xdata):
+        raise FinamDataError(f"Data is already quantified with units '{xdata.units}'")
+    return UNITS.Quantity(xdata, _get_pint_units(units or UNITS.dimensionless))
+
+
+def check_quantified(xdata, routine="check_quantified"):
+    """
+    Check if data is a quantified DataArray.
+
+    Parameters
+    ----------
+    xdata : numpy.ndarray
+        The given data array.
+    routine : str, optional
+        Name of the routine to show in the Error, by default "check_quantified"
+
+    Raises
+    ------
+    FinamDataError
+        If the array is not a quantified DataArray.
+    """
+    if not is_quantified(xdata):
+        raise FinamDataError(f"{routine}: given data is not quantified.")
+
+
+def _get_pint_units(var):
+    if var is None:
+        raise FinamDataError("Can't extract units from 'None'.")
+
+    if isinstance(var, pint.Unit):
+        return var
+
+    if isinstance(var, pint.Quantity):
+        return var.units or UNITS.dimensionless
+
+    return UNITS.Unit(var)
+
+
+def compatible_units(unit1, unit2):
+    """
+    Checks if two units are compatible/convertible.
+
+    Parameters
+    ----------
+    unit1 : UnitLike or Quantified
+        First unit to compare.
+    unit2 : UnitLike or Quantified
+        Second unit to compare.
+
+    Returns
+    -------
+    bool
+        Unit compatibility.
+    """
+    unit1, unit2 = _get_pint_units(unit1), _get_pint_units(unit2)
+    comp_equiv = _UNIT_PAIRS_CACHE.get((unit1, unit2))
+    if comp_equiv is None:
+        comp_equiv = _cache_units(unit1, unit2)
+
+    return comp_equiv[0]
+
+
+def equivalent_units(unit1, unit2):
+    """
+    Check if two given units are equivalent.
+
+    Parameters
+    ----------
+    unit1 : UnitLike or Quantified
+        First unit to compare.
+    unit2 : UnitLike or Quantified
+        Second unit to compare.
+
+    Returns
+    -------
+    bool
+        Unit equivalence.
+    """
+    unit1, unit2 = _get_pint_units(unit1), _get_pint_units(unit2)
+    comp_equiv = _UNIT_PAIRS_CACHE.get((unit1, unit2))
+    if comp_equiv is None:
+        comp_equiv = _cache_units(unit1, unit2)
+
+    return comp_equiv[1]
+
+
+def _cache_units(unit1, unit2):
+    equiv = False
+    compat = False
+    try:
+        equiv = np.isclose((1.0 * unit1).to(unit2).magnitude, 1.0)
+        compat = True
+    except pint.errors.DimensionalityError:
+        pass
+
+    _UNIT_PAIRS_CACHE[(unit1, unit2)] = compat, equiv
+    return compat, equiv
+
+
+def clear_units_cache():
+    """Clears the units cache"""
+    _UNIT_PAIRS_CACHE.clear()
diff --git a/src/finam/sdk/adapter.py b/src/finam/sdk/adapter.py
index a3f6e988e766b9667ee86daf945567594a44ddfa..0d535dc4d4f3ed0752b87a5369493ce7d40479a6 100644
--- a/src/finam/sdk/adapter.py
+++ b/src/finam/sdk/adapter.py
@@ -1,6 +1,7 @@
 """
 Abstract base implementation for adapters.
 """
+
 import logging
 from abc import ABC
 from datetime import datetime
@@ -57,6 +58,12 @@ class Adapter(IAdapter, Input, Output, ABC):
     def info(self):
         return self._output_info
 
+    @final
+    @property
+    def in_info(self):
+        """Info from connected source."""
+        return self._input_info
+
     @property
     def needs_pull(self):
         """bool: if the adapter needs pull."""
diff --git a/src/finam/sdk/output.py b/src/finam/sdk/output.py
index 92658a0cfdc3dfba96d5180e252a98e833ffd532..4c10705f2c9dbb67404dce9bde94626ebbdaccc3 100644
--- a/src/finam/sdk/output.py
+++ b/src/finam/sdk/output.py
@@ -1,6 +1,7 @@
 """
 Implementations of IOutput
 """
+
 import logging
 import os
 from datetime import datetime
@@ -382,7 +383,7 @@ class Output(IOutput, Loggable):
             raise FinamNoDataError("No data info available")
 
         fail_info = {}
-        if not self._output_info.accepts(info, fail_info, ignore_none=True):
+        if not self._output_info.accepts(info, fail_info, incoming_donwstream=True):
             fail_info = "\n".join(
                 [
                     f"{name} - got {got}, expected {exp}"
diff --git a/tests/adapters/square_5x4/cells.txt b/tests/adapters/square_5x4/cells.txt
new file mode 100644
index 0000000000000000000000000000000000000000..68fac1a8ff8065978e47891f3ae18582b5cc55d9
--- /dev/null
+++ b/tests/adapters/square_5x4/cells.txt
@@ -0,0 +1,198 @@
+90 85 66
+89 39 77
+106 39 89
+97 94 44
+108 44 94
+87 63 66
+103 62 72
+87 69 63
+102 66 85
+100 85 90
+76 14 13
+104 72 62
+103 72 71
+112 76 13
+102 87 66
+97 79 94
+96 93 46
+108 68 91
+117 93 96
+104 89 77
+101 98 84
+109 91 68
+108 91 44
+101 69 87
+116 73 88
+105 77 39
+116 88 58
+95 67 94
+48 42 33
+74 47 11
+75 47 74
+75 74 72
+73 71 70
+73 70 36
+78 43 17
+76 47 75
+36 8 7
+74 11 10
+70 8 36
+77 76 75
+70 40 9
+79 34 42
+71 40 70
+47 12 11
+40 10 9
+42 34 33
+70 9 8
+79 35 34
+91 7 6
+41 26 25
+80 24 37
+83 56 53
+74 10 40
+82 26 41
+84 18 43
+80 41 25
+43 18 17
+85 80 37
+72 40 71
+81 41 80
+95 94 42
+84 19 18
+48 33 32
+59 58 57
+85 81 80
+61 59 60
+78 17 16
+82 27 26
+57 55 56
+95 42 48
+80 25 24
+91 36 7
+60 59 57
+37 24 23
+62 59 61
+58 55 57
+67 52 54
+89 62 61
+88 54 55
+68 67 54
+88 68 54
+88 55 58
+86 37 23
+56 55 53
+63 61 60
+55 54 53
+86 23 22
+83 53 51
+90 60 57
+74 40 72
+39 16 15
+90 57 56
+89 61 65
+54 52 53
+87 86 45
+90 66 60
+45 22 21
+66 63 60
+65 61 63
+53 52 51
+86 22 45
+83 51 64
+38 32 31
+91 6 44
+64 51 50
+52 49 51
+69 65 63
+44 6 5
+92 31 30
+46 30 29
+50 49 38
+78 16 39
+92 38 31
+51 49 50
+95 52 67
+92 30 46
+93 92 46
+92 50 38
+48 32 38
+49 48 38
+93 50 92
+94 79 42
+95 49 52
+93 64 50
+95 48 49
+115 101 84
+100 81 85
+96 28 27
+97 5 4
+98 21 20
+99 41 81
+100 56 83
+101 87 45
+102 85 37
+102 86 87
+103 58 59
+96 27 82
+97 44 5
+98 45 21
+104 75 72
+105 39 15
+108 94 67
+109 36 91
+99 82 41
+104 62 89
+110 29 3
+111 4 0
+111 0 35
+112 13 1
+112 1 12
+112 47 76
+113 2 19
+113 20 2
+110 3 28
+100 90 56
+102 37 86
+103 59 62
+117 64 93
+114 100 83
+117 96 82
+105 14 76
+109 88 73
+101 45 98
+115 84 43
+104 77 75
+109 68 88
+106 89 65
+107 65 69
+107 106 65
+105 15 14
+107 43 78
+105 76 77
+106 78 39
+107 78 106
+117 99 64
+108 67 68
+114 83 64
+114 81 100
+109 73 36
+110 28 96
+113 98 20
+111 97 4
+111 35 79
+112 12 47
+113 19 84
+110 46 29
+116 71 73
+116 58 103
+114 99 81
+114 64 99
+110 96 46
+113 84 98
+111 79 97
+117 82 99
+116 103 71
+115 69 101
+115 107 69
+115 43 107
diff --git a/tests/adapters/square_5x4/points.txt b/tests/adapters/square_5x4/points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..5d49f1035f263ddd2a80406f6824ca3fe547649d
--- /dev/null
+++ b/tests/adapters/square_5x4/points.txt
@@ -0,0 +1,118 @@
+0.000000000000000000e+00 0.000000000000000000e+00
+5.000000000000000000e+00 0.000000000000000000e+00
+5.000000000000000000e+00 4.000000000000000000e+00
+0.000000000000000000e+00 4.000000000000000000e+00
+4.999999999995947131e-01 0.000000000000000000e+00
+9.999999999984133803e-01 0.000000000000000000e+00
+1.499999999996955102e+00 0.000000000000000000e+00
+1.999999999995512923e+00 0.000000000000000000e+00
+2.499999999996204814e+00 0.000000000000000000e+00
+2.999999999996965983e+00 0.000000000000000000e+00
+3.499999999997728040e+00 0.000000000000000000e+00
+3.999999999998489209e+00 0.000000000000000000e+00
+4.499999999999245048e+00 0.000000000000000000e+00
+5.000000000000000000e+00 4.999999999990952237e-01
+5.000000000000000000e+00 9.999999999976482146e-01
+5.000000000000000000e+00 1.499999999996189937e+00
+5.000000000000000000e+00 1.999999999994775957e+00
+5.000000000000000000e+00 2.499999999996049826e+00
+5.000000000000000000e+00 2.999999999997366995e+00
+5.000000000000000000e+00 3.499999999998682831e+00
+4.499999999999999112e+00 4.000000000000000000e+00
+3.999999999999997780e+00 4.000000000000000000e+00
+3.499999999999996891e+00 4.000000000000000000e+00
+2.999999999999996003e+00 4.000000000000000000e+00
+2.499999999999995115e+00 4.000000000000000000e+00
+1.999999999999994005e+00 4.000000000000000000e+00
+1.499999999999992006e+00 4.000000000000000000e+00
+9.999999999999911182e-01 4.000000000000000000e+00
+4.999999999999956146e-01 4.000000000000000000e+00
+0.000000000000000000e+00 3.499999999998003819e+00
+0.000000000000000000e+00 3.000000000001386002e+00
+0.000000000000000000e+00 2.500000000004854783e+00
+0.000000000000000000e+00 2.000000000008236967e+00
+0.000000000000000000e+00 1.500000000006241896e+00
+0.000000000000000000e+00 1.000000000004162004e+00
+0.000000000000000000e+00 5.000000000020810020e-01
+2.265798546853971196e+00 4.238914066109839007e-01
+2.749999999999935163e+00 3.577350269190534782e+00
+4.330127018896737789e-01 2.250000000006775913e+00
+4.515887990308685040e+00 1.721291157952514972e+00
+3.249999999997145839e+00 4.330127018927403815e-01
+1.749999999999994005e+00 3.573329484275529211e+00
+4.330127018913362269e-01 1.250000000006348033e+00
+4.599679368559095316e+00 2.734037287364882829e+00
+1.206637172643693967e+00 4.481480624790066258e-01
+3.745775399958263030e+00 3.578703610545325819e+00
+4.233860225254563736e-01 3.247881315359677057e+00
+4.230662432701146614e+00 4.218481522081570234e-01
+4.330127018898574098e-01 1.750000000008703038e+00
+8.660254037804282490e-01 2.000000000008117063e+00
+8.660254037794199444e-01 2.500000000006437961e+00
+1.299038105671252108e+00 2.250000000007863044e+00
+1.299038105671842080e+00 1.750000000008790080e+00
+1.732050807563251027e+00 2.000000000008578915e+00
+1.732050807563606964e+00 1.500000000008993917e+00
+2.165063509455331214e+00 1.750000000009032108e+00
+2.168763921158598951e+00 2.243590698928989102e+00
+2.598692946631000034e+00 1.998931783162479991e+00
+2.625210852851048937e+00 1.535103350557095903e+00
+3.033703670897030857e+00 1.753920133713432028e+00
+3.031627495395729177e+00 2.250475319485361059e+00
+3.464627171766605951e+00 2.000732575539676095e+00
+3.464101615131112144e+00 1.500000000010194068e+00
+3.470262035740892870e+00 2.522530415211392985e+00
+1.305354910972956084e+00 2.788748402221191025e+00
+3.897114317022456031e+00 2.250000000009737100e+00
+3.031088913238387139e+00 2.750000000008742784e+00
+1.307411517935652912e+00 1.264503175480730990e+00
+1.733099282557412968e+00 1.015972093721750902e+00
+3.897114317022187802e+00 2.750000000009475087e+00
+2.752633091139478960e+00 4.314924860121975847e-01
+2.965664888242407038e+00 8.430407192711438791e-01
+3.473466298134646912e+00 9.485033022443511985e-01
+2.495283483432777949e+00 8.191813640038545508e-01
+3.749999999997362998e+00 4.330127018933099259e-01
+3.999999999998494982e+00 8.660254037854584475e-01
+4.539957219767416952e+00 8.147052950646799241e-01
+4.249999999998625100e+00 1.299038105677188915e+00
+4.566987298108965199e+00 2.249999999994134026e+00
+4.233860225288407220e-01 7.521186846436920259e-01
+2.249999999999953815e+00 3.569771490982977191e+00
+1.986017662442111087e+00 3.172602692073807962e+00
+1.278337285646486077e+00 3.591790500907492856e+00
+1.736885620003159048e+00 2.478141994818935157e+00
+4.527193881727244573e+00 3.220499339533433147e+00
+2.543206966671962999e+00 3.068203009786670155e+00
+3.249295899993029924e+00 3.581030149777149951e+00
+3.484672574077193108e+00 3.081218998192642999e+00
+2.224749828680767205e+00 1.226702911488599002e+00
+3.949238975830291043e+00 1.718453006844590014e+00
+2.598076211346683095e+00 2.500000000008701928e+00
+1.735657837491481947e+00 5.051897496787564057e-01
+4.314082553290368161e-01 2.749646885897047088e+00
+8.243663844143013231e-01 3.015277068572050023e+00
+9.086100644094758927e-01 9.243779422283027181e-01
+8.745184159297654247e-01 1.489813519623498062e+00
+7.910885367730781104e-01 3.530452272734196928e+00
+7.456552845443471522e-01 4.161816644597777226e-01
+4.234352555081294156e+00 3.603882322562967921e+00
+1.532342419179892001e+00 3.204280363629865125e+00
+2.126965062766455805e+00 2.722917844972307044e+00
+3.999171452587594988e+00 3.190116389084741044e+00
+3.008323479140071033e+00 3.207158181768441807e+00
+3.063606654562874176e+00 1.306322380129556038e+00
+3.820890929013136983e+00 1.258109208728056982e+00
+4.665063509458925672e+00 1.250000000000385914e+00
+4.232125541608215968e+00 2.032051323961315159e+00
+4.232050807571533291e+00 2.499999999993876898e+00
+1.378283175007543049e+00 8.316382047177097725e-01
+2.094791281142887041e+00 8.112976320968601218e-01
+3.660254037837317176e-01 3.633974596215732156e+00
+3.660254037858404197e-01 3.660254037865067200e-01
+4.633974596214726738e+00 3.660254037848266750e-01
+4.633974596214662789e+00 3.633974596214311958e+00
+1.728839994976222982e+00 2.874968674216736186e+00
+4.251041965493531194e+00 2.878930603197281890e+00
+2.690002125042068037e+00 1.148726086499350929e+00
+1.146297907397342941e+00 3.226109721612958836e+00
diff --git a/tests/adapters/square_5x4/types.txt b/tests/adapters/square_5x4/types.txt
new file mode 100644
index 0000000000000000000000000000000000000000..553cd6b0ade2d360961a624f8fae5b4df3f191b1
--- /dev/null
+++ b/tests/adapters/square_5x4/types.txt
@@ -0,0 +1,198 @@
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
diff --git a/tests/adapters/test_regrid.py b/tests/adapters/test_regrid.py
index 54d05726dd66bcfc5eba2c086bb1d989e942df4e..0854ca880b969cecd25ebd635d119087d536136c 100644
--- a/tests/adapters/test_regrid.py
+++ b/tests/adapters/test_regrid.py
@@ -1,6 +1,7 @@
 """
 Unit tests for data info propagation.
 """
+
 import unittest
 from datetime import datetime, timedelta
 
@@ -10,6 +11,7 @@ from finam import (
     UNITS,
     CellType,
     Composition,
+    FinamDataError,
     FinamMetaDataError,
     Info,
     Location,
@@ -110,7 +112,7 @@ class TestRegrid(unittest.TestCase):
 
         (source.outputs["Output"] >> RegridNearest() >> sink.inputs["Input"])
 
-        with self.assertRaises(NotImplementedError):
+        with self.assertRaises(FinamDataError):
             composition.connect()
 
     def test_regrid_nearest_crs(self):
@@ -248,7 +250,7 @@ class TestRegrid(unittest.TestCase):
             >> sink.inputs["Input"]
         )
 
-        with self.assertRaises(NotImplementedError):
+        with self.assertRaises(FinamDataError):
             composition.connect()
 
     def test_regrid_linear_crs(self):
@@ -533,3 +535,7 @@ class TestRegrid(unittest.TestCase):
         self.assertEqual(sink.data["Input"][0, 0], 1.0 * UNITS.meter)
         self.assertEqual(sink.data["Input"][0, 1], 0.5 * UNITS.meter)
         self.assertEqual(sink.data["Input"][0, 9], 0.5 * UNITS.meter)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/adapters/test_regrid_mask.py b/tests/adapters/test_regrid_mask.py
new file mode 100644
index 0000000000000000000000000000000000000000..42eedc1b27088d7ec2d4c8a131c12acfec8a33e9
--- /dev/null
+++ b/tests/adapters/test_regrid_mask.py
@@ -0,0 +1,154 @@
+"""
+Unit tests for regridding with masked data.
+"""
+
+import unittest
+from pathlib import Path
+
+import numpy as np
+from numpy.testing import assert_array_equal
+
+from finam import Composition, FinamDataError, Info, Mask, UniformGrid, UnstructuredGrid
+from finam import data as fdata
+from finam.adapters.regrid import RegridLinear, RegridNearest
+from finam.modules import StaticSimplexNoise, debug
+
+
+def get_mask(points, rad=1.5):
+    return (points[:, 0] - 2.5) ** 2 + (points[:, 1] - 2) ** 2 > rad**2
+
+
+class TestRegridMask(unittest.TestCase):
+    @classmethod
+    def setUpClass(self):
+        here = Path(__file__).parent
+        points = np.loadtxt(here / "square_5x4" / "points.txt", dtype=float)
+        cells = np.loadtxt(here / "square_5x4" / "cells.txt", dtype=int)
+        types = np.loadtxt(here / "square_5x4" / "types.txt", dtype=int)
+
+        self.in_grid = UnstructuredGrid(points, cells, types)
+        self.out_grid = UniformGrid((25, 20), spacing=(0.2, 0.2))
+
+        self.in_mask = get_mask(self.in_grid.cell_centers, rad=1.5)
+        omask = get_mask(self.out_grid.cell_centers, rad=2.5)
+        self.out_mask = fdata.from_compressed(
+            omask, self.out_grid.data_shape, self.out_grid.order
+        )
+
+    def test_regrid_nearest_out_mask(self):
+        in_info = Info(grid=self.in_grid, units="", mask=self.in_mask)
+        source = StaticSimplexNoise(in_info, 0.15, 3, 0.5)
+        sink = debug.DebugPushConsumer({"Input": Info()})
+        regrid = RegridNearest(out_grid=self.out_grid, out_mask=self.out_mask)
+        composition = Composition([source, sink])
+
+        (source.outputs["Noise"] >> regrid >> sink.inputs["Input"])
+        composition.connect()
+
+        info = sink.inputs["Input"].info
+        data = sink.data["Input"][0, ...]
+        assert_array_equal(info.mask, data.mask)
+        assert_array_equal(info.mask, self.out_mask)
+
+        i_data = source.outputs["Noise"].data[0][1].magnitude.compressed()
+        o_data = data.magnitude.compressed()
+        self.assertAlmostEqual(i_data.mean(), o_data.mean(), 2)
+
+    def test_regrid_nearest_filled(self):
+        in_info = Info(grid=self.in_grid, units="", mask=self.in_mask)
+        source = StaticSimplexNoise(in_info, 0.15, 3, 0.5)
+        sink = debug.DebugPushConsumer({"Input": Info()})
+        regrid = RegridNearest(out_grid=self.out_grid, out_mask=Mask.NONE)
+        composition = Composition([source, sink])
+
+        (source.outputs["Noise"] >> regrid >> sink.inputs["Input"])
+        composition.connect()
+
+        info = sink.inputs["Input"].info
+        data = sink.data["Input"][0, ...].magnitude
+        self.assertEqual(info.mask, Mask.NONE)
+        self.assertFalse(fdata.is_masked_array(data))
+
+        i_data = source.outputs["Noise"].data[0][1].magnitude.compressed()
+        self.assertAlmostEqual(i_data.mean(), data.mean(), 1)
+
+    def test_regrid_linear_determine_mask(self):
+        in_info = Info(grid=self.in_grid, units="", mask=self.in_mask)
+        source = StaticSimplexNoise(in_info, 0.15, 3, 0.5)
+        sink = debug.DebugPushConsumer({"Input": Info()})
+        regrid = RegridLinear(
+            out_grid=self.out_grid, fill_with_nearest=False, out_mask=None
+        )
+        composition = Composition([source, sink])
+
+        (source.outputs["Noise"] >> regrid >> sink.inputs["Input"])
+        composition.connect()
+
+        info = sink.inputs["Input"].info
+        data = sink.data["Input"][0, ...]
+        assert_array_equal(info.mask, data.mask)
+        self.assertEqual(np.sum(data.mask), 306)
+
+        i_data = source.outputs["Noise"].data[0][1].magnitude.compressed()
+        o_data = data.magnitude.compressed()
+        self.assertAlmostEqual(i_data.mean(), o_data.mean(), 2)
+
+    def test_regrid_linear_error_domain(self):
+        in_info = Info(grid=self.in_grid, units="", mask=self.in_mask)
+        source = StaticSimplexNoise(in_info, 0.15, 3, 0.5)
+        sink = debug.DebugPushConsumer({"Input": Info()})
+        regrid = RegridLinear(
+            out_grid=self.out_grid, fill_with_nearest=False, out_mask=Mask.NONE
+        )
+        composition = Composition([source, sink])
+
+        (source.outputs["Noise"] >> regrid >> sink.inputs["Input"])
+
+        # not covering domain without fill
+        with self.assertRaises(FinamDataError):
+            composition.connect()
+
+    def test_regrid_linear_filled(self):
+        in_info = Info(grid=self.in_grid, units="", mask=self.in_mask)
+        source = StaticSimplexNoise(in_info, 0.15, 3, 0.5)
+        sink = debug.DebugPushConsumer({"Input": Info()})
+        regrid = RegridLinear(
+            out_grid=self.out_grid, fill_with_nearest=True, out_mask=Mask.NONE
+        )
+        composition = Composition([source, sink])
+
+        (source.outputs["Noise"] >> regrid >> sink.inputs["Input"])
+        composition.connect()
+
+        info = sink.inputs["Input"].info
+        data = sink.data["Input"][0, ...].magnitude
+        self.assertEqual(info.mask, Mask.NONE)
+        self.assertFalse(fdata.is_masked_array(data))
+
+        i_data = source.outputs["Noise"].data[0][1].magnitude.compressed()
+        self.assertAlmostEqual(i_data.mean(), data.mean(), 1)
+
+    def test_regrid_linear_filled_mask(self):
+        in_info = Info(grid=self.in_grid, units="", mask=self.in_mask)
+        source = StaticSimplexNoise(in_info, 0.15, 3, 0.5)
+        sink = debug.DebugPushConsumer({"Input": Info()})
+        regrid = RegridLinear(
+            out_grid=self.out_grid, fill_with_nearest=True, out_mask=self.out_mask
+        )
+        composition = Composition([source, sink])
+
+        (source.outputs["Noise"] >> regrid >> sink.inputs["Input"])
+        composition.connect()
+
+        info = sink.inputs["Input"].info
+        data = sink.data["Input"][0, ...]
+        assert_array_equal(info.mask, data.mask)
+        assert_array_equal(info.mask, self.out_mask)
+
+        i_data = source.outputs["Noise"].data[0][1].magnitude.compressed()
+        o_data = data.magnitude.compressed()
+        self.assertAlmostEqual(i_data.mean(), o_data.mean(), 2)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/adapters/test_stats.py b/tests/adapters/test_stats.py
index 1c9e417e49fee664c0c1eba0d75ae7008d0a623d..c3c30a3c3ada8cb3b059bfacf75438f6fa8c2329 100644
--- a/tests/adapters/test_stats.py
+++ b/tests/adapters/test_stats.py
@@ -39,3 +39,7 @@ class TestHistogram(unittest.TestCase):
         self.assertEqual(fm.data.get_units(data), fm.UNITS.dimensionless)
 
         composition.run(end_time=datetime(2000, 1, 10))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/adapters/test_time.py b/tests/adapters/test_time.py
index 41068b761910639c3ecf1a5bebb3a6115c6c0e02..0946b13a4b88ee1de37ec35cc5a97255f5e1e5f6 100644
--- a/tests/adapters/test_time.py
+++ b/tests/adapters/test_time.py
@@ -1,6 +1,7 @@
 """
 Unit tests for the adapters.time module.
 """
+
 import unittest
 from datetime import datetime, timedelta
 
diff --git a/tests/adapters/test_time_integration.py b/tests/adapters/test_time_integration.py
index f708ae88b8916cc1c7253c628e2d1deeccfcd277..e8142ad12a77242195084808adf164b427b7161a 100644
--- a/tests/adapters/test_time_integration.py
+++ b/tests/adapters/test_time_integration.py
@@ -1,6 +1,7 @@
 """
 Unit tests for the adapters.time module.
 """
+
 import unittest
 from datetime import datetime, timedelta
 
diff --git a/tests/components/test_callback.py b/tests/components/test_callback.py
index 261618bb3ae169ce9317158963e4a905d38be8b0..443f588aba894cdbc454e44b748a50ce15a45eb4 100644
--- a/tests/components/test_callback.py
+++ b/tests/components/test_callback.py
@@ -62,3 +62,7 @@ class TestCallback(unittest.TestCase):
         out_data = consumer.data["In1"]
         self.assertGreaterEqual(out_data, 2.0)
         self.assertLessEqual(out_data, 4.0)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/components/test_control.py b/tests/components/test_control.py
index d656d442211a1f633e22da34da555773dadd68c3..77e504899584746a71fe2d621d9e312075bfdd84 100644
--- a/tests/components/test_control.py
+++ b/tests/components/test_control.py
@@ -208,3 +208,7 @@ class TestTimeTrigger(unittest.TestCase):
         )
         with self.assertRaises(fm.FinamMetaDataError):
             trigger.initialize()
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/components/test_csv_writer.py b/tests/components/test_csv_writer.py
index f41aa86cb8b46fdf2beeea767f8d97d4cdc9e181..87f91a867d4cd590e73c8dbac42a1620ae1b69eb 100644
--- a/tests/components/test_csv_writer.py
+++ b/tests/components/test_csv_writer.py
@@ -68,3 +68,7 @@ class TestCsvWriter(unittest.TestCase):
                 step=1,
                 separator=",",
             )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/components/test_debug.py b/tests/components/test_debug.py
index bf90ce8811ec0bd4250d465af0e6f6185fe4803f..c926a50459297d6a364f7a200b4edbb4918c1d95 100644
--- a/tests/components/test_debug.py
+++ b/tests/components/test_debug.py
@@ -90,3 +90,7 @@ class TestPushDebugConsumer(unittest.TestCase):
         composition.run(start_time=start, end_time=datetime(2000, 1, 10))
 
         self.assertEqual(consumer.data["In"][0, ...], 11)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/components/test_generator.py b/tests/components/test_generator.py
index 44cdea0e434a9687efdcbf2108f7a2a2537f5908..a082503efddb1ee2c5ee5b0a780efc90aec000f1 100644
--- a/tests/components/test_generator.py
+++ b/tests/components/test_generator.py
@@ -42,3 +42,7 @@ class TestStaticCallbackGenerator(unittest.TestCase):
 
         out_data_2 = consumer.data["In1"]
         self.assertEqual(out_data_1, out_data_2)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/components/test_noise.py b/tests/components/test_noise.py
index 1a6a8577a841e6fa0db5cd21c5c205938c78c569..987b2941d0ba1078103f8ba5b78466ca5a3be4be 100644
--- a/tests/components/test_noise.py
+++ b/tests/components/test_noise.py
@@ -298,3 +298,7 @@ class TestStaticNoise(unittest.TestCase):
         data_2 = sink.data["Input"][0, ...]
         self.assertEqual(data_1, data_2)
         self.assertEqual(data_2.shape, ())
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/components/test_parametric.py b/tests/components/test_parametric.py
index ff67e40965fcec51d0302606ccf92abeec240e72..c5e932231e2560613bf487127e34aa3f74c38071 100644
--- a/tests/components/test_parametric.py
+++ b/tests/components/test_parametric.py
@@ -286,3 +286,7 @@ class TestStaticParametricGrid(unittest.TestCase):
         data_2 = sink.data["Input"][0, ...]
         assert_allclose(data_1.magnitude, data_2.magnitude)
         self.assertEqual(data_2.shape, (19, 14))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/components/test_simplex_noise.py b/tests/components/test_simplex_noise.py
index d81259812dbd99af93c365c3be37a3e74f2bb145..7581d17311a4ce64b975852d09248e5984a70978 100644
--- a/tests/components/test_simplex_noise.py
+++ b/tests/components/test_simplex_noise.py
@@ -49,3 +49,7 @@ class TestSimplexNoise(unittest.TestCase):
         composition.run(end_time=datetime(2000, 3, 1))
 
         print(consumer.data["Noise"])
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/components/test_weighted_sum.py b/tests/components/test_weighted_sum.py
index 19158c02665ffe43d635b4a0f96d44aa891a696e..8098d7f5577a562da3e6c53e422b5eeca87f4938 100644
--- a/tests/components/test_weighted_sum.py
+++ b/tests/components/test_weighted_sum.py
@@ -342,3 +342,7 @@ class TestWeightedSum(unittest.TestCase):
 
         with self.assertRaises(finam.errors.FinamMetaDataError):
             composition.run(start_time=start, end_time=start + timedelta(days=30))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/core/test_propagate_info.py b/tests/core/test_propagate_info.py
index df11848e8c772d7fe1950c6e314e51b02bdfa27d..f5863a0c44a03b605050918a43c383a8c55d2ef6 100644
--- a/tests/core/test_propagate_info.py
+++ b/tests/core/test_propagate_info.py
@@ -1,6 +1,7 @@
 """
 Unit tests for data info propagation.
 """
+
 import unittest
 from datetime import datetime, timedelta
 
@@ -199,3 +200,7 @@ class TestPropagate(unittest.TestCase):
             source.out_info.time,
             time,
         )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/core/test_pull_based_component.py b/tests/core/test_pull_based_component.py
index 1c288cd031217b2095393f0255cba96108149429..2bdb48f128053a410eef1ac8e90f228f09705889 100644
--- a/tests/core/test_pull_based_component.py
+++ b/tests/core/test_pull_based_component.py
@@ -55,3 +55,7 @@ class TestPullBasedComponent(unittest.TestCase):
         composition.run(start_time=time, end_time=datetime(2000, 1, 12))
 
         self.assertEqual(consumer.data, {"In": 12})
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/core/test_schedule.py b/tests/core/test_schedule.py
index 9eedec5ec251fda6b5b71f6ddad82fac271a2eb2..126d6e4d5268726aba7e80d666f51bf9221874df 100644
--- a/tests/core/test_schedule.py
+++ b/tests/core/test_schedule.py
@@ -1,6 +1,7 @@
 """
 Unit tests for the driver/scheduler.
 """
+
 import logging
 import os
 import pprint
diff --git a/tests/core/test_sdk.py b/tests/core/test_sdk.py
index 007a0d0353cd7a4aa12771b9bf3f60f858141730..0c1d50f92aa474f9c0a6ddf470cee4f854135729 100644
--- a/tests/core/test_sdk.py
+++ b/tests/core/test_sdk.py
@@ -1,6 +1,7 @@
 """
 Unit tests for the sdk implementations.
 """
+
 import logging
 import os.path
 import tempfile
@@ -16,6 +17,7 @@ from finam import (
     CallbackOutput,
     ComponentStatus,
     Composition,
+    EsriGrid,
     FinamDataError,
     FinamLogError,
     FinamMetaDataError,
@@ -162,7 +164,7 @@ class TestOutput(unittest.TestCase):
         counter = 0
         t = datetime(2000, 1, 1)
         info = Info(time=t, grid=NoGrid(), meta={"test": 0})
-        wrong_info = Info(time=t, grid=NoGrid(), meta={"test": 5})
+        wrong_info = Info(time=t, grid=EsriGrid(2, 2), meta={"test": 5})
 
         def callback(_clr, _time):
             nonlocal counter
diff --git a/tests/core/test_units.py b/tests/core/test_units.py
index 9c5e881336989a82906888db790677516697a16c..6b374a545774ecbb28dc84bbfee03ea9948fbece 100644
--- a/tests/core/test_units.py
+++ b/tests/core/test_units.py
@@ -1,6 +1,7 @@
 """
 Unit tests for data info propagation.
 """
+
 import unittest
 from datetime import datetime, timedelta
 
@@ -112,3 +113,7 @@ class TestUnits(unittest.TestCase):
 
         with self.assertRaises(FinamMetaDataError):
             composition.run(start_time=time, end_time=datetime(2000, 1, 2))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/data/test_cf_units.py b/tests/data/test_cf_units.py
index 9a54c65bb207253740222f885eff2a293b9d0247..c6e5404f4c62edfe17e19496bc73bfb9cf35d660 100644
--- a/tests/data/test_cf_units.py
+++ b/tests/data/test_cf_units.py
@@ -29,3 +29,7 @@ class TestCfUnits(unittest.TestCase):
 
         self.assertEqual(u("m/s"), "m s-1")
         self.assertEqual(u("m s-1"), "m s-1")
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/data/test_grid_spec.py b/tests/data/test_grid_spec.py
index fe97d0b513c06832d03ee3890ec7887f939da3bd..e5086539176196a5804bec351aa43aa7d00ff9f2 100644
--- a/tests/data/test_grid_spec.py
+++ b/tests/data/test_grid_spec.py
@@ -345,3 +345,7 @@ class TestGridSpec(unittest.TestCase):
         assert_array_equal(grid.cell_types, [CellType.HEX])
         grid = UniformGrid((2, 2, 3))
         assert_array_equal(grid.cell_types, [CellType.HEX, CellType.HEX])
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/data/test_grid_tools.py b/tests/data/test_grid_tools.py
index 78e516e4190ef3886b32547f0f73ec700e65e1b3..e81913dab6aadcfda5b3ef1ce48860f841faf281 100644
--- a/tests/data/test_grid_tools.py
+++ b/tests/data/test_grid_tools.py
@@ -89,7 +89,7 @@ class TestGridTools(unittest.TestCase):
         assert_array_equal(gen_cells((1,), order="F"), [[0]])
         assert_array_equal(gen_cells((2,), order="F"), [[0, 1]])
 
-        assert_array_equal(gen_cells((2, 2, 2), order="F"), [[2, 3, 1, 0, 6, 7, 5, 4]])
+        assert_array_equal(gen_cells((2, 2, 2), order="F"), [[6, 7, 5, 4, 2, 3, 1, 0]])
 
         # 0---1---2
         # |   |   |
@@ -255,3 +255,7 @@ class TestGridTools(unittest.TestCase):
 
         test_type = INV_VTK_TYPE_MAP[VTK_TYPE_MAP]
         assert_array_equal(test_type, range(6))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/data/test_masked.py b/tests/data/test_masked.py
index 90c1868e29a7bd62e0046b4a23a22dbf60069ea7..0f6865edc99694493001917bce0245be7b67b502 100644
--- a/tests/data/test_masked.py
+++ b/tests/data/test_masked.py
@@ -1,6 +1,7 @@
 """
 Unit tests for masked data.
 """
+
 import unittest
 from datetime import datetime, timedelta
 
@@ -120,6 +121,60 @@ class TestMasked(unittest.TestCase):
         np.testing.assert_allclose(data[mask], ucdata_c[mask])
         np.testing.assert_allclose(data[mask], ucdata_f[mask])
 
+        # more specific routines
+        grid1 = fm.RectilinearGrid([(1.0, 2.0, 3.0, 4.0)])
+        grid2 = fm.RectilinearGrid([(1.0, 2.0, 3.0)])
+        grid3 = fm.RectilinearGrid([(1.0, 2.0, 3.0), (1.0, 2.0, 3.0)])
+        mask1 = np.array((1, 0, 0), dtype=bool)
+        mask2 = np.array((0, 0, 1), dtype=bool)
+        mask3 = np.array((1, 0, 1), dtype=bool)
+        mask4 = np.array((1, 1, 1), dtype=bool)
+        mask5 = np.array((0, 0, 0), dtype=bool)
+        mask6 = np.array((1, 0), dtype=bool)
+        mask7 = np.array(((0, 1), (0, 1)), dtype=bool)
+        data = np.ma.masked_array((10.0, 20.0, 30.0), mask1, fill_value=np.nan)
+
+        # submask check
+        self.assertFalse(fm.data.tools.is_sub_mask(mask1, mask2))
+        self.assertFalse(fm.data.tools.is_sub_mask(mask1, np.ma.nomask))
+        self.assertTrue(fm.data.tools.is_sub_mask(mask1, mask3))
+        self.assertTrue(fm.data.tools.is_sub_mask(mask2, mask3))
+        self.assertTrue(fm.data.tools.is_sub_mask(np.ma.nomask, mask1))
+        self.assertTrue(fm.data.tools.is_sub_mask(np.ma.nomask, mask2))
+        self.assertTrue(fm.data.tools.is_sub_mask(np.ma.nomask, mask3))
+        self.assertTrue(fm.data.tools.is_sub_mask(np.ma.nomask, mask4))
+        self.assertTrue(fm.data.tools.is_sub_mask(np.ma.nomask, mask5))
+        self.assertTrue(fm.data.tools.is_sub_mask(np.ma.nomask, np.ma.nomask))
+        self.assertTrue(fm.data.tools.is_sub_mask(mask5, np.ma.nomask))
+        self.assertFalse(fm.data.tools.is_sub_mask(mask1, mask6))
+        self.assertFalse(fm.data.tools.is_sub_mask(mask1, mask7))
+
+        # equal mask
+        self.assertTrue(fm.data.tools.masks_equal(None, None))
+        self.assertTrue(fm.data.tools.masks_equal(fm.Mask.NONE, fm.Mask.NONE))
+        self.assertTrue(fm.data.tools.masks_equal(fm.Mask.FLEX, fm.Mask.FLEX))
+        self.assertFalse(fm.data.tools.masks_equal(fm.Mask.FLEX, fm.Mask.NONE))
+        self.assertTrue(fm.data.tools.masks_equal(np.ma.nomask, np.ma.nomask))
+        self.assertTrue(fm.data.tools.masks_equal(np.ma.nomask, mask5))
+        self.assertTrue(fm.data.tools.masks_equal(mask5, np.ma.nomask))
+        self.assertFalse(fm.data.tools.masks_equal(mask1, mask6, grid1, grid2))
+        self.assertFalse(fm.data.tools.masks_equal(mask1, mask7, grid1, grid3))
+        self.assertFalse(fm.data.tools.masks_equal(mask1, mask2, grid1, grid1))
+
+        # cover domain
+        self.assertTrue(fm.data.tools.check_data_covers_domain(data, mask3))
+        self.assertFalse(fm.data.tools.check_data_covers_domain(data, mask2))
+        self.assertFalse(fm.data.tools.check_data_covers_domain(data, np.ma.nomask))
+
+        np.testing.assert_array_almost_equal(data, fm.data.tools.to_masked(data))
+        np.testing.assert_array_almost_equal((1, 2, 3), fm.data.tools.filled((1, 2, 3)))
+
+    def test_info_mask(self):
+        grid = fm.RectilinearGrid([(1.0, 2.0, 3.0)])
+        mask = np.array((1, 0, 0), dtype=bool)
+        with self.assertRaises(fm.FinamMetaDataError):
+            fm.Info(grid=grid, mask=mask)
+
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/tests/data/test_tools.py b/tests/data/test_tools.py
index 4f1c1b42c155cf8cb5f0b99df03f6ed0860dcde9..5e772b6587ddae7354eedb596077127f2cf804df 100644
--- a/tests/data/test_tools.py
+++ b/tests/data/test_tools.py
@@ -262,7 +262,7 @@ class TestDataTools(unittest.TestCase):
             info.accepts(finam.Info(time, grid=finam.NoGrid(), units="s"), {})
         )
 
-        self.assertFalse(
+        self.assertTrue(
             info.accepts(
                 finam.Info(time, grid=finam.NoGrid(), units="m", foo="baz"), {}
             )
@@ -284,10 +284,10 @@ class TestDataTools(unittest.TestCase):
             finam.Info(time, grid=finam.NoGrid()),
         )
 
-        finam.data.tools._check_shape(xdata.shape[1:], finam.NoGrid())
+        finam.data.tools.core._check_shape(xdata.shape[1:], finam.NoGrid())
 
         with self.assertRaises(finam.errors.FinamDataError):
-            finam.data.tools._check_shape(xdata.shape[1:], finam.NoGrid(dim=1))
+            finam.data.tools.core._check_shape(xdata.shape[1:], finam.NoGrid(dim=1))
 
     def test_quantify(self):
         xdata = np.asarray([1.0])
@@ -312,15 +312,19 @@ class TestDataTools(unittest.TestCase):
     def test_cache_units(self):
         finam.data.tools.clear_units_cache()
 
-        self.assertEqual({}, finam.data.tools._UNIT_PAIRS_CACHE)
+        self.assertEqual({}, finam.data.tools.units._UNIT_PAIRS_CACHE)
 
         eqiv = finam.data.tools.equivalent_units("mm", "L/m^2")
         self.assertTrue(eqiv)
         self.assertEqual(
             {(finam.UNITS.Unit("mm"), finam.UNITS.Unit("L/m^2")): (True, True)},
-            finam.data.tools._UNIT_PAIRS_CACHE,
+            finam.data.tools.units._UNIT_PAIRS_CACHE,
         )
 
         finam.data.tools.clear_units_cache()
 
-        self.assertEqual({}, finam.data.tools._UNIT_PAIRS_CACHE)
+        self.assertEqual({}, finam.data.tools.units._UNIT_PAIRS_CACHE)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/tools/test_connect.py b/tests/tools/test_connect.py
index 56a7a7ee68c553bccfbad546c8a3a7d319c36187..6ba4c10b46107d5c6a766dd52412cc26846afbdd 100644
--- a/tests/tools/test_connect.py
+++ b/tests/tools/test_connect.py
@@ -414,3 +414,7 @@ class TestConnectHelper(unittest.TestCase):
             connector.connect(
                 start_time=None, push_infos={"Out1": Info(time=None, grid=NoGrid())}
             )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/tools/test_cwd.py b/tests/tools/test_cwd.py
index 2756f06644eb948ea1ae13389c1372a6a042239a..0ce763feab08ea5a00269abdf16b9a5f5f1f89be 100644
--- a/tests/tools/test_cwd.py
+++ b/tests/tools/test_cwd.py
@@ -28,3 +28,7 @@ class TestCWD(unittest.TestCase):
         self.assertEqual(Path(test2.cwd).resolve(), Path(test2.new_cwd).resolve())
         self.assertEqual(Path(".").resolve(), Path(cwd).resolve())
         self.assertNotEqual(Path(".").resolve(), Path(test2.cwd).resolve())
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/tools/test_inspect.py b/tests/tools/test_inspect.py
index aff0f36d4af90fa46243cb183f15e698762189d1..495bfd3df62a399581e12320b23b81a91bf4a309 100644
--- a/tests/tools/test_inspect.py
+++ b/tests/tools/test_inspect.py
@@ -28,3 +28,7 @@ class TestInspect(unittest.TestCase):
 
         _s1 = fm.tools.inspect(comp1)
         _s2 = fm.tools.inspect(comp2)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/tools/test_log.py b/tests/tools/test_log.py
index 0e5701610adbe5f614b80d98226deb86615c4f8f..1663cd0c8c82c90c90f5919da409e05df8880d0b 100644
--- a/tests/tools/test_log.py
+++ b/tests/tools/test_log.py
@@ -58,3 +58,7 @@ class TestLog(unittest.TestCase):
         self.assertEqual(len(captured.records), 1)
         self.assertEqual(captured.records[0].levelno, logging.INFO)
         self.assertEqual(captured.records[0].message, "Hi from C")
+
+
+if __name__ == "__main__":
+    unittest.main()