From c58af748b3bc440407490f0ada4278172ee0708b Mon Sep 17 00:00:00 2001
From: Bert Palm <bert.palm@ufz.de>
Date: Fri, 26 Mar 2021 01:23:59 +0100
Subject: [PATCH] the word 'Flagger' now is History (except for plotting, which
 is old and deaf and probably die soon anyway)

---
 saqc/core/core.py             |  17 +-
 saqc/core/flags.py            |   2 +-
 saqc/core/register.py         |  12 +-
 saqc/funcs/breaks.py          |  21 +-
 saqc/funcs/changepoints.py    |   8 +-
 saqc/funcs/constants.py       |  20 +-
 saqc/funcs/curvefit.py        |   9 +-
 saqc/funcs/drift.py           |  75 ++--
 saqc/funcs/flagtools.py       |  54 +--
 saqc/funcs/generic.py         |  26 +-
 saqc/funcs/interpolation.py   |  28 +-
 saqc/funcs/outliers.py        |  77 ++--
 saqc/funcs/pattern.py         |  20 +-
 saqc/funcs/resampling.py      |  70 ++--
 saqc/funcs/residues.py        |  10 +-
 saqc/funcs/rolling.py         |   9 +-
 saqc/funcs/scores.py          |   4 +-
 saqc/funcs/tools.py           |  46 +--
 saqc/funcs/transformation.py  |   8 +-
 sphinx-doc/make_doc_module.py |   2 +-
 tests/common.py               |  17 +-
 tests/core/test_flagger.py    | 756 ----------------------------------
 tests/fuzzy/test_masking.py   |  36 +-
 23 files changed, 282 insertions(+), 1045 deletions(-)
 delete mode 100644 tests/core/test_flagger.py

diff --git a/saqc/core/core.py b/saqc/core/core.py
index 8041f6f4f..03cb06577 100644
--- a/saqc/core/core.py
+++ b/saqc/core/core.py
@@ -4,7 +4,7 @@ from __future__ import annotations
 
 # TODO:
 #  - integrate plotting into the api
-#  - `data` and `flagger` as arguments to `getResult`
+#  - `data` and `flags` as arguments to `getResult`
 
 import logging
 import copy as stdcopy
@@ -114,15 +114,16 @@ class SaQC(FuncModules):
         self._to_call: List[Tuple[ColumnSelector, APIController, SaQCFunction]] = []
 
     def _initFlags(self, data, flags: Union[Flags, None]):
-        """ Init the internal flagger object.
+        """ Init the internal Flags-object.
 
-        Ensures that all data columns are present and user passed flags from
-        a flags frame or an already initialised flagger are used.
+        Ensures that all data columns are present and user passed
+        flags from a frame or an already initialised Flags-object
+        are used.
         """
         if flags is None:
             return initFlagsLike(data)
 
-        # add columns that are present in data but not in flagger
+        # add columns that are present in data but not in flags
         for c in data.columns.difference(flags.columns):
             flags[c] = pd.Series(UNFLAGGED, index=data[c].index, dtype=float)
 
@@ -202,7 +203,7 @@ class SaQC(FuncModules):
             plotAllHook(data, flags)
 
         # This is way faster for big datasets, than to throw everything in the constructor.
-        # Simply because of _initFlagger -> merge() -> mergeDios() over all columns.
+        # Simply because of _initFlags -> merge() -> mergeDios() over all columns.
         new = self._constructSimple()
         new._flags, new._data = flags, data
         return new
@@ -213,7 +214,7 @@ class SaQC(FuncModules):
 
         Returns
         -------
-        data, flagger: (DictOfSeries, DictOfSeries)
+        data, flags: (DictOfSeries, DictOfSeries)
         """
 
         realization = self.evaluate()
@@ -270,7 +271,7 @@ class SaQC(FuncModules):
 def _saqcCallFunc(locator, controller, function, data, flags):
     # NOTE:
     # We assure that all columns in data have an equivalent column in flags,
-    # we might have more flagger columns though
+    # we might have more flags columns though
     assert data.columns.difference(flags.columns).empty
 
     field = locator.field
diff --git a/saqc/core/flags.py b/saqc/core/flags.py
index 1d95adf31..d698cf1d0 100644
--- a/saqc/core/flags.py
+++ b/saqc/core/flags.py
@@ -202,7 +202,7 @@ class Flags:
         # technically it would be possible to select a field and set
         # the entire column to a scalar flag value (float), but it has
         # a high potential, that this is not intended by the user.
-        # if desired use ``flagger[:, field] = flag``
+        # if desired use ``flags[:, field] = flag``
         if not isinstance(value, pd.Series):
             raise ValueError("must pass value of type pd.Series")
 
diff --git a/saqc/core/register.py b/saqc/core/register.py
index b00f353d0..256b3228a 100644
--- a/saqc/core/register.py
+++ b/saqc/core/register.py
@@ -131,14 +131,14 @@ def _postCall(result, old_state: CallState) -> FuncReturnT:
     Parameters
     ----------
     result : tuple
-        the result from the called function, namely: data and flagger
+        the result from the called function, namely: data and flags
 
     old_state : dict
         control keywords from `_preCall`
 
     Returns
     -------
-    data, flagger : dios.DictOfSeries, saqc.flagger.Flagger
+    data, flags : dios.DictOfSeries, saqc.Flags
     """
     data, flags = result
     flags = _restoreFlags(flags, old_state)
@@ -264,7 +264,7 @@ def _prepareFlags(flags: Flags, masking) -> Flags:
     Prepare flags before each call. Always returns a copy.
 
     Currently this only clears the flags, but in future,
-    this should be sliced the flagger to the columns, that
+    this should be sliced the flags to the columns, that
     the saqc-function needs.
     """
     # Either the index or the columns itself changed
@@ -286,9 +286,9 @@ def _restoreFlags(flags: Flags, old_state: CallState):
 
     out = old_state.flags.copy()
     for c in columns:
-        # this implicitly squash the new-flagger history (RHS) to a single column, which than is appended to
-        # the old history (LHS). The new-flagger history possibly consist of multiple columns, one for each
-        # time flags was set to the flagger.
+        # this implicitly squash the new flags history (RHS) to a single column, which than is appended to
+        # the old history (LHS). The new flags history possibly consist of multiple columns, one for each
+        # time a series or scalar was passed to the flags.
         out[c] = flags[c]
 
     return out
diff --git a/saqc/funcs/breaks.py b/saqc/funcs/breaks.py
index f3ad9eeb2..b6dd00834 100644
--- a/saqc/funcs/breaks.py
+++ b/saqc/funcs/breaks.py
@@ -41,8 +41,8 @@ def flagMissing(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     nodata : any, default np.nan
         A value that defines missing data.
     flag : float, default BAD
@@ -52,9 +52,8 @@ def flagMissing(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
     """
     datacol = data[field]
     if np.isnan(nodata):
@@ -88,8 +87,8 @@ def flagIsolated(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object
+    flags : saqc.Flags
+        A flags object
     gap_window : str
         The minimum size of the gap before and after a group of valid values, making this group considered an
         isolated group. See condition (2) and (3)
@@ -103,8 +102,8 @@ def flagIsolated(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional information related to `data`.
+    flags : saqc.Flags
+        The flags object, holding flags and additional information related to `data`.
 
     Notes
     -----
@@ -160,8 +159,8 @@ def flagJumps(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The reference variable, the deviation from wich determines the flagging.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
     thresh : float
         The threshold, the mean of the values have to change by, to trigger flagging.
     winsz : str
diff --git a/saqc/funcs/changepoints.py b/saqc/funcs/changepoints.py
index 83439157e..450a77602 100644
--- a/saqc/funcs/changepoints.py
+++ b/saqc/funcs/changepoints.py
@@ -47,8 +47,8 @@ def flagChangePoints(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The reference variable, the deviation from wich determines the flagging.
-    flagger : saqc.flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.flags
+        A flags object, holding flags and additional informations related to `data`.
     stat_func : Callable[numpy.array, numpy.array]
          A function that assigns a value to every twin window. Left window content will be passed to first variable,
         right window content will be passed to the second.
@@ -140,8 +140,8 @@ def assignChangePointCluster(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The reference variable, the deviation from wich determines the flagging.
-    flagger : saqc.flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.flags
+        A flags object, holding flags and additional informations related to `data`.
     stat_func : Callable[[numpy.array, numpy.array], float]
         A function that assigns a value to every twin window. Left window content will be passed to first variable,
         right window content will be passed to the second.
diff --git a/saqc/funcs/constants.py b/saqc/funcs/constants.py
index 3791eaabf..6f8d29828 100644
--- a/saqc/funcs/constants.py
+++ b/saqc/funcs/constants.py
@@ -43,8 +43,8 @@ def flagConstants(
         A dictionary of pandas.Series, holding all the data.
     field : str
         Name of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     thresh : float
         Upper bound for the maximum total change of an interval to be flagged constant.
     window : str
@@ -56,9 +56,9 @@ def flagConstants(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
+    flags : saqc.Flags
+        The flags object, holding flags and additional informations related to `data`.
+        Flags values may have changed, relatively to the flags input.
     """
     if not isinstance(window, str):
         raise TypeError('window must be offset string.')
@@ -104,8 +104,8 @@ def flagByVariance(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     window : str
         Only intervals of minimum size "window" have the chance to get flagged as constant intervals
     thresh : float
@@ -125,9 +125,9 @@ def flagByVariance(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
+    flags : saqc.Flags
+        The flags object, holding flags and additional informations related to `data`.
+        Flags values may have changed, relatively to the flags input.
     """
     dataseries = data[field]
 
diff --git a/saqc/funcs/curvefit.py b/saqc/funcs/curvefit.py
index 3465e07d1..4b50693c6 100644
--- a/saqc/funcs/curvefit.py
+++ b/saqc/funcs/curvefit.py
@@ -74,8 +74,8 @@ def fitPolynomial(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-modelled.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     winsz : {str, int}
         The size of the window you want to use for fitting. If an integer is passed, the size
         refers to the number of periods for every fitting window. If an offset string is passed,
@@ -106,9 +106,8 @@ def fitPolynomial(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
     """
     # TODO: some (rater large) parts are functional similar to saqc.funcs.rolling.roll
     if data[field].empty:
diff --git a/saqc/funcs/drift.py b/saqc/funcs/drift.py
index 65f5b043d..f47183c6f 100644
--- a/saqc/funcs/drift.py
+++ b/saqc/funcs/drift.py
@@ -55,8 +55,8 @@ def flagDriftFromNorm(
         A dictionary of pandas.Series, holding all the data.
     field : str
         A dummy parameter.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
     fields : str
         List of fieldnames in data, determining which variables are to be included into the flagging process.
     segment_freq : str
@@ -86,9 +86,9 @@ def flagDriftFromNorm(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the input flagger.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the input flags.
 
     Notes
     -----
@@ -166,8 +166,8 @@ def flagDriftFromReference(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The reference variable, the deviation from wich determines the flagging.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
     fields : str
         List of fieldnames in data, determining wich variables are to be included into the flagging process.
     segment_freq : str
@@ -186,9 +186,9 @@ def flagDriftFromReference(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the input flagger.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the input flags.
 
     Notes
     -----
@@ -255,8 +255,8 @@ def flagDriftFromScaledNorm(
         A dictionary of pandas.Series, holding all the data.
     field : str
         A dummy parameter.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
     fields_scale1 : str
         List of fieldnames in data to be included into the flagging process which are scaled according to scaling
         scheme 1.
@@ -290,9 +290,9 @@ def flagDriftFromScaledNorm(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the input flagger.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the input flags.
 
     References
     ----------
@@ -386,8 +386,8 @@ def correctExponentialDrift(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     maint_data_field : str
         The fieldname of the datacolumn holding the maintenance information.
         The maint data is to expected to have following form:
@@ -406,9 +406,8 @@ def correctExponentialDrift(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
     """
     # 1: extract fit intervals:
     if data[maint_data_field].empty:
@@ -480,8 +479,8 @@ def correctRegimeAnomaly(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     cluster_field : str
         A string denoting the field in data, holding the cluster label for the data you want to correct.
     model : Callable
@@ -502,8 +501,8 @@ def correctRegimeAnomaly(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        The quality flags of data
     """
     cluster_ser = data[cluster_field]
     unique_successive = pd.unique(cluster_ser.values)
@@ -583,8 +582,8 @@ def correctOffset(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     max_mean_jump : float
         when searching for changepoints in mean - this is the threshold a mean difference in the
         sliding window search must exceed to trigger changepoint detection.
@@ -606,8 +605,8 @@ def correctOffset(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        The quality flags of data
     """
     data, flags = copy(data, field, flags, field + '_CPcluster')
     data, flags = assignChangePointCluster(
@@ -689,8 +688,8 @@ def flagRegimeAnomaly(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     cluster_field : str
         The name of the column in data, holding the cluster labels for the samples in field. (has to be indexed
         equal to field)
@@ -711,9 +710,9 @@ def flagRegimeAnomaly(
 
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
+    flags : saqc.Flags
+        The flags object, holding flags and additional informations related to `data`.
+        Flags values may have changed, relatively to the flags input.
     """
     return assignRegimeAnomaly(
         data, field, flags,
@@ -767,8 +766,8 @@ def assignRegimeAnomaly(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     cluster_field : str
         The name of the column in data, holding the cluster labels for the samples in field. (has to be indexed
         equal to field)
@@ -794,9 +793,9 @@ def assignRegimeAnomaly(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
+    flags : saqc.Flags
+        The flags object, holding flags and additional informations related to `data`.
+        Flags values may have changed, relatively to the flags input.
     """
     series = data[cluster_field]
     cluster = np.unique(series)
diff --git a/saqc/funcs/flagtools.py b/saqc/funcs/flagtools.py
index 94b04da10..5cb907781 100644
--- a/saqc/funcs/flagtools.py
+++ b/saqc/funcs/flagtools.py
@@ -24,8 +24,8 @@ def forceFlags(
         data container
     field : str
         columns name that holds the data
-    flagger : Flagger
-        flagger object
+    flags : saqc.Flags
+        flags object
     flag : float, default BAD
         flag to set
     kwargs : dict
@@ -34,7 +34,7 @@ def forceFlags(
     Returns
     -------
     data : DictOfSeries
-    flagger : Flagger
+    flags : saqc.Flags
 
     See Also
     --------
@@ -57,15 +57,15 @@ def clearFlags(data: DictOfSeries, field: ColumnName, flags: Flags, **kwargs) ->
         data container
     field : str
         columns name that holds the data
-    flagger : Flagger
-        flagger object
+    flags : saqc.Flags
+        flags object
     kwargs : dict
         unused
 
     Returns
     -------
     data : DictOfSeries
-    flagger : Flagger
+    flags : saqc.Flags
 
     See Also
     --------
@@ -93,8 +93,8 @@ def flagUnflagged(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
     flag : float, default BAD
         flag value to set
     kwargs : Dict
@@ -104,8 +104,8 @@ def flagUnflagged(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        The quality flags of data
 
     See Also
     --------
@@ -128,15 +128,15 @@ def flagGood(data: DictOfSeries, field: ColumnName, flags: Flags, flag=BAD, **kw
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
 
     Returns
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        The quality flags of data
     """
     warnings.warn("'flagGood' is deprecated and does nothing, use 'flagUnflagged' instead", DeprecationWarning)
     return data, flags
@@ -165,8 +165,8 @@ def flagManual(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
     mdata : {pd.Series, pd.Dataframe, DictOfSeries}
         The "manually generated" data
     mflag : scalar
@@ -189,7 +189,7 @@ def flagManual(
     Returns
     -------
     data : original data
-    flagger : modified flagger
+    flags : modified flags
 
     Examples
     --------
@@ -204,7 +204,7 @@ def flagManual(
     On *dayly* data, with the 'ontime' method, only the provided timestamnps are used.
     Bear in mind that only exact timestamps apply, any offset will result in ignoring
     the timestamp.
-    >>> _, fl = flagManual(data, field, flagger, mdata, mflag=1, method='ontime')
+    >>> _, fl = flagManual(data, field, flags, mdata, mflag=1, method='ontime')
     >>> fl[field] > UNFLAGGED
     2000-01-31    False
     2000-02-01    True
@@ -217,7 +217,7 @@ def flagManual(
     Freq: D, dtype: bool
 
     With the 'right-open' method, the mdata is forward fill:
-    >>> _, fl = flagManual(data, field, flagger, mdata, mflag=1, method='right-open')
+    >>> _, fl = flagManual(data, field, flags, mdata, mflag=1, method='right-open')
     >>> fl[field] > UNFLAGGED
     2000-01-31    False
     2000-02-01    True
@@ -229,7 +229,7 @@ def flagManual(
     Freq: D, dtype: bool
 
     With the 'left-open' method, backward filling is used:
-    >>> _, fl = flagManual(data, field, flagger, mdata, mflag=1, method='left-open')
+    >>> _, fl = flagManual(data, field, flags, mdata, mflag=1, method='left-open')
     >>> fl[field] > UNFLAGGED
     2000-01-31    False
     2000-02-01    False
@@ -287,7 +287,7 @@ def flagManual(
 @register(masking='none', module="flagtools")
 def flagDummy(data: DictOfSeries, field: ColumnName, flags: Flags,  **kwargs) -> Tuple[DictOfSeries, Flags]:
     """
-    Function does nothing but returning data and flagger.
+    Function does nothing but returning data and flags.
 
     Parameters
     ----------
@@ -295,15 +295,15 @@ def flagDummy(data: DictOfSeries, field: ColumnName, flags: Flags,  **kwargs) ->
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
 
     Returns
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        The quality flags of data
     """
     return data, flags
 
@@ -319,8 +319,8 @@ def flagForceFail(data: DictOfSeries, field: ColumnName, flags: Flags, **kwargs)
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
 
     Raises
     ------
diff --git a/saqc/funcs/generic.py b/saqc/funcs/generic.py
index 329514fcf..93a7eec9b 100644
--- a/saqc/funcs/generic.py
+++ b/saqc/funcs/generic.py
@@ -108,8 +108,8 @@ def process(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, where you want the result from the generic expressions processing to be written to.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     func : Callable
         The data processing function with parameter names that will be
         interpreted as data column entries.
@@ -122,9 +122,9 @@ def process(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         The shape of the data may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        The flags shape may have changed relatively to the input flagger.
+    flags : saqc.Flags
+        The quality flags of data
+        The flags shape may have changed relatively to the input flags.
 
     Examples
     --------
@@ -184,8 +184,8 @@ def flag(
     field : str
         The fieldname of the column, where you want the result from the generic expressions evaluation to be projected
         to.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     func : Callable
         The expression that is to be evaluated is passed in form of a callable, with parameter names that will be
         interpreted as data column entries. The Callable must return an boolen array like.
@@ -199,9 +199,9 @@ def flag(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
 
     Examples
     --------
@@ -247,9 +247,9 @@ def flag(
     if field not in flags.columns:
         flags[field] = pd.Series(UNFLAGGED, index=mask.index, name=field)
 
-    # if flagger.getFlags(field).empty:
-    #     flagger = flagger.merge(
-    #         flagger.initFlags(
+    # if flags.getFlags(field).empty:
+    #     flags = flags.merge(
+    #         flags.initFlags(
     #             data=pd.Series(name=field, index=mask.index, dtype=np.float64)))
     flags[mask, field] = flag
     return data, flags
diff --git a/saqc/funcs/interpolation.py b/saqc/funcs/interpolation.py
index 32fcd2ba4..1c5131257 100644
--- a/saqc/funcs/interpolation.py
+++ b/saqc/funcs/interpolation.py
@@ -39,8 +39,8 @@ def interpolateByRolling(
     field : str
         Name of the column, holding the data-to-be-interpolated.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Information related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional Information related to `data`.
 
     winsz : int, str
         The size of the window, the aggregation is computed from. An integer define the number of periods to be used,
@@ -65,9 +65,8 @@ def interpolateByRolling(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
     """
 
     data = data.copy()
@@ -118,8 +117,8 @@ def interpolateInvalid(
     field : str
         Name of the column, holding the data-to-be-interpolated.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Information related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional Information related to `data`.
 
     method : {"linear", "time", "nearest", "zero", "slinear", "quadratic", "cubic", "spline", "barycentric",
         "polynomial", "krogh", "piecewise_polynomial", "spline", "pchip", "akima"}
@@ -146,9 +145,8 @@ def interpolateInvalid(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
     """
     inter_data = interpolateNANs(
         data[field],
@@ -203,8 +201,8 @@ def interpolateIndex(
     field : str
         Name of the column, holding the data-to-be-interpolated.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Information related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional Information related to `data`.
 
     freq : str
         An Offset String, interpreted as the frequency of
@@ -233,9 +231,9 @@ def interpolateIndex(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
     """
     if data[field].empty:
         return data, flags
diff --git a/saqc/funcs/outliers.py b/saqc/funcs/outliers.py
index 844643f61..b16e93d99 100644
--- a/saqc/funcs/outliers.py
+++ b/saqc/funcs/outliers.py
@@ -42,8 +42,8 @@ def flagByStray(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
 
     partition_freq : str, int, or None, default None
         Determines the segmentation of the data into partitions, the kNN algorithm is
@@ -146,8 +146,8 @@ def _evalStrayLabels(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the labels to be evaluated.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     fields : list[str]
         A list of strings, holding the column names of the variables, the stray labels shall be
         projected onto.
@@ -383,8 +383,8 @@ def flagMVScores(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged. (Here a dummy, for structural reasons)
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     fields : List[str]
         List of fieldnames, corresponding to the variables that are to be included into the flagging process.
     trafo : callable, default lambda x:x
@@ -437,9 +437,9 @@ def flagMVScores(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed, relatively to the flags input.
 
     Notes
     -----
@@ -540,8 +540,8 @@ def flagRaise(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     thresh : float
         The threshold, for the total rise (thresh > 0), or total drop (thresh < 0), value courses must
         not exceed within a timespan of length `raise_window`.
@@ -568,9 +568,9 @@ def flagRaise(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed, relatively to the flags input.
 
     Notes
     -----
@@ -700,8 +700,8 @@ def flagMAD(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged. (Here a dummy, for structural reasons)
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     window : str
        Offset string. Denoting the windows size that the "Z-scored" values have to lie in.
     z: float, default 3.5
@@ -713,9 +713,9 @@ def flagMAD(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed, relatively to the flags input.
 
     References
     ----------
@@ -780,8 +780,8 @@ def flagOffset(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged. (Here a dummy, for structural reasons)
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     thresh : float
         Minimum difference between to values, to consider the latter one as a spike. See condition (1)
     tolerance : float
@@ -802,9 +802,9 @@ def flagOffset(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed, relatively to the flags input.
 
     References
     ----------
@@ -933,8 +933,8 @@ def flagByGrubbs(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     winsz : {int, str}
         The size of the window you want to use for outlier testing. If an integer is passed, the size
         refers to the number of periods of every testing window. If a string is passed, it has to be an offset string,
@@ -955,9 +955,9 @@ def flagByGrubbs(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
 
     References
     ----------
@@ -1029,8 +1029,8 @@ def flagRange(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     min : float
         Lower bound for valid data.
     max : float
@@ -1042,9 +1042,8 @@ def flagRange(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
     """
 
     # using .values is much faster
@@ -1085,8 +1084,8 @@ def flagCrossStatistic(
         A dictionary of pandas.Series, holding all the data.
     field : str
         A dummy parameter.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional informations related to `data`.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
     fields : str
         List of fieldnames in data, determining wich variables are to be included into the flagging process.
     thresh : float
@@ -1105,9 +1104,9 @@ def flagCrossStatistic(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the input flagger.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the input flags.
 
     References
     ----------
diff --git a/saqc/funcs/pattern.py b/saqc/funcs/pattern.py
index 5f4829e9c..564247488 100644
--- a/saqc/funcs/pattern.py
+++ b/saqc/funcs/pattern.py
@@ -39,8 +39,8 @@ def flagPatternByDTW(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     ref_field: str
         The fieldname in `data' which holds the pattern.
     widths: tuple of int
@@ -57,9 +57,9 @@ def flagPatternByDTW(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
 
 
     References
@@ -126,8 +126,8 @@ def flagPatternByWavelet(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     ref_field: str
         The fieldname in `data` which holds the pattern.
     max_distance: float
@@ -142,9 +142,9 @@ def flagPatternByWavelet(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
 
 
     References
diff --git a/saqc/funcs/resampling.py b/saqc/funcs/resampling.py
index 967966c9c..8bb871cdd 100644
--- a/saqc/funcs/resampling.py
+++ b/saqc/funcs/resampling.py
@@ -75,8 +75,8 @@ def aggregate(
     field : str
         The fieldname of the column, holding the data-to-be-regularized.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.freq
+    flags : saqc.Flags
+        Container to store quality flags to data.  freq
 
     freq : str
         The sampling frequency the data is to be aggregated (resampled) at.
@@ -101,9 +101,9 @@ def aggregate(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
     """
 
     data, flags = copy(data, field, flags, field + '_original')
@@ -149,8 +149,8 @@ def linear(
     field : str
         The fieldname of the column, holding the data-to-be-regularized.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.freq
+    flags : saqc.Flags
+        Container to store flags of the data.  freq
 
     freq : str
         An offset string. The frequency of the grid you want to interpolate your data at.
@@ -160,9 +160,9 @@ def linear(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
     """
 
     data, flags = copy(data, field, flags, field + '_original')
@@ -208,8 +208,8 @@ def interpolate(
     field : str
         The fieldname of the column, holding the data-to-be-regularized.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.freq
+    flags : saqc.Flags
+        Container to store flags of the data.  freq
 
     freq : str
         An offset string. The frequency of the grid you want to interpolate your data at.
@@ -227,9 +227,9 @@ def interpolate(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
     """
 
     data, flags = copy(data, field, flags, field + '_original')
@@ -294,8 +294,8 @@ def mapToOriginal(
     field : str
         The fieldname of the column, holding the data-to-be-deharmonized.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.freq
+    flags : saqc.Flags
+        Container to store flags of the data.  freq
 
     method : {'inverse_fagg', 'inverse_bagg', 'inverse_nagg', 'inverse_fshift', 'inverse_bshift', 'inverse_nshift',
             'inverse_interpolation'}
@@ -307,9 +307,9 @@ def mapToOriginal(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
     """
     newfield = str(field) + '_original'
     data, flags = reindexFlags(data, newfield, flags, method, source=field, to_mask=False)
@@ -338,8 +338,8 @@ def shift(
     field : str
         The fieldname of the column, holding the data-to-be-shifted.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
 
     freq : str
         An frequency Offset String that will be interpreted as the sampling rate you want the data to be shifted to.
@@ -366,9 +366,9 @@ def shift(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
     """
     data, flags = copy(data, field, flags, field + '_original')
     return _shift(data, field, flags, freq, method=method, freq_check=freq_check, **kwargs)
@@ -459,8 +459,8 @@ def resample(
     field : str
         The fieldname of the column, holding the data-to-be-resampled.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
 
     freq : str
         An Offset String, that will be interpreted as the frequency you want to resample your data with.
@@ -509,9 +509,9 @@ def resample(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
     """
     flagged = _isflagged(flags[field], kwargs['to_mask'])
     datcol = data[field]
@@ -651,8 +651,8 @@ def reindexFlags(
     field : str
         The fieldname of the data column, you want to project the source-flags onto.
 
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
 
     method : {'inverse_fagg', 'inverse_bagg', 'inverse_nagg', 'inverse_fshift', 'inverse_bshift', 'inverse_nshift'}
         The method used for projection of source flags onto field flags. See description above for more details.
@@ -668,9 +668,9 @@ def reindexFlags(
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
     """
     flagscol = flags[source]
 
diff --git a/saqc/funcs/residues.py b/saqc/funcs/residues.py
index ad7b88a64..28a62acd1 100644
--- a/saqc/funcs/residues.py
+++ b/saqc/funcs/residues.py
@@ -65,8 +65,8 @@ def calculatePolynomialResidues(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-modelled.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     winsz : {str, int}
         The size of the window you want to use for fitting. If an integer is passed, the size
         refers to the number of periods for every fitting window. If an offset string is passed,
@@ -95,9 +95,9 @@ def calculatePolynomialResidues(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
 
     """
     return fitPolynomial(
diff --git a/saqc/funcs/rolling.py b/saqc/funcs/rolling.py
index 4b8a5f64e..db9b026fa 100644
--- a/saqc/funcs/rolling.py
+++ b/saqc/funcs/rolling.py
@@ -37,8 +37,8 @@ def roll(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-modelled.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     winsz : {int, str}
         The size of the window you want to roll with. If an integer is passed, the size
         refers to the number of periods for every fitting window. If an offset string is passed,
@@ -66,9 +66,8 @@ def roll(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
     """
     data = data.copy()
     to_fit = data[field]
diff --git a/saqc/funcs/scores.py b/saqc/funcs/scores.py
index d5b192aa6..02812f44a 100644
--- a/saqc/funcs/scores.py
+++ b/saqc/funcs/scores.py
@@ -57,8 +57,8 @@ def assignKNNScore(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The reference variable, the deviation from wich determines the flagging.
-    flagger : saqc.flagger
-        A flagger object, holding flags and additional informations related to `data`.fields
+    flags : saqc.flags
+        A flags object, holding flags and additional informations related to `data`.fields
     n_neighbors : int, default 10
         The number of nearest neighbors to which the distance is comprised in every datapoints scoring calculation.
     trafo : Callable[np.array, np.array], default lambda x: x
diff --git a/saqc/funcs/tools.py b/saqc/funcs/tools.py
index 90db87055..f8950debe 100644
--- a/saqc/funcs/tools.py
+++ b/saqc/funcs/tools.py
@@ -23,8 +23,8 @@ def copy(data: DictOfSeries, field: str, flags: Flags, new_field: str, **kwargs)
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the data column, you want to fork (copy).
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     new_field: str
         Target name.
 
@@ -32,10 +32,10 @@ def copy(data: DictOfSeries, field: str, flags: Flags, new_field: str, **kwargs)
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-        data shape may have changed relatively to the flagger input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags shape may have changed relatively to the flagger input.
+        data shape may have changed relatively to the flags input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags shape may have changed relatively to the flags input.
     """
     if new_field in flags.columns.union(data.columns):
         raise ValueError(f"{field}: field already exist")
@@ -49,7 +49,7 @@ def copy(data: DictOfSeries, field: str, flags: Flags, new_field: str, **kwargs)
 @register(masking='none', module="tools")
 def drop(data: DictOfSeries, field: str, flags: Flags, **kwargs) -> Tuple[DictOfSeries, Flags]:
     """
-    The function drops field from the data dios and the flagger.
+    The function drops field from the data dios and the flags.
 
     Parameters
     ----------
@@ -57,17 +57,17 @@ def drop(data: DictOfSeries, field: str, flags: Flags, **kwargs) -> Tuple[DictOf
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the data column, you want to drop.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
 
     Returns
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-        data shape may have changed relatively to the flagger input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags shape may have changed relatively to the flagger input.
+        data shape may have changed relatively to the flags input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags shape may have changed relatively to the flags input.
     """
     del data[field]
     del flags[field]
@@ -77,7 +77,7 @@ def drop(data: DictOfSeries, field: str, flags: Flags, **kwargs) -> Tuple[DictOf
 @register(masking='none', module="tools")
 def rename(data: DictOfSeries, field: str, flags: Flags, new_name: str, **kwargs) -> Tuple[DictOfSeries, Flags]:
     """
-    The function renames field to new name (in both, the flagger and the data).
+    The function renames field to new name (in both, the flags and the data).
 
     Parameters
     ----------
@@ -85,8 +85,8 @@ def rename(data: DictOfSeries, field: str, flags: Flags, new_name: str, **kwargs
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the data column, you want to rename.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     new_name : str
         String, field is to be replaced with.
 
@@ -94,8 +94,8 @@ def rename(data: DictOfSeries, field: str, flags: Flags, new_name: str, **kwargs
     -------
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        The quality flags of data
     """
     data[new_name] = data[field]
     flags.history[new_name] = flags.history[field]
@@ -141,8 +141,8 @@ def mask(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-masked.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store flags of the data.
     mode : {"periodic", "mask_var"}
         The masking mode.
         - "periodic": parameters "period_start", "period_end" are evaluated to generate a periodical mask
@@ -170,9 +170,9 @@ def mask(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
 
 
     Examples
diff --git a/saqc/funcs/transformation.py b/saqc/funcs/transformation.py
index 48a072909..91952d0f1 100644
--- a/saqc/funcs/transformation.py
+++ b/saqc/funcs/transformation.py
@@ -29,8 +29,8 @@ def transform(
         A dictionary of pandas.Series, holding all the data.
     field : str
         The fieldname of the column, holding the data-to-be-transformed.
-    flagger : saqc.flagger.Flagger
-        A flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        Container to store quality flags to data.
     func : Callable[{pd.Series, np.array}, np.array]
         Function to transform data[field] with.
     partition_freq : {None, float, str}, default None
@@ -46,8 +46,8 @@ def transform(
     data : dios.DictOfSeries
         A dictionary of pandas.Series, holding all the data.
         Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.Flagger
-        The flagger object, holding flags and additional Informations related to `data`.
+    flags : saqc.Flags
+        The quality flags of data
     """
 
     data = data.copy()
diff --git a/sphinx-doc/make_doc_module.py b/sphinx-doc/make_doc_module.py
index b2f8ff484..1e508a253 100644
--- a/sphinx-doc/make_doc_module.py
+++ b/sphinx-doc/make_doc_module.py
@@ -83,7 +83,7 @@ def parse_func_dcstrings(m_paths):
                 continue
             dcstr = rm_section(dcstr, 'Returns')
             dcstr = rm_parameter(dcstr, 'data')
-            dcstr = rm_parameter(dcstr, 'flagger')
+            dcstr = rm_parameter(dcstr, 'flags')
             parameters = get_parameter(dcstr)
             parameters = [f"{p[0]}={p[1]}" if p[1] else p[0] for p in parameters]
             signature = f"def {func.name}({', '.join(parameters)}):"
diff --git a/tests/common.py b/tests/common.py
index 1a3f501a7..21fc6c9c2 100644
--- a/tests/common.py
+++ b/tests/common.py
@@ -11,7 +11,6 @@ from saqc.core import initFlagsLike, Flags
 
 
 TESTNODATA = (np.nan, -9999)
-TESTFLAGGER = (Flags(),)
 
 
 def flagAll(data, field, flags, **kwargs):
@@ -47,30 +46,30 @@ def checkDataFlagsInvariants(data, flags, field, identical=True):
     Check all invariants that must hold at any point for
         * field
         * data
-        * flagger
+        * flags
         * data[field]
-        * flagger[field]
+        * flags[field]
         * data[field].index
-        * flagger[field].index
-        * between data and flagger
-        * between data[field] and flagger[field]
+        * flags[field].index
+        * between data and flags
+        * between data[field] and flags[field]
 
     Parameters
     ----------
     data : dios.DictOfSeries
         data container
-    flagger : Flags
+    flags : Flags
         flags container
     field : str
         the field in question
     identical : bool, default True
-        whether to check indexes of data and flagger to be
+        whether to check indexes of data and flags to be
         identical (True, default) of just for equality.
     """
     assert isinstance(data, dios.DictOfSeries)
     assert isinstance(flags, Flags)
 
-    # all columns in data are in flagger
+    # all columns in data are in flags
     assert data.columns.difference(flags.columns).empty
 
     # ------------------------------------------------------------------------
diff --git a/tests/core/test_flagger.py b/tests/core/test_flagger.py
deleted file mode 100644
index 1af9f4710..000000000
--- a/tests/core/test_flagger.py
+++ /dev/null
@@ -1,756 +0,0 @@
-#!/usr/bin/env python
-
-import pytest
-import numpy as np
-import pandas as pd
-from pandas.api.types import is_bool_dtype
-
-import dios
-
-from tests.common import TESTFLAGGER, initData
-
-
-pytestmark = pytest.mark.skip('old flagger tests - rewrite needed')
-
-
-def _getDataset(rows, cols):
-    return initData(cols=cols, rows=rows, start_date="2011-01-01", end_date="2011-01-10")
-
-
-DATASETS = [
-    _getDataset(0, 1),
-    _getDataset(1, 1),
-    _getDataset(100, 1),
-    # _getDataset(1000, 1),
-    _getDataset(0, 4),
-    _getDataset(1, 4),
-    # _getDataset(100, 4),
-    # _getDataset(1000, 4),
-    # _getDataset(10000, 40),
-    _getDataset(20, 4),
-]
-
-
-def check_all_dios_index_length(tocheck, expected):
-    for c in tocheck:
-        if len(tocheck[c]) != len(expected[c]):
-            return False
-    return True
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_initFlags(data, flagger):
-    """
-    test before:
-    - None
-    """
-
-    newflagger = flagger.initFlags(data)
-    assert isinstance(newflagger, type(flagger))
-    assert newflagger is not flagger
-
-    flags = newflagger.getFlags()
-    assert isinstance(flags, dios.DictOfSeries)
-
-    assert len(flags.columns) >= len(data.columns)
-    assert check_all_dios_index_length(flags, data)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_initFlagsWithFlags(data, flagger):
-    flags = dios.DictOfSeries(pd.Series(data=flagger.BAD))
-    flagger = flagger.initFlags(flags=flags)
-    assert (flagger.flags == flags).all(axis=None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_getFlags(data, flagger):
-    """
-    test before:
-    - initFlags()
-
-    we need to check:
-    - access all flags -> get a dios
-    - access some columns of flags -> get a dios
-    - access one column of flags -> get a series
-    """
-
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    # all - dios
-    flags0 = flagger.getFlags()
-    assert isinstance(flags0, dios.DictOfSeries)
-    assert (flags0.columns == data.columns).all()
-    assert check_all_dios_index_length(flags0, data)
-    for dt in flags0.dtypes:
-        assert dt == flagger.dtype
-
-    # some - dios
-    if len(data.columns) >= 2:
-        cols = data.columns[:2].to_list()
-        flags1 = flagger.getFlags(cols)
-        assert isinstance(flags1, dios.DictOfSeries)
-        assert (flags1.columns == data.columns[:2]).all()
-        assert check_all_dios_index_length(flags1, data[cols])
-        for dt in flags1.dtypes:
-            assert dt == flagger.dtype
-
-    # series
-    flags2 = flagger.getFlags(field)
-    assert isinstance(flags2, pd.Series)
-    assert flags2.dtype == flagger.dtype
-    assert flags2.shape[0] == data[field].shape[0]
-    # NOTE: need fix in dios see issue #16 (has very low priority)
-    # assert flags2.name in data.columns
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_setFlags(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    """
-    flagger = flagger.initFlags(data)
-    sl = slice("2011-01-02", "2011-01-05")
-    field, *_ = data.columns
-
-    base = flagger.getFlags()
-
-    flagger_good = flagger.setFlags(field, flag=flagger.GOOD, loc=sl)
-    assert isinstance(flagger_good, type(flagger))
-    assert flagger_good is not flagger
-
-    flags_good = flagger_good.getFlags()
-    assert len(flags_good[field]) <= len(base[field])
-    assert (flags_good.columns == base.columns).all()
-    assert (flags_good.loc[sl, field] == flagger.GOOD).all()
-
-    # overflag works BAD > GOOD
-    flagger_bad = flagger_good.setFlags(field, flag=flagger.BAD)
-    assert (flagger_bad.getFlags(field) == flagger.BAD).all()
-
-    # overflag doesn't work GOOD < BAD
-    flagger_still_bad = flagger_bad.setFlags(field, flag=flagger.GOOD)
-    assert (flagger_still_bad.getFlags(field) == flagger.BAD).all()
-
-    # overflag does work with force
-    flagger_forced_good = flagger_bad.setFlags(field, flag=flagger.GOOD, force=True)
-    assert (flagger_forced_good.getFlags(field) == flagger.GOOD).all()
-
-    with pytest.raises(ValueError):
-        flagger.setFlags(field=None, flag=flagger.BAD)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_sliceFlagger(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags() inside slice()
-    """
-    sl = slice(None, None, 3)
-
-    flagger = flagger.initFlags(data)
-    newflagger = flagger.slice(loc=sl)
-    assert isinstance(newflagger, type(flagger))
-
-    newflags = newflagger.getFlags()
-    assert (newflags.columns == data.columns).all()
-    assert check_all_dios_index_length(newflags, data[sl])
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_sliceFlaggerDrop(data, flagger):
-    flagger = flagger.initFlags(data)
-    with pytest.raises(TypeError):
-        flagger.getFlags(field=data.columns, drop="var")
-
-    field = data.columns[0]
-    expected = data.columns.drop(field)
-
-    filtered = flagger.slice(drop=field)
-    assert (filtered.getFlags().columns == expected).all(axis=None)
-    assert (filtered.getFlags().to_df().index == data[expected].to_df().index).all(axis=None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlagger(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    - setFlags()
-    - slice()
-    """
-    field, *_ = data.columns
-    sl = slice(None, None, 3)
-
-    this_flagger = flagger.initFlags(data)
-    other_flagger = this_flagger.slice(loc=sl).setFlags(field)
-    result_flagger = this_flagger.merge(other_flagger)
-
-    result_flags = result_flagger.getFlags()
-    other_flags = other_flagger.getFlags()
-
-    # check flags that was set
-    check = result_flags.loc[sl, field] == other_flags[field]
-    assert check.all(None)
-    # check flags that was not set
-    mask = ~result_flags[field].index.isin(other_flags[field].index)
-    check = result_flags.loc[mask, field] == result_flagger.UNFLAGGED
-    assert check.all(None)
-
-    # check unchanged columns
-    cols = data.columns.to_list()
-    cols.remove(field)
-    check = result_flags[cols] == result_flagger.UNFLAGGED
-    assert check.all(None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerColumnsDiff(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    - setFlags()
-    - slice()
-    - merge()
-    """
-    field, *_ = data.columns
-    new_field = field + "_new"
-    sl = slice(None, None, 2)
-
-    other_data = data.loc[sl]
-    other_data.columns = [new_field] + data.columns[1:].to_list()
-    other_flagger = flagger.initFlags(other_data)
-
-    this_flagger = flagger.initFlags(data).setFlags(field, flag=flagger.BAD)
-    result_flagger = this_flagger.merge(other_flagger)
-
-    result_flags = result_flagger.getFlags()
-    other_flags = other_flagger.getFlags()
-
-    # we need to check if
-    # - the new column is present
-    # - the new column is identical to the original
-    # - the other column are unchanged
-    #   - field-column is BAD
-    #   - other columns are UNFLAGGED
-
-    assert new_field in result_flags
-
-    check = result_flags[new_field] == other_flags[new_field]
-    assert check.all(None)
-
-    check = result_flags[field] == result_flagger.BAD
-    assert check.all(None)
-
-    cols = data.columns.to_list()
-    cols.remove(field)
-    check = result_flags[cols] == result_flagger.UNFLAGGED
-    assert check.all(None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerIndexDiff(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    - setFlags()
-    - slice()
-    - merge()
-
-    we need to check:
-    - index is union of this and other's index
-    - indices + values that only in this, should be present
-    - indices + values that only in other, should be present
-    - indices that in this and other, have values from other
-    """
-    field, *_ = data.columns
-    sl = slice(None, None, 2)
-
-    def shiftindex(s):
-        s.index = s.index + pd.Timedelta(minutes=2, seconds=25)
-        return s
-
-    # create a sliced time-shifted version of data
-    other_data = data.loc[sl].apply(shiftindex)
-    if isinstance(other_data, pd.Series):
-        pass
-
-    this_flagger = flagger.initFlags(data).setFlags(field, flag=flagger.BAD)
-    other_flagger = flagger.initFlags(other_data)
-    result_flagger = this_flagger.merge(other_flagger)
-
-    result_flags = result_flagger.getFlags()
-    this_flags = this_flagger.getFlags()
-    other_flags = other_flagger.getFlags()
-
-    for c in result_flags:
-        t, o, r = this_flags[c], other_flags[c], result_flags[c]
-        assert (r.index == t.index.union(o.index)).all()
-
-        only_this = t.index.difference(o.index)
-        only_other = o.index.difference(t.index)
-        both = t.index.intersection(o.index)
-
-        # nothing is missing
-        assert (r.index == only_this.union(only_other).union(both)).all()
-
-        assert (r[only_this] == t[only_this]).all()
-        assert (r[only_other] == o[only_other]).all()
-        assert (r[both] == o[both]).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerOuter(data, flagger):
-
-    field = data.columns[0]
-
-    data_left = data
-    data_right = data.iloc[::2]
-
-    left = flagger.initFlags(data=data_left).setFlags(field=field, flag=flagger.BAD)
-
-    right = flagger.initFlags(data=data_right).setFlags(field, flag=flagger.GOOD)
-
-    merged = left.merge(right, join="outer")
-
-    loc = data_right[field].index.difference(data_left[field].index)
-    assert (merged.getFlags(field, loc=loc) == flagger.GOOD).all(axis=None)
-    assert (merged.getFlags(field, loc=data_left[field].index) == flagger.BAD).all(axis=None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerInner(data, flagger):
-
-    field = data.columns[0]
-
-    data_left = data
-    data_right = data.iloc[::2]
-
-    left = flagger.initFlags(data=data_left).setFlags(field=field, flag=flagger.BAD)
-
-    right = flagger.initFlags(data=data_right).setFlags(field, flag=flagger.GOOD)
-
-    merged = left.merge(right, join="inner")
-
-    assert (merged.getFlags(field).index == data_right[field].index).all()
-    assert (merged.getFlags(field) == flagger.BAD).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerMerge(data, flagger):
-
-    field = data.columns[0]
-    data_left = data
-    data_right = data.iloc[::2]
-
-    left = flagger.initFlags(data=data_left).setFlags(field=field, flag=flagger.BAD)
-
-    right = flagger.initFlags(data=data_right).setFlags(field, flag=flagger.GOOD)
-
-    merged = left.merge(right, join="merge")
-
-    loc = data_left[field].index.difference(data_right[field].index)
-    assert (merged.getFlags(field, loc=data_right[field].index) == flagger.GOOD).all(axis=None)
-    assert (merged.getFlags(field, loc=loc) == flagger.BAD).all(axis=None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_isFlaggedDios(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - setFlags()
-    """
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    mask = np.zeros(len(data[field]), dtype=bool)
-
-    df_tests = [
-        (flagger.isFlagged(), mask),
-        (flagger.setFlags(field).isFlagged(), ~mask),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(flag=flagger.GOOD, comparator=">"), mask,),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(flag=flagger.GOOD, comparator="<"), mask,),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(flag=flagger.GOOD, comparator="=="), ~mask,),
-    ]
-    for flags, expected in df_tests:
-        assert np.all(flags[field] == expected)
-        assert isinstance(flags, dios.DictOfSeries)
-        assert check_all_dios_index_length(flags, data)
-        assert (flags.columns == data.columns).all()
-        for dt in flags.dtypes:
-            assert is_bool_dtype(dt)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_isFlaggedSeries(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - setFlags()
-    """
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    mask = np.zeros(len(data[field]), dtype=bool)
-
-    series_tests = [
-        (flagger.isFlagged(field), mask),
-        (flagger.setFlags(field).isFlagged(field), ~mask),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(field, flag=flagger.GOOD, comparator=">"), mask,),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(field, flag=flagger.GOOD, comparator="<"), mask,),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(field, flag=flagger.GOOD, comparator="=="), ~mask,),
-    ]
-    for flags, expected in series_tests:
-        assert np.all(flags == expected)
-        assert isinstance(flags, pd.Series)
-        assert flags.dtype == bool
-        assert flags.shape[0] == data[field].shape[0]
-        # NOTE: need fix in dios see issue #16 (has very low priority)
-        # assert flags.name in data.columns
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_isFlaggedSeries_fail(data, flagger):
-    """
-    test before:
-    - initFlags()
-    """
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    fail_tests = [
-        {"flag": pd.Series(index=data[field].index, data=flagger.BAD).astype(flagger.dtype)},
-        # NOTE: allowed since use of dios
-        # {"field": ["var1", "var2"]},
-    ]
-    for args in fail_tests:
-        with pytest.raises(TypeError):
-            flagger.isFlagged(**args)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_clearFlags(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    - setFlags()
-    - isFlagged()
-    """
-    flagger = flagger.initFlags(data)
-    sl = slice("2011-01-02", "2011-01-05")
-    field, *_ = data.columns
-
-    base = flagger.getFlags(field)
-
-    flagger = flagger.setFlags(field=field, flag=flagger.BAD)
-    assert np.sum(flagger.isFlagged(field)) == len(base)
-
-    flaggernew = flagger.clearFlags(field)
-    assert isinstance(flaggernew, type(flagger))
-    assert flaggernew is not flagger
-    assert len(flagger.getFlags(field)) == len(data[field])
-
-    flagger = flagger.clearFlags(field)
-    assert np.sum(flagger.isFlagged(field)) == 0
-    assert len(flagger.getFlags(field)) == len(data[field])
-
-    flagger = flagger.setFlags(field=field, flag=flagger.BAD)
-    assert np.sum(flagger.isFlagged(field)) == len(base)
-    assert len(flagger.getFlags(field)) == len(data[field])
-
-    flagger = flagger.clearFlags(field, loc=sl)
-    assert len(flagger.getFlags(field)) == len(data[field])
-    unflagged = flagger.isFlagged(field, loc=sl)
-    assert np.sum(unflagged) == 0
-    assert np.sum(flagger.isFlagged(field)) == len(data[field]) - len(unflagged)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_dtype(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    tests = (
-        flagger.getFlags(field).astype(str),
-        "TEST",
-        55,
-    )
-
-    for test in tests:
-        with pytest.raises(TypeError):
-            flagger = flagger.setFlags(field, flag=test)
-        assert flagger.getFlags(field).dtype == flagger.dtype
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER[-1:])
-def test_returnCopy(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    base = flagger.getFlags()
-
-    assert flagger.getFlags() is not base
-    assert flagger.isFlagged() is not base
-    assert flagger.setFlags(field) is not flagger
-    assert flagger.clearFlags(field) is not flagger
-
-
-LOC_ILOC_FUNCS = ["isFlagged", "getFlags"]
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("flaggerfunc", LOC_ILOC_FUNCS)
-def test_loc(data, flagger, flaggerfunc):
-    flagger = flagger.initFlags(data)
-    sl = slice("2011-01-02", "2011-01-05")
-    field, *_ = data.columns
-
-    chunk = data.loc[sl, field]
-    d = data.loc[sl]
-    if d.empty:
-        return
-
-    m = data[field].index.get_loc(d[field].index[0])
-    M = data[field].index.get_loc(d[field].index[-1])
-    mask = np.full(len(data[field]), False)
-    mask[m:M] = True
-
-    flagger_func = getattr(flagger, flaggerfunc)
-
-    # masked
-    mflags0 = flagger_func(field, loc=mask)
-    mflags1 = flagger_func().loc[mask, field]
-    mflags2 = flagger_func(field).loc[mask]
-    mflags3 = flagger_func(loc=mask)[field]
-
-    assert (mflags0 == mflags1).all()
-    assert (mflags0 == mflags2).all()
-    assert (mflags0 == mflags3).all()
-
-    # indexed
-    iflags0 = flagger_func(field, loc=chunk.index)
-    iflags1 = flagger_func().loc[chunk.index, field]
-    iflags2 = flagger_func(field).loc[chunk.index]
-    iflags3 = flagger_func(loc=chunk.index)[field]
-    assert (iflags0 == iflags1).all()
-    assert (iflags0 == iflags2).all()
-    assert (iflags0 == iflags3).all()
-
-    # sliced
-    sflags0 = flagger_func(field, loc=sl)
-    sflags1 = flagger_func().loc[sl, field]
-    sflags2 = flagger_func(field).loc[sl]
-    sflags3 = flagger_func(loc=sl)[field]
-    assert (sflags0 == sflags1).all()
-    assert (sflags0 == sflags2).all()
-    assert (sflags0 == sflags3).all()
-
-    assert (sflags0 == iflags0).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_classicUseCases(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    flagger = flagger.clearFlags(field)
-
-    # data-mask, same length than flags
-    d = data[field]
-    mask = d < (d.max() - d.min()) // 2
-    flagged = flagger.setFlags(field, loc=mask, flag=flagger.BAD).isFlagged(field)
-    assert (flagged == mask).all()
-
-    flagger = flagger.clearFlags(field)
-
-    indices = np.arange(0, len(data[field]))
-    mask = indices % 3 == 0
-    indices = indices[mask]
-    # we had some fun with numpy and end up with
-    # numpy indices (positional), but with different length..
-    # make dt-index with iloc, then pass to loc
-    dt_idx = data[field].iloc[indices].index
-    flagged = flagger.setFlags(field, loc=dt_idx, flag=flagger.BAD).isFlagged(field)
-    assert (flagged.iloc[indices] == flagged[flagged]).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_getFlagsWithExtras(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    flags, extra = flagger.getFlags(field, full=True)
-    assert isinstance(flags, pd.Series)
-    assert isinstance(extra, dict)
-    for k, v in extra.items():
-        assert isinstance(v, pd.Series)
-        assert flags.index.equals(v.index)
-
-    flags, extra = flagger.getFlags(full=True)
-    assert isinstance(flags, dios.DictOfSeries)
-    assert isinstance(extra, dict)
-    for k, v in extra.items():
-        assert isinstance(v, dios.DictOfSeries)
-        assert flags.columns.equals(v.columns)
-        for c in flags:
-            assert flags[c].index.equals(v[c].index)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_replace_delete(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-    newflagger = flagger.replaceField(field=field, flags=None)
-
-    new, newextra = newflagger.getFlags(full=True)
-    assert field not in newflagger.flags
-    for k in newextra:
-        assert field not in newextra[k]
-
-    with pytest.raises(ValueError):
-        flagger.replaceField(field="i_dont_exist", flags=None)
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_replace_insert(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-    newfield = 'fooo'
-    flags, extra = flagger.getFlags(field, full=True)
-    newflagger = flagger.replaceField(field=newfield, flags=flags, **extra)
-    old, oldextra = flagger.getFlags(full=True)
-    new, newextra = newflagger.getFlags(full=True)
-    assert newfield in newflagger.flags
-    assert (newflagger._flags[newfield] == flagger._flags[field]).all()
-    assert newflagger._flags[newfield] is not flagger._flags[field]  # not a copy
-    for k in newextra:
-        assert newfield in newextra[k]
-        assert (newextra[k][newfield] == oldextra[k][field]).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_replace_replace(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-    flags, extra = flagger.getFlags(field, full=True)
-
-    # set everything to DOUBTFUL
-    flags[:] = flagger.BAD
-    for k, v in extra.items():
-        v[:] = flagger.BAD
-        extra[k] = v
-
-    newflagger = flagger.replaceField(field=field, flags=flags, **extra)
-
-    old, oldextra = flagger.getFlags(full=True)
-    new, newextra = newflagger.getFlags(full=True)
-    assert old.columns.equals(new.columns)
-    assert (new[field] == flagger.BAD).all()
-
-    assert oldextra.keys() == newextra.keys()
-    for k in newextra:
-        o, n = oldextra[k], newextra[k]
-        assert n.columns.equals(o.columns)
-        assert (n[field] == flagger.BAD).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagAfter(flagger):
-    idx = pd.date_range("2000", "2001", freq='1M')
-    s = pd.Series(0, index=idx)
-    data = dios.DictOfSeries(s, columns=['a'])
-    exp_base = pd.Series(flagger.UNFLAGGED, index=idx)
-
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    flags = flagger.setFlags(field, loc=s.index[3], flag_after=5).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[3: 3+5+1] = flagger.BAD
-    assert (flags == exp).all()
-
-    flags = flagger.setFlags(field, loc=s.index[3], flag_after=5, win_flag=flagger.GOOD).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[3: 3+5+1] = flagger.GOOD
-    exp[3] = flagger.BAD
-    assert (flags == exp).all()
-
-    # 3 month < 99 days < 4 month
-    flags = flagger.setFlags(field, loc=s.index[3], flag_after="99d").getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[3: 3+3+1] = flagger.BAD
-    assert (flags == exp).all()
-
-    # 3 month < 99 days < 4 month
-    flags = flagger.setFlags(field, loc=s.index[3], flag_after="99d", win_flag=flagger.GOOD).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[3: 3+3+1] = flagger.GOOD
-    exp[3] = flagger.BAD
-    assert (flags == exp).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagBefore(flagger):
-    idx = pd.date_range("2000", "2001", freq='1M')
-    s = pd.Series(0, index=idx)
-    data = dios.DictOfSeries(s, columns=['a'])
-    exp_base = pd.Series(flagger.UNFLAGGED, index=idx)
-
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    flags = flagger.setFlags(field, loc=s.index[8], flag_before=5).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[8-5: 8+1] = flagger.BAD
-    assert (flags == exp).all()
-
-    flags = flagger.setFlags(field, loc=s.index[8], flag_before=5, win_flag=flagger.GOOD).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[8-5: 8+1] = flagger.GOOD
-    exp[8] = flagger.BAD
-    assert (flags == exp).all()
-
-    # 3 month < 99 days < 4 month
-    flags = flagger.setFlags(field, loc=s.index[8], flag_before="99d").getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[8-3: 8+1] = flagger.BAD
-    assert (flags == exp).all()
-
-    # 3 month < 99 days < 4 month
-    flags = flagger.setFlags(field, loc=s.index[8], flag_before="99d", win_flag=flagger.GOOD).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[8-3: 8+1] = flagger.GOOD
-    exp[8] = flagger.BAD
-    assert (flags == exp).all()
diff --git a/tests/fuzzy/test_masking.py b/tests/fuzzy/test_masking.py
index be77e9872..cf00f1d98 100644
--- a/tests/fuzzy/test_masking.py
+++ b/tests/fuzzy/test_masking.py
@@ -48,9 +48,9 @@ def test_dataMutationPreventsUnmasking(data_field_flags):
 @settings(max_examples=MAX_EXAMPLES, deadline=None)
 @given(data_field_flags=dataFieldFlags())
 def test_flagsMutationPreventsUnmasking(data_field_flags):
-    """ test if (un)masking works as expected on flagger-changes.
+    """ test if (un)masking works as expected on flags-changes.
 
-    if `flagger` is mutated after `_maskData`, `_unmaskData` should be a no-op
+    if `flags` is mutated after `_maskData`, `_unmaskData` should be a no-op
     """
     data_in, field, flags = data_field_flags
     data_masked, mask = _maskData(data_in, flags, columns=[field], to_mask=BAD)
@@ -73,7 +73,7 @@ def test_reshapingPreventsUnmasking(data_field_flags):
     data_in, field, flags = data_field_flags
     data_masked, mask = _maskData(data_in, flags, columns=[field], to_mask=BAD)
 
-    # mutate indexes of `data` and `flagger`
+    # mutate indexes of `data` and `flags`
     index = data_masked[field].index.to_series()
     index.iloc[-len(data_masked[field])//2:] += pd.Timedelta("7.5Min")
     data_masked[field] = pd.Series(data=filler, index=index)
@@ -98,8 +98,8 @@ def test_unmaskingInvertsMasking(data_field_flags):
 
 
 # @settings(max_examples=MAX_EXAMPLES, deadline=None)
-# @given(data_field_flagger=dataFieldFlagger(), func_kwargs=flagFuncsKwargs())
-# def test_maskingPreservesData(data_field_flagger, func_kwargs):
+# @given(data_field_flags=dataFieldFlags(), func_kwargs=flagFuncsKwargs())
+# def test_maskingPreservesData(data_field_flags, func_kwargs):
 #     """
 #     no mutations on pre-flagged data
 
@@ -108,20 +108,20 @@ def test_unmaskingInvertsMasking(data_field_flags):
 #     are removed
 #     """
 
-#     data_in, field, flagger = data_field_flagger
+#     data_in, field, flags = data_field_flags
 
-#     data_masked, mask = _maskData(data_in, flagger, columns=[field], to_mask=flagger.BAD)
+#     data_masked, mask = _maskData(data_in, flags, columns=[field], to_mask=flags.BAD)
 #     func, kwargs = func_kwargs
-#     data_masked, _ = func(data_masked, field, flagger, **kwargs)
-#     data_out = _unmaskData(data_in, mask, data_masked, flagger, to_mask=flagger.BAD)
+#     data_masked, _ = func(data_masked, field, flags, **kwargs)
+#     data_out = _unmaskData(data_in, mask, data_masked, flags, to_mask=flags.BAD)
 
-#     flags_in = flagger.isFlagged(flag=flagger.BAD)
+#     flags_in = flags.isFlagged(flag=flags.BAD)
 #     assert data_in.aloc[flags_in].equals(data_out.aloc[flags_in])
 
 
 # @settings(max_examples=MAX_EXAMPLES, deadline=None)
-# @given(data_field_flagger=dataFieldFlagger(), func_kwargs=flagFuncsKwargs())
-# def test_maskingEqualsRemoval(data_field_flagger, func_kwargs):
+# @given(data_field_flags=dataFieldFlags(), func_kwargs=flagFuncsKwargs())
+# def test_maskingEqualsRemoval(data_field_flags, func_kwargs):
 #     """
 #     calling a function on pre-flagged data should yield the same
 #     results as calling this function on data where the flagged values
@@ -129,17 +129,17 @@ def test_unmaskingInvertsMasking(data_field_flags):
 #     """
 #     func, kwargs = func_kwargs
 
-#     data, field, flagger = data_field_flagger
-#     flagged_in = flagger.isFlagged(flag=flagger.BAD, comparator=">=")
+#     data, field, flags = data_field_flags
+#     flagged_in = flags.isFlagged(flag=flags.BAD, comparator=">=")
 
 #     # mask and call
-#     data_left, _ = _maskData(data, flagger, columns=[field], to_mask=flagger.BAD)
-#     data_left, _ = func(data_left, field, flagger, **kwargs)
+#     data_left, _ = _maskData(data, flags, columns=[field], to_mask=flags.BAD)
+#     data_left, _ = func(data_left, field, flags, **kwargs)
 
 #     # remove and call
 #     data_right = data.aloc[~flagged_in]
-#     flagger_right = flagger.initFlags(flagger.getFlags().aloc[~flagged_in])
-#     data_right, _ = func(data_right, field, flagger_right, **kwargs)
+#     flags_right = flags.initFlags(flags.getFlags().aloc[~flagged_in])
+#     data_right, _ = func(data_right, field, flags_right, **kwargs)
 
 #     # NOTE: we need to handle the implicit type conversion in `_maskData`
 #     data_left_compare = data_left.aloc[~flagged_in]
-- 
GitLab