diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7a8fcc4e8109c933d04b672129aeb0b826c6eced..2f4d6604a73532a9f3df497e53424e34a08defcc 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,52 +1,118 @@
+# ===========================================================
+# preparation
+# ===========================================================
+
 variables:
   GIT_SUBMODULE_STRATEGY: recursive
 
-before_script:
-  - export DEBIAN_FRONTEND=noninteractive
-  - apt-get -qq update
-  - apt-get -qq install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python-openssl git > /dev/null
-  - export DEBIAN_FRONTEND=dialog
-  - export LC_ALL=C.UTF-8
-  - export LANG=C.UTF-8
-  - git clone https://github.com/pyenv/pyenv.git ~/.pyenv
-  - export PYENV_ROOT="$HOME/.pyenv"
-  - export PATH="$PYENV_ROOT/bin:$PATH"
-  - eval "$(pyenv init -)"
+default:
+  image: python:3.8
+  before_script:
+    - pip install --upgrade pip
+    - pip install pytest
+    - pip install -r requirements.txt
 
 
-test:python37:
+# ===========================================================
+# normal jobs (non scheduled)
+# ===========================================================
+
+# test saqc with python 3.7
+python37:
+  stage: test
+  except:
+    - schedules
+  image: python:3.7
   script:
-    - pyenv install 3.7.5
-    - pyenv shell 3.7.5
-    - pip install --upgrade pip
-    - pip install -r requirements.txt
-    - python -m pytest --ignore test/lib test
+    - pytest tests/core tests/funcs tests/integration dios/test
     - python -m saqc --config ressources/data/config_ci.csv --data ressources/data/data.csv --outfile /tmp/test.csv
 
 
-test:python38:
+# test saqc with python 3.8
+python38:
+  stage: test
+  except:
+    - schedules
   script:
-    - pyenv install 3.8.0
-    - pyenv shell 3.8.0
-    - pip install --upgrade pip
-    - pip install -r requirements.txt
-    - python -m pytest --ignore test/lib test
+    - pytest tests/core tests/funcs tests/integration dios/test
     - python -m saqc --config ressources/data/config_ci.csv --data ressources/data/data.csv --outfile /tmp/test.csv
 
-# Make html docu with sphinx
+
+# test saqc with python 3.9
+python39:
+  stage: test
+  except:
+    - schedules
+  image: python:3.9
+  script:
+    - pytest tests/core tests/funcs tests/integration
+    - python -m saqc --config ressources/data/config_ci.csv --data ressources/data/data.csv --outfile /tmp/test.csv
+
+
+# check if everthing is properly formatted
+black:
+  stage: test
+  script:
+    - pip install black
+    - black --check .
+
+
+# make (visual) coverage in gitlab merge request diff's
+coverage:
+  stage: test
+  except:
+    - schedules
+  allow_failure: true
+  script:
+    - pip install pytest-cov coverage
+    - pytest --cov=saqc tests/core tests/funcs
+  after_script:
+    - coverage xml
+
+  # regex to find the coverage percentage in the job output
+  coverage: '/^TOTAL.+?(\d+\%)$/'
+
+  artifacts:
+    when: always
+    reports:
+      cobertura: coverage.xml
+
+
+# make html docu with sphinx
 pages:
   stage: deploy
+  only:
+    - cookBux
+  except:
+    - schedules
   script:
-    - pyenv install 3.8.0
-    - pyenv shell 3.8.0
-    - pip install --upgrade pip
-    - pip install -r requirements.txt
     - cd sphinx-doc/
     - pip install -r requirements_sphinx.txt
-    - make html
+    - make doc
     - cp -r _build/html ../public
   artifacts:
     paths:
       - public
+
+
+# ===========================================================
+# scheduled jobs
+# ===========================================================
+
+# fuzzy testing saqc
+fuzzy:
+  stage: test
+  only:
+    - schedules
+  script:
+    - pytest tests/fuzzy
+
+
+# test lib saqc
+testLib:
+  stage: test
   only:
-    - develop
+    - schedules
+  script:
+    - pytest tests/lib
+
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 10d3465f6d14b3a3ded89c4039ee39363a41a284..da88ffd8683a4b990ccc82e228ef41dc5e913d75 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -79,10 +79,9 @@
 ## Breaking Changes
 - register is now a decorator instead of a wrapper
 
-
 # 1.5
 
-coming soon...
+coming soon ...
 
 ## Features
 
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 90a3e1f9826b61dc44c39346dafa71898b19dec4..4cd12ace56e9e2fe51bd4530adef4c0eab10f4b4 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -3,29 +3,105 @@ We recommend a virtual python environment for development. The setup process is
 
 # Testing
 SaQC comes with an extensive test suite based on [pytest](https://docs.pytest.org/en/latest/).
-In order to run all tests execute:
-```sh
-python -m pytest .
-```
+In order to run all tests execute `python -m pytest .`, for faster iteration a test run with 
+`python -m pytest --ignore test/lib test` is usually enough.
 
 # Coding conventions
 
 ## Naming
 
 ### Code
-We follow the follwing naming conventions:
+We implement the following naming conventions:
 - Classes: CamelCase
 - Functions: camelCase
 - Variables/Arguments: snake_case
 
+### Argument names in public functions signatures
+
+first, its not necessary to have *talking* arg-names, in contrast to variable names in 
+code. This is, because one always must read the documentation. To use and parameterize a function,
+just by guessing the meaning of the argument names and not read the docs, 
+will almost never work. thats why, we dont have the obligation to make names (very) 
+talkative.
+
+second, because of the nature of a function (to have a *simple* way to use complex code), 
+its common to use simple and short names. This means, to omit any *irrelevant* information. 
+
+For example if we have a function that fit a polynomial on some data with three arguments.
+Lets say we have:
+ - the data input, 
+ - a threshold that defines a cutoff point for a calculation on a polynomial and
+ - a third argument. 
+
+one could name the args `data, poly_cutoff_threshold, ...`, but much better names would 
+be `data, thresh, ...`, because a caller dont need the extra information, 
+stuffed in the name. 
+If the third argument is also some kind of threshold, 
+one can use `data, cutoff, thresh`, because the *thresh-* information of the `cutoff` 
+parameter is not crucial and the caller knows that this is a threshold from the docstring.
+
+third, underscores give a nice feedback if one doing wrong or over complex. 
+No underscore is fine, one underscore is ok, if the information is *really necessary* (see above), 
+but if one use two or more underscores, one should think of a better naming, 
+or omit some information. 
+Sure, seldom but sometimes it is necessary to use 2 underscores, but we consider it as bad style.
+Using 3 or more underscores, is not allowed unless have write an reasoning and get it
+signed by at least as many core developers as underscores one want to use.
+
+
+In short the naming should *give a very, very rough idea* of the purpose of the argument, 
+but not *explain* the usage or the purpose. 
+It is not a shame to name a parameter just `n` or `alpha` etc. if for example the algorithm 
+(from the paper etc.) name it alike. 
+
+
 ### Test Functions
 - testnames: [testmodule_]flagTestName
  
 ## Formatting
-We use (black)[https://black.readthedocs.io/en/stable/] with a line length if 120 characters.
-Within the `SaQC` root directory run `black -l 120`.
+We use (black)[https://black.readthedocs.io/en/stable/] in its default settings.
+Within the `saqc` root directory run `black .`.
 
 ## Imports
 Only absolute imports are accepted.
 
 
+# Development Workflow
+## Repository Structure
+
+- `master` - branch:
+  + Stable and usually protected.
+  + Regular merges from `develop` according to the [release cycle](#release-cycle). These merges get a tag, increasing at least the minor version.
+  + Irregular merges from `develop` in case if critical bugs. Such merges increase at least the patch level.
+  + Merges into `master` usually lead to a PyPI release
+- `develop` - branch:
+  + The main development branch, no hard stability requirements/guarantees
+  + Merges into `develop` should mostly follow a Merge Request Workflow, minor changes can however be committed directly. Such minor changes include:
+    * Typos and white space changes
+    * Obvious bug in features implemented by the committing developer
+    
+    
+## Merge Request Workflow
+- Most changes to `saqc` are integrated by merge requests from a feature branch into `develop`
+- All merge requests need to be reviewed by at least one other core developer (currently @palmb, @luenensc and @schaefed).
+- We implement the following Gitlab based review process:
+  + The author assigns the Merge Request to one of the core developers. The reviewer should review the request within one week,
+    large requests may of course lead to longer review times.
+  + Reviewer and Author discuss any issues using the Gitlab code review facilities:
+    * In case all concerns are resolved, the reviewer approves the Merge Request and assigns it back to the author.
+    * In case reviewer and author can't resolve their discussion, the Merge Request should be assigned to another reviewer.
+      The new reviewer is now in charge to come to a decision, by either approving, closing or going into another review iteration.
+  + The author of an approved Merge Request:
+    * has the right and the duty to merge into the `develop` branch, any occurring conflicts need to be addressed by the author,
+    * is always highly encouraged to provide a summary of the changes introduced with the Merge Request in its description upon integration. This recommandation becomes an obligation in case of interface modification or changes to supported and/or documented workflows.
+
+
+## Release Cycle
+- We employ a release cycle of roughly 4 weeks.
+- To avoid the avoid the integration of untested and/or broken changes, the merge window closes one week before the intended
+  release date. Commits to `develop` after the merge window of a release closes need to be integrated during the subsequent release
+  cycle
+- The release cycle is organized by Gitlab Milestones, the expiration date of a certain milestone indicates the end of the 
+  related merge window, the actual merge into `master` and the accompanying release is scheduled for the week after the
+  milestones expiration date. 
+- Issues and Merge Requests can and should be associated to these milestone as this help in the organization of review activities.
diff --git a/LICENSE.md b/LICENSE.md
new file mode 100644
index 0000000000000000000000000000000000000000..804727ae457a5b291e0792b53bb9a211c2a836eb
--- /dev/null
+++ b/LICENSE.md
@@ -0,0 +1,746 @@
+# SOFTWARE LICENCE
+
+This file is part of the "System for Automated Quality Control" developed by the Research Data Management Team
+of the Helmholtz-Centre for Environmental Research Leipzig.
+
+# Copyright Notice
+
+Copyright(c) 2021, *Helmholtz-Zentrum für Umweltforschung GmbH -- UFZ*. All rights reserved.
+
+
+**The code is a property of**
+
+*Helmholtz-Zentrum für Umweltforschung GmbH -- UFZ*\
+Registered Office: Leipzig\
+Registration Office: Amtsgericht Leipzig\
+Trade Register Nr. B 4703\
+Chairman of the Supervisory Board: MinDirig\'in Oda Keppler\
+Scientific Director: Prof. Dr. Georg Teutsch\
+Administrative Director: Dr. Sabine König\
+
+**Contact**
+
+David Schäfer\
+Research Data Management (RDM)\
+Permoserstr. 15\
+04318 Leipzig\
+david.schaefer\@ufz.de
+
+This program is free software; you can redistribute it and/or modify it
+under the terms of the GNU General Public License as published by the
+Free Software Foundation; either version 3 of the License, or (at your
+option) any later version.
+
+This program is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+Public License for more details.
+
+You should have received a copy of the GNU General Public License along
+with this program; if not, it can be found at the end of this document
+or see at https://www.gnu.org/licenses/gpl-3.0.de.html.
+
+**Redistribution**
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+- Redistributions of source code must retain the above copyright notice,
+the list of conditions for redistribution and modification as well as the
+following GNU General Public License.
+
+- Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions, the following GNU General Public License
+and the modification conditions in the documentation and/or other materials
+provided with the distribution.
+
+- Neither the name of *Helmholtz-Zentrum für Umweltforschung GmbH -- UFZ*,
+nor the names of its contributors may be used to endorse or promote products
+derived from this software without specific prior written permission.
+
+**Modification**
+
+If software is modified to produce derivative works, such modified
+software should be clearly marked, so as not to confuse it with the
+version available from *Helmholtz-Zentrum für Umweltforschung GmbH --
+UFZ*.
+
+
+# GNU GENERAL PUBLIC LICENSE
+
+Version 3, 29 June 2007
+
+Copyright (C) 2007 Free Software Foundation, Inc.
+<https://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this
+license document, but changing it is not allowed.
+
+# Preamble
+
+The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom
+to share and change all versions of a program--to make sure it remains
+free software for all its users. We, the Free Software Foundation, use
+the GNU General Public License for most of our software; it applies
+also to any other work released this way by its authors. You can apply
+it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you
+have certain responsibilities if you distribute copies of the
+software, or if you modify it: responsibilities to respect the freedom
+of others.
+
+For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the
+manufacturer can do so. This is fundamentally incompatible with the
+aim of protecting users' freedom to change the software. The
+systematic pattern of such abuse occurs in the area of products for
+individuals to use, which is precisely where it is most unacceptable.
+Therefore, we have designed this version of the GPL to prohibit the
+practice for those products. If such problems arise substantially in
+other domains, we stand ready to extend this provision to those
+domains in future versions of the GPL, as needed to protect the
+freedom of users.
+
+Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish
+to avoid the special danger that patents applied to a free program
+could make it effectively proprietary. To prevent this, the GPL
+assures that patents cannot be used to render the program non-free.
+
+The precise terms and conditions for copying, distribution and
+modification follow.
+
+# TERMS AND CONDITIONS
+
+## 0. Definitions.
+
+"This License" refers to version 3 of the GNU General Public License.
+
+"Copyright" also means copyright-like laws that apply to other kinds
+of works, such as semiconductor masks.
+
+"The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of
+an exact copy. The resulting work is called a "modified version" of
+the earlier work or a work "based on" the earlier work.
+
+A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user
+through a computer network, with no transfer of a copy, is not
+conveying.
+
+An interactive user interface displays "Appropriate Legal Notices" to
+the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+## 1. Source Code.
+
+The "source code" for a work means the preferred form of the work for
+making modifications to it. "Object code" means any non-source form of
+a work.
+
+A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+The Corresponding Source need not include anything that users can
+regenerate automatically from other parts of the Corresponding Source.
+
+The Corresponding Source for a work in source code form is that same
+work.
+
+## 2. Basic Permissions.
+
+All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+You may make, run and propagate covered works that you do not convey,
+without conditions so long as your license otherwise remains in force.
+You may convey covered works to others for the sole purpose of having
+them make modifications exclusively for you, or provide you with
+facilities for running those works, provided that you comply with the
+terms of this License in conveying all material for which you do not
+control copyright. Those thus making or running the covered works for
+you must do so exclusively on your behalf, under your direction and
+control, on terms that prohibit them from making any copies of your
+copyrighted material outside their relationship with you.
+
+Conveying under any other circumstances is permitted solely under the
+conditions stated below. Sublicensing is not allowed; section 10 makes
+it unnecessary.
+
+## 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such
+circumvention is effected by exercising rights under this License with
+respect to the covered work, and you disclaim any intention to limit
+operation or modification of the work as a means of enforcing, against
+the work's users, your or third parties' legal rights to forbid
+circumvention of technological measures.
+
+## 4. Conveying Verbatim Copies.
+
+You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+## 5. Conveying Modified Source Versions.
+
+You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these
+conditions:
+
+-   a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+-   b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under
+    section 7. This requirement modifies the requirement in section 4
+    to "keep intact all notices".
+-   c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy. This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged. This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+-   d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+## 6. Conveying Non-Source Forms.
+
+You may convey a covered work in object code form under the terms of
+sections 4 and 5, provided that you also convey the machine-readable
+Corresponding Source under the terms of this License, in one of these
+ways:
+
+-   a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+-   b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the Corresponding
+    Source from a network server at no charge.
+-   c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source. This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+-   d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge. You need not require recipients to copy the
+    Corresponding Source along with the object code. If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source. Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+-   e) Convey the object code using peer-to-peer transmission,
+    provided you inform other peers where the object code and
+    Corresponding Source of the work are being offered to the general
+    public at no charge under subsection 6d.
+
+A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal,
+family, or household purposes, or (2) anything designed or sold for
+incorporation into a dwelling. In determining whether a product is a
+consumer product, doubtful cases shall be resolved in favor of
+coverage. For a particular product received by a particular user,
+"normally used" refers to a typical or common use of that class of
+product, regardless of the status of the particular user or of the way
+in which the particular user actually uses, or expects or is expected
+to use, the product. A product is a consumer product regardless of
+whether the product has substantial commercial, industrial or
+non-consumer uses, unless such uses represent the only significant
+mode of use of the product.
+
+"Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to
+install and execute modified versions of a covered work in that User
+Product from a modified version of its Corresponding Source. The
+information must suffice to ensure that the continued functioning of
+the modified object code is in no case prevented or interfered with
+solely because modification has been made.
+
+If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or
+updates for a work that has been modified or installed by the
+recipient, or for the User Product in which it has been modified or
+installed. Access to a network may be denied when the modification
+itself materially and adversely affects the operation of the network
+or violates the rules and protocols for communication across the
+network.
+
+Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+## 7. Additional Terms.
+
+"Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders
+of that material) supplement the terms of this License with terms:
+
+-   a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+-   b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+-   c) Prohibiting misrepresentation of the origin of that material,
+    or requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+-   d) Limiting the use for publicity purposes of names of licensors
+    or authors of the material; or
+-   e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+-   f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions
+    of it) with contractual assumptions of liability to the recipient,
+    for any liability that these contractual assumptions directly
+    impose on those licensors and authors.
+
+All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions; the
+above requirements apply either way.
+
+## 8. Termination.
+
+You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+However, if you cease all violation of this License, then your license
+from a particular copyright holder is reinstated (a) provisionally,
+unless and until the copyright holder explicitly and finally
+terminates your license, and (b) permanently, if the copyright holder
+fails to notify you of the violation by some reasonable means prior to
+60 days after the cessation.
+
+Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+## 9. Acceptance Not Required for Having Copies.
+
+You are not required to accept this License in order to receive or run
+a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+## 10. Automatic Licensing of Downstream Recipients.
+
+Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+## 11. Patents.
+
+A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+A contributor's "essential patent claims" are all patent claims owned
+or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+A patent license is "discriminatory" if it does not include within the
+scope of its coverage, prohibits the exercise of, or is conditioned on
+the non-exercise of one or more of the rights that are specifically
+granted under this License. You may not convey a covered work if you
+are a party to an arrangement with a third party that is in the
+business of distributing software, under which you make payment to the
+third party based on the extent of your activity of conveying the
+work, and under which the third party grants, to any of the parties
+who would receive the covered work from you, a discriminatory patent
+license (a) in connection with copies of the covered work conveyed by
+you (or copies made from those copies), or (b) primarily for and in
+connection with specific products or compilations that contain the
+covered work, unless you entered into that arrangement, or that patent
+license was granted, prior to 28 March 2007.
+
+Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+## 12. No Surrender of Others' Freedom.
+
+If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under
+this License and any other pertinent obligations, then as a
+consequence you may not convey it at all. For example, if you agree to
+terms that obligate you to collect a royalty for further conveying
+from those to whom you convey the Program, the only way you could
+satisfy both those terms and this License would be to refrain entirely
+from conveying the Program.
+
+## 13. Use with the GNU Affero General Public License.
+
+Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+## 14. Revised Versions of this License.
+
+The Free Software Foundation may publish revised and/or new versions
+of the GNU General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in
+detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies that a certain numbered version of the GNU General Public
+License "or any later version" applies to it, you have the option of
+following the terms and conditions either of that numbered version or
+of any later version published by the Free Software Foundation. If the
+Program does not specify a version number of the GNU General Public
+License, you may choose any version ever published by the Free
+Software Foundation.
+
+If the Program specifies that a proxy can decide which future versions
+of the GNU General Public License can be used, that proxy's public
+statement of acceptance of a version permanently authorizes you to
+choose that version for the Program.
+
+Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+## 15. Disclaimer of Warranty.
+
+THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
+WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
+PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
+DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
+CORRECTION.
+
+## 16. Limitation of Liability.
+
+IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
+CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
+ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
+NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
+LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
+TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
+PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+## 17. Interpretation of Sections 15 and 16.
+
+If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+END OF TERMS AND CONDITIONS
+
+# How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these
+terms.
+
+To do so, attach the following notices to the program. It is safest to
+attach them to the start of each source file to most effectively state
+the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+        <one line to give the program's name and a brief idea of what it does.>
+        Copyright (C) <year>  <name of author>
+
+        This program is free software: you can redistribute it and/or modify
+        it under the terms of the GNU General Public License as published by
+        the Free Software Foundation, either version 3 of the License, or
+        (at your option) any later version.
+
+        This program is distributed in the hope that it will be useful,
+        but WITHOUT ANY WARRANTY; without even the implied warranty of
+        MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+        GNU General Public License for more details.
+
+        You should have received a copy of the GNU General Public License
+        along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper
+mail.
+
+If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+        <program>  Copyright (C) <year>  <name of author>
+        This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+        This is free software, and you are welcome to redistribute it
+        under certain conditions; type `show c' for details.
+
+The hypothetical commands \`show w' and \`show c' should show the
+appropriate parts of the General Public License. Of course, your
+program's commands might be different; for a GUI interface, you would
+use an "about box".
+
+You should also get your employer (if you work as a programmer) or
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. For more information on this, and how to apply and follow
+the GNU GPL, see <https://www.gnu.org/licenses/>.
+
+The GNU General Public License does not permit incorporating your
+program into proprietary programs. If your program is a subroutine
+library, you may consider it more useful to permit linking proprietary
+applications with the library. If this is what you want to do, use the
+GNU Lesser General Public License instead of this License. But first,
+please read <https://www.gnu.org/licenses/why-not-lgpl.html>.
+
+END OF TERMS AND CONDITIONS
diff --git a/LICENSE.txt b/LICENSE.txt
deleted file mode 100644
index 298ce84a44edb02b31953b36123ae9d15df7bcf2..0000000000000000000000000000000000000000
--- a/LICENSE.txt
+++ /dev/null
@@ -1,737 +0,0 @@
-==============
-SOFTWARE LICENCE
-==============
-
-This file is part of the "System for Automated Quality Control" developed by the Research Data Management Team
-of the Helmholtz-Centre for Environmental Research Leipzig.
-
-==============
-COPYRIGHT NOTICE
-==============
-
-Copyright(c) 2019, 
-Helmholtz-Zentrum fuer Umweltforschung GmbH - UFZ. 
-All rights reserved.
-
-The code is a property of:
-
-Helmholtz Centre for Environmental Research - UFZ 
-Registered Office: Leipzig 
-Registration Office: Amtsgericht Leipzig 
-Trade Register Nr. B 4703 
-Chairman of the Supervisory Board: MinDirig'in Oda Keppler 
-Scientific Director: Prof. Dr. Georg Teutsch 
-Administrative Director: Dr. Sabine König
-
-
-The "System for Automated Quality Control" is free software. You can 
-redistribute it and/or modify it under the terms of the GNU General 
-Public License as published by the free Software Foundation either 
-version 3 of the License, or (at your option) any later version. 
-
-
-This program is distributed in the hope that it will be useful, but 
-WITHOUT ANY WARRANTY; without even the implied warranty of 
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 
-See the GNU General Public License for more details.
-
-
-You received a copy of the GNU General Public License along 
-with the "System for Automated Quality Control". It can be found
-in the section "GNU General Public License" in this document. The
-complete GNU license text can also be found at 
-<http://www.gnu.org/licenses/>.
-
-==============
-Contact
-==============
-
-David Schäfer
-Dr. Robert Günther
-Research Data Management Team
-
-Mail: Permoserstrasse 15, 04318 Leipzig, Germany
-E-mail: david.schaefer@ufz.de
-E-mail: rdm-contact@ufz.de
-
-
-==============
-Redistribution
-==============
-
-Redistribution and use in source and binary forms, with or without 
-modification, are permitted provided that the following conditions 
-are met:
-
-*  Redistributions of source code must retain the above 
-   copyright notice, the list of conditions for redistribution and
-   modification as well as the following disclaimers of warranty and liability.
-*  Redistributions in binary form must reproduce the above copyright 
-   notice, this list of conditions, the following disclaimer and the 
-   modification conditions in the documentation and/or other materials 
-   provided with the distribution.
-*  Neither the name of Helmholtz-Zentrum fuer Umweltforschung GmbH - 
-   UFZ, nor the names of its contributors may be used to endorse or 
-   promote products derived from this software without specific prior 
-   written permission.
-
-
-==============
-Modification
-==============
-
-If software is modified to produce derivative works, such modified 
-software should be clearly marked, so as not to confuse it with the 
-version available from UFZ.
-
-==============
-Disclaimer of Warranty 
-==============
-
-THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE 
-HELMHOLTZ-ZENTRUM FUER UMWELTFORSCHUNG GMBH - UFZ AND CONTRIBUTORS
-AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
-==============
-Limitation of Liability
-==============
-
-IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL THE HELMHOLTZ-ZENTRUM FUER UMWELTFORSCHUNG GMBH - UFZ AND 
-CONTRIBUTORS OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
-==============
-GNU General Public License
-==============
-                    GNU GENERAL PUBLIC LICENSE
-                       Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
-                            Preamble
-
-  The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
-  The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works.  By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users.  We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors.  You can apply it to
-your programs, too.
-
-  When we speak of free software, we are referring to freedom, not
-price.  Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
-  To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights.  Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
-  For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received.  You must make sure that they, too, receive
-or can get the source code.  And you must show them these terms so they
-know their rights.
-
-  Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
-  For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software.  For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
-  Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so.  This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software.  The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable.  Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products.  If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
-  Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary.  To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
-  The precise terms and conditions for copying, distribution and
-modification follow.
-
-                       TERMS AND CONDITIONS
-
-  0. Definitions.
-
-  "This License" refers to version 3 of the GNU General Public License.
-
-  "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
-  "The Program" refers to any copyrightable work licensed under this
-License.  Each licensee is addressed as "you".  "Licensees" and
-"recipients" may be individuals or organizations.
-
-  To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy.  The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
-  A "covered work" means either the unmodified Program or a work based
-on the Program.
-
-  To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy.  Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
-  To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies.  Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
-  An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License.  If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
-  1. Source Code.
-
-  The "source code" for a work means the preferred form of the work
-for making modifications to it.  "Object code" means any non-source
-form of a work.
-
-  A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
-  The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form.  A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
-  The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities.  However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work.  For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
-  The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
-  The Corresponding Source for a work in source code form is that
-same work.
-
-  2. Basic Permissions.
-
-  All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met.  This License explicitly affirms your unlimited
-permission to run the unmodified Program.  The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work.  This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
-  You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force.  You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright.  Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
-  Conveying under any other circumstances is permitted solely under
-the conditions stated below.  Sublicensing is not allowed; section 10
-makes it unnecessary.
-
-  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
-  No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
-  When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
-  4. Conveying Verbatim Copies.
-
-  You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
-  You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
-  5. Conveying Modified Source Versions.
-
-  You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
-    a) The work must carry prominent notices stating that you modified
-    it, and giving a relevant date.
-
-    b) The work must carry prominent notices stating that it is
-    released under this License and any conditions added under section
-    7.  This requirement modifies the requirement in section 4 to
-    "keep intact all notices".
-
-    c) You must license the entire work, as a whole, under this
-    License to anyone who comes into possession of a copy.  This
-    License will therefore apply, along with any applicable section 7
-    additional terms, to the whole of the work, and all its parts,
-    regardless of how they are packaged.  This License gives no
-    permission to license the work in any other way, but it does not
-    invalidate such permission if you have separately received it.
-
-    d) If the work has interactive user interfaces, each must display
-    Appropriate Legal Notices; however, if the Program has interactive
-    interfaces that do not display Appropriate Legal Notices, your
-    work need not make them do so.
-
-  A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit.  Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
-  6. Conveying Non-Source Forms.
-
-  You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
-    a) Convey the object code in, or embodied in, a physical product
-    (including a physical distribution medium), accompanied by the
-    Corresponding Source fixed on a durable physical medium
-    customarily used for software interchange.
-
-    b) Convey the object code in, or embodied in, a physical product
-    (including a physical distribution medium), accompanied by a
-    written offer, valid for at least three years and valid for as
-    long as you offer spare parts or customer support for that product
-    model, to give anyone who possesses the object code either (1) a
-    copy of the Corresponding Source for all the software in the
-    product that is covered by this License, on a durable physical
-    medium customarily used for software interchange, for a price no
-    more than your reasonable cost of physically performing this
-    conveying of source, or (2) access to copy the
-    Corresponding Source from a network server at no charge.
-
-    c) Convey individual copies of the object code with a copy of the
-    written offer to provide the Corresponding Source.  This
-    alternative is allowed only occasionally and noncommercially, and
-    only if you received the object code with such an offer, in accord
-    with subsection 6b.
-
-    d) Convey the object code by offering access from a designated
-    place (gratis or for a charge), and offer equivalent access to the
-    Corresponding Source in the same way through the same place at no
-    further charge.  You need not require recipients to copy the
-    Corresponding Source along with the object code.  If the place to
-    copy the object code is a network server, the Corresponding Source
-    may be on a different server (operated by you or a third party)
-    that supports equivalent copying facilities, provided you maintain
-    clear directions next to the object code saying where to find the
-    Corresponding Source.  Regardless of what server hosts the
-    Corresponding Source, you remain obligated to ensure that it is
-    available for as long as needed to satisfy these requirements.
-
-    e) Convey the object code using peer-to-peer transmission, provided
-    you inform other peers where the object code and Corresponding
-    Source of the work are being offered to the general public at no
-    charge under subsection 6d.
-
-  A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
-  A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling.  In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage.  For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product.  A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
-  "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source.  The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
-  If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information.  But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
-  The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed.  Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
-  Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
-  7. Additional Terms.
-
-  "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law.  If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
-  When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it.  (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.)  You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
-  Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
-    a) Disclaiming warranty or limiting liability differently from the
-    terms of sections 15 and 16 of this License; or
-
-    b) Requiring preservation of specified reasonable legal notices or
-    author attributions in that material or in the Appropriate Legal
-    Notices displayed by works containing it; or
-
-    c) Prohibiting misrepresentation of the origin of that material, or
-    requiring that modified versions of such material be marked in
-    reasonable ways as different from the original version; or
-
-    d) Limiting the use for publicity purposes of names of licensors or
-    authors of the material; or
-
-    e) Declining to grant rights under trademark law for use of some
-    trade names, trademarks, or service marks; or
-
-    f) Requiring indemnification of licensors and authors of that
-    material by anyone who conveys the material (or modified versions of
-    it) with contractual assumptions of liability to the recipient, for
-    any liability that these contractual assumptions directly impose on
-    those licensors and authors.
-
-  All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10.  If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term.  If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
-  If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
-  Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
-  8. Termination.
-
-  You may not propagate or modify a covered work except as expressly
-provided under this License.  Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
-  However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
-  Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
-  Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License.  If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
-  9. Acceptance Not Required for Having Copies.
-
-  You are not required to accept this License in order to receive or
-run a copy of the Program.  Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance.  However,
-nothing other than this License grants you permission to propagate or
-modify any covered work.  These actions infringe copyright if you do
-not accept this License.  Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
-  10. Automatic Licensing of Downstream Recipients.
-
-  Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License.  You are not responsible
-for enforcing compliance by third parties with this License.
-
-  An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations.  If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
-  You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License.  For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
-  11. Patents.
-
-  A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based.  The
-work thus licensed is called the contributor's "contributor version".
-
-  A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version.  For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
-  Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
-  In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement).  To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
-  If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients.  "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
-  If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
-  A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License.  You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
-  Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
-  12. No Surrender of Others' Freedom.
-
-  If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License.  If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all.  For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
-  13. Use with the GNU Affero General Public License.
-
-  Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work.  The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
-  14. Revised Versions of this License.
-
-  The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time.  Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
-  Each version is given a distinguishing version number.  If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation.  If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
-  If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
-  Later license versions may give you additional or different
-permissions.  However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
-  15. Disclaimer of Warranty.
-
-  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
-  16. Limitation of Liability.
-
-  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
-  17. Interpretation of Sections 15 and 16.
-
-  If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
diff --git a/README.md b/README.md
index 35c04b031c760a99ab2068823cf0d97a89129622..02262fd5cad0080554a16c15d59c9d418ca7216c 100644
--- a/README.md
+++ b/README.md
@@ -58,12 +58,12 @@ dataset and the routines to inspect, quality control and/or process them.
 The content of such a configuration could look like this:
 
 ```
-varname    ; test                                
+varname    ; test
 #----------;------------------------------------
-SM2        ; harm_shift2Grid(freq="15Min")       
-SM2        ; flagMissing(nodata=NAN)             
-'SM(1|2)+' ; flagRange(min=10, max=60)           
-SM2        ; spikes_flagMad(window="30d", z=3.5)
+SM2        ; shiftToFreq(freq="15Min")
+SM2        ; flagMissing()
+'SM(1|2)+' ; flagRange(min=10, max=60)
+SM2        ; flagMad(window="30d", z=3.5)
 ```
 
 As soon as the basic inputs, a dataset and the configuration file are
@@ -81,15 +81,16 @@ The following snippet implements the same configuration given above through
 the Python-API:
 
 ```python
-from saqc import SaQC, SimpleFlagger
+import numpy as np
+from saqc import SaQC
 
-saqc = (SaQC(SimpleFlagger(), data)
-        .harm_shift2Grid("SM2", freq="15Min")
-        .flagMissing("SM2", nodata=np.nan)
+saqc = (SaQC(data)
+        .shiftToFreq("SM2", freq="15Min")
+        .flagMissing("SM2")
         .flagRange("SM(1|2)+", regex=True, min=10, max=60)
-        .spikes_flagMad("SM2", window="30d", z=3.5))
-        
-data, flagger = saqc.getResult()
+        .flagMad("SM2", window="30d", z=3.5))
+
+data, flags = saqc.getResult()
 ```
 
 ## Installation
diff --git a/dios/.gitignore b/dios/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..1b0574052da476dbbda5f136edecf7199a34628f
--- /dev/null
+++ b/dios/.gitignore
@@ -0,0 +1,2 @@
+
+__pycache__/
diff --git a/dios/Readme.md b/dios/Readme.md
new file mode 100644
index 0000000000000000000000000000000000000000..2a399caba9826c8636ecc8042bdf7b16802fc3bf
--- /dev/null
+++ b/dios/Readme.md
@@ -0,0 +1,102 @@
+DictOfSeries 
+============
+
+DictOfSeries is a pandas.Series of pandas.Series objects which aims to behave as similar as possible to pandas.DataFrame. 
+
+
+Nomenclature
+------------
+- series/ser: instance of pandas.Series
+- dios: instance of dios.DictOfSeries
+- df: instance of pandas.DataFrame
+- dios-like: a *dios* or a *df*
+- alignable object: a *dios*, *df* or a *series*
+
+
+Features
+--------
+* every *column* has its own index
+* uses much less memory than a misaligned pandas.DataFrame
+* behaves quite like a pandas.DataFrame
+* additional align locator (`.aloc[]`)
+
+Install
+-------
+
+todo: PyPi
+
+``` 
+import dios
+
+# Have fun :)
+```
+
+Documentation
+-------------
+
+The main docu is on ReadTheDocs at: 
+
+* [dios.rtfd.io](https://dios.rtfd.io)
+
+but some docs are also available local:
+* [Indexing](/docs/doc_indexing.md)
+* [Cookbook](/docs/doc_cookbook.md)
+* [Itype](/docs/doc_itype.md)
+
+TL;DR
+-----
+**get it**
+```
+>>> from dios import DictOfSeries
+```
+**empty**
+```
+>>> DictOfSeries()
+Empty DictOfSeries
+Columns: []
+
+>>> DictOfSeries(columns=['x', 'y'])
+Empty DictOfSeries
+Columns: ['x', 'y']
+
+>>> DictOfSeries(columns=['x', 'y'], index=[3,4,5])
+     x |      y | 
+====== | ====== | 
+3  NaN | 3  NaN | 
+4  NaN | 4  NaN | 
+5  NaN | 5  NaN | 
+```
+**with data**
+```
+>>> DictOfSeries([range(4), range(2), range(3)])
+   0 |    1 |    2 | 
+==== | ==== | ==== | 
+0  0 | 0  0 | 0  0 | 
+1  1 | 1  1 | 1  1 | 
+2  2 |      | 2  2 | 
+3  3 |      |      | 
+
+>>> DictOfSeries(np.random.random([2,4]))
+          0 |           1 | 
+=========== | =========== | 
+0  0.112020 | 0  0.509881 | 
+1  0.108070 | 1  0.285779 | 
+2  0.851453 | 2  0.805933 | 
+3  0.138352 | 3  0.812339 | 
+
+>>> DictOfSeries(np.random.random([2,4]), columns=['a','b'], index=[11,12,13,14])
+           a |            b | 
+============ | ============ | 
+11  0.394304 | 11  0.356206 | 
+12  0.943689 | 12  0.735356 | 
+13  0.791820 | 13  0.066947 | 
+14  0.759802 | 14  0.496321 | 
+
+>>> DictOfSeries(dict(today=['spam']*3, tomorrow=['spam']*2))
+  today |   tomorrow | 
+======= | ========== | 
+0  spam | 0     spam | 
+1  spam | 1     spam | 
+2  spam |            | 
+```
+
diff --git a/dios/__init__.py b/dios/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..944c1c95ec6997eba2e4b6b2e2b6ffddc75bd886
--- /dev/null
+++ b/dios/__init__.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+
+from .dios import *
diff --git a/dios/dios/__init__.py b/dios/dios/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f5317eb22de1212b9fd6922a90ad67ec1b1387f6
--- /dev/null
+++ b/dios/dios/__init__.py
@@ -0,0 +1,27 @@
+from .lib import *
+from .dios import *
+
+__all__ = [
+    "DictOfSeries",
+    "to_dios",
+    "pprint_dios",
+    "IntItype",
+    "FloatItype",
+    "NumItype",
+    "DtItype",
+    "ObjItype",
+    "ItypeWarning",
+    "ItypeCastWarning",
+    "ItypeCastError",
+    "is_itype",
+    "is_itype_subtype",
+    "is_itype_like",
+    "get_itype",
+    "cast_to_itype",
+    "CastPolicy",
+    "Opts",
+    "OptsFields",
+    "OptsFields",
+    "dios_options",
+    "example_DictOfSeries",
+]
diff --git a/dios/dios/base.py b/dios/dios/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..11392b4869beadbe04c0b20703140e7a42bbc556
--- /dev/null
+++ b/dios/dios/base.py
@@ -0,0 +1,696 @@
+#!/usr/bin/env python
+
+from . import operators as ops
+from . import pandas_bridge as pdextra
+from . import lib
+
+from .lib import (
+    _CAST_POLICIES,
+    _throw_MixedItype_err_or_warn,
+    _find_least_common_itype,
+)
+
+from abc import abstractmethod
+import pandas as pd
+import operator as op
+import functools as ftools
+
+__author__ = "Bert Palm"
+__email__ = "bert.palm@ufz.de"
+__copyright__ = "Copyright 2018, Helmholtz-Zentrum für Umweltforschung GmbH - UFZ"
+
+
+class _DiosBase:
+    @property
+    @abstractmethod
+    def _constructor(self):
+        pass
+
+    def __init__(
+        self,
+        data=None,
+        columns=None,
+        index=None,
+        itype=None,
+        cast_policy="save",
+        fastpath=False,
+    ):
+
+        # set via property
+        self.cast_policy = cast_policy
+
+        # we are called internally
+        if fastpath:
+            self._itype = itype or lib.ObjItype
+            if data is not None:
+                self._data = data
+            else:
+                # it is significantly faster, to provide an index and fill it,
+                # than to successively build the index by adding data
+                self._data = pd.Series(dtype="O", index=columns)
+
+        else:
+
+            if index is not None and not isinstance(index, pd.Index):
+                index = pd.Index(index)
+
+            # itype=None means infer the itype by the data, so we first set to the highest
+            # possible itype, then insert data, then infer the best-fitting itype.
+            if itype is None and index is None:
+                self._itype = lib.ObjItype
+            else:
+                if index is not None:
+                    self._itype = lib.get_itype(index)
+                if itype is not None:
+                    self._itype = lib.get_itype(itype)
+
+            cols = pd.Index([] if columns is None else columns)
+            if not cols.is_unique:
+                raise ValueError("columns must be unique")
+            self._data = pd.Series(dtype="O", index=cols)
+
+            if data is not None:
+                self._init_insert_data(data, columns, index)
+
+        # self._data may still contain nans; at positions where
+        # no data was present, but a column-name was given
+        if self._data.hasnans:
+            e = pd.Series(dtype="O", index=index)
+            for c in self.columns[self._data.isna()]:
+                self._insert(c, e.copy())
+
+        self._data.index.name = "columns"
+
+        # we try to infer the itype, but if we still have
+        # no data, we will set the itype lazy, i.e. with
+        # the first non-empty _insert()
+        if itype is None:
+            if self.empty:
+                self._itype = "INFER"
+            else:
+                self._itype = _find_least_common_itype(self._data)
+                if not self._itype.unique:
+                    _throw_MixedItype_err_or_warn(self.itype)
+
+    def _init_insert_data(self, data, columns, index):
+        """Insert items of a iterable in self"""
+
+        if pdextra.is_iterator(data):
+            data = list(data)
+
+        if _is_dios_like(data) or isinstance(data, dict):
+            if columns is None:
+                pass  # data is dict-like
+            else:
+                data = {k: data[k] for k in data if k in columns}
+
+        elif isinstance(data, pd.Series):
+            name = data.name or 0
+            if columns is not None and len(columns) > 0:
+                name = self.columns[0]
+            data = {name: data}
+
+        elif pdextra.is_nested_list_like(data):
+            if columns is None:
+                data = {i: d for i, d in enumerate(data)}
+            elif len(data) == len(columns):
+                data = dict(zip(self.columns, data))
+            else:
+                raise ValueError(
+                    f"{len(columns)} columns passed, data implies {len(data)} columns"
+                )
+
+        elif pdextra.is_list_like(data):
+            name = 0 if columns is None or len(columns) < 1 else self.columns[0]
+            data = {name: data}
+
+        else:
+            raise TypeError("data type not understood")
+
+        for k in data:
+            self._insert(k, pd.Series(data[k], index=index))
+
+    # ----------------------------------------------------------------------
+    # checks
+
+    def _is_valid_columns_index(self, obj):
+        if isinstance(obj, pd.Series) and obj.dtype == "O":
+            return True
+        return False
+
+    # ----------------------------------------------------------------------
+    # Indexing Methods
+
+    def _insert(self, col, val):
+        """Insert a fresh new value as pd.Series into self"""
+        val = list(val) if pdextra.is_iterator(val) else val
+
+        if _is_dios_like(val):
+            val = val.squeeze()
+            if not isinstance(val, pd.Series):
+                raise ValueError(f"Cannot insert frame-like with more than one column")
+
+        elif val is None:
+            val = pd.Series()
+
+        elif not isinstance(val, pd.Series):
+            raise TypeError(
+                f"Only data of type pandas.Series can be inserted, passed was {type(val)}"
+            )
+
+        # set the itype lazy, i.e. when first non-empty
+        # column is inserted
+        if self._itype == "INFER":
+            if not val.empty:
+                self._itype = lib.get_itype(val.index)
+                # cast all pre-inserted empty series
+                self._cast_all(self._itype, self.cast_policy)
+                if not self._itype.unique:
+                    _throw_MixedItype_err_or_warn(self._itype)
+        else:
+            val = lib.cast_to_itype(val, self.itype, policy=self.cast_policy)
+
+        val.name = col
+        self._data.at[col] = val.copy(deep=True)
+
+    def __getitem__(self, key):
+        """dios[key] -> dios/series"""
+        # scalar        -> select a column
+        # slice         -> select rows (on all columns)
+        # bool dios     -> select columns, select rows
+        # mask          -> select rows (on all columns)
+        # list-like     -> select columns
+
+        if pdextra.is_scalar(key):
+            # NOTE: we shallow copy, to prevent changes on the
+            # index mirror back to us and may mess up the itype.
+            s = self._data.at[key]
+            s.index = s.index.copy()
+            return s
+
+        if isinstance(key, slice):
+            return self._slice(key)
+
+        if _is_dios_like(key):
+            return self._getitem_bool_dios(key)
+
+        if pdextra.is_bool_indexer(key):
+            return self._getitem_bool_listlike(key)
+
+        # select columns and let pandas handle it
+        data = self._data.loc[key]
+        if self._is_valid_columns_index(data):
+            return self._constructor(
+                data=data, itype=self.itype, cast_policy=self.cast_policy, fastpath=True
+            )
+
+        raise TypeError(f"cannot index columns with this type, {type(key)}")
+
+    def _slice(self, key):
+        """slices self, return copy"""
+        if key == slice(None):
+            return self.copy()
+
+        new = self.copy_empty(columns=True)
+        for c, series in self.items():
+            new._data.at[c] = series[key]
+        return new
+
+    def _getitem_bool_dios(self, key):
+        """Select items by a boolean dios-like drop un-selected indices."""
+
+        if not _is_bool_dios_like(key):
+            raise ValueError("Must pass DictOfSeries with boolean values only")
+
+        new = self.copy_empty(columns=True)
+        for c, series in self.items():
+            if c in key:
+                val = key[c].reindex(index=series.index, fill_value=False)
+                new._data.at[c] = series.loc[val]
+        return new
+
+    def _getitem_bool_listlike(self, key):
+        new = self.copy_empty(columns=True)
+        for c, series in self.items():
+            new._data.at[c] = series.loc[key]
+        return new
+
+    def __setitem__(self, key, value):
+        """dios[key] = value"""
+        key = list(key) if pdextra.is_iterator(key) else key
+        if isinstance(key, tuple):
+            raise KeyError(f"{key}. tuples are not allowed")
+
+        elif pdextra.is_hashable(key):
+            if isinstance(value, pd.Series) or key not in self.columns:
+                self._insert(key, value)
+            elif _is_dios_like(value) or pdextra.is_nested_list_like(value):
+                raise ValueError("Incompatible indexer with multi-dimensional value")
+            else:
+                self._data.at[key][:] = value
+
+        else:
+            data = self.__getitem__(key)
+            assert isinstance(
+                data, self.__class__
+            ), f"getitem returned data of type {type(data)}"
+
+            # special cases
+            if _is_dios_like(value):
+                self._setitem_dios(data, value)
+            # NOTE: pd.Series also considered list-like
+            elif pdextra.is_list_like(value):
+                self._setitem_listlike(data, value)
+
+            # default case
+            else:
+                for c, series in data.items():
+                    series[:] = value
+                    self._data.at[c][series.index] = series
+
+    def _setitem_listlike(self, data, value):
+
+        value = value.values if isinstance(value, pd.Series) else value
+
+        if len(value) != len(data.columns):
+            raise ValueError(
+                f"array-like value of length {len(value)} could "
+                f"not be broadcast to indexing result of shape "
+                f"(.., {len(data.columns)})"
+            )
+
+        for i, (c, series) in enumerate(data.items()):
+            series[:] = value[i]
+            self._data.at[c][series.index] = series
+
+    def _setitem_dios(self, data, value):
+        """Write values from a dios-like to self.
+
+        No justification or alignment of columns, but of indices.
+        If value has missing indices, nan's are inserted at that
+        locations, just like `series.loc[:]=val` or `df[:]=val` do.
+
+        Eg.
+         di[::2] = di[::3]   ->   di[::2]
+
+            x |        x |            x |
+        ===== |     ==== |       ====== |
+        0   x |     0  z |       0    z |
+        2   x |  =  3  z |   ->  2  NaN |
+        4   x |     6  z |       4  NaN |
+        6   x |                  6    z |
+
+        Parameter
+        ----------
+        data : dios
+            A maybe trimmed version of self
+        value : dios, pd.Dataframe
+            The value to set with the same column dimension like data
+        """
+
+        if len(data) != len(value.columns):
+            raise ValueError(
+                f"shape mismatch: values array of shape "
+                f"(.., {len(value.columns)}) could not "
+                f"be broadcast to indexing result of "
+                f"shape (.., {len(data.columns)})"
+            )
+
+        for i, (c, series) in enumerate(data.items()):
+            # .loc cannot handle empty series,
+            # like `emptySeries.loc[:] = [1,2]`
+            if series.empty:
+                continue
+            val = value[value.columns[i]]
+            series.loc[:] = val
+            self._data.at[c].loc[series.index] = series
+
+    def __delitem__(self, key):
+        del self._data[key]
+
+    # ------------------------------------------------------------------------------
+    # Base properties and basic dunder magic
+
+    @property
+    def columns(self):
+        """The column labels of the DictOfSeries"""
+        return self._data.index
+
+    @columns.setter
+    def columns(self, cols):
+        index = pd.Index(cols)
+        if not index.is_unique:
+            raise ValueError("columns index must have unique values")
+        self._data.index = index
+        # rename all columns
+        for i, s in enumerate(self._data):
+            s.name = index[i]
+
+    @property
+    def itype(self):
+        """The ``Itype`` of the DictOfSeries.
+
+        See :ref:`Itype documentation <doc_itype:Itype>` for more info.
+        """
+        if self._itype == "INFER":
+            return None
+        return self._itype
+
+    @itype.setter
+    def itype(self, itype):
+        itype = lib.get_itype(itype)
+        self._cast_all(itype, policy=self.cast_policy)
+        self._itype = itype
+
+    @property
+    def cast_policy(self):
+        """The policy to use for casting new columns if its initial itype does not fit.
+
+        See :ref:`Itype documentation <doc_itype:Itype>` for more info.
+        """
+        return self._policy
+
+    @cast_policy.setter
+    def cast_policy(self, policy):
+        if policy not in _CAST_POLICIES:
+            raise ValueError(f"policy must be one of {_CAST_POLICIES}")
+        self._policy = policy
+
+    def _cast_all(self, itype, policy):
+        c = "?"
+        new = self.copy_empty()
+        try:
+            for c, series in self.items():
+                new._data.at[c] = lib.cast_to_itype(series, itype, policy=policy)
+        except Exception as e:
+            raise type(e)(f"Column {c}: " + str(e)) from e
+
+    def __len__(self):
+        return len(self.columns)
+
+    @property
+    def empty(self):
+        """Indicator whether DictOfSeries is empty.
+
+        Returns
+        -------
+        bool :
+            If DictOfSeries is empty, return True, if not return False.
+
+        See Also
+        --------
+        DictOfSeries.dropempty : drop empty columns
+        DictOfSeries.dropna : drop NAN's from a DictOfSeries
+        pandas.Series.dropna : drop NAN's from a Series
+
+        Notes
+        -----
+            If DictOfSeries contains only NaNs, it is still not considered empty. See the example below.
+
+        Examples
+        --------
+        An example of an actual empty DictOfSeries.
+
+        >>> di_empty = DictOfSeries(columns=['A'])
+        >>> di_empty
+        Empty DictOfSeries
+        Columns: ['A']
+        >>> di_empty.empty
+        True
+
+        If we only have NaNs in our DictOfSeries, it is not considered empty!
+        We will need to drop the NaNs to make the DictOfSeries empty:
+
+        >>> di = pd.DictOfSeries({'A' : [np.nan]})
+        >>> di
+            A |
+        ===== |
+        0 NaN |
+        >>> di.empty
+        False
+        >>> di.dropna().empty
+        True
+        """
+        return len(self) == 0 or all(s.empty for s in self._data)
+
+    def __iter__(self):
+        yield from self.columns
+
+    def __reversed__(self):
+        yield from reversed(self.columns)
+
+    def __contains__(self, item):
+        return item in self.columns
+
+    def items(self):
+        yield from self._data.items()
+
+    # ----------------------------------------------------------------------
+    # if copy.copy() is copy.copy(): return copy.copy().copy()
+
+    def __deepcopy__(self, memo=None):
+        return self.copy(deep=True)
+
+    def __copy__(self):
+        return self.copy(deep=True)
+
+    def copy(self, deep=True):
+        """Make a copy of this DictOfSeries' indices and data.
+
+        Parameters
+        ----------
+        deep : bool, default True
+            Make a deep copy, including a copy of the data and the indices.
+            With deep=False neither the indices nor the data are copied.
+
+        Returns
+        -------
+        copy : DictOfSeries
+
+        See Also
+        --------
+        pandas.DataFrame.copy
+        """
+        data = self._data.copy()
+        if deep:
+            for c, series in self.items():
+                data.at[c] = series.copy()
+
+        return self._constructor(
+            data=data, itype=self.itype, cast_policy=self.cast_policy, fastpath=True
+        )
+
+    def copy_empty(self, columns=True):
+        """
+        Return a new DictOfSeries object, with same properties than the original.
+        Parameters
+        ----------
+        columns: bool, default True
+             If ``True``, the copy will have the same, but empty columns like the original.
+
+        Returns
+        -------
+        DictOfSeries: empty copy
+
+        Examples
+        --------
+
+        >>> di = DictOfSeries({'A': range(2), 'B': range(3)})
+        >>> di
+           A |    B |
+        ==== | ==== |
+        0  0 | 0  0 |
+        1  1 | 1  1 |
+             | 2  2 |
+
+        >>> empty = di.copy_empty()
+        >>> empty
+        Empty DictOfSeries
+        Columns: ['A', 'B']
+
+        The properties are the same, eg.
+
+        >>> empty.itype == di.itype
+        True
+        >>> empty.cast_policy == di.cast_policy
+        True
+        >>> empty.dtypes == di.dtypes
+        columns
+        A    True
+        B    True
+        dtype: bool
+        """
+        data = None
+        if columns is True:  # is correct
+            data = pd.Series(dtype="O", index=self.columns)
+            for c, series in self.items():
+                # OPTIM: the following code is about 2x faster than
+                # data.at[c] = pd.Series(dtype=self._data.at[c].dtype)
+                data.at[c] = series.reindex([])
+
+        return self._constructor(
+            data=data, itype=self.itype, cast_policy=self.cast_policy, fastpath=True
+        )
+
+    # ------------------------------------------------------------------------------
+    # Operators
+
+    def _op1(self, op):
+        new = self.copy_empty(columns=True)
+        try:
+            for k, series in self.items():
+                new[k] = op(series)
+        except Exception as e:
+            raise type(e)(f"'{ops.OP_MAP[op]} dios' failed: " + str(e)) from e
+        return new
+
+    def _op2(self, op, other, align=True, inplace=False):
+        def raiseif(kself, kother, s):
+            if kself != kother:
+                raise ValueError(
+                    f"{s} does not match, {s} left: {kself}, {s} right: {kother}"
+                )
+
+        def doalign(left, right):
+            return left.align(right, join="inner") if align else (left, right)
+
+        def get_operants():
+            if _is_dios_like(other):
+                raiseif(list(self), list(other), "keys")
+                for k, series in self.items():
+                    yield (k, *doalign(series, other[k]))
+            elif isinstance(other, pd.Series):
+                for k, series in self.items():
+                    yield (k, *doalign(series, other))
+            elif pdextra.is_dict_like(other):
+                raiseif(sorted(self), sorted(other), "keys")
+                for k, series in self.items():
+                    yield (k, series, other[k])
+            elif pdextra.is_nested_list_like(other):
+                raiseif(len(self), len(other), "length")
+                for i, (k, series) in enumerate(self.items()):
+                    yield (k, series, other[i])
+            elif pdextra.is_scalar(other) or pdextra.is_list_like(other):
+                for k, series in self.items():
+                    yield (k, series, other)
+            else:
+                raise NotImplementedError
+
+        new = self.copy_empty(columns=True)
+        try:
+            for k, ser, oth in get_operants():
+                new[k] = op(ser, oth)
+        except Exception as e:
+            raise type(e)(f"'dios {ops.OP_MAP[op]} other' failed: " + str(e)) from e
+
+        if inplace:
+            self._data = new._data
+            return None
+
+        return new
+
+    # unary
+    __neg__ = ftools.partialmethod(_op1, op.neg)
+    __abs__ = ftools.partialmethod(_op1, op.abs)
+    __invert__ = ftools.partialmethod(_op1, op.inv)
+    # comparison
+    __eq__ = ftools.partialmethod(_op2, op.eq, align=False)
+    __ne__ = ftools.partialmethod(_op2, op.ne, align=False)
+    __le__ = ftools.partialmethod(_op2, op.le, align=False)
+    __ge__ = ftools.partialmethod(_op2, op.ge, align=False)
+    __lt__ = ftools.partialmethod(_op2, op.lt, align=False)
+    __gt__ = ftools.partialmethod(_op2, op.gt, align=False)
+    # arithmetic
+    __add__ = ftools.partialmethod(_op2, op.add)
+    __sub__ = ftools.partialmethod(_op2, op.sub)
+    __mul__ = ftools.partialmethod(_op2, op.mul)
+    __mod__ = ftools.partialmethod(_op2, op.mod)
+    __truediv__ = ftools.partialmethod(_op2, op.truediv)
+    __floordiv__ = ftools.partialmethod(_op2, op.floordiv)
+    __pow__ = ftools.partialmethod(_op2, op.pow)
+    __iadd__ = ftools.partialmethod(_op2, op.add, inplace=True)
+    __isub__ = ftools.partialmethod(_op2, op.sub, inplace=True)
+    __imul__ = ftools.partialmethod(_op2, op.mul, inplace=True)
+    __imod__ = ftools.partialmethod(_op2, op.mod, inplace=True)
+    __itruediv__ = ftools.partialmethod(_op2, op.truediv, inplace=True)
+    __ifloordiv__ = ftools.partialmethod(_op2, op.floordiv, inplace=True)
+    __ipow__ = ftools.partialmethod(_op2, op.pow, inplace=True)
+    # bool
+    __and__ = ftools.partialmethod(_op2, op.and_)
+    __or__ = ftools.partialmethod(_op2, op.or_)
+    __xor__ = ftools.partialmethod(_op2, op.xor)
+    __iand__ = ftools.partialmethod(_op2, op.and_, inplace=True)
+    __ior__ = ftools.partialmethod(_op2, op.or_, inplace=True)
+    __ixor__ = ftools.partialmethod(_op2, op.xor, inplace=True)
+
+    # ------------------------------------------------------------------------------
+    # Indexer
+
+    @property
+    def loc(self):
+        """Access a group of rows and columns by label(s) or a boolean array.
+
+        See :ref:`indexing docs <doc_indexing:Pandas-like indexing>`
+        """
+        return _LocIndexer(self)
+
+    @property
+    def iloc(self):
+        """Purely integer-location based indexing for selection by position.
+
+        See :ref:`indexing docs <doc_indexing:Pandas-like indexing>`
+        """
+        return _iLocIndexer(self)
+
+    @property
+    def aloc(self):
+        """Access a group of rows and columns by label(s) or a boolean array with automatic alignment of indexers.
+
+        See :ref:`indexing docs <doc_indexing:Special indexer .aloc>`
+        """
+        return _aLocIndexer(self)
+
+    @property
+    def at(self):
+        """Access a single value for a row/column label pair.
+
+        See :ref:`indexing docs <doc_indexing:Pandas-like indexing>`
+        """
+        return _AtIndexer(self)
+
+    @property
+    def iat(self):
+        """Access a single value for a row/column pair by integer position.
+
+        See :ref:`indexing docs <doc_indexing:Pandas-like indexing>`
+        """
+        return _iAtIndexer(self)
+
+
+def _is_dios_like(obj) -> bool:
+    # must have columns
+    # columns is some kind of pd.Index
+    # iter will iter through columns
+    # a `in` obj check if obj is in columns
+    # obj[key] will give a pd.Series
+    # obj.squeeze() give pd.Series if len(obj) == 1
+    return isinstance(obj, (_DiosBase, pd.DataFrame))
+
+
+def _is_bool_series(obj) -> bool:
+    return isinstance(obj, pd.Series) and obj.dtype == bool
+
+
+def _is_bool_dios_like(obj) -> bool:
+    if not _is_dios_like(obj):
+        return False
+    dtypes = obj.dtypes
+    if (dtypes == bool).all():
+        return True
+    if (dtypes == "O").any():
+        return obj.apply(pdextra.is_bool_indexer).all()
+    return False
+
+
+# keep this here to prevent cyclic import
+from .indexer import _aLocIndexer, _iLocIndexer, _LocIndexer, _iAtIndexer, _AtIndexer
diff --git a/dios/dios/dios.py b/dios/dios/dios.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f908e8f280c2550056829f7283927f4d021cd70
--- /dev/null
+++ b/dios/dios/dios.py
@@ -0,0 +1,1299 @@
+from .base import _DiosBase, _is_dios_like
+from .lib import Opts, OptsFields, dios_options
+from .lib import _find_least_common_itype
+from . import pandas_bridge as pdextra
+
+import functools as ftools
+import pandas as pd
+import numpy as np
+
+
+class DictOfSeries(_DiosBase):
+    """A data frame where every column has its own index.
+
+    DictOfSeries is a collection of pd.Series's which aim to be as close as possible similar to
+    pd.DataFrame. The advantage over pd.DataFrame is, that every `column` has its own row-index,
+    unlike the former, which provide a single row-index for all columns. This solves problems with
+    unaligned data and data which varies widely in length.
+
+    Indexing with ``di[]``, ``di.loc[]`` and ``di.iloc[]``  should work analogous to these methods
+    from pd.DataFrame. The indexer can be a single label, a slice, a list-like, a boolean list-like,
+    or a boolean DictOfSeries/pd.DataFrame and can be used to selectively get or set data.
+
+    Parameters
+    ----------
+    data : array-like, Iterable, dict, or scalar value
+        Contains data stored in Series.
+
+    columns : array-like
+        Column labels to use for resulting frame. Will default to
+        RangeIndex(0, 1, 2, ..., n) if no column labels are provided.
+
+    index : Index or array-like
+        Index to use to reindex every given series during init. Ignored if omitted.
+
+    itype : Itype, pd.Index, Itype-string-repr or type
+        Every series that is inserted, must have an index of this type or any
+        of this types subtypes.
+        If None, the itype is inferred as soon as the first non-empty series is inserted.
+
+    cast_policy : {'save', 'force', 'never'}, default 'save'
+        Policy used for (down-)casting the index of a series if its type does not match
+        the ``itype``.
+    """
+
+    def __init__(
+        self,
+        data=None,
+        columns=None,
+        index=None,
+        itype=None,
+        cast_policy="save",
+        fastpath=False,
+    ):
+        super().__init__(
+            data=data,
+            columns=columns,
+            index=index,
+            itype=itype,
+            cast_policy=cast_policy,
+            fastpath=fastpath,
+        )
+
+    @property
+    def _constructor(self):
+        """Return the class. Useful for construction in the elder class.
+        A import of DictOfSeries would end up cyclic."""
+        return DictOfSeries
+
+    def _construct_like_self(self, **kwargs):
+        kwargs.setdefault("itype", self.itype)
+        kwargs.setdefault("cast_policy", self.cast_policy)
+        return self._constructor(**kwargs)
+
+    @property
+    def indexes(self):
+        """Return pandas.Series with the indexes of all columns."""
+        return self.for_each("index")
+
+    @property
+    def values(self):
+        """Return a numpy.array of numpy.arrays with the values of all columns.
+
+        The outer has the length of columns, the inner holds the values of the column.
+        """
+        s = self.for_each("values")
+        return s.values
+
+    @property
+    def dtypes(self):
+        """Return pandas.Series with the dtypes of all columns."""
+        return self.for_each("dtype")
+
+    @property
+    def lengths(self):
+        """Return pandas.Series with the lenght of all columns."""
+        return self._data.apply(len)
+
+    @property
+    def size(self):
+        return self.lengths.sum()
+
+    @property
+    def shape(self):
+        return tuple(self.lengths), len(self.columns)
+
+    # ------------------------------------------------------------------------------
+    # Dict-like methods
+
+    def clear(self):
+        d = self._data
+        self._data = pd.Series(dtype=d.dtype, index=type(d.index)([]))
+
+    def get(self, key, default=None):
+        return self._data.get(key, default)
+
+    # implemented in _BaseClass
+    # def items(self):
+    #     return self._data.items()
+
+    def keys(self):
+        return self.columns
+
+    def pop(self, *args):
+        # We support a default value, like dict, in contrary to pd.
+        # Therefore we need to handle args manually, because dict-style pop()
+        # differ between a single arg and a tuple-arg, with arg and default,
+        # where the second arg can be anything, including None. If the key is
+        # not present, and a single arg is given, a KeyError is raised, but
+        # with a given default value, it is returned instead.
+        if len(args) == 0:
+            raise TypeError("pop expected at least 1 arguments, got 0")
+        if len(args) > 2:
+            raise TypeError(f"pop expected at most 2 arguments, got {len(args)}")
+        key, *rest = args
+        if key in self.columns:
+            return self._data.pop(key)
+        elif rest:
+            return rest.pop()
+        raise KeyError(key)
+
+    def popitem(self):
+        last = self.columns[-1]
+        return last, self._data.pop(last)
+
+    def setdefault(self, key, default=None):
+        if key not in self.columns:
+            self._insert(key, default)
+        return self._data[key]
+
+    def update(self, other):
+        if not _is_dios_like(other):
+            other = to_dios(other)
+        self.aloc[other, ...] = other
+
+    # ------------------------------------------------------------------------------
+    # High-Level Iteration
+
+    def iteritems(self):
+        yield from self.items()
+
+    def iterrows(self, fill_value=np.nan, squeeze=True):
+        """
+        Iterate over DictOfSeries rows as (index, pandas.Series/DictOfSeries) pairs.
+        **MAY BE VERY PERFORMANCE AND/OR MEMORY EXPENSIVE**
+
+        Parameters
+        ----------
+        fill_value: scalar, default numpy.nan
+            Fill value for row entry, if the column does not have an entry
+            at the current index location. This ensures that the returned
+            Row always contain all columns. If ``None`` is given no value
+            is filled.
+
+            If ``fill_value=None`` and ``squeeze=True`` the resulting Row
+            (a pandas.Series) may differ in length between iterator calls.
+            That's because an entry, that is not present in a column, will
+            also not be present in the resulting Row.
+
+        squeeze: bool, default False
+            * ``True`` : A pandas.Series is returned for each row.
+            * ``False`` : A single-rowed DictOfSeries is returned for each row.
+
+        Yields
+        ------
+        index : label
+            The index of the row.
+        data : Series or DictOfSeries
+            The data of the row as a Series if squeeze is True, as
+            a DictOfSeries otherwise.
+
+        See Also
+        --------
+        DictOfSeries.iteritems : Iterate over (column name, Series) pairs.
+        """
+
+        # todo: 2nd posibility for fill_value=Any, squeeze=False
+        #   do it like in case fill_value=None ->
+        #       1. row = aloc the row
+        #       2. e = row.isempty()
+        #       3. row.loc[idx,e] = fill_value
+        #   This approach could be much better, because the dtype of
+        #   the columns is preserved.
+
+        # PROBABLY PERFORMANCE EXPENSIVE
+        if fill_value is None:
+            allidx = self.index_of("all")
+            if squeeze:
+                for i in allidx:
+                    yield i, self.aloc[i:i].dropempty().squeeze(axis=0)
+            else:
+                for i in allidx:
+                    yield self.aloc[i:i]
+
+        # PROBABLY MEMORY EXPENSIVE
+        else:
+            if fill_value is np.nan:
+                df = self.to_df()
+            else:
+                nans = self.isna().to_df().fillna(False)
+                df = self.to_df().fillna(fill_value)
+                df[nans] = np.nan
+            if squeeze:
+                yield from df.iterrows()
+            else:
+                for idx, row in df.iterrows():
+                    yield idx, self._constructor(data=row.to_dict(), index=[idx])
+
+    # ------------------------------------------------------------------------------
+    # Broadcasting and Reducing
+
+    def for_each(self, attr_or_callable, **kwds):
+        """
+        Apply a callable or a pandas.Series method or property on each column.
+
+        Parameters
+        ----------
+        attr_or_callable: Any
+            A pandas.Series attribute or any callable, to apply on each column.
+            A series attribute can be any property, field or method and also
+            could be specified as string. If a callable is given it must take
+            pandas.Series as the only positional argument and return a scalar.
+
+        **kwds: any
+            kwargs to passed to callable
+
+        Returns
+        -------
+        pandas.Series
+            A series with the results, indexed by the column labels.
+
+        Notes
+        -----
+        The called function or the attribute works on the actual underlying series.
+        If the provided function works inplace it can and will modify the actual data.
+        If this is not desired one can should make an explicit copy beforehand. If the
+        function returns new objects or copies, explicit copying is not needed.
+
+        See Also
+        --------
+        DictOfSeries.apply : Apply functions to columns and convert
+                             result to DictOfSeries.
+
+        Examples
+        --------
+        >>> d = DictOfSeries([range(3), range(4)], columns=['a', 'b'])
+        >>> d
+           a |    b |
+        ==== | ==== |
+        0  0 | 0  0 |
+        1  1 | 1  1 |
+        2  2 | 2  2 |
+             | 3  3 |
+
+        Use with a callable..
+
+        >>> d.for_each(max)
+        columns
+        a    2
+        b    3
+        dtype: object
+
+        ..or with a string, denoting a pd.Series attribute and
+        therefor is the same as giving the latter.
+
+        >>> d.for_each('max')
+        columns
+        a    2
+        b    3
+        dtype: object
+
+        >>> d.for_each(pd.Series.max)
+        columns
+        a    2
+        b    3
+        dtype: object
+
+        Both also works with properties:
+
+        >>> d.for_each('dtype')
+        columns
+        a    int64
+        b    int64
+        dtype: object
+        """
+        attrOcall = attr_or_callable
+        if isinstance(attrOcall, str):
+            attrOcall = getattr(pd.Series, attrOcall)
+        call = callable(attrOcall)
+        if not call:
+            attrOcall = attr_or_callable
+        data = pd.Series(dtype="O", index=self.columns)
+        for c, series in self.items():
+            if call:
+                data.at[c] = attrOcall(series, **kwds)
+            else:
+                data.at[c] = getattr(series, attrOcall)
+        return data
+
+    def apply(self, func, axis=0, raw=False, args=(), **kwds):
+        """
+        Apply a function along an axis of the DictOfSeries.
+
+        Parameters
+        ----------
+        func : callable
+            Function to apply on each column.
+        axis : {0 or 'index', 1 or 'columns'}, default 0
+            Axis along which the function is applied:
+
+            * 0 or 'index': apply function to each column.
+            * 1 or 'columns': NOT IMPLEMENTED
+
+        raw : bool, default False
+            Determines if row or column is passed as a Series or ndarray object:
+
+            * ``False`` : passes each row or column as a Series to the
+              function.
+            * ``True`` : the passed function will receive ndarray objects
+              instead.
+              If you are just applying a NumPy reduction function this will
+              achieve much better performance.
+
+        args : tuple
+            Positional arguments to pass to `func` in addition to the
+            array/series.
+        **kwds
+            Additional keyword arguments to pass as keywords arguments to
+            `func`.
+
+        Returns
+        -------
+        Series or DataFrame
+            Result of applying ``func`` along the given axis of the
+            DataFrame.
+
+        Notes
+        -----
+        The called function or the attribute works on the actual underlying series.
+        If the provided function works inplace it can and will modify the actual data.
+        If this is not desired one should make an explicit copy beforehand. If the
+        function returns new objects or copies, and does not mess with the data, explicit
+        copying is not needed.
+
+
+        Raises
+        ------
+        NotImplementedError
+            * if axis is 'columns' or 1
+
+        See Also
+        --------
+        DictOfSeries.for_each: apply pd.Series methods or properties to each column
+
+        Examples
+        --------
+
+        We use the example DictOfSeries from :ref:`indexing <doc_indexing:Example dios>`.
+
+        >>> di = di[:5]
+            a |    b |     c |     d |
+        ===== | ==== | ===== | ===== |
+        0   0 | 2  5 | 4   7 | 6   0 |
+        1   7 | 3  6 | 5  17 | 7   1 |
+        2  14 | 4  7 | 6  27 | 8   2 |
+        3  21 | 5  8 | 7  37 | 9   3 |
+        4  28 | 6  9 | 8  47 | 10  4 |
+
+        >>> di.apply(max)
+        columns
+        a    28
+        b     9
+        c    47
+        d     4
+        dtype: int64
+
+        >>> di.apply(pd.Series.count)
+        columns
+        a    5
+        b    5
+        c    5
+        d    5
+        dtype: int64
+
+        One can pass keyword arguments directly..
+
+        >>> di.apply(pd.Series.value_counts, normalize=True)
+              a |      b |       c |      d |
+        ======= | ====== | ======= | ====== |
+        7   0.2 | 7  0.2 | 7   0.2 | 4  0.2 |
+        14  0.2 | 6  0.2 | 37  0.2 | 3  0.2 |
+        21  0.2 | 5  0.2 | 47  0.2 | 2  0.2 |
+        28  0.2 | 9  0.2 | 27  0.2 | 1  0.2 |
+        0   0.2 | 8  0.2 | 17  0.2 | 0  0.2 |
+
+        Or define a own funtion..
+
+        >>> di.apply(lambda s : 'high' if max(s) > 10 else 'low')
+        columns
+        a    high
+        b     low
+        c    high
+        d     low
+        dtype: object
+
+        And also more advanced functions that return a list-like can be given. Note that
+        the returned lists not necessarily must have the same length.
+
+        >>> func = lambda s : ('high', max(s), min(s)) if min(s) > (max(s)//2) else ('low',max(s))
+        >>> di.apply(func)
+             a |       b |      c |      d |
+        ====== | ======= | ====== | ====== |
+        0  low | 0  high | 0  low | 0  low |
+        1   28 | 1     9 | 1   47 | 1    4 |
+               | 2     5 |        |        |
+        """
+        if axis in [1, "columns"]:
+            raise NotImplementedError
+
+        if axis not in [0, "index"]:
+            raise ValueError(axis)
+
+        # we cannot use self._data.apply(func=func, args=args, **kwds)
+        # because this may return a pandas.DataFrame. Also we cannot
+        # use pandas.Series.apply(), because this works on its values.
+        need_dios = need_convert = False
+        result = pd.Series(dtype="O", index=self.columns)
+        for c, series in self.items():
+            series = series.values if raw else series
+            s = func(series, *args, **kwds)
+            result.at[c] = s
+            if pdextra.is_scalar(s):
+                need_convert = True
+            else:
+                need_dios = True
+                if not isinstance(s, pd.Series):
+                    need_convert = True
+        if need_dios:
+            if need_convert:
+                for c, val in result.items():
+                    result.at[c] = pd.Series(val)
+            itype = _find_least_common_itype(result)
+            result = self._constructor(data=result, itype=itype, fastpath=True)
+        return result
+
+    def reduce_columns(self, func, initial=None, skipna=False):
+        """
+        Reduce all columns to a single pandas.Series by a given function.
+
+        Apply a function of two pandas.Series as arguments, cumulatively to all
+        columns, from left to right, so as to reduce the columns to a single
+        pandas.Series. If initial is present, it is placed before the columns
+        in the calculation, and serves as a default when the columns are empty.
+
+        Parameters
+        ----------
+        func : function
+            The function must take two identically indexed pandas.Series and should
+            return a single pandas.Series with the same index.
+
+        initial : column-label or pd.Series, default None
+            The series to start with. If None a dummy series is created, with the
+            indices of all columns and the first seen values.
+
+        skipna : bool, default False
+               If True, skip NaN values.
+
+        Returns
+        -------
+        pandas.Series
+            A series with the reducing result and the index of the start series,
+            defined by ``initializer``.
+        """
+        if initial is None:
+            value = pd.Series(index=self.index_of("all"))
+            for d in self._data:
+                value = value.combine_first(d)
+        elif isinstance(initial, pd.Series):
+            value = initial.copy()
+        elif initial in self.columns:
+            value = self._data.at[initial].copy()
+        else:
+            raise ValueError("initial must be pd.Series, a column label or None")
+
+        if skipna:
+            val = value.dropna()
+            data = self.dropna()._data
+        else:
+            val = value
+            data = self._data
+
+        for d in data:
+            idx = val.index & d.index
+            if len(idx) > 0:
+                l, r = val.loc[idx], d.loc[idx]
+                val.loc[idx] = func(l, r)
+
+        if skipna:
+            value.loc[val.index] = val
+        return value
+
+    # ------------------------------------------------------------------------------
+    # Merging and Joining
+
+    def combine_first(self, other, keepna=False):
+        """
+        Update null elements with value in the same location in other.
+
+        Combine two DictOfSeries objects by filling null values in one DictOfSeries with
+        non-null values from other DictOfSeries. The row and column indexes of the resulting
+        DictOfSeries will be the union of the two.
+
+        Parameters
+        ----------
+        keepna : bool, default False
+            By default Nan's are updated by other and new value-index pairs from other are
+            inserted. If set to True, NaN's are not updated and only new value-index pair are inserted.
+
+        other : DictOfSeries
+            Provided DictOfSeries to use to fill null values.
+
+        Returns
+        -------
+        DictOfSeries
+        """
+        if keepna:
+            nans = self.isna()
+
+        new: DictOfSeries = self.copy()
+        for c in other.columns:
+            if c in self.columns:
+                col = self._data.at[c].combine_first(other[c])
+            else:
+                col = other[c]
+            new._data.at[c] = col
+
+        if keepna:
+            new.aloc[nans] = np.nan
+
+        return new
+
+    # ------------------------------------------------------------------------------
+    # Misc methods
+
+    def index_of(self, method="all"):
+        """Return an single index with indices from all columns.
+
+        Parameters
+        ----------
+        method : string, default 'all'
+            * 'all' : get all indices from all columns
+            * 'union' : alias for 'all'
+            * 'shared' : get indices that are present in every columns
+            * 'intersection' : alias for 'shared'
+            * 'uniques' : get indices that are only present in a single column
+            * 'non-uniques' : get indices that are present in more than one column
+
+        Returns
+        -------
+        pd.Index
+            A single duplicate-free index, somehow representing indices of all columns.
+
+        Examples
+        --------
+        We use the example DictOfSeries from :ref:`indexing <doc_indexing:Example dios>`.
+
+        >>> di
+            a |      b |      c |     d |
+        ===== | ====== | ====== | ===== |
+        0   0 | 2    5 | 4    7 | 6   0 |
+        1   7 | 3    6 | 5   17 | 7   1 |
+        2  14 | 4    7 | 6   27 | 8   2 |
+        3  21 | 5    8 | 7   37 | 9   3 |
+        4  28 | 6    9 | 8   47 | 10  4 |
+        5  35 | 7   10 | 9   57 | 11  5 |
+        6  42 | 8   11 | 10  67 | 12  6 |
+        7  49 | 9   12 | 11  77 | 13  7 |
+        8  56 | 10  13 | 12  87 | 14  8 |
+        9  63 | 11  14 | 13  97 | 15  9 |
+
+        >>> di.index_of()
+        RangeIndex(start=0, stop=16, step=1)
+
+        >>> di.index_of("shared")
+        Int64Index([6, 7, 8, 9], dtype='int64')
+
+        >>> di.index_of("uniques")
+        Int64Index([0, 1, 14, 15], dtype='int64')
+        """
+        indexes = self.indexes
+        if len(indexes) <= 1:
+            return indexes.squeeze()
+
+        if method in ["union", "all"]:
+            res = ftools.reduce(pd.Index.union, indexes)
+        elif method in ["intersection", "shared"]:
+            res = ftools.reduce(pd.Index.intersection, indexes)
+        elif method in ["uniques", "non-uniques"]:
+            res = ftools.reduce(pd.Index.append, indexes)
+            res = res.value_counts(sort=False, dropna=False)
+            if method == "uniques":
+                res = res[res == 1].index
+            else:
+                res = res[res > 1].index
+        else:
+            raise ValueError(method)
+        return res if res.is_unique else res.unique()
+
+    def squeeze(self, axis=None):
+        """Squeeze a 1-dimensional axis objects into scalars."""
+        if axis in [0, "index"]:
+            if (self.lengths == 1).all():
+                return self._data.apply(pd.Series.squeeze)
+            return self
+        elif axis in [1, "columns"]:
+            if len(self) == 1:
+                return self._data.squeeze()
+            return self
+        elif axis is None:
+            if len(self) == 1:
+                return self._data.squeeze().squeeze()
+            if (self.lengths == 1).all():
+                return self._data.apply(pd.Series.squeeze).squeeze()
+            return self
+        raise ValueError(axis)
+
+    def dropna(self, inplace=False):
+        """Return a bolean array that is `True` if the value is a Nan-value"""
+        data = self.for_each("dropna", inplace=inplace)
+        if inplace:
+            return
+        return self._construct_like_self(data=data, fastpath=True)
+
+    def dropempty(self):
+        """Drop empty columns. Return copy."""
+        return self.loc[:, self.notempty()]
+
+    def astype(self, dtype, copy=True, errors="raise"):
+        """Cast the data to the given data type."""
+        data = self.for_each("astype", dtype=dtype, copy=copy, errors=errors)
+        return self._construct_like_self(data=data, fastpath=True)
+
+    def _mask_or_where(self, cond, other=np.nan, inplace=False, mask=True):
+        """helper to mask/where"""
+        data = self if inplace else self.copy()
+
+        if callable(other):
+            other = other(data)
+
+        if callable(cond):
+            cond = cond(data)
+        # if DictOfSeries is bool,
+        # is already checked in aloc
+        elif not _is_dios_like(cond):
+            if not pdextra.is_bool_indexer(cond):
+                raise ValueError(
+                    "Object with boolean values only expected as condition"
+                )
+
+        if mask:
+            data.aloc[cond] = other
+        else:
+            data.aloc[~cond] = other
+
+        if inplace:
+            return None
+        return data
+
+    def where(self, cond, other=np.nan, inplace=False):
+        """
+        Replace values where the condition is False.
+
+        Parameters
+        ----------
+        cond : bool DictOfSeries, Series, array-like, or callable
+            Where cond is True, keep the original value. Where False, replace
+            with corresponding value from other. If cond is callable, it is computed
+            on the DictOfSeries and should return boolean DictOfSeries or array.
+            The callable must not change input DictOfSeries (though dios doesn’t check it).
+            If cond is a bool Series, every column is (row-)aligned against it, before the
+            boolean values are evaluated. Missing indices are treated like False values.
+
+        other : scalar, Series, DictOfSeries, or callable
+            Entries where cond is False are replaced with corresponding value from other.
+            If other is callable, it is computed on the DictOfSeries and should return scalar
+            or DictOfSeries. The callable must not change input DictOfSeries (though dios doesn’t check it).
+            If other is a Series, every column is (row-)aligned against it, before the values
+            are written. NAN's are written for missing indices.
+
+        inplace : bool, default False
+            Whether to perform the operation in place on the data.
+
+        Returns
+        -------
+        DictOfSeries
+
+        See Also
+        --------
+        mask: Mask data where condition is True
+        """
+        return self._mask_or_where(cond=cond, other=other, inplace=inplace, mask=False)
+
+    def mask(self, cond, other=np.nan, inplace=False):
+        """
+        Replace values where the condition is True.
+
+        Parameters
+        ----------
+        cond : bool DictOfSeries, Series, array-like, or callable
+            Where cond is False, keep the original value. Where True, replace
+            with corresponding value from other. If cond is callable, it is computed
+            on the DictOfSeries and should return boolean DictOfSeries or array.
+            The callable must not change input DictOfSeries (though dios doesn’t check it).
+            If cond is a bool Series, every column is (row-)aligned against it, before the
+            boolean values are evaluated. Missing indices are treated like False values.
+
+        other : scalar, Series, DictOfSeries, or callable
+            Entries where cond is True are replaced with corresponding value from other.
+            If other is callable, it is computed on the DictOfSeries and should return scalar
+            or DictOfSeries. The callable must not change input DictOfSeries (though dios doesn’t check it).
+            If other is a Series, every column is (row-)aligned against it, before the values
+            are written. NAN's are written for missing indices.
+
+        inplace : bool, default False
+            Whether to perform the operation in place on the data.
+
+        Returns
+        -------
+        DictOfSeries
+
+        See Also
+        --------
+        mask: Mask data where condition is False
+        """
+        return self._mask_or_where(cond=cond, other=other, inplace=inplace, mask=True)
+
+    def memory_usage(self, index=True, deep=False):
+        return self.for_each(pd.Series.memory_usage, index=index, deep=deep).sum()
+
+    def to_df(self, how="outer"):
+        """
+        Transform DictOfSeries to a pandas.DataFrame.
+
+        Because a pandas.DataFrame can not handle Series of different
+        length, but DictOfSeries can, the missing data is filled with
+        NaNs or is dropped, depending on the keyword `how`.
+
+        Parameters
+        ----------
+        how: {'outer', 'inner'}, default 'outer'
+            define how the resulting DataFrame index is generated:
+            * 'outer': The indices of all columns, merged into one index is used.
+                If a column misses values at the new index location, `NaN`s are filled.
+            * 'inner': Only indices that are present in all columns are used, filling
+                logic is not needed, but values are dropped, if a column has indices
+                that are not known to all other columns.
+
+        Returns
+        -------
+        pandas.DataFrame: transformed data
+
+        Examples
+        --------
+
+        Missing data locations are filled with NaN's
+
+        >>> a = pd.Series(11, index=range(2))
+        >>> b = pd.Series(22, index=range(3))
+        >>> c = pd.Series(33, index=range(1,9,3))
+        >>> di = DictOfSeries(dict(a=a, b=b, c=c))
+        >>> di
+            a |     b |     c |
+        ===== | ===== | ===== |
+        0  11 | 0  22 | 1  33 |
+        1  11 | 1  22 | 4  33 |
+              | 2  22 | 7  33 |
+
+        >>> di.to_df()
+        columns     a     b     c
+        0        11.0  22.0   NaN
+        1        11.0  22.0  33.0
+        2         NaN  22.0   NaN
+        4         NaN   NaN  33.0
+        7         NaN   NaN  33.0
+
+        or is dropped if `how='inner'`
+
+        >>> di.to_df(how='inner')
+        columns   a   b   c
+        1        11  22  33
+        """
+        if how == "inner":
+            how = "shared"
+        elif how == "outer":
+            how = "all"
+        else:
+            raise ValueError(how)
+
+        index = self.index_of(how)
+        df = pd.DataFrame(columns=self.columns, index=index)
+        for c, series in self.items():
+            # this automatically respects the df-index, that
+            # was set before. Missing locations are already
+            # nans, present locations are set.
+            df[c] = series.copy()
+        return df
+
+    @property
+    def debugDf(self):
+        """Alias for ``to_df()`` as property, for debugging purpose."""
+        return self.to_df()
+
+    def min(self, axis=0, skipna=True):
+        if axis is None:
+            return self.for_each(pd.Series.min, skipna=skipna).min()
+        if axis in [0, "index"]:
+            return self.for_each(pd.Series.min, skipna=skipna)
+        if axis in [1, "columns"]:
+            func = lambda s1, s2: s1.where(s1 < s2, s2)
+            return self.reduce_columns(func, skipna=skipna)
+        raise ValueError(axis)
+
+    def max(self, axis=0, skipna=None):
+        if axis is None:
+            return self.for_each(pd.Series.max, skipna=skipna).max()
+        if axis in [0, "index"]:
+            return self.for_each(pd.Series.max, skipna=skipna)
+        if axis in [1, "columns"]:
+            func = lambda s1, s2: s1.where(s1 > s2, s2)
+            return self.reduce_columns(func, skipna=skipna)
+        raise ValueError(axis)
+
+    # ----------------------------------------------------------------------
+    # Boolean and empty stuff
+
+    def equals(self, other):
+        """
+        Test whether two DictOfSeries contain the same elements.
+
+        This function allows two DictOfSeries to be compared against each other to see
+        if they have the same shape and elements. NaNs in the same location are considered equal.
+        The column headers do not need to have the same type, but the elements within the columns
+        must be the same dtype.
+
+        Parameters
+        ----------
+        other: DictOfSeries
+            The other DictOfSeries to compare with.
+
+        Returns
+        -------
+        bool
+            True if all elements are the same in both DictOfSeries, False otherwise.
+        """
+        if not isinstance(other, _DiosBase):
+            return False
+        try:
+            eq_nans = (self.isna() == other.isna()).all(None)
+            eq_data = (self.dropna() == other.dropna()).all(None)
+            eq_dtypes = (self.dtypes == other.dtypes).all()
+            return eq_nans and eq_dtypes and eq_data
+        except Exception:
+            return False
+
+    def isin(self, values):
+        """Return a boolean dios, that indicates if the corresponding value is in the given array-like."""
+        data = self.for_each("isin", values=values)
+        return self._construct_like_self(data=data, fastpath=True)
+
+    def all(self, axis=0):
+        """
+        Return whether all elements are True, potentially over an axis.
+
+        Returns True unless there at least one element within a series
+        or along a DictOfSeries axis that is False or equivalent (e.g. zero or empty).
+
+        Parameters
+        ----------
+        axis : {0 or ‘index’, 1 or ‘columns’, None}, default 0
+            Indicate which axis or axes should be reduced.
+             * 0 / ‘index’ : reduce the index, return a Series whose index is the original column labels.
+             * 1 / ‘columns’ : reduce the columns, return a Series whose index is the union of all columns indexes.
+             * None : reduce all axes, return a scalar.
+
+        Returns
+        -------
+        pandas.Series
+
+        See Also
+        --------
+        pandas.Series.all: Return True if all elements are True.
+        any: Return True if one (or more) elements are True.
+        """
+        if axis is None:
+            return self._data.apply(all).all()
+        if axis in [0, "index"]:
+            return self._data.apply(all)
+        if axis in [1, "columns"]:
+            func = lambda s1, s2: s1.astype(bool) & s2.astype(bool)
+            init = pd.Series(True, dtype=bool, index=self.index_of("all"))
+            return self.reduce_columns(func, init)
+        raise ValueError(axis)
+
+    def any(self, axis=0):
+        """
+        Return whether any element is True, potentially over an axis.
+
+        Returns False unless there at least one element within a series
+        or along a DictOfSeries axis that is True or equivalent (e.g. non-zero or non-empty).
+
+        Parameters
+        ----------
+        axis : {0 or ‘index’, 1 or ‘columns’, None}, default 0
+            Indicate which axis or axes should be reduced.
+             * 0 / ‘index’ : reduce the index, return a Series whose index is the original column labels.
+             * 1 / ‘columns’ : reduce the columns, return a Series whose index is the union of all columns indexes.
+             * None : reduce all axes, return a scalar.
+
+        Returns
+        -------
+        pandas.Series
+
+        See Also
+        --------
+        pandas.Series.any: Return whether any element is True.
+        all: Return True if all elements are True.
+        """
+        if axis is None:
+            return self._data.apply(any).any()
+        if axis in [0, "index"]:
+            return self._data.apply(any)
+        if axis in [1, "columns"]:
+            func = lambda s1, s2: s1.astype(bool) | s2.astype(bool)
+            init = pd.Series(False, dtype=bool, index=self.index_of("all"))
+            return self.reduce_columns(func, init)
+        raise ValueError(axis)
+
+    def isna(self, drop_empty=False):
+        """
+        Return a boolean DictOfSeries which indicates NA positions.
+        """
+        data = self.dropempty() if drop_empty else self
+        data = data.for_each("isna")
+        return self._construct_like_self(data=data, fastpath=True)
+
+    def notna(self, drop_empty=False):
+        """
+        Return a boolean DictOfSeries which indicates non-NA positions.
+        """
+        data = self.dropempty() if drop_empty else self
+        data = data.for_each("notna")
+        return self._construct_like_self(data=data, fastpath=True)
+
+    def hasnans(self, axis=0, drop_empty=False):
+        """
+        Returns a boolean Series along an axis, which indicates if it contains NA-entries.
+        """
+        data = self.dropempty() if drop_empty else self
+        if axis is None:
+            return data.for_each("hasnans").any()
+        if axis in [0, "index"]:
+            return data.for_each("hasnans")
+        if axis in [1, "columns"]:
+            func = lambda s1, s2: s1.isna() | s2.isna()
+            init = pd.Series(False, dtype=bool, index=self.index_of("all"))
+            return data.reduce_columns(func, init)
+        raise ValueError(axis)
+
+    def fillna(
+        self,
+        value=None,
+        method=None,
+        axis=None,
+        inplace=False,
+        limit=None,
+        downcast=None,
+    ):
+        if axis in [None, 0, "index"]:
+            kws = dict(value=value, method=method, limit=limit, downcast=downcast)
+            data = self.for_each("fillna", inplace=inplace, **kws)
+            if inplace:
+                return
+            return self._construct_like_self(data=data, fastpath=True)
+
+        if axis in [1, "columns"]:
+            raise NotImplementedError
+        raise ValueError(axis)
+
+    def isempty(self):
+        """Returns a boolean Series, which indicates if an column is empty"""
+        return self.for_each("empty").astype(bool)
+
+    def notempty(self):
+        """Returns a boolean Series, which indicates if an column is not empty"""
+        return ~self.isempty()
+
+    def isdata(self):
+        """Alias for ``notna(drop_empty=True)``."""
+        return self.notna(drop_empty=True)
+
+    def isnull(self, drop_empty=False):
+        """Alias for ``isna()``"""
+        return self.isna(drop_empty=drop_empty)
+
+    def notnull(self, drop_empty=False):
+        """Alias, see ``notna()``."""
+        return self.notna(drop_empty=drop_empty)
+
+    def to_dios(self):
+        """
+        A dummy to allow unconditional to_dios calls
+        on pd.DataFrame, pd.Series and dios.DictOfSeries
+        """
+        return self
+
+    # ----------------------------------------------------------------------
+    # Rendering Methods
+
+    def __str__(self):
+        return self.__repr__()
+
+    def __repr__(self):
+        repr = dios_options[OptsFields.dios_repr]
+        showdim = self.lengths.max() > dios_options[OptsFields.disp_max_rows]
+        return self.to_string(method=repr, show_dimensions=showdim)
+
+    def to_string(
+        self,
+        max_rows=None,
+        min_rows=None,
+        max_cols=None,
+        na_rep="NaN",
+        show_dimensions=False,
+        method=Opts.repr_indexed,
+        no_value=" ",
+        empty_series_rep="no data",
+        col_delim=" | ",
+        header_delim="=",
+        col_space=None,
+    ):
+        """Pretty print a dios.
+
+        if `method` == `indexed` (default):
+            every column is represented by a own index and corresponding values
+
+        if `method` == `aligned` [2]:
+            one(!) global index is generated and values from a column appear at
+            the corresponding index-location.
+
+        Parameters
+        ---------
+
+        max_cols :
+            not more column than `max_cols` are printed [1]
+
+        max_rows :
+            see `min_rows` [1]
+
+        min_rows :
+            not more rows than `min_rows` are printed, if rows of any series exceed `max_rows` [1]
+
+        na_rep :
+            all NaN-values are replaced by `na_rep`. Default `NaN`
+
+        empty_series_rep :
+            Ignored if not `method='indexed'`.
+            Empty series are represented by the string in `empty_series_rep`
+
+        col_delim : str
+            Ignored if not `method='indexed'`.
+            between all columns `col_delim` is inserted.
+
+        header_delim :
+            Ignored if not `method='indexed'`.
+            between the column names (header) and the data, `header_delim` is inserted,
+            if not None. The string is repeated, up to the width of the column. (str or None).
+
+        no_value :
+            Ignored if not `method='aligned'`.
+            value that indicates, that no entry in the underling series is present. Bear in mind
+            that this should differ from `na_rep`, otherwise you cannot differ missing- from NaN- values.
+
+        Notes
+        -----
+            [1]: defaults to the corresponding value in `dios_options`
+            [2]: the common-params are directly passed to pd.DataFrame.to_string(..)
+            under the hood, if method is `aligned`
+
+        """
+        if self.empty:
+            return _empty_repr(self)
+
+        max_cols = max_cols or dios_options[OptsFields.disp_max_cols] or 100
+        max_rows = max_rows or dios_options[OptsFields.disp_max_rows] or 200
+        min_rows = min_rows or dios_options[OptsFields.disp_min_rows] or 100
+
+        kwargs = dict(
+            max_rows=max_rows,
+            min_rows=min_rows,
+            max_cols=max_cols,
+            na_rep=na_rep,
+            col_space=col_space,
+            show_dimensions=show_dimensions,
+        )
+
+        if method == Opts.repr_aligned:
+            return _to_aligned_df(self, no_value=no_value).to_string(**kwargs)
+
+        # add pprint relevant options
+        kwargs.update(
+            empty_series_rep=empty_series_rep,
+            col_delim=col_delim,
+            header_delim=header_delim,
+        )
+
+        return pprint_dios(self, **kwargs)
+
+    def to_csv(self, *args, **kwargs):
+        self.to_df().to_csv(*args, **kwargs)
+
+    to_csv.__doc__ = pd.DataFrame.to_csv.__doc__
+
+
+def _empty_repr(di):
+    return f"Empty DictOfSeries\n" f"Columns: {di.columns.to_list()}"
+
+
+def pprint_dios(
+    dios,
+    max_rows=None,
+    min_rows=None,
+    max_cols=None,
+    na_rep="NaN",
+    empty_series_rep="no data",
+    col_space=None,
+    show_dimensions=True,
+    col_delim=" | ",
+    header_delim="=",
+):
+    na_rep = str(na_rep)
+    empty_series_rep = str(empty_series_rep)
+    col_delim = col_delim or " "
+
+    min_rows = min(max_rows, min_rows)
+
+    if dios.empty:
+        return _empty_repr(dios)
+
+    maxlen = dios.lengths.max()
+    data = dios._data
+
+    trunc_cols = len(data) > max_cols
+    if trunc_cols:
+        left, right = data.head(max_cols // 2), data.tail(max_cols // 2)
+        data = left.append(right)
+
+    # now data only contains series that we want to print.
+
+    # if any series exceed max_rows we trim all series to min_rows
+    series_lengths = data.apply(len).to_list()
+    series_maxlen = max(series_lengths)
+    trunc_rows = series_maxlen > max_rows
+    max_rows = min_rows if trunc_rows else series_maxlen
+
+    # we make a list of list, where the inner contains all
+    # stringified values of the series upto max_rows+1, where
+    # the additional row is the column-name
+    outer = []
+    for colname in data.index:
+        s = data.at[colname]
+
+        isempty = s.empty
+        if isempty:
+            s = pd.Series(empty_series_rep)
+            idx = False
+            cspace = col_space
+        else:
+            idx = True
+            cspace = col_space // 2 if col_space else col_space
+
+        sstr = s.to_frame().to_string(
+            col_space=cspace,
+            header=[str(colname)],
+            index=idx,
+            na_rep=na_rep,
+            max_rows=max_rows,
+            min_rows=min_rows,
+        )
+        li = sstr.split("\n")
+
+        # HACK: empty series produce a unnecessary space,
+        # because index is omitted
+        if isempty:
+            cstr, vstr = li
+            if len(cstr.lstrip()) < len(vstr) and (cspace or 0) < len(vstr):
+                li = [cstr[1:], vstr[1:]]
+
+        outer.append(li)
+
+    # now the length of every value-string per series are the same.
+    # we need this length's to know, how many chars we need to fill,
+    # once we exceed the length of the series, or if we insert whole
+    # columns.
+    valstr_len = [len(c[0]) for c in outer]
+
+    rows = max_rows + 1  # colnames aka. header
+    rows += 1 if trunc_rows else 0  # `...` in rows
+    rows += 1 if header_delim else 0  # underline header
+
+    if header_delim:
+        for i, c in enumerate(outer):
+            colheader = (header_delim * valstr_len[i])[: valstr_len[i]]
+            c.insert(1, colheader)
+
+    dots = " ... "
+    if trunc_cols:
+        outer.insert(max_cols // 2, [dots] * rows)
+        valstr_len.insert(max_cols // 2, len(dots))
+        series_lengths.insert(max_cols // 2, rows)
+
+    txt = ""
+    for r in range(rows):
+        for i, c in enumerate(outer):
+            try:
+                vstr = c[r]
+            except IndexError:
+                vstr = " " * valstr_len[i]
+            txt += vstr + col_delim
+        txt += "\n"
+
+    # add footer
+    if show_dimensions:
+        for i, c in enumerate(outer):
+            # ignore the dot-column
+            if trunc_cols and i == max_cols // 2:
+                txt += dots + " " * len(col_delim)
+            else:
+                txt += f"[{series_lengths[i]}]".ljust(valstr_len[i] + len(col_delim))
+
+        txt += f"\n\nmax: [{maxlen} rows x {len(dios.columns)} columns]"
+        txt += "\n"
+
+    return txt
+
+
+def _to_aligned_df(dios, no_value=" "):
+    if dios.empty:
+        return pd.DataFrame(columns=dios.columns)
+
+    # keep track of all real nans
+    nandict = {}
+    for c in dios:
+        nans = dios[c].isna()
+        nandict[c] = nans[nans].index
+
+    df = dios.to_df()
+    df[df.isna()] = no_value
+
+    # reinsert all real nans
+    for c in df:
+        df.loc[nandict[c], c] = np.nan
+
+    return df
+
+
+def to_dios(obj) -> DictOfSeries:
+    if isinstance(obj, DictOfSeries):
+        return obj
+    return DictOfSeries(data=obj)
+
+
+def __monkey_patch_pandas():
+    def to_dios(self):
+        return DictOfSeries(data=self)
+
+    pd.Series.to_dios = to_dios
+    pd.DataFrame.to_dios = to_dios
+
+
+__monkey_patch_pandas()
diff --git a/dios/dios/indexer.py b/dios/dios/indexer.py
new file mode 100644
index 0000000000000000000000000000000000000000..8f325b282acb458f66a0170231f983ab905f009e
--- /dev/null
+++ b/dios/dios/indexer.py
@@ -0,0 +1,491 @@
+from . import pandas_bridge as pdextra
+from .base import (
+    _DiosBase,
+    _is_dios_like,
+    _is_bool_dios_like,
+)
+
+import numpy as np
+import pandas as pd
+
+
+class _Indexer:
+    def __init__(self, obj: _DiosBase):
+        self.obj = obj
+        self._data = obj._data
+
+    def _unpack_key(self, key):
+
+        key = list(key) if pdextra.is_iterator(key) else key
+
+        if isinstance(key, tuple):
+            if len(key) > 2:
+                raise KeyError("To many indexers")
+            rowkey, colkey = key
+        else:
+            rowkey, colkey = key, slice(None)
+
+        if isinstance(rowkey, tuple) or isinstance(colkey, tuple):
+            raise KeyError(f"{key}. tuples are not allowed.")
+
+        rowkey = list(rowkey) if pdextra.is_iterator(rowkey) else rowkey
+        colkey = list(colkey) if pdextra.is_iterator(colkey) else colkey
+        return rowkey, colkey
+
+    def _set_value_muli_column(self, rowkey, colkey, value, xloc="loc"):
+        """set value helper for loc and iloc"""
+
+        data = getattr(self._data, xloc)[colkey]
+
+        hashable_rkey = pdextra.is_hashable(rowkey)
+        dioslike_value = False
+        iter_value = False
+
+        if _is_dios_like(value):
+            dioslike_value = True
+            if hashable_rkey:
+                raise ValueError(f"Incompatible indexer with DictOfSeries")
+
+        elif pdextra.is_list_like(value):
+            value = value.values if isinstance(value, pd.Series) else value
+            iter_value = True
+            if len(value) != len(data):
+                raise ValueError(
+                    f"shape mismatch: value array of shape (.., {len(value)}) could "
+                    f"not be broadcast to indexing result of shape (.., {len(data)})"
+                )
+        c = "?"
+        try:
+            for i, c in enumerate(data.index):
+                dat = data.at[c]
+                dat_xloc = getattr(dat, xloc)
+
+                if dioslike_value:
+                    # set to empty series fail; emptySer.loc[:] = [2,1]
+                    # len(scalar) -> would fail, but cannot happen,
+                    # because dioslike+hashable, already was checked
+                    if len(dat_xloc[rowkey]) == 0:
+                        continue
+
+                # unpack the value if necessary
+                if iter_value:
+                    val = value[i]
+                elif dioslike_value:
+                    val = value[c] if c in value else np.nan
+                else:
+                    val = value
+
+                dat_xloc[rowkey] = val
+
+        except Exception as e:
+            raise type(e)(f"failed for column {c}: " + str(e)) from e
+
+
+# #############################################################################
+
+
+class _LocIndexer(_Indexer):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+
+    def __getitem__(self, key):
+
+        rowkey, colkey = self._unpack_key(key)
+        if _is_dios_like(rowkey) or _is_dios_like(colkey):
+            raise ValueError("Could not index with multidimensional key")
+
+        # simple optimisation
+        if pdextra.is_null_slice(rowkey) and pdextra.is_null_slice(colkey):
+            return self.obj.copy()
+
+        data = self._data.loc[colkey].copy()
+
+        # .loc[any, scalar] -> (a single) series
+        # .loc[scalar, scalar] -> (a single) value
+        if pdextra.is_hashable(colkey):
+            new = data.loc[rowkey]
+
+        # .loc[any, non-scalar]
+        else:
+            k = "?"
+            try:
+
+                for k in data.index:
+                    data.at[k] = data.at[k].loc[rowkey]
+
+            except Exception as e:
+                raise type(e)(f"failed for column {k}: " + str(e)) from e
+
+            # .loc[scalar, non-scalar] -> column-indexed series
+            if pdextra.is_hashable(rowkey):
+                new = data
+
+            # .loc[non-scalar, non-scalar] -> dios
+            else:
+                new = self.obj.copy_empty(columns=False)
+                new._data = data
+
+        return new
+
+    def __setitem__(self, key, value):
+
+        rowkey, colkey = self._unpack_key(key)
+        if _is_dios_like(rowkey) or _is_dios_like(colkey):
+            raise ValueError("Cannot index with multi-dimensional key")
+
+        # .loc[any, scalar] - set on single column
+        if pdextra.is_hashable(colkey):
+
+            # .loc[dont-care, new-scalar] = val
+            if colkey not in self.obj.columns:
+                self.obj._insert(colkey, value)
+
+            # .loc[any, scalar] = multi-dim
+            elif _is_dios_like(value) or pdextra.is_nested_list_like(value):
+                raise ValueError("Incompatible indexer with multi-dimensional value")
+
+            # .loc[any, scalar] = val
+            else:
+                self._data.at[colkey].loc[rowkey] = value
+
+        # .loc[any, non-scalar] = any
+        else:
+            self._set_value_muli_column(rowkey, colkey, value, xloc="loc")
+
+
+# #############################################################################
+
+
+class _iLocIndexer(_Indexer):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+
+    def __getitem__(self, key):
+        rowkey, colkey = self._unpack_key(key)
+        if _is_dios_like(rowkey) or _is_dios_like(colkey):
+            raise ValueError("Cannot index with multidimensional key")
+
+        # simple optimisation
+        if pdextra.is_null_slice(rowkey) and pdextra.is_null_slice(colkey):
+            return self.obj.copy()
+
+        data = self._data.iloc[colkey].copy()
+
+        # .iloc[any, int] -> single series
+        # .iloc[int, int] -> single value
+        if pdextra.is_integer(colkey):
+            new = data.iloc[rowkey]
+
+        # .iloc[any, non-int]
+        else:
+            k = "?"
+            try:
+
+                for k in data.index:
+                    data.at[k] = data.at[k].iloc[rowkey]
+
+            except Exception as e:
+                raise type(e)(f"failed for column {k}: " + str(e)) from e
+
+            # .iloc[int, non-int] -> column-indexed series
+            if pdextra.is_integer(rowkey):
+                new = data
+
+            # .iloc[non-int, non-int] -> dios
+            else:
+                new = self.obj.copy_empty(columns=False)
+                new._data = data
+
+        return new
+
+    def __setitem__(self, key, value):
+        rowkey, colkey = self._unpack_key(key)
+        if _is_dios_like(rowkey) or _is_dios_like(colkey):
+            raise ValueError("Cannot index with multidimensional key")
+
+        # .iloc[any, int] = Any
+        if pdextra.is_integer(colkey):
+            if _is_dios_like(value) or pdextra.is_nested_list_like(value):
+                raise ValueError("Incompatible indexer with multi-dimensional value")
+            self._data.iat[colkey].iloc[rowkey] = value
+
+        # .iloc[any, non-int] = Any
+        else:
+            self._set_value_muli_column(rowkey, colkey, value, xloc="iloc")
+
+
+# #############################################################################
+
+
+class _aLocIndexer(_Indexer):
+    """align Indexer
+
+    Automatically align (alignable) indexer on all possible axis,
+    and handle indexing with non-existent or missing keys gracefully.
+
+    Also align (alignable) values before setting them with .loc
+    """
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self._usebool = True
+
+    def __call__(self, usebool=True):
+        """We are called if the user want to set `usebool=False', which make
+        boolean alignable indexer treat as non-boolean alignable indexer.
+
+        Explanation: A boolean dios indexer align its indices with the indices
+        of the receiving dios like a non-boolean dios indexer also would do.
+        Additionally all rows with False values are kicked too. To disable
+         that `usebool=False` can be given."""
+        self._usebool = usebool
+        return self
+
+    def __getitem__(self, key):
+        rowkeys, colkeys, lowdim = self._unpack_key_aloc(key)
+        data = pd.Series(dtype="O", index=colkeys)
+        kws = dict(itype=self.obj.itype, cast_policy=self.obj._policy)
+
+        c = "?"
+        try:
+
+            for i, c in enumerate(data.index):
+                data.at[c] = self._data.at[c].loc[rowkeys[i]]
+
+        except Exception as e:
+            raise type(e)(f"failed for column {c}: " + str(e)) from e
+
+        if lowdim:
+            return data.squeeze()
+        else:
+            return self.obj._constructor(data=data, fastpath=True, **kws)
+
+    def __setitem__(self, key, value):
+        rowkeys, colkeys, _ = self._unpack_key_aloc(key)
+
+        def iter_self(colkeys, position=False):
+            c = "?"
+            try:
+
+                for i, c in enumerate(colkeys):
+                    dat = self._data.at[c]
+                    rk = rowkeys[i]
+                    if len(dat.loc[rk]) == 0:
+                        continue
+                    yield dat, rk, i if position else c
+
+            except Exception as e:
+                raise type(e)(f"failed for column {c}: " + str(e)) from e
+
+        # align columns, for rows use series.loc to align
+        if _is_dios_like(value):
+            colkeys = value.columns.intersection(colkeys)
+            for dat, rk, c in iter_self(colkeys):
+                dat.loc[rk] = value[c]
+
+        # no align, no merci
+        elif pdextra.is_nested_list_like(value):
+            if len(colkeys) != len(value):
+                raise ValueError(
+                    f"shape mismatch: values array of shape "
+                    f"(.., {len(value)}) could not "
+                    f"be broadcast to indexing result of "
+                    f"shape (.., {len(colkeys)})"
+                )
+            for dat, rk, i in iter_self(colkeys, position=True):
+                dat.loc[rk] = value[i]
+
+        # align rows by using series.loc
+        elif isinstance(value, pd.Series):
+            for dat, rk, _ in iter_self(colkeys):
+                dat.loc[rk] = value
+
+        # no align, no merci
+        else:
+            for dat, rk, _ in iter_self(colkeys):
+                dat.loc[rk] = value
+
+    def _unpack_key_aloc(self, key):
+        """
+        Return a list of row indexer and a list of existing(!) column labels.
+        Both list always have the same length and also could be empty together.
+
+        Note:
+            The items of the row indexer list should be passed to pd.Series.loc[]
+        """
+        # if a single column-key is given, the caller may
+        # want to return a single Series, instead of a dios
+        lowdim = False
+
+        def keys_from_bool_dios_like(key):
+            if not _is_bool_dios_like(key):
+                raise ValueError("Must pass dios-like key with boolean values only.")
+            colkey = self.obj.columns.intersection(key.columns)
+            rowkey = []
+            for c in colkey:
+                b = key[c]
+                rowkey += [self._data.at[c].index.intersection(b[b].index)]
+            return rowkey, colkey, lowdim
+
+        def keys_from_dios_like(key):
+            colkey = self.obj.columns.intersection(key.columns)
+            rowkey = [self._data.at[c].index.intersection(key[c].index) for c in colkey]
+            return rowkey, colkey, lowdim
+
+        rowkey, colkey = self._unpack_key(key)
+
+        if _is_dios_like(colkey) or pdextra.is_nested_list_like(colkey):
+            raise ValueError("Could not index with multi-dimensional column key.")
+
+        # giving the ellipsis as column key, is an alias
+        # for giving `usebool=False`. see self.__call__()
+        if colkey is Ellipsis:
+            self._usebool = False
+            colkey = slice(None)
+
+        # .aloc[dios]
+        if _is_dios_like(rowkey):
+
+            if not pdextra.is_null_slice(colkey):
+                raise ValueError(
+                    f"Could not index with a dios-like indexer as rowkey,"
+                    f"and a column key of that type {type(colkey)}"
+                )
+            if self._usebool:
+                return keys_from_bool_dios_like(rowkey)
+            else:
+                return keys_from_dios_like(rowkey)
+
+        # handle gracefully: scalar
+        elif pdextra.is_hashable(colkey):
+            colkey = [colkey] if colkey in self.obj.columns else []
+            lowdim = True
+
+        # column-alignable: list-like, filter only existing columns
+        elif pdextra.is_list_like(colkey) and not pdextra.is_bool_indexer(colkey):
+            colkey = colkey.values if isinstance(colkey, pd.Series) else colkey
+            colkey = self.obj.columns.intersection(colkey)
+
+        # handle gracefully (automatically)
+        # just a simple optimisation
+        elif pdextra.is_null_slice(colkey):
+            colkey = self.obj.columns
+
+        # not alignable, fall back to .loc (boolean list/series, slice(..), etc.
+        else:
+            colkey = self._data.loc[colkey].index
+
+        if len(colkey) == 0:  # (!) `if not colkey:` fails for pd.Index
+            return [], [], lowdim
+
+        rowkey = self._get_rowkey(rowkey, colkey)
+
+        return rowkey, colkey, lowdim
+
+    def _get_rowkey(self, rowkey, colkey, depth=0):
+
+        if pdextra.is_nested_list_like(rowkey) and depth == 0:
+            rowkey = rowkey.values if isinstance(rowkey, pd.Series) else rowkey
+            if len(rowkey) != len(colkey):
+                raise ValueError(
+                    "Nested arrays indexer must have same (outer) "
+                    "length than the number of selected columns."
+                )
+            indexer = []
+            for i, c in enumerate(colkey):
+                # recurse to get the row indexer from inner element
+                indexer += self._get_rowkey(rowkey[i], [c], depth=depth + 1)
+            rowkey = indexer
+
+        # row-alignable: pd.Series(), align rows to every series in colkey (columns)
+        elif isinstance(rowkey, pd.Series):
+            if self._usebool and pdextra.is_bool_indexer(rowkey):
+                rowkey = [
+                    self._data.at[c].index.intersection(rowkey[rowkey].index)
+                    for c in colkey
+                ]
+            else:
+                rowkey = [
+                    self._data.at[c].index.intersection(rowkey.index) for c in colkey
+                ]
+
+        # handle gracefully: scalar, transform to row-slice
+        elif pdextra.is_hashable(rowkey):
+            rowkey = [slice(rowkey, rowkey)] * len(colkey)
+
+        # handle gracefully: list-like, filter only existing rows
+        # NOTE: dios.aloc[series.index] is processed here
+        elif pdextra.is_list_like(rowkey) and not pdextra.is_bool_indexer(rowkey):
+            rowkey = [self._data.at[c].index.intersection(rowkey) for c in colkey]
+
+        # not alignable
+        # the rowkey is processed by .loc someway in
+        # the calling function - (eg. slice(..), boolean list-like, etc.)
+        else:
+            rowkey = [rowkey] * len(colkey)
+
+        return rowkey
+
+
+# #############################################################################
+
+
+class _AtIndexer(_Indexer):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+
+    def _check_key(self, key):
+        if not (
+            isinstance(key, tuple)
+            and len(key) == 2
+            and pdextra.is_hashable(key[0])
+            and pdextra.is_hashable(key[1])
+        ):
+            raise KeyError(
+                f"{key}. `.at` takes exactly one scalar row-key "
+                "and one scalar column-key"
+            )
+
+    def __getitem__(self, key):
+        self._check_key(key)
+        return self._data.at[key[1]].at[key[0]]
+
+    def __setitem__(self, key, value):
+        self._check_key(key)
+        if _is_dios_like(value) or pdextra.is_nested_list_like(value):
+            raise TypeError(
+                ".at[] cannot be used to set multi-dimensional values, use .aloc[] instead."
+            )
+        self._data.at[key[1]].at[key[0]] = value
+
+
+# #############################################################################
+
+
+class _iAtIndexer(_Indexer):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+
+    def _check_key(self, key):
+        if not (
+            isinstance(key, tuple)
+            and len(key) == 2
+            and pdextra.is_integer(key[0])
+            and pdextra.is_integer(key[1])
+        ):
+            raise KeyError(
+                f"{key} `.iat` takes exactly one integer positional "
+                f"row-key and one integer positional scalar column-key"
+            )
+
+    def __getitem__(self, key):
+        self._check_key(key)
+        return self._data.iat[key[1]].iat[key[0]]
+
+    def __setitem__(self, key, value):
+        self._check_key(key)
+        if _is_dios_like(value) or pdextra.is_nested_list_like(value):
+            raise TypeError(
+                ".iat[] cannot be used to set multi-dimensional values, use .aloc[] instead."
+            )
+        self._data.iat[key[1]].iat[key[0]] = value
diff --git a/dios/dios/lib.py b/dios/dios/lib.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc5e41305f920708b9e8e47f28af8911bb953f1a
--- /dev/null
+++ b/dios/dios/lib.py
@@ -0,0 +1,420 @@
+import pandas as pd
+import warnings
+
+
+class ItypeWarning(RuntimeWarning):
+    pass
+
+
+class ItypeCastWarning(ItypeWarning):
+    pass
+
+
+class ItypeCastError(RuntimeError):
+    pass
+
+
+class __Itype:
+    def __init__(self):
+        raise RuntimeError("a Itype class does not allow instances of itself.")
+
+
+class DtItype(__Itype):
+    name = "datetime"
+    unique = True
+    subtypes = (pd.DatetimeIndex,)
+    min_pdindex = pd.DatetimeIndex([])
+
+
+class IntItype(__Itype):
+    name = "integer"
+    unique = True
+    subtypes = (pd.RangeIndex, pd.Int64Index, pd.UInt64Index, int)
+    min_pdindex = pd.Int64Index([])
+
+
+class FloatItype(__Itype):
+    name = "float"
+    subtypes = (pd.Float64Index, float)
+    unique = True
+    min_pdindex = pd.Float64Index([])
+
+
+# class MultiItype(__Itype):
+#     name = "multi"
+#     subtypes = (pd.MultiIndex, )
+#     unique = ??
+
+
+class NumItype(__Itype):
+    name = "numeric"
+    _subitypes = (IntItype, FloatItype)
+    subtypes = _subitypes + IntItype.subtypes + FloatItype.subtypes
+    unique = False
+    min_pdindex = pd.Float64Index([])
+
+
+class ObjItype(__Itype):
+    name = "object"
+    unique = False
+    _subitypes = (DtItype, IntItype, FloatItype, NumItype, str)
+    _otheritypes = (
+        pd.CategoricalIndex,
+        pd.IntervalIndex,
+        pd.PeriodIndex,
+        pd.TimedeltaIndex,
+        pd.Index,
+    )
+    subtypes = _subitypes + _otheritypes + DtItype.subtypes + NumItype.subtypes
+    min_pdindex = pd.Index([])
+
+
+def is_itype(obj, itype):
+    """Check if obj is a instance of the given itype or its str-alias was given"""
+
+    # todo: iter through itype as it could be a tuple, if called like ``is_itype(o, (t1,t2))``
+
+    # user gave a Itype, like ``DtItype``
+    if type(obj) == type and issubclass(obj, itype):
+        return True
+
+    # user gave a string, like 'datetime'
+    if isinstance(obj, str) and obj == itype.name:
+        return True
+
+    return False
+
+
+def is_itype_subtype(obj, itype):
+    """Check if obj is a subclass or a instance of a subclass of the given itype"""
+
+    # user gave a subtype, like ``pd.DatetimeIndex``
+    if type(obj) == type and issubclass(obj, itype.subtypes):
+        return True
+
+    # user gave a instance of a subtype, like ``pd.Series(..).index``
+    if isinstance(obj, itype.subtypes):
+        return True
+
+    return False
+
+
+def is_itype_like(obj, itype):
+    """Check if obj is a subclass or a instance of the given itype or any of its subtypes"""
+    return is_itype(obj, itype) or is_itype_subtype(obj, itype)
+
+
+def get_itype(obj):
+    """
+
+    Return the according Itype.
+
+    and return the according Itype
+    Parameters
+    ----------
+    obj : {itype string, Itype, pandas.Index, instance of pd.index}
+        get the itype fitting for the input
+
+    Examples
+    --------
+    >>> get_itype("datetime")
+    <class 'dios.lib.DtItype'>
+
+    >>> s = pd.Series(index=pd.to_datetime([]))
+    >>> get_itype(s.index)
+    <class 'dios.lib.DtItype'>
+
+    >>> get_itype(DtItype)
+    <class 'dios.lib.DtItype'>
+
+    >>> get_itype(pd.DatetimeIndex)
+    <class 'dios.lib.DtItype'>
+    """
+    if type(obj) == type and issubclass(obj, __Itype):
+        return obj
+
+    # check if it is the actual type, not a subtype
+    types = [DtItype, IntItype, FloatItype, NumItype, ObjItype]
+    for t in types:
+        if is_itype(obj, t):
+            return t
+
+    for t in types:
+        if is_itype_subtype(obj, t):
+            return t
+
+    raise ValueError(
+        f"{obj} is not a itype, nor any known subtype of a itype, nor a itype string alias"
+    )
+
+
+def _itype_eq(a, b):
+    return is_itype(a, b)
+
+
+def _itype_lt(a, b):
+    return is_itype_subtype(a, b)
+
+
+def _itype_le(a, b):
+    return is_itype_like(a, b)
+
+
+def _find_least_common_itype(iterable_of_series):
+    itypes = [NumItype, FloatItype, IntItype, DtItype]
+    tlist = [get_itype(s.index) for s in iterable_of_series]
+    found = ObjItype
+    if tlist:
+        for itype in itypes:
+            for t in tlist:
+                if _itype_le(t, itype):
+                    continue
+                break
+            else:
+                found = itype
+    return found
+
+
+################################################################################
+# Casting
+
+
+class CastPolicy:
+    force = "force"
+    save = "save"
+    never = "never"
+
+
+_CAST_POLICIES = [CastPolicy.force, CastPolicy.save, CastPolicy.never]
+
+
+def cast_to_itype(series, itype, policy="lossless", err="raise", inplace=False):
+    """Cast a series (more explicit the type of the index) to fit the itype of a dios.
+
+    Return the casted series if successful, None otherwise.
+
+    Note:
+        This is very basic number-casting, so in most cases, information from
+        the old index will be lost after the cast.
+    """
+
+    if policy not in _CAST_POLICIES:
+        raise ValueError(f"policy={policy}")
+    if err not in ["raise", "ignore"]:
+        raise ValueError(f"err={err}")
+    if not inplace:
+        series = series.copy()
+    itype = get_itype(itype)
+
+    if series.empty:
+        return pd.Series(index=itype.min_pdindex, dtype=series.dtype)
+
+    series.itype = get_itype(series.index)
+
+    # up-cast issn't necessary because a dios with a higher
+    # itype always can take lower itypes.
+    # series can have dt/int/float/mixed
+    # dt    -> dt           -> mixed
+    # int   -> int   -> num -> mixed
+    # float -> float -> num -> mixed
+    # mixed                 -> mixed
+    if _itype_le(series.itype, itype):  # a <= b
+        return series
+
+    e = f"A series index of type '{type(series.index)}' cannot be casted to Itype '{itype.name}'"
+
+    # cast any -> dt always fail.
+    if is_itype(itype, DtItype):
+        pass
+    else:
+        e += f", as forbidden by the cast-policy '{policy}'."
+
+    if policy == CastPolicy.never:
+        pass
+
+    elif policy == CastPolicy.force:
+        # cast any (dt/float/mixed) -> int
+        if is_itype(itype, IntItype):  # a == b
+            series.index = pd.RangeIndex(len(series))
+            return series
+        # cast any (dt/int/mixed) -> float
+        # cast any (dt/float/mixed) -> nur
+        if is_itype(itype, FloatItype) or is_itype(itype, NumItype):  # a == b or a == c
+            series.index = pd.Float64Index(range(len(series)))
+            return series
+
+    elif policy == CastPolicy.save:
+        # cast int   -> float
+        if is_itype(itype, IntItype) and is_itype(
+            series.itype, FloatItype
+        ):  # a == b and c == d
+            series.index = series.index.astype(float)
+            return series
+        # cast float -> int, maybe if unique
+        if is_itype(itype, FloatItype) and is_itype(
+            series.itype, IntItype
+        ):  # a == b and c == d
+            series.index = series.index.astype(int)
+            if series.index.is_unique:
+                return series
+            e = (
+                f"The cast with policy {policy} from series index type '{type(series.index)}' to "
+                f"itype {itype.name} resulted in a non-unique index."
+            )
+        # cast mixed -> int/float always fail
+
+    if err == "raise":
+        raise ItypeCastError(e)
+    else:
+        return None
+
+
+################################################################################
+# OPTIONS
+
+
+class OptsFields:
+    """storage class for the keys in `dios_options`
+
+    Use like so: ``dios_options[OptsFields.X] = Opts.Y``.
+
+    See Also
+    --------
+        Opts: values for the options dict
+        dios_options: options dict for module
+    """
+
+    mixed_itype_warn_policy = "mixed_itype_policy"
+    disp_max_rows = "disp_max_rows "
+    disp_min_rows = "disp_min_rows "
+    disp_max_cols = "disp_max_vars"
+    dios_repr = "dios_repr"
+
+
+class Opts:
+    """storage class for string values for `dios_options`
+
+    Use like so: ``dios_options[OptsFields.X] = Opts.Y``.
+
+    See Also
+    --------
+        OptsFields: keys for the options dict
+        dios_options: options dict for module
+    """
+
+    itype_warn = "warn"
+    itype_err = "err"
+    itype_ignore = "ignore"
+    repr_aligned = "aligned"
+    repr_indexed = "indexed"
+
+
+class __DocDummy(dict):
+    pass
+
+
+dios_options = __DocDummy()
+dios_options.update(
+    **{
+        OptsFields.disp_max_rows: 60,
+        OptsFields.disp_min_rows: 10,
+        OptsFields.disp_max_cols: 10,
+        OptsFields.mixed_itype_warn_policy: Opts.itype_warn,
+        OptsFields.dios_repr: Opts.repr_indexed,
+    }
+)
+
+opdoc = f"""Options dictionary for module `dios`.
+
+Use like so: ``dios_options[OptsFields.X] = Opts.Y``.
+
+**Items**:
+ * {OptsFields.dios_repr}: {{'indexed', 'aligned'}} default: 'indexed'
+    dios default representation if:
+     * `indexed`:  show every column with its index
+     * `aligned`:  transform to pandas.DataFrame with indexed merged together.
+ * {OptsFields.disp_max_rows}  : int
+    Maximum numbers of row before truncated to `disp_min_rows`
+    in representation of DictOfSeries
+
+ * {OptsFields.disp_min_rows} : int
+    min rows to display if `max_rows` is exceeded
+
+ * {OptsFields.disp_max_cols} : int
+    Maximum numbers of columns before truncated representation
+
+ * {OptsFields.mixed_itype_warn_policy} : {{'warn', 'err', 'ignore'}}
+    How to inform user about mixed Itype
+
+See Also
+--------
+    OptsFields: keys for the options dict 
+    Opts: values for the options dict 
+
+"""
+dios_options.__doc__ = opdoc
+
+
+def _throw_MixedItype_err_or_warn(itype):
+    msg = (
+        f"Using '{itype.name}' as itype is not recommend. "
+        f"As soon as series with different index types are inserted,\n"
+        f"indexing and slicing will almost always fail. "
+    )
+
+    if dios_options[OptsFields.mixed_itype_warn_policy] in [
+        "ignore",
+        Opts.itype_ignore,
+    ]:
+        pass
+    elif dios_options[OptsFields.mixed_itype_warn_policy] in [
+        "error",
+        "err",
+        Opts.itype_err,
+    ]:
+        msg += "Suppress this error by specifying an unitary 'itype' or giving an 'index' to DictOfSeries."
+        raise ItypeCastError(msg)
+    else:
+        msg += "Silence this warning by specifying an unitary 'itype' or giving an 'index' to DictOfSeries."
+        warnings.warn(msg, ItypeWarning)
+    return
+
+
+def example_DictOfSeries():
+    """Return a example dios.
+
+    Returns
+    -------
+    DictOfSeries: an example
+
+    Examples
+    --------
+
+    >>> from dios import example_DictOfSeries
+    >>> di = example_DictOfSeries()
+    >>> di
+        a |      b |      c |     d |
+    ===== | ====== | ====== | ===== |
+    0   0 | 2    5 | 4    7 | 6   0 |
+    1   7 | 3    6 | 5   17 | 7   1 |
+    2  14 | 4    7 | 6   27 | 8   2 |
+    3  21 | 5    8 | 7   37 | 9   3 |
+    4  28 | 6    9 | 8   47 | 10  4 |
+    5  35 | 7   10 | 9   57 | 11  5 |
+    6  42 | 8   11 | 10  67 | 12  6 |
+    7  49 | 9   12 | 11  77 | 13  7 |
+    8  56 | 10  13 | 12  87 | 14  8 |
+    9  63 | 11  14 | 13  97 | 15  9 |
+    """
+    from dios import DictOfSeries
+
+    a = pd.Series(range(0, 70, 7))
+    b = pd.Series(range(5, 15, 1))
+    c = pd.Series(range(7, 107, 10))
+    d = pd.Series(range(0, 10, 1))
+
+    for i, s in enumerate([a, b, c, d]):
+        s.index += i * 2
+
+    di = DictOfSeries(dict(a=a, b=b, c=c, d=d))
+    return di.copy()
diff --git a/dios/dios/operators.py b/dios/dios/operators.py
new file mode 100644
index 0000000000000000000000000000000000000000..365992efefa1951aeea7da7ce2a561dbe156e45d
--- /dev/null
+++ b/dios/dios/operators.py
@@ -0,0 +1,42 @@
+# do not import dios-stuff here
+import operator as op
+
+
+_OP1_MAP = {
+    op.inv: "~",
+    op.neg: "-",
+    op.abs: "abs()",
+}
+
+_OP2_COMP_MAP = {
+    op.eq: "==",
+    op.ne: "!=",
+    op.le: "<=",
+    op.ge: ">=",
+    op.gt: ">",
+    op.lt: "<",
+}
+
+_OP2_BOOL_MAP = {
+    op.and_: "&",
+    op.or_: "|",
+    op.xor: "^",
+}
+_OP2_ARITH_MAP = {
+    op.add: "+",
+    op.sub: "-",
+    op.mul: "*",
+    op.pow: "**",
+}
+
+_OP2_DIV_MAP = {
+    op.mod: "%",
+    op.truediv: "/",
+    op.floordiv: "//",
+}
+
+OP_MAP = _OP2_COMP_MAP.copy()
+OP_MAP.update(_OP2_BOOL_MAP)
+OP_MAP.update(_OP2_ARITH_MAP)
+OP_MAP.update(_OP2_DIV_MAP)
+OP_MAP.update(_OP1_MAP)
diff --git a/dios/dios/pandas_bridge.py b/dios/dios/pandas_bridge.py
new file mode 100644
index 0000000000000000000000000000000000000000..3bbc08b0e1519c46c10d96827b2d38dd255cb0d5
--- /dev/null
+++ b/dios/dios/pandas_bridge.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+
+__author__ = "Bert Palm"
+__email__ = "bert.palm@ufz.de"
+__copyright__ = "Copyright 2020, Helmholtz-Zentrum für Umweltforschung GmbH - UFZ"
+
+
+from pandas.core.common import (
+    is_bool_indexer,
+    is_null_slice,
+)
+from pandas.core.dtypes.common import (
+    is_nested_list_like,
+)
+from pandas.api.types import (
+    is_list_like,
+    is_hashable,
+    is_integer,
+    is_dict_like,
+    is_scalar,
+    # Unlike the example says, return lists False, not True
+    # >>is_iterator([1, 2, 3])
+    # >>False
+    is_iterator,
+)
diff --git a/dios/docs/.gitignore b/dios/docs/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..59b03a42eb191d1bf06f45dd6cfb3be394bbe06e
--- /dev/null
+++ b/dios/docs/.gitignore
@@ -0,0 +1,8 @@
+# ignore everything
+_api
+_build
+_static
+*.automodsumm
+
+
+
diff --git a/dios/docs/Makefile b/dios/docs/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..90e4cc6d6ce8338a0d7eacf2f93e96e8e3f2e6a2
--- /dev/null
+++ b/dios/docs/Makefile
@@ -0,0 +1,25 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS    ?=
+SPHINXBUILD   ?= sphinx-build
+SOURCEDIR     = .
+BUILDDIR      = _build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+	@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile clean
+
+clean:
+	rm -rf _build _static _api
+	rm -f *.automodsumm
+	mkdir _static
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+	@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/dios/docs/conf.py b/dios/docs/conf.py
new file mode 100644
index 0000000000000000000000000000000000000000..52d7f39053fdcde753bf26ed2e1ba651ecb12128
--- /dev/null
+++ b/dios/docs/conf.py
@@ -0,0 +1,91 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# This file only contains a selection of the most common options. For a full
+# list see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Path setup --------------------------------------------------------------
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import os
+import sys
+
+sys.path.insert(0, os.path.abspath(".."))
+
+
+# -- Project information -----------------------------------------------------
+
+project = "dios"
+copyright = "2020, Bert Palm"
+author = "Bert Palm"
+
+
+# -- General configuration ---------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    "sphinx.ext.autodoc",
+    "sphinx.ext.autosummary",
+    # "sphinx.ext.doctest",
+    # "sphinx.ext.extlinks",
+    # "sphinx.ext.todo",
+    # "sphinx.ext.intersphinx",
+    # "sphinx.ext.coverage",
+    # "sphinx.ext.mathjax",
+    # "sphinx.ext.ifconfig",
+    "sphinx.ext.autosectionlabel",
+    # link source code
+    "sphinx.ext.viewcode",
+    # add suupport for NumPy style docstrings
+    "sphinx.ext.napoleon",
+    # doc the whole module
+    "sphinx_automodapi.automodapi",
+    "sphinxcontrib.fulltoc",
+    # markdown sources support
+    "recommonmark",
+    "sphinx_markdown_tables",
+]
+numpydoc_show_class_members = False
+automodsumm_inherited_members = True
+automodapi_inheritance_diagram = False
+automodapi_toctreedirnm = "_api"
+# automodsumm_writereprocessed = True
+autosectionlabel_prefix_document = True
+
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path.
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
+
+source_suffix = [".rst", ".md"]
+
+
+# -- Options for HTML output -------------------------------------------------
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = "nature"
+
+# use pandas theme
+# html_theme = "pydata_sphinx_theme"
+
+
+# html_theme_options = {
+# }
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
diff --git a/dios/docs/dios_api.rst b/dios/docs/dios_api.rst
new file mode 100644
index 0000000000000000000000000000000000000000..976d52c43c1ef9d02d9a55855808ff08cd2a60cd
--- /dev/null
+++ b/dios/docs/dios_api.rst
@@ -0,0 +1,11 @@
+
+API
+====
+
+.. automodapi:: dios
+   :include-all-objects:
+   :no-heading:
+
+
+
+
diff --git a/dios/docs/doc_cookbook.md b/dios/docs/doc_cookbook.md
new file mode 100644
index 0000000000000000000000000000000000000000..f32a52968935c927fca45591a1004663e19e37b8
--- /dev/null
+++ b/dios/docs/doc_cookbook.md
@@ -0,0 +1,25 @@
+Cookbook
+=========
+
+Recipes
+-------
+- select common rows from all columns
+- align columns to an other column
+- align columns to a given index
+- align dios with dios
+- get/set values by condition
+- apply a value to multiple columns
+- [Broadcast array-likes to multiple columns](#broadcast-array-likes-to-multiple-columns)
+- apply a array-like value to multiple columns
+- nan-policy - mask vs. drop values, when nan's are inserted (mv to Readme ??)
+- itype - when to use, pitfalls and best-practise
+- changing the index of series' in dios (one, some, all)
+- changing the dtype of series' in dios (one, some, all)
+- changing properties of series' in dios (one, some, all)
+
+**T_O_D_O**
+
+
+Broadcast array-likes to multiple columns
+-----------------------------------------
+**T_O_D_O**
diff --git a/dios/docs/doc_indexing.md b/dios/docs/doc_indexing.md
new file mode 100644
index 0000000000000000000000000000000000000000..ae67947ecbee8f5dda1577bdf67dd4f2ebef9b42
--- /dev/null
+++ b/dios/docs/doc_indexing.md
@@ -0,0 +1,525 @@
+Pandas-like indexing
+====================
+
+`[]` and `.loc[]`, `.iloc[]` and `.at[]`, `.iat[]` - should behave exactly like 
+their counter-parts from pandas.DataFrame. They can take as indexer 
+- lists, array-like objects and in general all iterables 
+- boolean lists and iterables
+- slices
+- scalars and any hashable object
+
+Most indexers are directly passed to the underling columns-series or row-series depending 
+on the position of the indexer and the complexity of the operation. For `.loc`, `.iloc`, `.at` 
+and `iat` the first position is the *row indexer*, the second the *column indexer*. The second 
+can be omitted and will default to `slice(None)`. Examples:
+- `di.loc[[1,2,3], ['a']]` : select labels 1,2,3 from column a
+- `di.iloc[[1,2,3], [0,3]]` : select positions 1,2,3 from the columns 0 and 3
+- `di.loc[:, 'a':'c']` : select all rows from columns a to d 
+- `di.at[4,'c']` : select the elements with label 4 in column c
+- `di.loc[:]` -> `di.loc[:,:]` : select everything. 
+
+Scalar indexing always return a pandas Series if the other indexer is a non-scalar. If both indexer
+are scalars, the element itself is returned. In all other cases a dios is returned. 
+For more pandas-like indexing magic and the differences between the indexers, 
+see the [pandas documentation](https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html).
+
+>**Note:**
+>
+>In contrast to pandas.DataFrame, `.loc[:]` and `.loc[:, :]` always behaves identical. Same apply for `iloc` and
+>[`aloc`](#the-special-indexer-aloc). For example, two pandas.DataFrames `df1` and `df2` with different columns,
+>does align columns with `df1.loc[:, :] = df2` , but does **not** with `df1.loc[:] = df2`. 
+>
+>If this is the desired behavior or a bug, i couldn't verify so far. -- Bert Palm 
+
+**2D-indexer**
+
+`dios[boolean dios-like]` (as single key) -  dios accept boolean 2D-indexer (boolean pandas.Dataframe 
+or boolean Dios). 
+
+Columns and rows from the indexer align with the dios. 
+This means that only matching columns selected and in this columns rows are selected where 
+i) indices are match and ii) the value is True in the indexer-bool-dios. There is no difference between
+missing indices and present indices, but False values.
+
+Values from unselected rows and columns are dropped, but empty columns are still preserved, 
+with the effect that the resulting Dios always have the same column dimension than the initial dios. 
+
+>**Note:**
+>This is the exact same behavior like pandas.DataFrame's handling of 2D-indexer, despite that pandas.DataFrame 
+>fill numpy.nan's at missing locations and therefore also fill-up, whole missing columns with numpy.nan's.
+
+**setting values**
+
+Setting values with `[]` and `.loc[]`, `.iloc[]` and `.at[]`, `.iat[]` works like in pandas. 
+With `.at`/`.iat` only single items can be set, for the other the
+right hand side values can be:
+ - *scalars*: these are broadcasted to the selected positions
+ - *lists*: the length the list must match the number of indexed columns. The items can be everything that 
+    can applied to a series, with the respective indexing method (`loc`, `iloc`, `[]`).
+ - *dios*: the length of the columns must match the number of indexed columns - columns does *not* align, 
+    they are just iterated. 
+    Rows do align. Rows that are present on the right but not on the left are ignored. 
+    Rows that are present on the left (bear in mind: these rows was explicitly chosen for write!), but not present
+    on the right, are filled with `NaN`s, like in pandas.
+ - *pandas.Series*: column indexer must be a scalar(!), the series is passed down, and set with `loc`, `iloc` or `[]` 
+    by pandas Series, where it maybe align, depending on the method. 
+
+**Examples:**
+
+- `dios.loc[2:5, 'a'] = [1,2,3]` is the same as `a=dios['a']; a.loc[2:5]=[1,2,3]; dios['a']=a`
+- `dios.loc[2:5, :] = 99` : set 99 on rows 2 to 5 on all columns
+
+Special indexer `.aloc`
+========================
+
+Additional to the pandas like indexers we have a `.aloc[..]` (align locator) indexing method. 
+Unlike `.iloc` and `.loc` indexers fully align if possible and 1D-array-likes can be broadcast 
+to multiple columns at once. This method also handle missing indexer-items gracefully. 
+It is used like `.loc`, so a single indexer (`.aloc[indexer]`) or a tuple of row-indexer and 
+column-indexer (`.aloc[row-indexer, column-indexer]`) can be given. Also it can handle boolean and *non-bolean*
+2D-Indexer.
+
+The main **purpose** of `.aloc` is:
+- to select gracefully, so rows or columns, that was given as indexer, but doesn't exist, not raise an error
+- align series/dios-indexer 
+- vertically broadcasting aka. setting multiple columns at once with a list-like value
+
+Aloc usage
+----------
+
+`aloc` is *called* like `loc`, with a single key, that act as row indexer `aloc[rowkey]` or with a tuple of
+row indexer and column indexer `aloc[rowkey, columnkey]`. Also 2D-indexer (like dios or df) can be given, but 
+only as a single key, like `.aloc[2D-indexer]` or with the special column key `...`, 
+the ellipsis (`.aloc[2D-indexer, ...]`). The ellipsis may change, how the 2D-indexer is
+interpreted, but this will explained [later](#the-power-of-2d-indexer) in detail.
+
+If a normal (non 2D-dimensional) row indexer is given, but no column indexer, the latter defaults to `:` aka. 
+`slice(None)`, so `.aloc[row-indexer]` becomes `.aloc[row-indexer, :]`, which means, that all columns are used.
+In general, a normal row-indexer is applied to every column, that was chosen by the column indexer, but for 
+each column separately.
+
+So maybe a first example gives an rough idea:
+```
+>>> s = pd.Series([11] * 4 )
+>>> di = DictOfSeries(dict(a=s[:2]*6, b=s[2:4]*7, c=s[:2]*8, d=s[1:3]*9))
+>>> di
+    a |     b |     c |     d | 
+===== | ===== | ===== | ===== | 
+0  66 | 2  77 | 0  88 | 1  99 | 
+1  66 | 3  77 | 1  88 | 2  99 | 
+
+
+>>> di.aloc[[1,2], ['a', 'b', 'd', 'x']]
+    a |     b |     d | 
+===== | ===== | ===== | 
+1  66 | 2  77 | 1  99 | 
+      |       | 2  99 | 
+```
+
+The return type
+----------------
+
+Unlike the other two indexer methods `loc` and `iloc`, it is not possible to get a single item returned; 
+the return type is either a pandas.Series, iff the column-indexer is a single key (eg. `'a'`) or a dios, iff not.
+The row-indexer does not play any role in the return type choice.
+
+> **Note for the curios:** 
+> 
+> This is because a scalar (`.aloc[key]`) is translates to `.loc[key:key]` under the hood.
+
+Indexer types
+-------------
+Following the `.aloc` specific indexer are listed. Any indexer that is not listed below (slice, boolean lists, ...), 
+but are known to work with `.loc`, are treated as they would passed to `.loc`, as they actually do under the hood.
+
+Some indexer are linked to later sections, where a more detailed explanation and examples are given.
+
+*special [Column indexer](#select-columns-gracefully) are :*
+- *list / array-like* (or any iterable object): Only labels that are present in the columns are used, others are 
+   ignored. 
+- *pd.Series* : `.values` are taken from series and handled like a *list*.
+- *scalar* (or any hashable obj) : Select a single column, if label is present, otherwise nothing. 
+
+
+*special [Row indexer](#selecting-rows-a-smart-way) are :*
+- *list / array-like* (or any iterable object): Only rows, which indices are present in the index of the column are 
+   used, others are ignored. A dios is returned. 
+- *scalar* (or any hashable obj) : Select a single row from a column, if the value is present in the index of 
+   the column, otherwise nothing is selected. [1]
+- *pd.Series* : align the index from the given Series with the column, what means only common indices are used. The 
+   actual values of the series are ignored(!).
+- *boolean pd.Series* : like *pd.Series* but only True values are evaluated. 
+   False values are equivalent to missing indices. To treat a boolean series as a *normal* indexer series, as decribed
+   above, one can use `.aloc(usebool=False)[boolean pd.Series]`.
+   
+
+*special [2D-indexer](#the-power-of-2d-indexer) are :*
+- `.aloc[boolean dios-like]` : work same like `di[boolean dios-like]` (see there). 
+   Brief: full align, select items, where the index is present and the value is True.
+- `.aloc[dios-like, ...]` (with Ellipsis) : Align in columns and rows, ignore its values. Per common column,
+   the common indices are selected. The ellipsis forces `aloc`, to ignore the values, so a boolean dios could be 
+   treated as a non-boolean. Alternatively `.aloc(usebool=False)[boolean dios-like]` could be used.[2]
+- `.aloc[nested list-like]` : The inner lists are used as `aloc`-*list*-row-indexer (see there) on all columns. 
+   One list for one column, which implies, that the outer list has the same length as the number of columns. 
+
+*special handling of 1D-**values***
+
+Values that are list- or array-like, which includes pd.Series, are set on all selected columns. pd.Series align
+like `s1.loc[:] = s2` do. See also the [cookbook](/docs/cookbook.md#broadcast-array-likes-to-multiple-columns).
+
+
+Aloc overiew table
+---------------------
+
+| example | type | on  | like `.loc` | handling | conditions / hints | link |
+| ------- | ---- | --- | ----------- | -------- | ------------------ | ---- |
+| `.aloc[any, 'a']`         | scalar               | columns |no | select graceful | - | [cols](#select-columns-gracefully)|
+|[Column indexer](#select-columns-gracefully)| 
+| `.aloc[any, 'a']`         | scalar               | columns |no | select graceful | - | [cols](#select-columns-gracefully)|
+| `.aloc[any, 'b':'z']`       | slice                | columns |yes| slice | - | [cols](#select-columns-gracefully)|
+| `.aloc[any, ['a','c']]`     | list-like            | columns |no | filter graceful | - | [cols](#select-columns-gracefully)|
+| `.aloc[any [True,False]]`   | bool list-like       | columns |yes| take `True`'s | length must match nr of columns | [cols](#select-columns-gracefully)|
+| `.aloc[any, s]`             | Series        | columns |no | like list,  | only `s.values` are evaluated | [cols](#select-columns-gracefully)|
+| `.aloc[any, bs]`            | bool Series   | columns |yes| like bool-list | see there | [cols](#select-columns-gracefully)|
+|[Row indexer](#selecting-rows-a-smart-way)|  
+| `.aloc[7, any]`             | scalar               | rows    |no | translate to `.loc[key:key]` | - | [rows](#selecting-rows-a-smart-way) |
+| `.aloc[3:42, any]`          | slice                | rows    |yes| slice | - | | 
+| `.aloc[[1,2,24], any]`      | list-like            | rows    |no | filter graceful | - | [rows](#selecting-rows-a-smart-way) |
+| `.aloc[[True,False], any]`  | bool list-like       | rows    |yes| take `True`'s | length must match nr of (all selected) columns | [blist](#boolean-array-likes-as-row-indexer)|
+| `.aloc[s, any]`             | Series        | rows    |no | like `.loc[s.index]` | - | [ser](#pandasseries-and-boolean-pandasseries-as-row-indexer) |
+| `.aloc[bs, any]`            | bool Series   | rows    |no | align + just take `True`'s  | evaluate `usebool`-keyword |  [ser](#pandasseries-and-boolean-pandasseries-as-row-indexer)|
+| `.aloc[[[s],[1,2,3]], any]` | nested list-like     | both    | ? | one row-indexer per column | outer length must match nr of (selected) columns | [nlist](#nested-lists-as-row-indexer) |
+|[2D-indexer](#the-power-of-2d-indexer)| 
+| `.aloc[di]`                 | dios-like            | both    |no | full align  | - | |
+| `.aloc[di, ...]`            | dios-like            | both    |no | full align | ellipsis has no effect | |
+| `.aloc[di>5]`               | bool dios-like       | both    |no | full align + take `True`'s | evaluate `usebool`-keyword | |
+| `.aloc[di>5, ...]`          | (bool) dios-like     | both    |no | full align, **no** bool evaluation | - | |
+
+Example dios
+------------
+
+The Dios used in the examples, unless stated otherwise, looks like so:
+
+```
+>>> dictofser
+    a |      b |      c |     d | 
+===== | ====== | ====== | ===== | 
+0   0 | 2    5 | 4    7 | 6   0 | 
+1   7 | 3    6 | 5   17 | 7   1 | 
+2  14 | 4    7 | 6   27 | 8   2 | 
+3  21 | 5    8 | 7   37 | 9   3 | 
+4  28 | 6    9 | 8   47 | 10  4 | 
+5  35 | 7   10 | 9   57 | 11  5 | 
+6  42 | 8   11 | 10  67 | 12  6 | 
+7  49 | 9   12 | 11  77 | 13  7 | 
+8  56 | 10  13 | 12  87 | 14  8 | 
+```
+
+or the short version:
+
+```
+>>> di
+    a |    b |     c |     d | 
+===== | ==== | ===== | ===== | 
+0   0 | 2  5 | 4   7 | 6   0 | 
+1   7 | 3  6 | 5  17 | 7   1 | 
+2  14 | 4  7 | 6  27 | 8   2 | 
+3  21 | 5  8 | 7  37 | 9   3 | 
+4  28 | 6  9 | 8  47 | 10  4 | 
+```
+
+The example Dios can get via a function:
+
+```
+from dios import example_DictOfSeries()
+mydios = example_DictOfSeries()
+```
+
+or generated manually like so:
+
+``` 
+>>> a = pd.Series(range(0, 70, 7))
+>>> b = pd.Series(range(5, 15, 1))
+>>> c = pd.Series(range(7, 107, 10))
+>>> d = pd.Series(range(0, 10, 1))
+>>> for i, s in enumerate([a,b,c,d]): s.index += i*2
+>>> dictofser = DictOfSeries(dict(a=a, b=b, c=c, d=d))
+>>> di = dictofser[:5]
+```
+
+
+Select columns, gracefully
+---------------------------
+
+One can use `.aloc[:, key]` to select **single columns** gracefully. 
+The underling pandas.Series is returned, if the key exist. 
+Otherwise a empty pandas.Series with `dtype=object` is returned.
+
+```
+>>> di.aloc[:, 'a']
+0     0
+1     7
+2    14
+3    21
+4    28
+Name: a, dtype: int64
+
+>>> di.aloc[:, 'x']
+Series([], dtype: object)
+```
+
+
+**Multiple columns**
+
+Just like selecting *single columns gracefully*, but with a array-like indexer. 
+A dios is returned, with a subset of the existing columns. 
+If no key is present a empty dios is returned. 
+
+```
+>>> di.aloc[:, ['c', 99, None, 'a', 'x', 'y']]
+    a |     c | 
+===== | ===== | 
+0   0 | 4   7 | 
+1   7 | 5  17 | 
+2  14 | 6  27 | 
+3  21 | 7  37 | 
+4  28 | 8  47 | 
+
+>>> di.aloc[:, ['x', 'y']]
+Empty DictOfSeries
+Columns: []
+
+s = pd.Series(dict(a='a', b='x', c='c', foo='d'))
+d.aloc[:, s]
+    a |     c |     d | 
+===== | ===== | ===== | 
+0   0 | 4   7 | 6   0 | 
+1   7 | 5  17 | 7   1 | 
+2  14 | 6  27 | 8   2 | 
+3  21 | 7  37 | 9   3 | 
+4  28 | 8  47 | 10  4 | 
+```
+
+**Boolean indexing, indexing with pd.Series and slice indexer**
+
+**Boolean indexer**, for example `[True, 'False', 'True', 'False']`, must have the same length than the number 
+of columns, then only columns, where the indexer has a `True` value are selected.
+
+If the key is a **pandas.Series**, its *values* are used for indexing, especially the Series's index is ignored. If a 
+series has boolean values its treated like a boolean indexer, otherwise its treated as a array-like indexer.
+
+A easy way to select all columns, is, to use null-**slice**es, like `.aloc[:,:]` or even simpler `.aloc[:]`. 
+This is just like one would do, with `loc` or `iloc`. Of course slicing with boundaries also work, 
+eg `.loc[:, 'a':'f']`. 
+
+>**See also**
+> - [pandas slicing ranges](https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#slicing-ranges) 
+> - [pandas boolean indexing](https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#boolean-indexing)
+
+
+Selecting Rows a smart way
+--------------------------
+
+For scalar and array-like indexer with label values, the keys are handled gracefully, just like with 
+array-like column indexers.
+
+``` 
+>>> di.aloc[1]
+   a |       b |       c |       d | 
+==== | ======= | ======= | ======= | 
+1  7 | no data | no data | no data | 
+
+>>> di.aloc[99]
+Empty DictOfSeries
+Columns: ['a', 'b', 'c', 'd']
+
+>>> di.aloc[[3,6,7,18]]
+    a |    b |     c |    d | 
+===== | ==== | ===== | ==== | 
+3  21 | 3  6 | 6  27 | 6  0 | 
+      | 6  9 | 7  37 | 7  1 | 
+```
+
+The length of columns can differ:
+``` 
+>>> di.aloc[[3,6,7,18]].aloc[[3,6]]
+    a |    b |     c |    d | 
+===== | ==== | ===== | ==== | 
+3  21 | 3  6 | 6  27 | 6  0 | 
+      | 6  9 |       |      | 
+```
+
+Boolean array-likes as row indexer
+---------------------------------
+
+For array-like indexer that hold boolean values, the length of the indexer and
+the length of all column(s) to index must match.
+``` 
+>>> di.aloc[[True,False,False,True,False]]
+    a |    b |     c |    d | 
+===== | ==== | ===== | ==== | 
+0   0 | 2  5 | 4   7 | 6  0 | 
+3  21 | 5  8 | 7  37 | 9  3 | 
+```
+If the length does not match a `IndexError` is raised:
+```
+>>> di.aloc[[True,False,False]]
+Traceback (most recent call last):
+  ...
+  IndexError: failed for column a: Boolean index has wrong length: 3 instead of 5
+```
+
+This can be tricky, especially if columns have different length:
+``` 
+>>> difflen
+    a |    b |     c |    d | 
+===== | ==== | ===== | ==== | 
+0   0 | 2  5 | 4   7 | 6  0 | 
+1   7 | 3  6 | 6  27 | 7  1 | 
+2  14 | 4  7 |       | 8  2 | 
+
+>>> difflen.aloc[[False,True,False]]
+Traceback (most recent call last):
+  ...
+  IndexError: Boolean index has wrong length: 3 instead of 2
+```
+
+pandas.Series and boolean pandas.Series as row indexer
+------------------------------------------------------
+
+When using a pandas.Series as row indexer with `aloc`, all its magic comes to light.
+The index of the given series align itself with the index of each column separately and is this way used as a filter.
+
+```
+>>> s = di['b'] + 100
+>>> s
+2    105
+3    106
+4    107
+5    108
+6    109
+Name: b, dtype: int64
+
+>>> di.aloc[s]
+    a |    b |     c |    d | 
+===== | ==== | ===== | ==== | 
+2  14 | 2  5 | 4   7 | 6  0 | 
+3  21 | 3  6 | 5  17 |      | 
+4  28 | 4  7 | 6  27 |      | 
+      | 5  8 |       |      | 
+      | 6  9 |       |      | 
+```
+
+As seen in the example above the series' values are ignored completely. The functionality  
+is similar to `s1.loc[s2.index]`, with `s1` and `s2` are pandas.Series's, and s2 is the indexer and s1 is one column 
+after the other.
+
+If the indexer series holds boolean values, these are **not** ignored. 
+The series align the same way as explained above, but additional only the `True` values are evaluated. 
+Thus `False`-values are treated like missing indices. The behavior here is analogous to `s1.loc[s2[s2].index]`.
+
+``` 
+>>> boolseries = di['b'] > 6
+>>> boolseries
+2    False
+3    False
+4     True
+5     True
+6     True
+Name: b, dtype: bool
+
+>>> di.aloc[boolseries]
+    a |    b |     c |    d | 
+===== | ==== | ===== | ==== | 
+4  28 | 4  7 | 4   7 | 6  0 | 
+      | 5  8 | 5  17 |      | 
+      | 6  9 | 6  27 |      | 
+```
+
+To evaluate boolean values is a very handy feature, as it can easily used with multiple conditions and also fits
+nicely with writing those as one-liner:
+
+``` 
+>>> di.aloc[d['b'] > 6]
+    a |    b |     c |    d | 
+===== | ==== | ===== | ==== | 
+4  28 | 4  7 | 4   7 | 6  0 | 
+      | 5  8 | 5  17 |      | 
+      | 6  9 | 6  27 |      | 
+
+>>> di.aloc[(d['a'] > 6) & (d['b'] > 6)]
+    a |    b |    c |       d | 
+===== | ==== | ==== | ======= | 
+4  28 | 4  7 | 4  7 | no data | 
+```
+
+
+>**Note:**
+>
+>Nevertheless, something like `di.aloc[di['a'] > di['b']]` do not work, because the comparison fails, 
+>as long as the two series objects not have the same index. But maybe one want to checkout 
+>[DictOfSeries.index_of()](https://dios.readthedocs.io/en/latest/_api/dios.DictOfSeries.html#dios.DictOfSeries.index_of).
+
+
+Nested-lists as row indexer
+---------------------------
+
+It is possible to pass different array-like indexer to different columns, by using nested lists as indexer. 
+The outer list's length must match the number of columns of the dios. The items of the outer list, all must be
+array-like and not further nested. For example list, pandas.Series, boolean lists or pandas.Series, numpy.arrays...
+Every inner list-like item is applied as row indexer to the according column. 
+
+``` 
+>>> d
+    a |    b |     c |     d | 
+===== | ==== | ===== | ===== | 
+0   0 | 2  5 | 4   7 | 6   0 | 
+1   7 | 3  6 | 5  17 | 7   1 | 
+2  14 | 4  7 | 6  27 | 8   2 | 
+3  21 | 5  8 | 7  37 | 9   3 | 
+4  28 | 6  9 | 8  47 | 10  4 | 
+
+>>> di.aloc[ [d['a'], [True,False,True,False,False], [], [7,8,10]] ]
+    a |    b |       c |     d | 
+===== | ==== | ======= | ===== | 
+0   0 | 2  5 | no data | 7   1 | 
+1   7 | 4  7 |         | 8   2 | 
+2  14 |      |         | 10  4 | 
+3  21 |      |         |       | 
+4  28 |      |         |       | 
+
+>>> ar = np.array([2,3])
+>>> di.aloc[[ar, ar+1, ar+2, ar+3]]
+    a |    b |     c |    d | 
+===== | ==== | ===== | ==== | 
+2  14 | 3  6 | 4   7 | 6  0 | 
+3  21 | 4  7 | 5  17 |      | 
+```
+
+Even this looks like a 2D-indexer, that are explained in the next section, it is not. 
+In contrast to the 2D-indexer, we also can provide a column key, to pre-filter the columns.
+
+```
+>>> di.aloc[[ar, ar+1, ar+3], ['a','b','d']]
+    a |    b |    d | 
+===== | ==== | ==== | 
+2  14 | 3  6 | 6  0 | 
+3  21 | 4  7 |      | 
+```
+
+
+
+The power of 2D-indexer
+-----------------------
+
+Overview: 
+
+|                      |        |
+| ------               | ------ |
+| `.aloc[bool-dios]`         | 1. align columns, 2. align rows, 3. just take `True`'s  -- [1] |
+| `.aloc[dios, ...]` (use Ellipsis)        | 1. align columns, 2. align rows, (3.) ignore values  -- [1] |
+[1] evaluate `usebool`-keyword
+
+
+**T_O_D_O**
+
diff --git a/dios/docs/doc_itype.md b/dios/docs/doc_itype.md
new file mode 100644
index 0000000000000000000000000000000000000000..ea1d49f733b173a4799d9406f61a9932280d58fa
--- /dev/null
+++ b/dios/docs/doc_itype.md
@@ -0,0 +1,18 @@
+Itype
+=====
+
+DictOfSeries holds multiple series, and each series can have a different index length 
+and index type. Differing index lengths are either solved by some aligning magic, or simply fail, if 
+aligning makes no sense (eg. assigning the very same list to series of different lengths (see `.aloc`).
+
+A bigger challange is the type of the index. If one series has an alphabetical index, and another one 
+a numeric index, selecting along columns can fail in every scenario. To keep track of the
+types of index or to prohibit the inserting of a *not fitting* index type, 
+we introduce the `itype`. This can be set on creation of a Dios and also changed during usage. 
+On change of the itype, all indexes of all series in the dios are casted to a new fitting type,
+if possible. Different cast-mechanisms are available. 
+
+If an itype prohibits some certain types of indexes and a series with a non-fitting index-type is inserted, 
+an implicit type cast is done (with or without a warning) or an error is raised. The warning/error policy
+can be adjusted via global options. 
+
diff --git a/dios/docs/genindex.rst b/dios/docs/genindex.rst
new file mode 100644
index 0000000000000000000000000000000000000000..1f62f6e9ecdf2abbb93ecc808c98f21d7afa99a6
--- /dev/null
+++ b/dios/docs/genindex.rst
@@ -0,0 +1,5 @@
+
+# dummy file to be able to link to index
+
+Index
+=====
\ No newline at end of file
diff --git a/dios/docs/index.rst b/dios/docs/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..8bfa3a91a0627774a1690216bcb6489d3466f7f9
--- /dev/null
+++ b/dios/docs/index.rst
@@ -0,0 +1,56 @@
+.. dios documentation master file, created by
+   sphinx-quickstart on Sun Apr 19 02:36:37 2020.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+Dios Docs
+=========
+
+.. currentmodule:: dios
+
+The whole package :mod:`dios` is mainly a container for
+the class :class:`dios.DictOfSeries`. See
+
+.. toctree::
+
+   dios.DictOfSeries <_api/dios.DictOfSeries>
+
+.. toctree::
+   :hidden:
+
+   Repository <https://git.ufz.de/rdm/dios>
+   example DictOfSeries <_api/dios.example_DictOfSeries>
+
+
+Most magic happen in getting and setting elements.
+To select any combination from columns and rows,
+read the documentation about indexing:
+
+.. toctree::
+
+   doc_indexing
+
+.. toctree::
+
+   doc_cookbook
+
+For the idea behind the Itype concept and its usage read:
+
+.. toctree::
+
+   doc_itype
+
+For implemented methods and module functions,
+respectively the full module api, see:
+
+.. toctree::
+   :maxdepth: 2
+
+   dios_api
+
+or browse the Index..
+
+.. toctree::
+
+   genindex
+
diff --git a/dios/docs/make.bat b/dios/docs/make.bat
new file mode 100644
index 0000000000000000000000000000000000000000..2119f51099bf37e4fdb6071dce9f451ea44c62dd
--- /dev/null
+++ b/dios/docs/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+	set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=.
+set BUILDDIR=_build
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+	echo.
+	echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+	echo.installed, then set the SPHINXBUILD environment variable to point
+	echo.to the full path of the 'sphinx-build' executable. Alternatively you
+	echo.may add the Sphinx directory to PATH.
+	echo.
+	echo.If you don't have Sphinx installed, grab it from
+	echo.http://sphinx-doc.org/
+	exit /b 1
+)
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/dios/docs/requirements_sphinx.txt b/dios/docs/requirements_sphinx.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d5bc791c87d490720b5910a238d776ac9370caa0
--- /dev/null
+++ b/dios/docs/requirements_sphinx.txt
@@ -0,0 +1,35 @@
+alabaster==0.7.12
+Babel==2.8.0
+certifi==2020.6.20
+chardet==3.0.4
+commonmark==0.9.1
+docutils==0.16
+idna==2.10
+imagesize==1.2.0
+importlib-metadata==1.7.0
+Jinja2==2.11.2
+Markdown==3.2.2
+MarkupSafe==1.1.1
+numpy==1.19.1
+packaging==20.4
+pandas==1.1.1
+Pygments==2.6.1
+pyparsing==2.4.7
+python-dateutil==2.8.1
+pytz==2020.1
+recommonmark==0.6.0
+requests==2.24.0
+six==1.15.0
+snowballstemmer==2.0.0
+Sphinx==3.2.1
+sphinx-automodapi==0.12
+sphinx-markdown-tables==0.0.15
+sphinxcontrib-applehelp==1.0.2
+sphinxcontrib-devhelp==1.0.2
+sphinxcontrib-fulltoc==1.2.0
+sphinxcontrib-htmlhelp==1.0.3
+sphinxcontrib-jsmath==1.0.1
+sphinxcontrib-qthelp==1.0.3
+sphinxcontrib-serializinghtml==1.1.4
+urllib3==1.25.10
+zipp==3.1.0
diff --git a/dios/profiling/__init__.py b/dios/profiling/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..6252ac8ced4f8efaec94fe8702e7380bd67fcd16
--- /dev/null
+++ b/dios/profiling/__init__.py
@@ -0,0 +1,2 @@
+from .generate_testsets import *
+from .performance import find_index_range, gen_random_timestamps
diff --git a/dios/profiling/generate_testsets.py b/dios/profiling/generate_testsets.py
new file mode 100644
index 0000000000000000000000000000000000000000..38dee39c12d01fad327d61b98aa8cf7fa469d98b
--- /dev/null
+++ b/dios/profiling/generate_testsets.py
@@ -0,0 +1,122 @@
+import time
+
+import pandas as pd
+import numpy as np
+import datetime as dt
+from ..dios import DictOfSeries
+import pickle
+import os
+
+var_prefix = "var"
+
+
+def _gen_testset(rowsz, colsz, freq="1min", disalign=True, randstart=True):
+    df = pd.DataFrame()
+    dos = DictOfSeries()
+    start = dt.datetime.strptime("2000-01-01 00:00:00", "%Y-%m-%d %H:%M:%S")
+    times = pd.date_range(periods=rowsz, start=start, freq=freq)
+
+    frequ = freq.strip("0123456789")
+    freqv = int(freq[: -len(frequ)])
+
+    for i in range(colsz):
+
+        if randstart:
+            # generate random startpoint for each series
+            r = str(np.random.randint(int(rowsz * 0.05), int(rowsz * 0.6) + 2)) + frequ
+            st = start + pd.Timedelta(r)
+            times = pd.date_range(periods=rowsz, start=st, freq=freq)
+
+        if disalign:
+            if disalign == "random":
+                r = np.random.randint(1, i + 2)
+            else:
+                # total disalign
+                r = i
+            times += pd.Timedelta(f"{r}ns")
+
+        d = np.random.randint(1, 9, rowsz)
+        v = f"var{i}"
+        tmp = pd.DataFrame(index=times, data=d, columns=[v])
+        df = pd.merge(df, tmp, left_index=True, right_index=True, how="outer")
+        dos[v] = tmp.squeeze().copy()
+
+    return df, dos
+
+
+def get_random_df_and_dios(rowsz, colsz, freq="1min", disalign=True, randstart=True):
+    df, _, _, dios, *_ = get_testset(
+        rowsz, colsz, freq=freq, disalign=disalign, randstart=randstart
+    )
+    return df, dios
+
+
+def get_testset(
+    rows,
+    cols,
+    freq="1s",
+    disalign=True,
+    randstart=True,
+    storagedir=None,
+    noresult=False,
+):
+    if storagedir is None:
+        storagedir = os.path.dirname(__file__)
+        storagedir = os.path.join(storagedir, "testsets")
+
+    fname = f"set_f{freq}_d{disalign}_r{randstart}_dim{rows}x{cols}.pkl"
+    fpath = os.path.join(storagedir, fname)
+
+    # try to get pickled data
+    try:
+        with open(fpath, "rb") as fh:
+            if noresult:
+                return
+            tup = pickle.load(fh)
+
+            # file/data was present
+            return tup
+    except (pickle.UnpicklingError, FileNotFoundError):
+        pass
+
+    # generate testset(s)
+    df, dios = _gen_testset(
+        rowsz=rows, colsz=cols, freq=freq, disalign=disalign, randstart=randstart
+    )
+    df = df.sort_index(axis=0, level=0)
+    df_type_a = df.copy().stack(dropna=False).sort_index(axis=0, level=0).copy()
+    df_type_b = df.copy().unstack().sort_index(axis=0, level=0).copy()
+    tup = df, df_type_a, df_type_b, dios
+
+    # store testsets
+    with open(fpath, "wb") as fh:
+        pickle.dump(tup, fh)
+
+    if noresult:
+        return
+
+    return tup
+
+
+def gen_all(rrange, crange):
+    for r in rrange:
+        for c in crange:
+            print(r, " x ", c)
+            t0 = time.time()
+            get_testset(r, c, noresult=True)
+            t1 = time.time()
+            print(t1 - t0)
+
+
+if __name__ == "__main__":
+    # import time
+    #
+    # t0 = time.time()
+    # for i in range(7):
+    #     get_testset(10**i, 10)
+    # t1 = time.time()
+    # print(t1-t0)
+
+    rr = [10 ** r for r in range(1, 6)]
+    c = range(10, 60, 10)
+    gen_all(rr, c)
diff --git a/dios/profiling/memory.py b/dios/profiling/memory.py
new file mode 100644
index 0000000000000000000000000000000000000000..2226d5d031848f50ff971b345b629b0bb4d5e204
--- /dev/null
+++ b/dios/profiling/memory.py
@@ -0,0 +1,101 @@
+import gc
+from .generate_testsets import get_random_df_and_dios
+
+
+def calc_mem(rows, cols, shifted=False, dtypesz=(64 / 8)):
+    if shifted:
+        idxsz = 8 * rows * cols
+        # additional nans are inserted exactly as many as variables
+        rowsz = rows * cols * dtypesz
+    else:
+        idxsz = 8 * rows
+        rowsz = rows * dtypesz
+
+    return idxsz + rowsz * cols
+
+
+def bytes2hread(bytes):
+    i = 0
+    units = ["B", "kB", "MB", "GB", "TB"]
+    while bytes > 1000:
+        bytes /= 1024
+        i += 1
+        if i == 4:
+            break
+    return bytes, units[i]
+
+
+def rows_by_time(nsec, mdays):
+    """calc the number of values for one value every n seconds in m days
+    :param nsec: n seconds a value occur
+    :param mdays: this many days of data
+    :return: rows thats needed
+    """
+    return int((60 / nsec) * 60 * 24 * mdays)
+
+
+if __name__ == "__main__":
+
+    # dios      - linear in rows and colums, same size for r=10,c=100 or r=100,c=10
+    do_real_check = True
+    cols = 10
+    rows = 100000
+    # rows = rows_by_time(nsec=600, mdays=365*2)
+
+    mem = calc_mem(rows, cols, shifted=False)
+    memsh = calc_mem(rows, cols, shifted=True)
+
+    df, dios = get_random_df_and_dios(rows, cols, disalign=False, randstart=True)
+    dios_mem = dios.memory_usage()
+    print(f"dios:\n-----------")
+    print("mem: ", *bytes2hread(dios_mem))
+    print("entries:", sum([len(dios[e]) for e in dios]))
+    print()
+
+    ratio = (1 / (memsh - mem)) * dios_mem
+
+    mem = bytes2hread(mem)
+    memsh = bytes2hread(memsh)
+
+    print("df - best case\n---------")
+    print("mem: ", *mem)
+    print("entries:", rows)
+    print()
+    print("df - worst case\n---------")
+    print("mem :", *memsh)
+    print("entries:", rows * cols)
+
+    print()
+    print(f"dfbest, dios, dfworst: 0%, {round(ratio, 4)*100}%, 100% ")
+
+    if not do_real_check:
+        exit(0)
+
+    proveMeRight = False
+
+    if proveMeRight:
+        # best case
+        print()
+        print("best case proove")
+        dfb, _ = get_random_df_and_dios(rows, cols, disalign=False, randstart=False)
+        dfb.info(memory_usage="deep", verbose=False)
+
+    print()
+    print("rand start, same freq")
+    df.info(memory_usage="deep", verbose=False)
+    print("entries:", sum([len(df[e]) for e in df]))
+
+    print()
+    print("rand start, rand freq")
+    df, _ = get_random_df_and_dios(rows, cols, disalign="random", randstart=True)
+    df.info(memory_usage="deep", verbose=False)
+    print("entries:", sum([len(df[e]) for e in df]))
+
+    if proveMeRight:
+        # worst case
+        print()
+        print("worst case proove")
+        df, _ = get_random_df_and_dios(rows, cols, disalign=True, randstart=False)
+        df.info(memory_usage="deep", verbose=False)
+
+    gc.collect()
diff --git a/dios/profiling/performance.py b/dios/profiling/performance.py
new file mode 100644
index 0000000000000000000000000000000000000000..caac9a101660d9d26e8c3b5a9e88045d091fd0f8
--- /dev/null
+++ b/dios/profiling/performance.py
@@ -0,0 +1,204 @@
+import pandas as pd
+import numpy as np
+import time
+from .generate_testsets import get_testset, var_prefix
+
+profile_assignment = False
+
+idx = pd.IndexSlice
+rows = 0
+
+fir = ["var", "ts", "ass"]
+sec = ["df", "a", "b", "dios"]
+timingsdf = pd.DataFrame(columns=pd.MultiIndex.from_product([fir, sec]))
+
+
+def df_timmings(df, t0, t1, v1, v2):
+    _t0 = time.time()
+    a = df.loc[t0:t1, :]
+    _t1 = time.time()
+    b = df.loc[:, v1]
+    _t2 = time.time()
+    if profile_assignment:
+        df.loc[t0:t1, v1] = df.loc[t0:t1, v1] * 1111
+    _t3 = time.time()
+
+    timingsdf.at[rows, ("ts", "df")] += _t1 - _t0
+    timingsdf.at[rows, ("var", "df")] += _t2 - _t1
+    timingsdf.at[rows, ("ass", "df")] += _t3 - _t2
+    return a, b, df
+
+
+def a_timings(df, t0, t1, v1, v2):
+    _t0 = time.time()
+    a = df.loc[t0:t1, :]
+    _t1 = time.time()
+    b = df.loc[:, v1]
+    _t2 = time.time()
+    if profile_assignment:
+        df.loc[t0:t1, v1] = df.loc[t0:t1, v1] * 1111
+    _t3 = time.time()
+
+    timingsdf.at[rows, ("ts", "a")] += _t1 - _t0
+    timingsdf.at[rows, ("var", "a")] += _t2 - _t1
+    timingsdf.at[rows, ("ass", "a")] += _t3 - _t2
+    return a, b, df
+
+
+def b_timings(df, t0, t1, v1, v2):
+    _t0 = time.time()
+    a = df.loc[:, t0:t1]
+    _t1 = time.time()
+    b = df.loc[v1, :]
+    _t2 = time.time()
+    if profile_assignment:
+        df.loc[v1, t0:t1] = df.loc[v1, t0:t1] * 1111
+    _t3 = time.time()
+
+    timingsdf.at[rows, ("ts", "b")] += _t1 - _t0
+    timingsdf.at[rows, ("var", "b")] += _t2 - _t1
+    timingsdf.at[rows, ("ass", "b")] += _t3 - _t2
+    return a, b, df
+
+
+def dios_timings(dios, t0, t1, v1, v2):
+    _t0 = time.time()
+    a = dios.loc[t0:t1, :]
+    _t1 = time.time()
+    b = dios.loc[:, v1]
+    _t2 = time.time()
+    if profile_assignment:
+        dios.loc[t0:t1, v1] = dios.loc[t0:t1, v1] * 1111
+    _t3 = time.time()
+
+    timingsdf.at[rows, ("ts", "dios")] += _t1 - _t0
+    timingsdf.at[rows, ("var", "dios")] += _t2 - _t1
+    timingsdf.at[rows, ("ass", "dios")] += _t3 - _t2
+    return a, b, dios
+
+
+def gen_random_timestamps(m, M):
+    r = (M - m) * (np.random.randint(10, 90) + np.random.random()) * 0.01
+    a, b = m + r, M - r
+    return min(a, b), max(a, b)
+
+
+def find_index_range(obj):
+    min_ = None
+    max_ = None
+    for r in obj:
+        m = obj[r].index.min()
+        M = obj[r].index.max()
+        try:
+            min_ = min(min_, m)
+            max_ = max(max_, M)
+        except TypeError:
+            min_ = m
+            max_ = M
+    return min_, max_
+
+
+if __name__ == "__main__":
+    import matplotlib.pyplot as plt
+
+    # do not touch
+    rows = 1
+
+    # max increase of of rows
+    # 1 = 10 # 2 = 100 # .... # 5 = 100'000
+    iterations = 5
+    runs = 1
+    cols = 10
+
+    profile_assignment = True
+
+    # which to calc and plot
+    use_df = False
+    use_a = True
+    use_b = True
+    use_dios = True
+
+    # plot options
+    normalize_to_df = True
+    plot_xlog = True
+    plot_ylog = True
+
+    # ########################
+
+    v1 = "var1"
+    v2 = "var2"
+    for i in range(iterations):
+        rows *= 10
+
+        timingsdf.loc[rows] = (0,) * len(timingsdf.columns)
+
+        df, a, b, dios = get_testset(rows, cols)
+        t0, t4 = find_index_range(df)
+
+        if use_df or normalize_to_df:
+            for r in range(runs):
+                t1, t2 = gen_random_timestamps(t0, t4)
+                vr1 = var_prefix + str(np.random.randint(0, cols))
+                df_timmings(df, t1, t2, vr1, None)
+
+        if use_a:
+            for r in range(runs):
+                t1, t2 = gen_random_timestamps(t0, t4)
+                vr1 = var_prefix + str(np.random.randint(0, cols))
+                a_timings(a, t1, t2, vr1, None)
+
+        if use_b:
+            for r in range(runs):
+                t1, t2 = gen_random_timestamps(t0, t4)
+                vr1 = var_prefix + str(np.random.randint(0, cols))
+                b_timings(b, t1, t2, vr1, None)
+
+        if use_dios:
+            for r in range(runs):
+                t1, t2 = gen_random_timestamps(t0, t4)
+                vr1 = var_prefix + str(np.random.randint(0, cols))
+                dios_timings(dios, t1, t2, vr1, None)
+
+    # calc the average
+    timingsdf /= runs
+
+    pd.set_option("display.max_columns", 100)
+
+    df = timingsdf
+    if not profile_assignment:
+        df.drop(labels="ass", axis=1, level=0, inplace=True)
+    print("timings:")
+    print(df)
+    df = df.swaplevel(axis=1)
+    if normalize_to_df:
+        a = df.loc[:, "a"] / df.loc[:, "df"]
+        b = df.loc[:, "b"] / df.loc[:, "df"]
+        c = df.loc[:, "df"] / df.loc[:, "df"]
+        d = df.loc[:, "dios"] / df.loc[:, "df"]
+        df.loc[:, "a"] = a.values
+        df.loc[:, "b"] = b.values
+        df.loc[:, "df"] = c.values
+        df.loc[:, "dios"] = d.values
+        all = df.copy()
+        all.swaplevel(axis=1)
+        print("\n\ndiff:")
+        print(all)
+
+    a = df.loc[:, ("a", slice(None))]
+    b = df.loc[:, ("b", slice(None))]
+    dios = df.loc[:, ("dios", slice(None))]
+    df = df.loc[:, ("df", slice(None))]
+
+    ax = plt.gca()
+    ax.set_title(f"avg of: {runs} runs, columns: {cols}")
+
+    if use_df:
+        df.plot(logy=plot_ylog, logx=plot_xlog, linestyle="-", ax=ax)
+    if use_a:
+        a.plot(logy=plot_ylog, logx=plot_xlog, linestyle="--", ax=ax)
+    if use_b:
+        b.plot(logy=plot_ylog, logx=plot_xlog, linestyle=":", ax=ax)
+    if use_dios:
+        dios.plot(logy=plot_ylog, logx=plot_xlog, linestyle="-.", ax=ax)
+
+    plt.show()
diff --git a/dios/profiling/testsets/.gitignore b/dios/profiling/testsets/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..aa8d4bb337f54e8ae3685732eb6a29dcf4a23364
--- /dev/null
+++ b/dios/profiling/testsets/.gitignore
@@ -0,0 +1,6 @@
+
+# ignore all
+*
+
+# except ourself, to ensure the `testsets`-dir isn't ignored
+!.gitignore
\ No newline at end of file
diff --git a/dios/requirements.txt b/dios/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..139f044f250877d0aaf33406d2e4e87334691a46
--- /dev/null
+++ b/dios/requirements.txt
@@ -0,0 +1,5 @@
+numpy==1.21.2
+pandas==1.3.3
+python-dateutil==2.8.1
+pytz==2021.1
+six==1.16.0
diff --git a/dios/setup.py b/dios/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..f7ad2acae0012f9010965e8d4b29f660940899e6
--- /dev/null
+++ b/dios/setup.py
@@ -0,0 +1,35 @@
+from setuptools import setup, find_packages
+import subprocess
+
+
+with open("Readme.md", "r") as fh:
+    long_description = fh.read()
+
+cmd = "git describe --tags --always --dirty"
+version = (
+    subprocess.run(cmd, shell=True, check=False, stdout=subprocess.PIPE)
+    .stdout.decode()
+    .strip()
+)
+print(f"git version: {version}")
+# if '-dirty' in version:
+#     print("Do not make a version from a dirty repro. Exiting now")
+#     exit(1)
+txt = "enter version\n>"
+version = input(txt)
+
+setup(
+    name="dios",
+    version=version,
+    author="Bert Palm",
+    author_email="bert.palm@ufz.de",
+    description="Dictionary of Series - a kind of pandas extension",
+    long_description=long_description,
+    long_description_content_type="text/markdown",
+    url="https://git.ufz.de/rdm/dios",
+    packages=["dios"],
+    install_requires=[
+        "pandas",
+    ],
+    license="GPLv3",
+)
diff --git a/dios/test/__init__.py b/dios/test/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..04a9db79ad4e5abad209b03661ddc26349d4e086
--- /dev/null
+++ b/dios/test/__init__.py
@@ -0,0 +1 @@
+from .test_setup import *
diff --git a/dios/test/run_dios.py b/dios/test/run_dios.py
new file mode 100644
index 0000000000000000000000000000000000000000..336ae3547862a786153b1f8a2901caae06c17dd6
--- /dev/null
+++ b/dios/test/run_dios.py
@@ -0,0 +1,48 @@
+from builtins import range
+
+from dios import *
+import numpy as np
+
+if __name__ == "__main__":
+    dios_options[OptsFields.mixed_itype_warn_policy] = Opts.itype_warn
+    print(dios_options)
+
+    df = pd.DataFrame(columns=range(1000))
+    pd.Series()
+    # print(df)
+    # exit(99)
+
+    # dios_options[OptsFields.disp_max_cols] = 5
+    # dios_options[OptsFields.disp_max_rows] = 100
+    dios_options[OptsFields.disp_min_rows] = 50
+    # dios_options[OptsFields.dios_repr] = Opts.repr_aligned
+
+    n = 10
+    d = DictOfSeries(
+        dict(
+            l=pd.Series(0, index=range(0, 30)),
+            # i123=pd.Series(dtype='O'),
+            a=pd.Series(1, index=range(0, n)),
+            nan=pd.Series(np.nan, index=range(3, n + 3)),
+            b=pd.Series(2, index=range(0, n * 2, 2)),
+            c=pd.Series(3, index=range(n, n * 2)),
+            d=pd.Series(4, index=range(-n // 2, n // 2)),
+            # z=pd.Series([1, 2, 3], index=list("abc"))
+        )
+    )
+
+    def f(s):
+        sec = 10 ** 9
+        s.index = pd.to_datetime(s.index * sec)
+        return s
+
+    dd = d.apply(f)
+    print(d)
+
+    # print(d.to_df())
+    # print(pd.options.display.max_rows)
+    # print(d.to_str(col_delim=' | ', col_space=20, header_delim='0123456789'))
+    # print(d.to_str(col_delim=' | ', col_space=20, max_cols=4 ))
+    di = DictOfSeries(columns=[])
+    print(di)
+    # print(DictOfSeries(data=1, columns=['a']))
diff --git a/dios/test/test__ops__.py b/dios/test/test__ops__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef2d3074729c464fdbd43d2ad78965280f9b47ea
--- /dev/null
+++ b/dios/test/test__ops__.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+from .test_setup import *
+
+
+__author__ = "Bert Palm"
+__email__ = "bert.palm@ufz.de"
+__copyright__ = "Copyright 2018, Helmholtz-Centrum für Umweltforschung GmbH - UFC"
+
+
+@pytest.mark.parametrize("left", diosFromMatr(DATA_ALIGNED))
+@pytest.mark.parametrize("right", diosFromMatr(DATA_ALIGNED))
+def test__eq__(left, right):
+    a, b = left, right
+    _test = a == b
+    for c in _test:
+        for i in _test[c].index:
+            res = (_test[c])[i]
+            e1 = a[c][i]
+            e2 = b[c][i]
+            exp = e1 == e2
+            assert res == exp
+
+
+@pytest.mark.parametrize("left", diosFromMatr(DATA_ALIGNED))
+@pytest.mark.parametrize("right", diosFromMatr(DATA_ALIGNED))
+@pytest.mark.parametrize("op", OP2)
+def test__op2__aligningops(left, right, op):
+    a, b = left, right
+    test = op(a, b)
+    for c in test:
+        for j in test[c].index:
+            exp = op(a[c][j], b[c][j])
+            res = test[c][j]
+            if not np.isfinite(res):
+                print(f"\n\n{a[c][j]} {OP_MAP[op]} {b[c][j]}")
+                print(f"\nres: {res}, exp:{exp}, op: {OP_MAP[op]}")
+                pytest.skip("test not support non-finite values")
+                return
+            assert res == exp
+
+
+@pytest.mark.parametrize("left", diosFromMatr(DATA_UNALIGNED))
+@pytest.mark.parametrize("right", diosFromMatr(DATA_UNALIGNED))
+@pytest.mark.parametrize("op", OPNOCOMP)
+def test__op2__UNaligningops(left, right, op):
+    try:
+        a, b = left, right
+        test = op(a, b)
+        for c in test:
+            for j in test[c].index:
+                exp = op(a[c][j], b[c][j])
+                res = test[c][j]
+                if not np.isfinite(res):
+                    print(f"\n\n{a[c][j]} {OP_MAP[op]} {b[c][j]}")
+                    print(f"\nres: {res}, exp:{exp}, op: {OP_MAP[op]}")
+                    pytest.skip("test not support non-finite values")
+                    return
+                assert res == exp
+    except ZeroDivisionError:
+        pytest.skip("ZeroDivisionError")
+
+
+@pytest.mark.parametrize("data", diosFromMatr(ALL))
+@pytest.mark.parametrize("op", OP1)
+def test__op1__(data, op):
+    test = op(data)
+    res = [entry for col in test for entry in test[col]]
+    e = [entry for col in data for entry in data[col]]
+    for i in range(len(res)):
+        exp = op(e[i])
+        assert res[i] == exp
diff --git a/dios/test/test__setget__.py b/dios/test/test__setget__.py
new file mode 100644
index 0000000000000000000000000000000000000000..281f0c5f95e5f12ba56a1d931961bfa4db7e1cb0
--- /dev/null
+++ b/dios/test/test__setget__.py
@@ -0,0 +1,56 @@
+from .test_setup import *
+from pandas.core.dtypes.common import is_scalar
+
+
+@pytest.mark.parametrize(("idxer", "exp"), [("a", s1), ("c", s3)])
+def test__getitem_single(dios_aligned, idxer, exp):
+    di = dios_aligned[idxer]
+    assert isinstance(di, pd.Series)
+    assert (di == exp).all()
+
+
+@pytest.mark.parametrize(
+    "idxer",
+    [
+        "x",
+        "2",
+        1000,
+        None,
+    ],
+)
+def test__getitem_single_fail(dios_aligned, idxer):
+    with pytest.raises((KeyError, ValueError)):
+        di = dios_aligned[idxer]
+
+
+@pytest.mark.parametrize("idxer", BASIC_INDEXER)
+def test__getitem_(dios_aligned, idxer):
+    di = dios_aligned[idxer]
+
+    assert isinstance(di, DictOfSeries)
+
+
+@pytest.mark.parametrize("idxer", BASIC_INDEXER_FAIL)
+def test__getitem_fail(dios_aligned, idxer):
+    with pytest.raises((ValueError, KeyError)):
+        dios_aligned[idxer]
+
+
+@pytest.mark.parametrize(
+    ("idxer", "exp"),
+    [
+        (slice(None), [s1 == s1, s2 == s2, s3 == s3, s4 == s4]),
+        (dios_aligned__() > 5, [s1 > 5, s2 > 5, s3 > 5, s4 > 5]),
+    ],
+)
+def test__setitem_single(dios_aligned, idxer, exp):
+    di = dios_aligned
+    di[idxer] = 99
+    for i, c in enumerate(di):
+        assert ((di[c] == 99) == exp[i]).all()
+
+
+@pytest.mark.parametrize("idxer", BASIC_INDEXER_FAIL)
+def test__setitem__fail(dios_aligned, idxer):
+    with pytest.raises((ValueError, KeyError, IndexError)):
+        dios_aligned[idxer] = 99
diff --git a/dios/test/test__setget__aloc.py b/dios/test/test__setget__aloc.py
new file mode 100644
index 0000000000000000000000000000000000000000..3c76c82450ef9ba606b5434dc8fcb6915e2571d3
--- /dev/null
+++ b/dios/test/test__setget__aloc.py
@@ -0,0 +1,63 @@
+from .test_setup import *
+from pandas.core.dtypes.common import is_scalar
+
+pytestmark = pytest.mark.skip
+
+
+@pytest.mark.parametrize(("idxer", "exp"), [("a", s1), ("c", s3), ("x", pd.Series())])
+def test__getitem_aloc_singleCol(dios_aligned, idxer, exp):
+    di = dios_aligned.aloc[:, idxer]
+    assert isinstance(di, pd.Series)
+    assert (di == exp).all()
+
+
+@pytest.mark.parametrize(("idxer", "exp"), [((1, "a"), s1), ((3, "c"), s3)])
+def test__getitem_aloc_singleRow_singleCol(dios_aligned, idxer, exp):
+    di = dios_aligned.aloc[idxer]
+    assert is_scalar(di)
+    assert di == exp.loc[idxer[0]]
+
+
+@pytest.mark.parametrize("idxerL", R_LOC_INDEXER)
+@pytest.mark.parametrize("idxerR", C_LOC_INDEXER)
+def test__getitem__aloc(dios_aligned, idxerL, idxerR):
+    di = dios_aligned.copy().aloc[idxerL, idxerR]
+    exp = dios_aligned.copy().loc[idxerL, idxerR]
+    assert isinstance(di, DictOfSeries)
+    assert (di == exp).all(None)
+
+
+# #############################
+# __SETITEM__
+
+
+@pytest.mark.parametrize(
+    ("idxer", "exp"),
+    [
+        (slice(None), [s1 == s1, s2 == s2, s3 == s3, s4 == s4]),
+        (C_BLIST, [s1 == s1, s2 != s2, s3 != s3, s4 == s4]),
+    ],
+)
+def test__setitem_aloc_singleCol(dios_aligned, idxer, exp):
+    di = dios_aligned.copy()
+    di.aloc[:, idxer] = 99
+    for i, c in enumerate(di):
+        assert ((di[c] == 99) == exp[i]).all()
+
+
+VALS = [
+    99,
+    pd.Series(range(4, 10), index=range(4, 10)),
+]
+
+
+@pytest.mark.parametrize("idxerL", R_LOC_INDEXER)
+@pytest.mark.parametrize("idxerR", C_LOC_INDEXER)
+@pytest.mark.parametrize("val", VALS)
+def test__setitem__aloc(dios_aligned, idxerL, idxerR, val):
+    di = dios_aligned.copy()
+    di.aloc[idxerL, idxerR] = val
+    exp = dios_aligned.copy()
+    di.loc[idxerL, idxerR] = val
+    assert isinstance(di, DictOfSeries)
+    assert (di == exp).all(None)
diff --git a/dios/test/test__setget__iloc.py b/dios/test/test__setget__iloc.py
new file mode 100644
index 0000000000000000000000000000000000000000..c0996e362f5122fcfaf268619060b37cc158f307
--- /dev/null
+++ b/dios/test/test__setget__iloc.py
@@ -0,0 +1,40 @@
+from .test_setup import *
+from pandas.core.dtypes.common import is_scalar
+
+
+@pytest.mark.parametrize(
+    ("idxer", "exp"),
+    [(0, s1), (1, s2), (2, s3), (3, s4), (-1, s4), (-2, s3), (-3, s2), (-4, s1)],
+)
+def test__getitem_single_iloc(dios_aligned, idxer, exp):
+    di = dios_aligned.iloc[:, idxer]
+    assert isinstance(di, pd.Series)
+    assert (di == exp).all()
+
+
+@pytest.mark.parametrize(
+    ("idxer", "exp"), [((1, 0), s1), ((3, -2), s3), ((-1, -1), s4)]
+)
+def test__getitem_scalar_iloc(dios_aligned, idxer, exp):
+    di = dios_aligned.iloc[idxer]
+    assert is_scalar(di)
+    assert di == exp.iloc[idxer[0]]
+
+
+@pytest.mark.parametrize(
+    "idxer",
+    [
+        -5,
+        99,
+        "a",
+        "2",
+        None,
+    ],
+)
+def test__getitem_single_iloc_fail(dios_aligned, idxer):
+    with pytest.raises((KeyError, IndexError, TypeError)):
+        di = dios_aligned.iloc[:, idxer]
+
+
+# #############################
+# __SETITEM__
diff --git a/dios/test/test__setget__loc.py b/dios/test/test__setget__loc.py
new file mode 100644
index 0000000000000000000000000000000000000000..a988b22546189d0f5a252f5f0b28af0d0e1bc7c1
--- /dev/null
+++ b/dios/test/test__setget__loc.py
@@ -0,0 +1,62 @@
+from .test_setup import *
+from pandas.core.dtypes.common import is_scalar
+
+
+@pytest.mark.parametrize(("idxer", "exp"), [("a", s1), ("c", s3)])
+def test__getitem_loc_singleCol(dios_aligned, idxer, exp):
+    di = dios_aligned.loc[:, idxer]
+    assert isinstance(di, pd.Series)
+    assert (di == exp).all()
+
+
+@pytest.mark.parametrize(("idxer", "exp"), [((1, "a"), s1), ((3, "c"), s3)])
+def test__getitem_loc_singleRow_singleCol(dios_aligned, idxer, exp):
+    di = dios_aligned.loc[idxer]
+    assert is_scalar(di)
+    assert di == exp.loc[idxer[0]]
+
+
+@pytest.mark.parametrize(
+    "idxer",
+    [
+        "x",
+        "2",
+        1,
+        None,
+    ],
+)
+def test__getitem_loc_singleCol_fail(dios_aligned, idxer):
+    with pytest.raises((KeyError, TypeError)):
+        di = dios_aligned.loc[:, idxer]
+
+
+# #############################
+# __SETITEM__
+
+
+@pytest.mark.parametrize(
+    ("idxer", "exp"),
+    [
+        (slice(None), [s1 == s1, s2 == s2, s3 == s3, s4 == s4]),
+        (C_BLIST, [s1 == s1, s2 != s2, s3 != s3, s4 == s4]),
+    ],
+)
+def test__setitem_loc_singleCol(dios_aligned, idxer, exp):
+    di = dios_aligned.copy()
+    di.loc[:, idxer] = 99
+    for i, c in enumerate(di):
+        assert ((di[c] == 99) == exp[i]).all()
+
+
+VALS = [
+    99,
+]
+
+
+@pytest.mark.parametrize("idxerL", R_LOC_INDEXER)
+@pytest.mark.parametrize("idxerR", C_LOC_INDEXER)
+@pytest.mark.parametrize("val", VALS)
+def test__setitem__loc(dios_aligned, idxerL, idxerR, val):
+    di = dios_aligned.copy()
+    di.loc[idxerL, idxerR] = val
+    assert isinstance(di, DictOfSeries)
diff --git a/dios/test/test__setitem__.py b/dios/test/test__setitem__.py
new file mode 100644
index 0000000000000000000000000000000000000000..89c445f459a702d4782bb183b0d6a6b844073eab
--- /dev/null
+++ b/dios/test/test__setitem__.py
@@ -0,0 +1,29 @@
+from .test_setup import *
+import pytest
+
+#
+# s1 = pd.Series(range(10), index=range(10))
+# s2 = pd.Series(range(5, 10), index=range(5, 10))
+# s3 = pd.Series(range(1, 30, 2), index=range(1, 30, 2))
+# s4 = pd.Series(np.linspace(7, 13, 9), index=range(3, 12))
+# s1.name, s2.name, s3.name, s4.name = 'a', 'b', 'c', 'd'
+# d1 = DictOfSeries(data=dict(a=s1.copy(), b=s2.copy(), c=s3.copy(), d=s4.copy()))
+#
+# blist = [True, False, False, True]
+# b = pd.Series([True, False] * 5, index=[1, 2, 3, 4, 5] + [6, 8, 10, 12, 14])
+# B = d1 > 5
+#
+#
+#
+#
+# BLIST = [True, False, False, True]
+#
+# LISTIDXER = [['a'], ['a', 'c'], pd.Series(['a', 'c'])]
+# BOOLIDXER = [pd.Series(BLIST), d1.copy() > 10]
+# SLICEIDXER = [slice(None), slice(-3, -1), slice(-1, 3), slice(None, None, 3)]
+# MULTIIDXER = []  # [d1 > 9, d1 != d1, d1 == d1]
+# EMPTYIDEXER = [[], pd.Series(), slice(3, 3), slice(3, -1), DictOfSeries()]
+#
+# INDEXERS = LISTIDXER + BOOLIDXER + SLICEIDXER + MULTIIDXER + EMPTYIDEXER
+#
+#
diff --git a/dios/test/test_dflike.py b/dios/test/test_dflike.py
new file mode 100644
index 0000000000000000000000000000000000000000..1af90047d51bff19126c7b46e3491b0f9aa21cb6
--- /dev/null
+++ b/dios/test/test_dflike.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+import pytest
+from .test_setup import *
+
+import pandas as pd
+from pandas.core.dtypes.common import is_dict_like, is_nested_list_like
+import numpy as np
+from copy import deepcopy
+
+__author__ = "Bert Palm"
+__email__ = "bert.palm@ufz.de"
+__copyright__ = "Copyright 2018, Helmholtz-Zentrum für Umweltforschung GmbH - UFZ"
+
+
+arr = np.random.rand(8)
+TESTDATA = [
+    None,  # empty  # 0
+    [1],  # 1
+    arr.copy(),  # 2
+    np.array([arr.copy(), arr.copy(), arr.copy()]),  # 3 - nested list
+    range(4),  # 4
+    dict(a=arr.copy(), b=arr.copy()),  # 5 dict
+    pd.DataFrame(dict(a=arr.copy(), b=arr.copy())),  # 6 df
+]
+
+
+@pytest.mark.parametrize("data", TESTDATA)
+@pytest.mark.parametrize("with_column_param", [False, True])
+def test_dios_create(data, with_column_param):
+
+    data_copy0 = deepcopy(data)
+    data_copy1 = deepcopy(data)
+
+    # create columns list
+    if with_column_param:
+        df = pd.DataFrame(data=data_copy0)
+        col = [f"new_{c}" for c in df]
+    else:
+        col = None
+
+    if is_nested_list_like(data):
+        # giving nested lists, work different between df and dios
+        data_copy1 = data_copy1.transpose()
+
+    df = pd.DataFrame(data=data_copy0, columns=col)
+    dios = DictOfSeries(data=data_copy1, columns=col)
+
+    assert dios.columns.equals(df.columns)
+
+    eq, msg = dios_eq_df(dios, df, with_msg=True)
+    assert eq, msg
diff --git a/dios/test/test_dflike__setget__.py b/dios/test/test_dflike__setget__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f8d783a5e1538f67d0b6a970f670acf99f047591
--- /dev/null
+++ b/dios/test/test_dflike__setget__.py
@@ -0,0 +1,93 @@
+from .test_setup import *
+import pytest
+
+
+def _test(res, exp):
+
+    if isinstance(exp, pd.DataFrame):
+        eq, msg = dios_eq_df(res, exp, with_msg=True)
+        assert eq, msg
+
+    else:
+        assert type(exp) == type(res)
+
+        if isinstance(exp, pd.Series):
+            eq, msg = diosSeries_eq_dfSeries(res, exp, with_msg=True)
+            assert eq, msg
+
+        # scalars
+        else:
+            assert res == exp
+
+
+@pytest.mark.parametrize("idxer", BASIC_INDEXER)
+def test_dflike__get__(df_aligned, dios_aligned, idxer):
+    print(idxer)
+    exp = df_aligned[idxer]
+    res = dios_aligned[idxer]
+    _test(res, exp)
+
+
+@pytest.mark.parametrize("locR", R_LOC_INDEXER)
+@pytest.mark.parametrize("locC", C_LOC_INDEXER)
+def test_dflike__get_loc__(df_aligned, dios_aligned, locR, locC):
+    print(locR)
+    print(locC)
+    exp = df_aligned.loc[locR, locC]
+    res = dios_aligned.loc[locR, locC]
+    _test(res, exp)
+
+
+@pytest.mark.parametrize("ilocR", R_iLOC_INDEXER)
+@pytest.mark.parametrize("ilocC", C_iLOC_INDEXER)
+def test_dflike__get_iloc__(df_aligned, dios_aligned, ilocR, ilocC):
+    print(ilocR)
+    print(ilocC)
+    exp = df_aligned.iloc[ilocR, ilocC]
+    res = dios_aligned.iloc[ilocR, ilocC]
+    _test(res, exp)
+
+
+VALS = [
+    99,
+]
+
+
+@pytest.mark.parametrize("idxer", BASIC_INDEXER)
+@pytest.mark.parametrize("val", VALS)
+def test_dflike__set__(df_aligned, dios_aligned, idxer, val):
+    print(idxer)
+    exp = df_aligned
+    res = dios_aligned
+    # NOTE: two test fail, pandas bul***it
+    #   df[:2]    -> select 2 rows
+    #   df[:2]=99 -> set 3 rows, WTF ???
+    exp[idxer] = val
+    res[idxer] = val
+    _test(res, exp)
+
+
+@pytest.mark.parametrize("locR", R_LOC_INDEXER)
+@pytest.mark.parametrize("locC", C_LOC_INDEXER)
+@pytest.mark.parametrize("val", VALS)
+def test_dflike__set_loc__(df_aligned, dios_aligned, locR, locC, val):
+    print(locR)
+    print(locC)
+    exp = df_aligned
+    res = dios_aligned
+    exp.loc[locR, locC] = val
+    res.loc[locR, locC] = val
+    _test(res, exp)
+
+
+@pytest.mark.parametrize("ilocR", R_iLOC_INDEXER)
+@pytest.mark.parametrize("ilocC", C_iLOC_INDEXER)
+@pytest.mark.parametrize("val", VALS)
+def test_dflike__set_iloc__(df_aligned, dios_aligned, ilocR, ilocC, val):
+    print(ilocR)
+    print(ilocC)
+    exp = df_aligned
+    res = dios_aligned
+    exp.iloc[ilocR, ilocC] = val
+    res.iloc[ilocR, ilocC] = val
+    _test(res, exp)
diff --git a/dios/test/test_magic_methods.py b/dios/test/test_magic_methods.py
new file mode 100644
index 0000000000000000000000000000000000000000..adb06a52ba11748fdb34f3b0d3cf799af3cd26ed
--- /dev/null
+++ b/dios/test/test_magic_methods.py
@@ -0,0 +1,16 @@
+from .test_setup import *
+
+
+def test__len__(datetime_series, maxlen=10):
+    dios = DictOfSeries()
+    assert len(dios) == 0
+
+    for i in range(maxlen):
+        dios[f"c{i}"] = datetime_series.copy()
+        assert len(dios) == i + 1
+
+    for i in reversed(range(maxlen)):
+        assert len(dios) == i + 1
+        del dios[f"c{i}"]
+
+    assert len(dios) == 0
diff --git a/dios/test/test_methods.py b/dios/test/test_methods.py
new file mode 100644
index 0000000000000000000000000000000000000000..a202b05f02565a3dcf776f2489b350185ba88e15
--- /dev/null
+++ b/dios/test/test_methods.py
@@ -0,0 +1,50 @@
+from .test_setup import *
+
+
+def test_copy_copy_empty(dios_aligned):
+    di = dios_aligned
+    shallow = di.copy(deep=False)
+    deep = di.copy(deep=True)
+    empty_w_cols = di.copy_empty(columns=True)
+    empty_no_cols = di.copy_empty(columns=False)
+
+    assert di is not shallow
+    assert di is not deep
+    assert di is not empty_w_cols
+    assert di is not empty_no_cols
+
+    for attr in [
+        "itype",
+        "_itype",
+        "_policy",
+    ]:
+        dios_attr = getattr(di, attr)
+        for cop in [shallow, deep, empty_w_cols, empty_no_cols]:
+            copy_attr = getattr(cop, attr)
+            assert dios_attr == copy_attr
+
+    assert di.columns.equals(shallow.columns)
+    assert di.columns.equals(deep.columns)
+    assert di.columns.equals(empty_w_cols.columns)
+    assert not di.columns.equals(empty_no_cols.columns)
+
+    for i in di:
+        assert di._data[i].index is shallow._data[i].index
+        assert di._data[i].index is not deep._data[i].index
+        di._data[i][0] = 999999
+        assert di[i][0] == shallow[i][0]
+        assert di[i][0] != deep[i][0]
+
+
+@pytest.mark.parametrize("left", diosFromMatr(DATA_UNALIGNED))
+# we use comp ops just to get some noise in the data
+@pytest.mark.parametrize("op", OPCOMP)
+def test_all(left, op):
+    a = left
+    ser = (op(a, a)).all()
+    assert isinstance(ser, pd.Series)
+    res = [e for e in ser]
+    exp = [op(a[col], a[col]) for col in a]
+    for i in range(len(res)):
+        assert isinstance(exp[i], pd.Series)
+        assert (res[i] == exp[i]).all()
diff --git a/dios/test/test_setup.py b/dios/test/test_setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..2bd04548693cafd10eeb0d3fff0a1656871e2731
--- /dev/null
+++ b/dios/test/test_setup.py
@@ -0,0 +1,342 @@
+from dios import *
+import pytest
+from numpy.random import randint
+
+try:
+    from dios.operators import (
+        OP_MAP,
+        _OP1_MAP,
+        _OP2_DIV_MAP,
+        _OP2_ARITH_MAP,
+        _OP2_BOOL_MAP,
+        _OP2_COMP_MAP,
+    )
+except ModuleNotFoundError:
+    from dios.dios.operators import (
+        OP_MAP,
+        _OP1_MAP,
+        _OP2_DIV_MAP,
+        _OP2_ARITH_MAP,
+        _OP2_BOOL_MAP,
+        _OP2_COMP_MAP,
+    )
+
+import pandas as pd
+import numpy as np
+
+a = pd.Series(range(0, 70, 7))
+b = pd.Series(range(5, 15, 1))
+c = pd.Series(range(7, 107, 10))
+d = pd.Series(range(0, 10, 1))
+
+s1, s2, s3, s4 = a, b, c, d
+
+
+def df_aligned__():
+    return pd.DataFrame(
+        dict(
+            a=a.copy(),
+            b=b.copy(),
+            c=c.copy(),
+            d=d.copy(),
+        )
+    )
+
+
+def dios_aligned__():
+    return DictOfSeries(
+        dict(
+            a=a.copy(),
+            b=b.copy(),
+            c=c.copy(),
+            d=d.copy(),
+        )
+    )
+
+
+def dios_unaligned__():
+    di = dios_aligned__().copy()
+    for i, s in enumerate(di._data):
+        s.index = s.index + i * 2
+    return di
+
+
+def df_unaligned__():
+    return dios_unaligned__().to_df()
+
+
+def dios_fuzzy__(nr_cols=None, mincol=0, maxcol=10, itype=None):
+    nr_of_cols = nr_cols if nr_cols else randint(mincol, maxcol + 1)
+
+    ns = 10 ** 9
+    sec_per_year = 31536000
+
+    ITYPES = [IntItype, FloatItype, DtItype, ObjItype]
+    if itype is not None:
+        itype = get_itype(itype)
+    else:
+        itype = ITYPES[randint(0, len(ITYPES))]
+
+    if itype == IntItype:
+        f = lambda i: pd.Int64Index(i.astype(int)).unique()
+    elif itype == FloatItype:
+        f = lambda i: pd.Float64Index(i).unique()
+    elif itype == ObjItype:
+        f = lambda i: pd.Index(i.astype(int)).unique().astype(str) + "_str"
+    else:  # itype == DtItype:
+        f = lambda i: pd.to_datetime(i.astype(int) * ns) + pd.Timedelta("30Y")
+
+    di = DictOfSeries(itype=itype)
+    for i in range(nr_of_cols):
+        start = randint(0, sec_per_year)
+        end = start + randint(0, sec_per_year)
+        if end > sec_per_year:
+            start, end = end - sec_per_year, start
+
+        base = randint(0, 10 + 1)
+        exp = randint(1, int(np.log10(end - start + 100)))
+        periods = base ** randint(1, exp + 1)
+        index = np.linspace(start, end, periods)
+        index = f(index)
+
+        arr = randint(0, 10, len(index))
+        di[f"c{i}"] = pd.Series(data=arr, dtype=float, index=index)
+
+    return di
+
+
+@pytest.fixture
+def dios_fuzzy():
+    return dios_fuzzy__().copy()
+
+
+@pytest.fixture
+def df_aligned():
+    return df_aligned__().copy()
+
+
+@pytest.fixture
+def dios_aligned():
+    return dios_aligned__().copy()
+
+
+@pytest.fixture
+def df_unaligned():
+    return df_unaligned__().copy()
+
+
+@pytest.fixture
+def dios_unaligned():
+    return dios_unaligned__().copy()
+
+
+def diosSeries_eq_dfSeries(
+    df_s, di_s, with_msg=False, df_s_name="di_s", di_s_name="df_s"
+):
+    def fail(msg):
+        if with_msg:
+            return False, msg
+        return False
+
+    assert isinstance(df_s, pd.Series)
+    assert isinstance(di_s, pd.Series)
+
+    if df_s.empty and not di_s.empty:
+        return fail(
+            f"value mismatch: " f"{df_s_name} is missing, but " f"{di_s_name} == {di_s}"
+        )
+
+    idiff = di_s.index.difference(df_s.index)
+    if not idiff.empty:
+        return fail(
+            f"index mismatch: "
+            f"{di_s_name}.index: {di_s.index.to_list()}, "
+            f"{df_s_name}.index: {df_s.index.to_list()}, "
+            f"diff: {idiff.to_list()}"
+        )
+
+    # compare series
+    for i in df_s.index:
+        exp = df_s.loc[i]
+
+        # Normally df-nans, from selecting are just not present values
+        # in a dios. But if a Nan was inserted in dios on purpose, it is
+        # a valid value, so we try to access the value first.
+        try:
+            val = di_s.loc[i]
+        except KeyError:
+            # nan in df, missing in dios -> OK
+            if np.isnan(exp):
+                continue
+
+            # valid val in df, missing in dios -> FAIL
+            else:
+                return fail(
+                    f"value mismatch: "
+                    f"{di_s_name}.loc[{i}] == {exp}, but "
+                    f"{df_s_name}.loc[{i}] does not exist"
+                )
+
+        # inf = np.isinf(exp) and np.isinf(val)
+        # sig = np.sign(exp) == np.sign(val)
+        # eq_nan = np.isnan(exp) and np.isnan(val)
+        # eq_inf = inf and sig
+        # eq_vals = exp == val
+        # eq = eq_nan or eq_inf or eq_vals
+        eq = np.equal(val, exp)
+        assert np.isscalar(eq)
+
+        if not eq:
+            return fail(
+                f"value mismatch: "
+                f"{di_s_name}.loc[{i}] == {exp}, but "
+                f"{df_s_name}.loc[{i}] == {val}"
+            )
+
+    return True, "equal" if with_msg else True
+
+
+def dios_eq_df(dios, df, dios_dropped_empty_colums=False, with_msg=False):
+    def fail(msg):
+        if with_msg:
+            return False, msg
+        return False
+
+    assert isinstance(df, pd.DataFrame)
+    assert isinstance(dios, DictOfSeries)
+
+    # check: dios has not more/other cols than df
+    notmore = [c for c in dios if c not in df]
+    if notmore:
+        return fail(
+            f"columns mismatch. "
+            f"dios: {dios.columns.to_list()}, "
+            f"df: {df.columns.to_list()}, "
+            f"diff: {notmore}"
+        )
+
+    # check: may df has empty cols and dios has no cols
+    # at this locations
+    miss = [c for c in df if c not in dios]
+    if miss:
+        if dios_dropped_empty_colums:
+            tmp = []
+            for c in miss:
+                if not df[c].dropna().empty:
+                    tmp += [c]
+            if tmp:
+                return fail(f"columns mismatch: " f"dios missing column(s): {tmp}")
+        else:
+            return fail(f"columns mismatch: " f"dios missing column(s): {miss}")
+
+    cols = df.columns.intersection(dios.columns)
+
+    for c in cols:
+        ok, m = diosSeries_eq_dfSeries(
+            df[c], dios[c], di_s_name=f"di[{c}]", df_s_name=f"df[{c}]", with_msg=True
+        )
+        if not ok:
+            return fail(m)
+
+    return True, "equal" if with_msg else True
+
+
+# 0,1
+NICE_SLICE = [slice(None), slice(None, None, 3)]
+R_BLIST = [True, False, False, False, True] * 2
+C_BLIST = [True, False, False, True]
+
+#              3,4               5       6
+R_LOC_SLICE = NICE_SLICE + [slice(2), slice(2, 8)]
+R_LOC_LIST = [[1], [3, 4, 5], pd.Series([3, 7])]
+#              7            8                  9
+R_LOC_BLIST = [R_BLIST, pd.Series(R_BLIST), pd.Series(R_BLIST).values]
+
+#              0,      1,           2,
+C_LOC_LIST = [["a"], ["a", "c"], pd.Series(["a", "c"])]
+C_LOC_SLICE = NICE_SLICE + [slice("b"), slice("b", "c")]
+C_LOC_BLIST = [
+    C_BLIST,
+    pd.Series(C_BLIST, index=list("abcd")),
+    pd.Series(C_BLIST).values,
+]
+
+#                 0 1            2           3            4
+RC_iLOC_SLICE = NICE_SLICE + [slice(4), slice(-3, -1), slice(-1, 3)]
+R_iLOC_LIST = [[7], [6, 8]]
+R_iLOC_BLIST = [
+    R_BLIST,
+    pd.Series(R_BLIST).values,
+]  # only list-likes allowed not series-likes
+C_iLOC_LIST = [[0], [1, 3]]
+C_iLOC_BLIST = [C_BLIST, pd.Series(C_BLIST).values]
+
+MULTIIDXER = [
+    df_aligned__() > 9,
+    df_aligned__() != df_aligned__(),
+    df_aligned__() == df_aligned__(),
+    df_aligned__() % 3 == 0,
+]
+EMPTYIDEXER = [
+    [],
+    pd.Series(dtype="O"),
+]
+EMPTY_DF = [pd.DataFrame()]
+
+BASIC_INDEXER = (
+    C_LOC_LIST + R_LOC_SLICE + R_LOC_BLIST + MULTIIDXER + EMPTYIDEXER + EMPTY_DF
+)
+BASIC_INDEXER_FAIL = [
+    ["z"],
+    ["a", "z"],
+    pd.Series(["a", "z"]),
+    pd.DataFrame(dict(a=[1, 2, 3])),
+]
+
+R_LOC_INDEXER = R_LOC_SLICE + R_LOC_LIST + R_LOC_BLIST + EMPTYIDEXER
+C_LOC_INDEXER = C_LOC_SLICE + C_LOC_LIST + C_LOC_BLIST + EMPTYIDEXER
+
+R_iLOC_INDEXER = RC_iLOC_SLICE + R_iLOC_LIST + R_iLOC_BLIST
+C_iLOC_INDEXER = RC_iLOC_SLICE + C_iLOC_LIST + C_iLOC_BLIST
+
+O = [[0, 0, 0], [0, 0, 0]]
+I = [[1, 1, 1], [1, 1, 1]]
+A = [[1, 2, 3], [4, 5, 6]]
+B = [[0, 2, 2], [5, 5, 5]]
+C = [[3, 2, 0], [1, 0, 3]]
+D = [[6, 5, 4], [3, 2, 1]]
+DATA_ALIGNED = [O, I, A, B, C, D]
+
+# outer lists could have differnet length, but this would
+# make the checks to complicated
+EEE = [[], [], []]
+O = [[0, 0], [0, 0, 0], [0, 0, 0, 0]]
+I = [[1, 1, 1], [1, 1, 1], [1]]
+A = [[1], [2, 3], [4, 5, 6]]
+B = [[0, 2, 2], [5], [5, 5]]
+C = [[3, 2, 0], [1, 0, 3], [0, 0, 0]]
+D = [[6], [2], [9]]
+DATA_UNALIGNED = [O, I, A, B, C, D, EEE]
+
+# only use if a single matrix is used
+ALL = DATA_ALIGNED + DATA_UNALIGNED
+
+OPCOMP = list(_OP2_COMP_MAP)
+OPNOCOMP = list(_OP2_ARITH_MAP) + list(_OP2_BOOL_MAP) + list(_OP2_DIV_MAP)
+OP2 = OPCOMP + OPNOCOMP
+OP1 = list(_OP1_MAP)
+
+
+def diosFromMatr(mlist):
+    l = []
+    for m in mlist:
+        m = np.array(m)
+        l.append(DictOfSeries(m.copy()))
+    return tuple(l)
+
+
+@pytest.fixture()
+def datetime_series():
+    m = randint(2, 1000)
+    idx = pd.date_range("2000", "2010", m)
+    return pd.Series(range(m), idx)
diff --git a/docs/FunctionIndex.md b/docs/FunctionIndex.md
deleted file mode 100644
index 5f8e3d4e6d4db76ee0b411ba9772c56a7b137242..0000000000000000000000000000000000000000
--- a/docs/FunctionIndex.md
+++ /dev/null
@@ -1,40 +0,0 @@
-# Implemented Quality Check Functions
-
-Index of the main documentation of the implemented functions, their purpose and parametrization.
-
-## Index
-
-- [Miscellaneous](docs/funcs/Miscellaneous.md)
-  - [flagRange](docs/funcs/Miscellaneous.md#flagrange)
-  - [flagSeasonalRange](docs/funcs/Miscellaneous.md#flagseasonalrange)
-  - [flagIsolated](docs/funcs/Miscellaneous.md#flagisolated)
-  - [flagMissing](docs/funcs/Miscellaneous.md#flagmissing)
-  - [flagDummy](docs/funcs/Miscellaneous.md#flagdummy)
-  - [clearFlags](docs/funcs/Miscellaneous.md#clearflags)
-  - [forceFlags](docs/funcs/Miscellaneous.md#forceflags)
-- [Spike Detection](docs/funcs/SpikeDetection.md)
-  - [spikes_flagBasic](docs/funcs/SpikeDetection.md#spikes_flagbasic)
-  - [spikes_flagMad](docs/funcs/SpikeDetection.md#spikes_flagmad)
-  - [spikes_flagSlidingZscore](docs/funcs/SpikeDetection.md#spikes_flagslidingzscore)
-  - [spikes_flagSpektrumBased](docs/funcs/SpikeDetection.md#spikes_flagspektrumbased)
-  - [spikes_flagRaise](docs/funcs/SpikeDetection.md#spikes_flagraise)
-- [Constant Detection](docs/funcs/ConstantDetection.md)
-  - [constants_flagBasic](docs/funcs/ConstantDetection.md#constants_flagbasic)
-  - [constants_flagVarianceBased](docs/funcs/ConstantDetection.md#constants_flagvariancebased)
-- [Break Detection](docs/funcs/BreakDetection.md)
-  - [breaks_flagSpektrumBased](docs/funcs/BreakDetection.md#breaks_flagspektrumbased)
-- [Time Series Harmonization](docs/funcs/TimeSeriesHarmonization.md)
-  - [harm_shift2Grid](docs/funcs/TimeSeriesHarmonization.md#harm_shift2grid)
-  - [harm_aggregate2Grid](docs/funcs/TimeSeriesHarmonization.md#harm_aggregate2grid)
-  - [harm_linear2Grid](docs/funcs/TimeSeriesHarmonization.md#harm_linear2grid)
-  - [harm_interpolate2Grid](docs/funcs/TimeSeriesHarmonization.md#harm_interpolate2grid)
-  - [harm_downsample](docs/funcs/TimeSeriesHarmonization.md#harm_downsample)
-  - [harm_harmonize](docs/funcs/TimeSeriesHarmonization.md#harm_harmonize)
-  - [harm_deharmonize](docs/funcs/TimeSeriesHarmonization.md#harm_deharmonize)
-- [Soil Moisture](docs/funcs/SoilMoisture.md)
-  - [sm_flagSpikes](docs/funcs/SoilMoisture.md#sm_flagspikes)
-  - [sm_flagBreaks](docs/funcs/SoilMoisture.md#sm_flagbreaks)
-  - [sm_flagConstants](docs/funcs/SoilMoisture.md#sm_flagconstants)
-  - [sm_flagFrost](docs/funcs/SoilMoisture.md#sm_flagfrost)
-  - [sm_flagPrecipitation](docs/funcs/SoilMoisture.md#sm_flagprecipitation)
-  - [sm_flagRandomForest](docs/funcs/SoilMoisture.md#sm_flagrandomforest)
diff --git a/docs/funcs/BreakDetection.md b/docs/funcs/BreakDetection.md
deleted file mode 100644
index 936bb1bb6045da3203e1b14cd4ad5a7f94abb2c8..0000000000000000000000000000000000000000
--- a/docs/funcs/BreakDetection.md
+++ /dev/null
@@ -1,67 +0,0 @@
-# Break Detection
-
-## Index
-[breaks_flagSpektrumBased](#breaks_flagspektrumbased)
-
-## breaks_flagSpektrumBased
-
-```                            
-breaks_flagSpektrumBased(thresh_rel=0.1, thresh_abs=0.01, 
-                         first_der_factor=10, first_der_window="12h", 
-                         scnd_der_ratio_range=0.05, scnd_der_ratio_thresh=10, 
-                         smooth=True, smooth_window=None, smooth_poly_deg=2)
-```
-
-| parameter             | data type                                                     | default value | description                                                                                                                                                |
-|-----------------------|---------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| thresh_rel            | float                                                         | `0.1`         | Minimum relative difference between two values to consider the latter as a break candidate. See condition (1)                                              |
-| thresh_abs            | float                                                         | `0.01`        | Minimum absolute difference between two values to consider the latter as a break candidate. See condition (2)                                              |
-| first_der_factor      | float                                                         | `10`          | Multiplication factor for arithmetic mean of the first derivatives surrounding a break candidate. See condition (3).                                       |
-| first_der_window      | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"12h"`       | Window around a break candidate for which the arithmetic mean is calculated. See condition (3)                                                             |
-| scnd_der_ratio_range  | float                                                         | `0.05`        | Maximum deviation from one of the ratio of the second derivatives of a break candidate and its preceding value. See condition (5)                          |
-| scnd_der_ratio_thresh | float                                                         | `10.0`        | Threshold for the ratio of the second derivatives of a break candidate and its succeeding value. See condition (5)                                         |
-| smooth                | bool                                                          | `True`        | Smooth the time series before differentiation using the Savitsky-Golay filter                                                                              |
-| smooth_window         | [offset string](docs/ParameterDescriptions.md#offset-strings) | `None`        | Size of the smoothing window of the Savitsky-Golay filter. The default value `None` results in a window of two times the sampling rate (i.e. three values) |
-| smooth_poly_deg       | integer                                                       | `2`           | Degree of the polynomial used for smoothing with the Savitsky-Golay filter                                                                                 |
-
-The function flags breaks (jumps/drops) by evaluating the derivatives of a time series.
-
-A value $`x_k`$ of a time series $`x_t`$ with timestamps $`t_i`$, is considered to be a break, if:
-
-1. $`x_k`$ represents a sufficiently large relative jump:
-
-   $`|\frac{x_k - x_{k-1}}{x_k}| >`$ `thresh_rel`
-
-2. $`x_k`$ represents a sufficient absolute jump:
-
-   $`|x_k - x_{k-1}| >`$ `thresh_abs`
-
-3. The dataset $`X = x_i, ..., x_{k-1}, x_{k+1}, ..., x_j`$, with $`|t_{k-1} - t_i| = |t_j - t_{k+1}| =`$ `first_der_window`
-   fulfills the following condition:
-   
-   $`|x'_k| >`$ `first_der_factor` $` \cdot \bar{X} `$
-   
-   where $`\bar{X}`$ denotes the arithmetic mean of $`X`$.
-
-4. The ratio (last/this) of the second derivatives is close to 1:
-
-   $` 1 -`$ `scnd_der_ratio_range` $`< |\frac{x''_{k-1}}{x_{k''}}| < 1 + `$`scnd_der_ratio_range`
-
-5. The ratio (this/next) of the second derivatives is sufficiently height:
-
-   $`|\frac{x''_{k}}{x''_{k+1}}| > `$`scnd_der_ratio_thresh`
-
-NOTE:
-- Only works for time series
-- The time series is expected to be harmonized to an
-  [equidistant frequency grid](docs/funcs/TimeSeriesHarmonization.md)
-
-
-This Function is a generalization of the spectrum based spike flagging
-mechanism as presented in [1].
-
-### References
-[1] Dorigo,W. et al.: Global Automated Quality Control of In Situ Soil Moisture
-    Data from the international Soil Moisture Network. 2013. Vadoze Zone J.
-    doi:10.2136/vzj2012.0097.
-
diff --git a/docs/funcs/ConstantDetection.md b/docs/funcs/ConstantDetection.md
deleted file mode 100644
index a0f4e580945ca503fdf8608f99fa7e974e72b575..0000000000000000000000000000000000000000
--- a/docs/funcs/ConstantDetection.md
+++ /dev/null
@@ -1,57 +0,0 @@
-# Constant Detection
-
-A collection of quality check routines to find constant values and plateaus.
-
-## Index
-
-- [constants_flagBasic](#constants_flagbasic)
-- [constants_flagVarianceBased](#constants_flagvariancebased)
-
-
-## constants_flagBasic
-
-```
-constants_flagBasic(window, thresh=0)
-```
-
-| parameter | data type                                                             | default value | description                                                                                                                  |
-|-----------|-----------------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------------------------------------|
-| window    | integer/[offset string](docs/ParameterDescriptions.md#offset-strings) |               | The minimum count or duration in which the values must be constant to be considered as plateau candidates. See condition (1) |
-| thresh    | float                                                                 |             0 | The maximum difference between values to be still considered as constant. See condition (2)                                     |
-
-This functions flags plateaus/series of constant values of length `window` if
-their difference is smaller than `thresh`.
-
-A set of consecutive values $`x_n, ..., x_{n+k}`$ of a time series $`x_t`$
-is considered to be constant, if:
-1. $`k \ge `$ `window`
-2. $`|x_n - x_{n+s}| \le `$ `thresh`, $`s \in {1,2, ..., k}`$
-
-
-## constants_flagVarianceBased
-
-```
-constants_flagVarianceBased(window="12h", thresh=0.0005,
-                            max_missing=None, max_consec_missing=None)
-```
-
-| parameter          | data type                                                     | default value | description                                                                                            |
-|--------------------|---------------------------------------------------------------|---------------|--------------------------------------------------------------------------------------------------------|
-| window             | [offset string](docs/ParameterDescriptions.md#offset-strings) | `12h`         | The minimum duration in which the values must be constant to be considered as plateau candidates. See condition (1) |
-| thresh             | float                                                         | `0.0005`      | The maximum variance of a group of values, to still consider them as constant. See condition (2)               |
-| max_missing        | integer                                                       | `None`        | The maximum count of missing values that are allowed in the `window`. If not set, this condition is ignored and infinity missing values are allowed.|
-| max_consec_missing | integer                                                       | `None`        | The maximum count of *consecutive* missing values, that are allowed in the `window`. If not set, this condition is ignored and infinity consecutive missing values are allowed. |
-
-
-This function flags plateaus/series of constant values. Any set of consecutive values
-$`x_n,..., x_{n+k}`$ of a timeseries $`x_t`$ is flagged, if:
-
-1. $`k \ge `$`window`
-2. $`\sigma(x_n,..., x_{n+k}) \le`$ `thresh`
-
-NOTE:
-- Only works for time series
-- The time series is expected to be harmonized to an
-  [equidistant frequency grid](docs/funcs/TimeSeriesHarmonization.md)
-- When `max_missing` or `max_consec_missing` are set, plateaus not 
-  fulfilling the respective condition will not be flagged
diff --git a/docs/funcs/FormalDescriptions.md b/docs/funcs/FormalDescriptions.md
deleted file mode 100644
index 12f286add9f9c5a488025d00b6d232eb02c2f38c..0000000000000000000000000000000000000000
--- a/docs/funcs/FormalDescriptions.md
+++ /dev/null
@@ -1,86 +0,0 @@
-# Mathematical descriptions
-
-A collection of detailed mathematical descriptions.
-
-## Index
-
-- [spikes_flagRaise](#spikes_flagraise)
-- [spikes_flagSpektrumBased](#spikes_flagspektrumbased)
-- [breaks_flagSpektrumBased](#breaks_flagspektrumbased)
-- [sm_flagConstants](#sm_flagconstants)
-
-
-## spikes_flagRaise
-
-The value $`x_{k}`$ of a time series $`x`$ with associated 
-timestamps $`t_i`$, is flagged a rise, if:
-
-1. There is any value $`x_{s}`$, preceeding $`x_{k}`$ within `raise_window` range, so that:
-    * $` M = |x_k - x_s | > `$  `thresh` $` > 0`$ 
-2. The weighted average $`\mu^*`$ of the values, preceeding $`x_{k}`$ within `average_window` range indicates, that $`x_{k}`$ doesnt return from an outliererish value course, meaning that:  
-    * $` x_k > \mu^* + ( M `$ / `mean_raise_factor` $`)`$  
-3. Additionally, if `min_slope` is not `None`, $`x_{k}`$ is checked for being sufficiently divergent from its very predecessor $`x_{k-1}`$, meaning that, it is additionally checked if: 
-    * $`x_k - x_{k-1} > `$ `min_slope` 
-    * $`t_k - t_{k-1} > `$ `min_slope_weight`*`intended_freq`
-
-The weighted average $`\mu^*`$ was calculated with weights $`w_{i}`$, defined by: 
-* $`w_{i} = (t_i - t_{i-1})`$ / `intended_freq`, if $`(t_i - t_{i-1})`$ < `intended_freq` and $`w_i =1`$ otherwise. 
-
-
-
-The value $`x_{k}`$ of a time series $`x_t`$ with 
-timestamps $`t_i`$ is considered a spikes, if:
-
-
-## spikes_flagSpektrumBased
-
-
-1. The quotient to its preceding data point exceeds a certain bound:
-    * $` |\frac{x_k}{x_{k-1}}| > 1 + `$ `raise_factor`, or
-    * $` |\frac{x_k}{x_{k-1}}| < 1 - `$ `raise_factor`
-2. The quotient of the second derivative $`x''`$, at the preceding
-   and subsequent timestamps is close enough to 1:
-    * $` |\frac{x''_{k-1}}{x''_{k+1}} | > 1 - `$ `deriv_factor`, and
-    * $` |\frac{x''_{k-1}}{x''_{k+1}} | < 1 + `$ `deriv_factor`
-3. The dataset $`X = x_i, ..., x_{k-1}, x_{k+1}, ..., x_j`$, with 
-   $`|t_{k-1} - t_i| = |t_j - t_{k+1}| =`$ `noise_window` fulfills the 
-   following condition: 
-   `noise_func`$`(X) <`$ `noise_thresh`
-
-## breaks_flagSpektrumBased
-
-A value $`x_k`$ of a time series $`x_t`$ with timestamps $`t_i`$, is considered to be a break, if:
-
-1. $`x_k`$ represents a sufficiently large relative jump:
-
-   $`|\frac{x_k - x_{k-1}}{x_k}| >`$ `thresh_rel`
-
-2. $`x_k`$ represents a sufficient absolute jump:
-
-   $`|x_k - x_{k-1}| >`$ `thresh_abs`
-
-3. The dataset $`X = x_i, ..., x_{k-1}, x_{k+1}, ..., x_j`$, with $`|t_{k-1} - t_i| = |t_j - t_{k+1}| =`$ `first_der_window`
-   fulfills the following condition:
-   
-   $`|x'_k| >`$ `first_der_factor` $` \cdot \bar{X} `$
-   
-   where $`\bar{X}`$ denotes the arithmetic mean of $`X`$.
-
-4. The ratio (last/this) of the second derivatives is close to 1:
-
-   $` 1 -`$ `scnd_der_ratio_margin_1` $`< |\frac{x''_{k-1}}{x_{k''}}| < 1 + `$`scnd_der_ratio_margin_1`
-
-5. The ratio (this/next) of the second derivatives is sufficiently height:
-
-   $`|\frac{x''_{k}}{x''_{k+1}}| > `$`scnd_der_ratio_margin_2`
-   
-## sm_flagConstants   
-
-Any set of consecutive values
-$`x_k,..., x_{k+n}`$, of a time series $`x`$ is flagged, if:
-
-1. $`n > `$`window`
-2. $`\sigma(x_k, x_{k+1},..., x_{k+n}) < `$`thresh`
-3. $`\max(x'_{k-n-s}, x'_{k-n-s+1},..., x'_{k-n+s}) \geq`$ `deriv_min`, with $`s`$ denoting periods per `precipitation_window`
-4. $`\min(x'_{k-n-s}, x'_{k-n-s+1},..., x'_{k-n+s}) \leq`$ `deriv_max`, with $`s`$ denoting periods per `precipitation_window`
-5. $`\mu(x_k, x_{k+1},..., x_{k+n}) \le \max(x) \cdot`$ `tolerance`   
\ No newline at end of file
diff --git a/docs/funcs/Miscellaneous.md b/docs/funcs/Miscellaneous.md
deleted file mode 100644
index f5ad537241171171d76dc89e44f1a3b809198cd9..0000000000000000000000000000000000000000
--- a/docs/funcs/Miscellaneous.md
+++ /dev/null
@@ -1,145 +0,0 @@
-# Miscellaneous
-
-A collection of unrelated quality check functions.
-
-## Index
-
-- [flagRange](#flagrange)
-- [flagSeasonalRange](#flagseasonalrange)
-- [flagIsolated](#flagisolated)
-- [flagPattern](#flagpattern)
-- [flagMissing](#flagmissing)
-- [clearFlags](#clearflags)
-- [forceFlags](#forceflags)
-
-
-
-## flagRange
-
-```
-flagRange(min, max)
-```
-| parameter | data type | default value | description                      |
-| --------- | --------- | ------------- | -----------                      |
-| min       | float     |               | The upper bound for valid values |
-| max       | float     |               | The lower bound for valid values |
-
-
-The function flags all values outside the closed interval
-$`[`$`min`, `max`$`]`$.
-
-## flagSeasonalRange
-
-```
-flagSeasonalRange(min, max, startmonth=1, endmonth=12, startday=1, endday=31)
-```
-
-| parameter  | data type   | default value | description                      |
-| ---------  | ----------- | ----          | -----------                      |
-| min        | float       |               | The upper bound for valid values |
-| max        | float       |               | The lower bound for valid values |
-| startmonth | integer     | `1`           | The interval start month         |
-| endmonth   | integer     | `12`          | The interval end month           |
-| startday   | integer     | `1`           | The interval start day           |
-| endday     | integer     | `31`          | The interval end day             |
-
-The function does the same as `flagRange`, but only if the timestamp of the
-data-point lies in a defined interval, which is build from days and months only. 
-In particular, the *year* is not considered in the Interval. 
-
-The left 
-boundary is defined by `startmonth` and `startday`, the right boundary by `endmonth`
-and `endday`. Both boundaries are inclusive. If the left side occurs later
-in the year than the right side, the interval is extended over the change of
-year (e.g. an interval of [01/12, 01/03], will flag values in December,
-January and February).
-
-NOTE: Only works for time-series-like datasets.
-
-
-## flagIsolated
-
-```
-flagIsolated(window, gap_window, group_window) 
-
-```
-
-| parameter    | data type                                                     | default value | description                                                                                                                                    |
-|--------------|---------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------------------------------------------------------|
-| gap_window   | [offset string](docs/ParameterDescriptions.md#offset-strings) |               | The minimum size of the gap before and after a group of valid values, which makes this group considered as isolated. See condition (2) and (3) |
-| group_window | [offset string](docs/ParameterDescriptions.md#offset-strings) |               | The maximum size of an isolated group of valid data. See condition (1).                                                                        |
-
-The function flags arbitrary large groups of values, if they are surrounded by sufficiently
-large data gaps. A gap is defined as group of missing and/or flagged values.
-
-A continuous group of values
-$`x_{k}, x_{k+1},...,x_{k+n}`$ with timestamps $`t_{k}, t_{k+1}, ..., t_{k+n}`$
-is considered to be isolated, if:
-1. $` t_{k+n} - t_{k} \le `$ `group_window`
-2. None of the values $` x_i, ..., x_{k-1} `$, with $`t_{k-1} - t_{i} \ge `$ `gap_window` is valid or unflagged
-3. None of the values $` x_{k+n+1}, ..., x_{j} `$, with $`t_{j} - t_{k+n+1} \ge `$ `gap_window` is valid or unflagged
-
-
-## flagMissing
-
-```
-flagMissing(nodata=NaN)
-```
-
-| parameter | data type  | default value  | description                       |
-| --------- | ---------- | -------------- | -----------                       |
-| nodata    | any        | `NAN`          | A value that defines missing data |
-
-The function flags all values indicating missing data.
-
-
-
-
-## flagPattern
-
-```                            
-flagPattern(ref_datafield, sample_freq = '15 Min', method = 'dtw', min_distance = None)
-``` 
-
-
-| parameter             | data type                                                     | default value | description                                                                                                                                                |
-|-----------------------|---------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| ref_datafield         | string                                                        |               |Name of the reference datafield = "pattern"                                           |
-| sample_freq         | string                                                         | `"15 Min"`        |Sample frequency to harmonize the data                                   |
-| method                | string                                                        | `"dtw "`      |"dtw" for Dynamic Time Warping (DTW), "wavelet" for Wavelet Pattern Recognition Algorithm                                                          |
-| min_distance          | float                                                         | `None`        |For DTW - alogrithm: the minimum distance of two graphs in order to be classified as "different"                                      |
-
-
-Implementation of the pattern recognition algorithms introduced in [Pattern Recognition](https://git.ufz.de/rdm-software/saqc/-/wikis/Pattern-Recognition). 
-
-
-
-
-
-## clearFlags
-
-```
-clearFlags()
-```
-
-The funcion removes all previously set flags.
-
-## forceFlags
-
-```
-forceFlags(flag)
-```
-| parameter | data type                                                                   | default value | description                          |
-| --------- | -----------                                                                 | ----          | -----------                          |
-| flag      | float/[flagging constant](docs/ParameterDescriptions.md#flagging-constants) | GOOD          | The flag that is set unconditionally |
-
-The functions overwrites all previous set flags with the given flag.
-
-
-## flagDummy
-
-```
-flagDummy()
-```
-
-Identity function, i.e. the function does nothing.
diff --git a/docs/funcs/SoilMoisture.md b/docs/funcs/SoilMoisture.md
deleted file mode 100644
index 705456715f6f2ca78525df44a0bb86ca304c4b76..0000000000000000000000000000000000000000
--- a/docs/funcs/SoilMoisture.md
+++ /dev/null
@@ -1,237 +0,0 @@
-# Soil Moisture
-
-A collection of soil moisture specific quality check routines.
-
-
-## Index
-
-- [sm_flagSpikes](#sm_flagspikes)
-- [sm_flagBreaks](#sm_flagbreaks)
-- [sm_flagConstants](#sm_flagconstants)
-- [sm_flagFrost](#sm_flagfrost)
-- [sm_flagPrecipitation](#sm_flagprecipitation)
-- [sm_flagRandomForest](#sm_flagrandomforest)
-
-
-## sm_flagSpikes
-
-```
-sm_flagSpikes(raise_factor=0.15, deriv_factor=0.2,
-              noise_func="CoVar", noise_window="12h", noise_thresh=1,
-              smooth_window="3h", smooth_poly_deg=2)
-```
-
-| parameter       | data type                                                     | default value |
-|-----------------|---------------------------------------------------------------|---------------|
-| raise_factor    | float                                                         | `0.15`        |
-| deriv_factor    | float                                                         | `0.2`         |
-| noise_func      | [string](#noise-detection-functions)                          | `"CoVar"`     |
-| noise_window    | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"12h"`       |
-| noise_thresh    | float                                                         | `1`           |
-| smooth_window   | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"3h"`        |
-| smooth_poly_deg | integer                                                       | `2`           |
-
-The function is a wrapper around `spikes_flagSpektrumBased`
-with a set of default parameters referring to [1]. For a complete description of 
-the algorithm and the available parameters please refer to the documentation of 
-[flagSpikes_spektrumBased](docs/funcs/SpikeDetection.md#spikes_spektrumbased)
-
-[1] Dorigo, W. et al: Global Automated Quality Control of In Situ Soil Moisture
-    Data from the international Soil Moisture Network. 2013.
-    Vadoze Zone J. doi:10.2136/vzj2012.0097.
-
-
-## sm_flagBreaks
-
-```
-sm_flagBreaks(thresh_rel=0.1, thresh_abs=0.01,
-              first_der_factor=10, first_der_window="12h",
-              scnd_der_ratio_range=0.05, scnd_der_ratio_thresh=10,
-              smooth=False, smooth_window="3h", smooth_poly_deg=2)
-```
-
-| parameter             | data type                                                     | default value |
-|-----------------------|---------------------------------------------------------------|---------------|
-| thresh_rel            | float                                                         | `0.1`         |
-| thresh_abs            | float                                                         | `0.01`        |
-| first_der_factor      | float                                                         | `10`          |
-| first_der_window      | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"12h"`       |
-| scnd_der_ratio_range  | float                                                         | `0.05`        |
-| scnd_der_ratio_thresh | float                                                         | `10.0`        |
-| smooth                | bool                                                          | `False`       |
-| smooth_window         | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"3h"`        |
-| smooth_poly_deg       | integer                                                       | `2`           |
-
-
-The function is a wrapper around `breaks_flagSpektrumBased`
-with a set of default parameters referring to [1]. For a complete description of 
-the algorithm and the available parameters please refer to the documentation of 
-[breaks_spektrumBased](docs/funcs/BreakDetection.md#breaks_spektrumbased).
-
-[1] Dorigo, W. et al: Global Automated Quality Control of In Situ Soil Moisture
-    Data from the international Soil Moisture Network. 2013.
-    Vadoze Zone J. doi:10.2136/vzj2012.0097.
-
-
-## sm_flagConstants
-
-```
-sm_flagConstants(window="12h", thresh=0.0005,
-                 precipitation_window="12h",
-                 tolerance=0.95,
-                 deriv_max=0.0025, deriv_min=0,
-                 max_missing=None, max_consec_missing=None,
-                 smooth_window=None, smooth_poly_deg=2)
-```
-
-| parameter            | data type                                                     | default value | description                                                                                                                                                |
-|----------------------|---------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| window               | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"12h"`       | Minimum duration during which values need to identical to become plateau candidates. See condition (1)                                                     |
-| thresh               | float                                                         | `0.0005`      | Maximum variance of a group of values to still consider them constant. See condition (2)                                                                   |
-| precipitation_window | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"12h"`       | See condition (3) and (4)                                                                                                                                  |
-| tolerance            | float                                                         | `0.95`        | Tolerance factor, see condition (5)                                                                                                                        |
-| deriv_max            | float                                                         | `0`           | See condition (4)                                                                                                                                          |
-| deriv_min            | float                                                         | `0.0025`      | See condition (3)                                                                                                                                          |
-| max_missing          | integer                                                       | `None`        | Maximum number of missing values allowed in `window`, by default this condition is ignored                                                                 |
-| max_consec_missing   | integer                                                       | `None`        | Maximum number of consecutive missing values allowed in `window`, by default this condition is ignored                                                     |
-| smooth_window        | [offset string](docs/ParameterDescriptions.md#offset-strings) | `None`        | Size of the smoothing window of the Savitsky-Golay filter. The default value `None` results in a window of two times the sampling rate (i.e. three values) |
-| smooth_poly_deg      | integer                                                       | `2`           | Degree of the polynomial used for smoothing with the Savitsky-Golay filter                                                                                 |
-
-
-This function flags plateaus/series of constant values in soil moisture data.
-
-The function represents a stricter version of
-[constants_flagVarianceBased](docs/funcs/ConstantDetection.md#constants_flagvariancebased).
-The additional constraints (3)-(5), are designed to match the special cases of constant
-values in soil moisture measurements and basically for preceding precipitation events
-(conditions (3) and (4)) and certain plateau level (condition (5)).
-
-Any set of consecutive values
-$`x_k,..., x_{k+n}`$, of a time series $`x`$ is flagged, if:
-
-1. $`n > `$`window`
-2. $`\sigma(x_k, x_{k+1},..., x_{k+n}) < `$`thresh`
-3. $`\max(x'_{k-n-s}, x'_{k-n-s+1},..., x'_{k-n+s}) \geq`$ `deriv_min`, with $`s`$ denoting periods per `precipitation_window`
-4. $`\min(x'_{k-n-s}, x'_{k-n-s+1},..., x'_{k-n+s}) \leq`$ `deriv_max`, with $`s`$ denoting periods per `precipitation_window`
-5. $`\mu(x_k, x_{k+1},..., x_{k+n}) \le \max(x) \cdot`$ `tolerance`
-
-NOTE:
-- The time series is expected to be harmonized to an
-  [equidistant frequency grid](docs/funcs/TimeSeriesHarmonization.md)
-
-This function is based on [1] and all default parameter values are taken from this publication.
-
-[1] Dorigo, W. et al: Global Automated Quality Control of In Situ Soil Moisture Data
-    from the international Soil Moisture Network. 2013. Vadoze Zone J.
-    doi:10.2136/vzj2012.0097.
-
-
-## sm_flagFrost
-
-```
-sm_flagFrost(soil_temp_variable, window="1h", frost_thresh=0)
-```
-
-| parameter          | data type                                                     | default value | description                                                   |
-|--------------------|---------------------------------------------------------------|---------------|---------------------------------------------------------------|
-| soil_temp_variable | string                                                        |               | Name of the soil temperature variable in the dataset          |
-| window             | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"1h"`        | Window around a value checked for frost events                |
-| frost_thresh       | float                                                         | `0`           | Soil temperature below `frost_thresh` are considered as frost |
-
-
-This function flags soil moisture values if the soil temperature
-(given in `soil_temp_variable`) drops below `frost_thresh`
-within a period of +/- `window`.
-
-This function is an implementation of the soil temperature based flagging
-presented in [1] and all default parameter values are taken from this
-publication.
-
-[1] Dorigo, W. et al: Global Automated Quality Control of In Situ Soil Moisture Data
-    from the international Soil Moisture Network. 2013. Vadoze Zone J.
-    doi:10.2136/vzj2012.0097.
-
-
-## sm_flagPrecipitation
-
-```
-sn_flagPrecipitation(prec_variable, 
-                     raise_window=None,
-                     sensor_depth=0, sensor_accuracy=0, 
-                     soil_porosity=0,
-                     std_factor=2, std_window="24h"
-                     ignore_missing=False)
-```
-
-| parameter       | data type                                                     | default value | description                                                               |
-|-----------------|---------------------------------------------------------------|---------------|---------------------------------------------------------------------------|
-| prec_variable   | string                                                        |               | Name of the precipitation variable in the dataset                         |
-| raise_window    | [offset string](docs/ParameterDescriptions.md#offset-strings) | `None`        | Duration during which a rise has to occur                                 |
-| sensor_depth    | float                                                         | `0`           | Depth of the soil moisture sensor in meter                                |
-| sensor_accuracy | float                                                         | `0`           | Soil moisture sensor accuracy in $`\frac{m^3}{m^{-3}}`$                   |
-| soil_porosity   | float                                                         | `0`           | Porosity of the soil surrounding the soil moisture sensor                 |
-| std_factor      | integer                                                       | `2`           | See condition (2)                                                         |
-| std_window      | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"24h"`       | See condition (2)                                                         |
-| ignore_missing  | bool                                                          | `False`       | Whether to check values even if there is invalid data within `std_window` |
-
-
-This function flags rises in soil moisture data if there are no sufficiently large
-precipitation events in the preceding 24 hours.
-
-A data point $`x_k`$ of a time series $`x`$ with sampling rate $`f`$
-is flagged, if:
-
-1. $`x_k`$ represents a rise in soil moisture, i.e. for
-   $`s = `$ (`raise_window` / $`f`$):
-
-   $`x_k > x_{k-s}`$
-
-2. The rise is sufficiently large and exceeds a threshold based on the 
-   standard deviation $`\sigma`$ of the values in the preceding `std_window`,
-   i.e. the following condition is fulfilled for $`h = `$ `std_window` / $`f`$:
-
-   $`x_k - x_{k-s} >`$ `std_factor` $`\cdot \sigma(x_{t-h},...,x_{k})`$
-
-3. The total amount of precipitation within the last 24 hours does not exceed
-   a certain threshold, i.e. with $`j = `$ "24h" /  $`f`$ the following 
-   condition is fulfilled:
-
-   $` y_{k-j} + y_{k-j+1} + ... + y_{k} \le `$ `sensor_depth` $`\cdot`$ `sensor_accuracy` $`\cdot`$ `soil_porosity`
-   
-
-This function is an implementation of the precipitation based flagging
-presented in [1] and all default parameter values are taken from this
-publication.
-
-[1] Dorigo, W. et al: Global Automated Quality Control of In Situ Soil Moisture Data
-    from the international Soil Moisture Network. 2013. Vadoze Zone J.
-    doi:10.2136/vzj2012.0097.
-
-
-## sm_flagRandomForest
-
-```
-sm_flagRandomForest(references, window_values, window_flags, path)
-```
-
-| parameter     | data type                 | default value | description                                                                                                                       |
-|---------------|---------------------------|---------------|-----------------------------------------------------------------------------------------------------------------------------------|
-| references    | string or list of strings |               | the field names of the data series that should be used as reference variables                                                      |
-| window_values | integer                   |               | Window size that is used to derive the gradients of both the field- and reference-series inside the moving window                 |
-| window_flags  | integer                   |               | Window size that is used to count the surrounding automatic flags that have been set before                                       |
-| path          | string                    |               | Path to the respective model object, i.e. its name and the respective value of the grouping variable. e.g. "models/model_0.2.pkl" |
-
-
-This function uses pre-trained machine-learning model objects for flagging. 
-This requires training a model by use of the [training script](../ressources/machine_learning/train_machine_learning.py) provided. 
-For flagging, inputs to the model are the data of the variable of interest, 
-data of reference variables and the automatic flags that were assigned by other 
-tests inside SaQC. Therefore, this function should be executed after all other tests.
-Internally, context information for each point is gathered in form of moving 
-windows. The size of the moving windows for counting of the surrounding 
-automatic flags and for calculation of gradients in the data is specified by 
-the user during model training. For the model to work, the parameters 
-'references', 'window_values' and 'window_flags' have to be set to the same 
-values as during training. For a more detailed description of the modeling 
-approach see the [training script](ressources/machine_learning/train_machine_learning.py).
-
diff --git a/docs/funcs/SpikeDetection.md b/docs/funcs/SpikeDetection.md
deleted file mode 100644
index 006b30b263af438810fe4d42874b3841aeefabb2..0000000000000000000000000000000000000000
--- a/docs/funcs/SpikeDetection.md
+++ /dev/null
@@ -1,237 +0,0 @@
-# Spike Detection
-
-A collection of quality check routines to find spikes.
-
-## Index
-
-- [spikes_flagBasic](#spikes_flagbasic)
-- [spikes_flagMad](#spikes_flagmad)
-- [spikes_flagSlidingZscore](#spikes_flagslidingzscore)
-- [spikes_flagSpektrumBased](#spikes_flagspektrumbased)
-- [spikes_flagRaise](#spikes_flagraise)
-
-
-## spikes_flagBasic
-
-```
-spikes_flagBasic(thresh, tolerance, window)
-```
-
-| parameter | data type                                                     | default value | description                                                                                    |
-|-----------|---------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------|
-| thresh    | float                                                         |               | Minimum difference between to values, to consider the latter one as a spike. See condition (1) |
-| tolerance | float                                                         |               | Maximum difference between pre-spike and post-spike values. See condition (2)                  |
-| window    | [offset string](docs/ParameterDescriptions.md#offset-strings) |               | Maximum length of "spiky" value courses. See condition (3)                                     |
-
-A basic outlier test, that is designed to work for harmonized, as well as raw
-(not-harmonized) data.
-
-The values $`x_{n}, x_{n+1}, .... , x_{n+k} `$ of a time series $`x_t`$ with 
-timestamps $`t_i`$ are considered spikes, if:
-
-1. $`|x_{n-1} - x_{n+s}| > `$ `thresh`, $` s \in \{0,1,2,...,k\} `$
-
-2. $`|x_{n-1} - x_{n+k+1}| < `$ `tolerance`
-
-3. $` |t_{n-1} - t_{n+k+1}| < `$ `window`
-
-By this definition, spikes are values, that, after a jump of margin `thresh`(1),
-are keeping that new value level, for a time span smaller than
-`window` (3), and then return to the initial value level -
-within a tolerance of `tolerance` (2).
-
-NOTE:
-This characterization of a "spike", not only includes one-value
-outliers, but also plateau-ish value courses.
-
-
-## spikes_flagMad
-
-```
-spikes_flagMad(window, z=3.5)
-```
-
-| parameter | data type                                                             | default value | description                                                          |
-|-----------|-----------------------------------------------------------------------|---------------|----------------------------------------------------------------------|
-| window    | integer/[offset string](docs/ParameterDescriptions.md#offset-strings) |               | size of the sliding window, where the modified Z-score is applied on |
-| z         | float                                                                 | `3.5`         | z-parameter of the modified Z-score                                  |
-
-This functions flags outliers using the simple median absolute deviation test.
-
-Values are flagged if they fulfill the following condition within a sliding window:
-
-```math
- 0.6745 * |x - m| > mad * z > 0
-```
-
-where $`x`$ denotes the window data, $`m`$ the window median, $`mad`$ the median
-absolute deviation and $`z`$ the $`z`$-parameter of the modified Z-Score.
-
-The window is moved by one time stamp at a time.
-
-NOTE:
-This function should only be applied on normalized data.
-
-References:
-[1] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
-
-
-## spikes_flagSlidingZscore
-
-```
-spikes_flagSlidingZscore(window, offset, count=1, polydeg=1, z=3.5, method="modZ")
-```
-
-| parameter | data type                                                             | default value | description                                                 |
-|-----------|-----------------------------------------------------------------------|---------------|-------------------------------------------------------------|
-| window    | integer/[offset string](docs/ParameterDescriptions.md#offset-strings) |               | size of the sliding window                                  |
-| offset    | integer/[offset string](docs/ParameterDescriptions.md#offset-strings) |               | offset between two consecutive windows                      |
-| count     | integer                                                               | `1`           | the minimal count a possible outlier needs, to be flagged   |
-| polydeg   | integer                                                               | `1"`          | the degree of the polynomial fit, to calculate the residual |
-| z         | float                                                                 | `3.5`         | z-parameter for the *method* (see description)              |
-| method    | [string](#outlier-detection-methods)                                  | `"modZ"`      | the method to detect outliers                               |
-
-This functions flags spikes using the given method within sliding windows.
-
-NOTE:
- - `window` and `offset` must be of same type, mixing of offset- and integer-
-    based windows is not supported and will fail
- - offset-strings only work with time-series-like data
-
-The algorithm works as follows:
-  1.  a window of size `window` is cut from the data
-  2.  normalization - the data is fit by a polynomial of the given degree `polydeg`, which is subtracted from the data
-  3.  the outlier detection `method` is applied on the residual, possible outlier are marked
-  4.  the window (on the data) is moved by `offset`
-  5.  start over from 1. until the end of data is reached
-  6.  all potential outliers, that are detected `count`-many times, are flagged as outlier
-
-### Outlier Detection Methods
-Currently two outlier detection methods are implemented:
-
-1. `"zscore"`: The Z-score marks every value as a possible outlier, which fulfills the following condition:
-
-   ```math
-    |r - m| > s * z
-   ```
-   where $`r`$ denotes the residual, $`m`$ the residual mean, $`s`$ the residual
-   standard deviation, and $`z`$ the $`z`$-parameter.
-
-2. `"modZ"`: The modified Z-score Marks every value as a possible outlier, which fulfills the following condition:
-
-   ```math
-    0.6745 * |r - m| > mad * z > 0
-   ```
-
-   where $`r`$ denotes the residual, $`m`$ the residual mean, $`mad`$ the residual median absolute
-   deviation, and $`z`$ the $`z`$-parameter.
-
-### References
-[1] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
-
-
-## spikes_flagSpektrumBased
-
-```
-spikes_flagSpektrumBased(raise_factor=0.15, deriv_factor=0.2,
-                         noise_func="CoVar", noise_window="12h", noise_thresh=1, 
-                         smooth_window=None, smooth_poly_deg=2)
-```
-
-| parameter       | data type                                                     | default value | description                                                                                                                                                |
-|-----------------|---------------------------------------------------------------|---------------|------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| raise_factor    | float                                                         | `0.15`        | Minimum relative value difference between two values to consider the latter as a spike candidate. See condition (1)                                        |
-| deriv_factor    | float                                                         | `0.2`         | See condition (2)                                                                                                                                          |
-| noise_func      | [string](#noise-detection-functions)                          | `"CoVar"`     | Function to calculate noisiness of the data surrounding potential spikes                                                                                   |
-| noise_window    | [offset string](docs/ParameterDescriptions.md#offset-strings) | `"12h"`       | Determines the range of the time window of the "surrounding" data of a potential spike. See condition (3)                                                  |
-| noise_thresh    | float                                                         | `1`           | Upper threshold for noisiness of data surrounding potential spikes. See condition (3)                                                                      |
-| smooth_window   | [offset string](docs/ParameterDescriptions.md#offset-strings) | `None`        | Size of the smoothing window of the Savitsky-Golay filter. The default value `None` results in a window of two times the sampling rate (i.e. three values) |
-| smooth_poly_deg | integer                                                       | `2`           | Degree of the polynomial used for fitting with the Savitsky-Golay filter                                                                                   |
-
-
-The function flags spikes by evaluating the time series' derivatives
-and applying various conditions to them.
-
-The value $`x_{k}`$ of a time series $`x_t`$ with 
-timestamps $`t_i`$ is considered a spikes, if:
-
-
-1. The quotient to its preceding data point exceeds a certain bound:
-    * $` |\frac{x_k}{x_{k-1}}| > 1 + `$ `raise_factor`, or
-    * $` |\frac{x_k}{x_{k-1}}| < 1 - `$ `raise_factor`
-2. The quotient of the second derivative $`x''`$, at the preceding
-   and subsequent timestamps is close enough to 1:
-    * $` |\frac{x''_{k-1}}{x''_{k+1}} | > 1 - `$ `deriv_factor`, and
-    * $` |\frac{x''_{k-1}}{x''_{k+1}} | < 1 + `$ `deriv_factor`
-3. The dataset $`X = x_i, ..., x_{k-1}, x_{k+1}, ..., x_j`$, with 
-   $`|t_{k-1} - t_i| = |t_j - t_{k+1}| =`$ `noise_window` fulfills the 
-   following condition: 
-   `noise_func`$`(X) <`$ `noise_thresh`
-   
-NOTE:
-- The dataset is supposed to be harmonized to a time series with an equidistant frequency grid
-- The derivative is calculated after applying a Savitsky-Golay filter to $`x`$
-
-  This function is a generalization of the Spectrum based Spike flagging
-  mechanism presented in [1]
-
-### Noise Detection Functions
-Currently two different noise detection functions are implemented:
-- `"CoVar"`: Coefficient of Variation
-- `"rVar"`: relative Variance
-
-
-### References
-[1] Dorigo, W. et al: Global Automated Quality Control of In Situ Soil Moisture
-    Data from the international Soil Moisture Network. 2013. Vadoze Zone J.
-    doi:10.2136/vzj2012.0097.
-    
-## spikes_flagRaise
-
-
-```
-spikes_flagRaise(thresh, raise_window, intended_freq, average_window=None, 
-                 mean_raise_factor=2, min_slope=None, min_slope_weight=0.8, 
-                 numba_boost=True)
-```
-
-| parameter         | data type                                                     | default value | description                                                                                                                                                                 |
-|-------------------|---------------------------------------------------------------|---------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| thresh            | float                                                         |               | The threshold, for the total rise (`thresh` $` > 0 `$ ), or total drop (`thresh` $` < 0 `$ ), that value courses must not exceed within a timespan of length `raise_window` |
-| raise_window      | [offset string](docs/ParameterDescriptions.md#offset-strings) |               | The timespan, the rise/drop thresholding refers to. Window is inclusively defined.                                                                                          |
-| intended_freq     | [offset string](docs/ParameterDescriptions.md#offset-strings) |               | The frequency, timeseries to-be-flagged is supposed to be sampled at. Window is inclusively defined.                                                                        |
-| average_window    | [offset string](docs/ParameterDescriptions.md#offset-strings) | `None`        | See condition (2) below. Window is inclusively defined. The window defaults to 1.5 times the size of `raise_window`                                                         |
-| mean_raise_factor | float                                                         | `2`           | See condition (2) below.                                                                                                                                                    |
-| min_slope         | float                                                         | `None`        | See condition (3)                                                                                                                                                           |
-| min_slope_weight  | integer                                                       | `0.8`         | See condition (3)                                                                                                                                                           |
-
-The function flags rises and drops in value courses, that exceed the threshold 
-given by `thresh` within a timespan shorter than, or equalling the time window 
-given by `raise_window`. 
-
-Weather rises or drops are flagged, is controlled by the signum of `thresh`. 
-(positive->rises, negative->drops)
-
-The parameter variety of the function is owned to the intriguing
-case of values, that "return" from outlierish or anomalious value levels and 
-thus exceed the threshold, while actually being usual values. 
-
-The value $`x_{k}`$ of a time series $`x`$ with associated 
-timestamps $`t_i`$, is flagged a rise, if:
-
-1. There is any value $`x_{s}`$, preceeding $`x_{k}`$ within `raise_window` range, so that:
-    * $` M = |x_k - x_s | > `$  `thresh` $` > 0`$ 
-2. The weighted average $`\mu^*`$ of the values, preceeding $`x_{k}`$ within `average_window` range indicates, that $`x_{k}`$ doesnt return from an outliererish value course, meaning that:  
-    * $` x_k > \mu^* + ( M `$ / `mean_raise_factor` $`)`$  
-3. Additionally, if `min_slope` is not `None`, $`x_{k}`$ is checked for being sufficiently divergent from its very predecessor $`x_{k-1}`$, meaning that, it is additionally checked if: 
-    * $`x_k - x_{k-1} > `$ `min_slope` 
-    * $`t_k - t_{k-1} > `$ `min_slope_weight`*`intended_freq`
-
-The weighted average $`\mu^*`$ was calculated with weights $`w_{i}`$, defined by: 
-* $`w_{i} = (t_i - t_{i-1})`$ / `intended_freq`, if $`(t_i - t_{i-1})`$ < `intended_freq` and $`w_i =1`$ otherwise. 
-
-The application of time gap weights and a slope weights are to account for the case of not harmonized timeseries.
-
-NOTE:
-- The dataset is NOT supposed to be harmonized to a time series with an 
-  equidistant frequency grid
diff --git a/docs/funcs/TimeSeriesHarmonization.md b/docs/funcs/TimeSeriesHarmonization.md
deleted file mode 100644
index a86e0d5b7ad3e8753f6d87b7c6f538816e084d5b..0000000000000000000000000000000000000000
--- a/docs/funcs/TimeSeriesHarmonization.md
+++ /dev/null
@@ -1,314 +0,0 @@
-# Time Series Harmonization
-
-A collection of functions to harmonize time series.
-
-## Index
-
-- [harm_shift2Grid](#harm_shift2grid)
-- [harm_aggregate2Grid](#harm_aggregate2grid)
-- [harm_linear2Grid](#harm_linear2grid)
-- [harm_interpolate2Grid](#harm_interpolate2grid)
-- [harm_downsample](#harm_downsample)
-- [harm_harmonize](#harm_harmonize)
-- [harm_deharmonize](#harm_deharmonize)
-
-
-## harm_shift2grid
-
-```
-harm_shift2Grid(freq, method='nshift')
-```
-| parameter | data type                                                     | default value | description                           |
-|-----------|---------------------------------------------------------------|---------------|---------------------------------------|
-| freq      | [offset string](docs/ParameterDescriptions.md#offset-strings) |               | Frequency of the target grid          |
-| method    | [method string](#shift-methods)                               | `"nshift"`    | Method used to shift values and flags |
-
-
-The function "harmonizes" a time series to an equidistant frequency
-grid by shifting data points to multiples of `freq`.
-
-This process includes:
-
-1. All missing values in the data set get [flagged](docs/funcs/Miscellaneous-md#flagmissing). 
-   These values will be excluded from the shifting process.
-2. Depending on the `method`, the data points and the associated
-   flags will be assigned to a timestamp in the target grid
-   
-NOTE:
-- The data will be projected to an regular grid ranging from 
-  the first to the last timestamp of the original time series
-- Because of the above, the size of the harmonized time series
-  is likely to differ from the size of the original series
-- Data from the original time series might be dropped 
-  (e.g. if there are multiple candidates for a shift, only 
-  one is used), but can be restored by [harm_deharmonize](#harm_deharmonize)
-
-## harm_aggregate2grid
-
-```
-harm_aggregate2Grid(freq, value_func, flag_func="max", method='nagg')
-```
-| parameter  | data type                                                     | default value | description                                     |
-|------------|---------------------------------------------------------------|---------------|-------------------------------------------------|
-| freq       | [offset string](docs/ParameterDescriptions.md#offset-strings) |               | Frequency of the target grid                    |
-| value_func | [function string](#aggregation-functions)                     |               | Function used for data aggregation              |
-| flag_func  | [function string](#aggregation-functions)                     | `"max"`       | Function used for flags aggregation             |
-| method     | [method string](#aggregation-methods)                         | `"nagg"`      | Method used to assign values to the target grid |
-
-
-The function "harmonizes" a time series to an equidistant frequency grid
-by aggregating data points to multiples of `freq` using the `method`.
-
-This process includes:
-
-1. All missing values in the data set get [flagged](docs/funcs/Miscellaneous-md#flagmissing). 
-   These values will be excluded from the aggregation process
-2. Values and flags will be aggregated by `value_func` and `flag_func` respectively
-3. Depending on the `method`, the aggregation results will be assigned to a timestamp
-   in the target grid
-
-NOTE:
-- The data will be projected to an regular grid ranging from 
-  the first to the last timestamp of the original time series
-- Because of the above, the size of the harmonized time series
-  is likely to differ from the size of the original series
-- Newly introduced intervals not covering any data in the original
-  dataset will be treated as missing data
-
-
-## harm_linear2grid
-
-```
-harm_linear2Grid(freq, method='nagg', func="max")
-```
-
-| parameter | data type                                                                 | default value | description                                       |
-|-----------|---------------------------------------------------------------------------|---------------|---------------------------------------------------|
-| freq      | [offset string](docs/ParameterDescriptions.md#offset-strings)             |               | Frequency of the target grid                      |
-| method    | [shift](#shift-methods)/[aggregation](#aggregation-methods) method string | `"nagg"`      | Method used to propagate flags to the target grid |
-| func      | [function string](#aggregation-functions)                                 | `"max"`       | Function used for flags aggregation               |
-
-
-The function "harmonizes" a time series to an equidistant frequency grid
-by linear interpolation of data points to multiples of `freq`.
-
-This process includes:
-
-1. All missing values in the data set get [flagged](docs/funcs/Miscellaneous-md#flagmissing). 
-   These values will be excluded from the aggregation process
-2. Linear interpolation. This is not a gap filling algorithm, only target grid points, 
-   that are surrounded by valid data points in the original data set within a range 
-   of `freq` will be calculated.
-4. Depending on the `method`, the original flags get shifted
-   or aggregated with `func` to the target grid
-
-
-NOTE:
-- Newly introduced intervals not covering any data in the original
-  dataset will be treated as missing data
-
-
-## harm_interpolate2grid
-
-```
-harm_interpolate2Grid(freq,
-                      method, order=1,
-                      flag_method='nagg', flag_func="max")
-```
-| parameter   | data type                                                                 | default value | description                                                             |
-|-------------|---------------------------------------------------------------------------|---------------|-------------------------------------------------------------------------|
-| freq        | [offset string](docs/ParameterDescriptions.md#offset-strings)             |               | Frequency of the target grid                                            |
-| method      | [interpolation method string](#interpolation-methods)                     |               | Interpolation method                                                    |
-| order       | integer                                                                   | `1`           | Order of the interpolation, only relevant if applicable in the `method` |
-| flag_method | [shift](#shift-methods)/[aggregation](#aggregation-methods) method string | `"nagg"`      | Method used to propagate flags to the target grid                       |
-| flag_func   | [function string](#aggregation-functions)                                 | `"max"`       | Function used for flags aggregation                                     |
-
-
-The function "harmonizes" a time series to an equidistant frequency grid
-by interpolation of data points to multiples of `freq`.
-
-This process includes:
-
-1. All missing values in the data set get [flagged](docs/funcs/Miscellaneous-md#flagmissing). 
-   These values will be excluded from the aggregation process
-2. Interpolation with `method`. This is not a gap filling algorithm,
-   only target grid points, that are surrounded by valid data points in the original
-   data set within a range of `freq` will be calculated.
-3. Depending on the `method`, the original flags get shifted
-   or aggregated with `func` to the target grid
-
-NOTE:
-- Newly introduced intervals not covering any data in the original
-  dataset will be treated as missing data
-- We recommended `harmonize_shift2Grid` over the `method`s
-  `nearest` and `pad`
-
-
-## harm_downsample
-
-```
-harm_downsample(sample_freq, agg_freq,
-                sample_func="mean", agg_func="mean",
-                max_invalid=None)
-```
-| parameter   | data type                                                     | default value | description                                                                                                                    |
-|-------------|---------------------------------------------------------------|---------------|--------------------------------------------------------------------------------------------------------------------------------|
-| sample_freq | [offset string](docs/ParameterDescriptions.md#offset-strings) |               | Frequency of the source grid                                                                                                   |
-| agg_freq    | [offset string](docs/ParameterDescriptions.md#offset-strings) |               | Frequency of the target grid                                                                                                   |
-| sample_func | [function string](#aggregation-function)                      | `"mean"`      | Function used to aggregate data to `sample_freq`. If `None` the data is expected to have a frequency of `sample_freq`          |
-| agg_func    | [function string](#aggregation-function)                      | `"mean"`      | Function used to aggregate data from `sample_freq` to `agg_freq`                                                               |
-| max_invalid | integer                                                       | `None`        | If the number of invalid data points (missing/flagged) within an aggregation interval exceeds `max_invalid` it is set to `NAN` |
-
-The function downsamples a time series from its `sample_freq` to the lower
-sampling rate `agg_freq`, by aggregation with `agg_func`.
-
-If a `sample_func` is given, the data will be aggregated to `sample_freq`
-before downsampling.
-
-NOTE:
-- Although the function is a wrapper around `harm_harmonize`, the deharmonization of "true"
-  downsamples (`sample_freq` < `agg_freq`) is not supported yet.
-
-
-## harm_harmonize
-
-```
-harm_harmonize(freq, inter_method, reshape_method, inter_agg="mean", inter_order=1,
-               inter_downcast=False, reshape_agg="max", reshape_missing_flag=None,
-               reshape_shift_comment=False, data_missing_value=np.nan)
-```
-
-| parameter             | data type                                                                                                         | default value | description                                                                                                                                                                                                                                   |
-|-----------------------|-------------------------------------------------------------------------------------------------------------------|---------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| freq                  | [offset string](docs/ParameterDescriptions.md#offset-strings)                                                     |               | Frequency of the target grid                                                                                                                                                                                                                  |
-| inter_method          | [shift](#shift-methods)/[aggregation](#aggregation-methods)/[interpolation](#interpolation-methods) method string |               | Method used to project values to the target grid                                                                                                                                                                                              |
-| reshape_method        | [shift](#shift-methods)/[aggregation](#aggregation-methods) method string                                         |               | Method used to project flags to the target grid                                                                                                                                                                                               |
-| inter_agg             | [aggregation function string](#aggregation-functions)                                                             | `"mean"`      | Function used for aggregation, if an `inter_method` is given                                                                                                                                                                                  |
-| inter_order           | int                                                                                                               | `1`           | The order of interpolation applied, if an `inter_method` is given                                                                                                                                                                             |
-| inter_downcast        | bool                                                                                                              | `False`       | `True`: Decrease interpolation order if data chunks that are too short to be interpolated with order `inter_order`. <br/> `False`: Project those data chunks to `NAN`. <br/> Option only relevant if `inter_method` supports an `inter_order` |
-| reshape_agg           | [aggregation function string](#aggregation-functions)                                                             | `"max"`       | Function used for the aggregation of flags. By default (`"max"`) the worst/highest flag is assigned                                                                                                                                           |
-| reshape_missing_flag  | string                                                                                                            | `None`        | Valid flag value, that will be used for empty harmonization intervals. By default (`None`) such intervals are set to `BAD`                                                                                                                    |
-| reshape_shift_comment | bool                                                                                                              | `False`       | `True`: Shifted flags will be reset, other fields associated with a flag might get lost. <br/> `False`: Shifted flags will not be reset. <br/> <br/> Only relevant for multi-column flagger and a given `inter_method`                        |
-| data_missing_value    | Any                                                                                                               | `np.nan`      | The value, indicating missing data                                                                                                                                                                                                            |
-
-
-The function "harmonizes" a time series to an equidistant frequency grid.
-In general this includes projection and/or interpolation of the data to
-timestamps, that are multiples of `freq`.
-
-This process includes:
-
-1. All missing values equal to `data_missing_value` in the data set
-   get [flagged](docs/funcs/Miscellaneous-md#flagmissing). 
-   These values will be excluded from the aggregation process
-2. Values will be calculated according to the given `inter_method`
-3. Flags will be calculated according to the given `reshape_method`
-
-NOTE:
-- The data will be projected to an regular grid ranging from 
-  the first to the last timestamp of the original time series
-- Because of the above, the size of the harmonized time series
-  is likely to differ from the size of the original series
-- Newly introduced intervals not covering any data in the original
-  dataset will be set to `data_missing_value` and `reshape_missing`
-  respectively
-- Data from the original time series might be dropped, but can
-  be restored by [deharmonize](#deharmonize)
-- Flags calculated on the new harmonized data set can be projected
-  to the original grid by [harm_deharmonize](#harm_deharmonize)
-
-
-## harm_deharmonize
-
-```
-harm_deharmonize(co_flagging=False)
-```
-
-| parameter   | data type | default value | description                                                    |
-|-------------|-----------|---------------|----------------------------------------------------------------|
-| co_flagging | boolean   | `False`       | Control the bahviour of the flag reprojection, see description |
-
-
-This functions projects harmonized datasets back to their original time stamps
-and thereby restores the original data shape.
-
-A combination of calls to one of the `harm_*` functions and `harm_deharmonize`,
-allows to leverage information from data sets with differing timestamps/frequencies
-and bring the generated information back to the original dataset.
-
-`_harm_deharmonize` will implicitly revert the methods and functions applied during
-harmonization. I.e.:
-- The harmonized time series will be dropped in favor of the original one
-- Flags are projected to the original time stamps if the are 'worse'/higher
-  than the original. The direction of this projection is invert to the
-  shift/aggregation direction in `harm_*`, i.e. a forward shift in
-  `harm_*` will result in a backward shift in `harm_deharmonize` and vice
-   versa.
-- The projection behavior is controlled by the value of `co_flagging`:
-  + `False`: Project a flag from the harmonized time series to a single 
-     flag in the deharmonized data set
-  + `True`: Project a flag in the harmonized time series to all flags 
-     in the respective projection interval.
-     
-  Let's say during harmonization a dataset was aggregated to a lower 
-  frequency (e.g. a time series with a frequency of 10 minutes was 
-  resampled to one with a frequency of 1 hour) and needs to be deharmonized.
-  If `co_flagging` is `True`, the flags from the harmonized dataset 
-  will be projected to all the six values within the aggregation period,
-  if `co_flagging` is False, only the next/last/nearest value in the 
-  deharmonized dataset will inherit the flag from the harmonized 
-  time series.
-  
-
-## Parameter Descriptions
-
-### Aggregation Functions
-
-| keyword    | description                   |
-|------------|-------------------------------|
-| `"sum"`    | sum of the values             |
-| `"mean"`   | arithmetic mean of the values |
-| `"min"`    | minimum value                 |
-| `"max"`    | maximum value                 |
-| `"median"` | median of the values          |
-| `"first"`  | first value                   |
-| `"last"`   | last value                    |
-
-### Aggregation Methods
-
-| keyword  | description                                                       |
-|----------|-------------------------------------------------------------------|
-| `"fagg"` | aggregation result is propagated to the next target grid point    |
-| `"bagg"` | aggregation result is propagated to the last target grid point    |
-| `"nagg"` | aggregation result is propagated to the closest target grid point |
-
-
-### Shift Methods
-
-| keyword    | description                                                                    |
-|------------|--------------------------------------------------------------------------------|
-| `"fshift"` | propagate the last valid value/flag to the grid point or fill with `BAD`/`NAN` |
-| `"bshift"` | propagate the next valid value/flag to the grid point or fill with `BAD`/`NAN` |
-| `"nshift"` | propagate the closest value/flag to the grid point or fill with `BAD`/`NAN`    |
-
-
-### Interpolation Methods
-
-- All the `pandas.Series` [interpolation methods](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.Series.interpolate.html)
-  are supported
-- Available interpolations:
-  + `"linear"`
-  + `"time"`
-  + `"nearest"`
-  + `"zero"`
-  + `"slinear"`
-  + `"quadratic"`
-  + `"cubic"`
-  + `"spline"`
-  + `"barycentric"`
-  + `"polynomial"`
-  + `"krogh"`
-  + `"piecewise_polynomial"`
-  + `"spline"`
-  + `"pchip"`
-  + `"akima"`
diff --git a/requirements.txt b/requirements.txt
index 23e059f68308f73f7ac60e41726d1f791a686ce0..f1f8206244a79cf318b3a9dbc50407362262ae3d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,35 +1,16 @@
-attrs==20.3.0
-Click==7.1.2
-cycler==0.10.0
-dios==0.6.0
+Click==8.0.1
 dtw==1.4.0
-kiwisolver==1.3.1
-llvmlite==0.35.0
-importlib-metadata==3.3.0
-joblib==1.0.0
-matplotlib==3.3.3
-mlxtend==0.18.0
-more-itertools==8.6.0
-numba==0.52.0
-numpy==1.19.4
-outlier==0.2
-utils==1.0.1
+hypothesis==6.23.1
+matplotlib==3.4.3
+mlxtend==0.19.0
+numba==0.54.0
+numpy==1.20.3
 outlier-utils==0.0.3
-packaging==20.8
-pandas==1.1.4
-pluggy==0.13.1
-pyparsing==2.4.7
-py==1.10.0
-pyarrow==1.0.1
+pyarrow==4.0.1
+pandas==1.3.3
+pytest==6.2.5
 pytest-lazy-fixture==0.6.3
-pytest==6.2.1
-python-dateutil==2.8.1
-python-intervals==1.10.0.post1
-pytz==2020.5
 PyWavelets==1.1.1
-zipp==3.4.0
-wcwidth==0.2.5
-scipy==1.6.0
-scikit-learn==0.23.2
-six==1.15.0
-astor==0.8.1
+scikit-learn==1.0
+scipy==1.7.1
+typing_extensions==3.10.0.2
diff --git a/ressources/data/config.csv b/ressources/data/config.csv
index e1e00c54e99c0e31b5a95c1ac759ac9063fe5fae..e23612fb9682a0a5e9344967f2b15f65f6d64ab3 100644
--- a/ressources/data/config.csv
+++ b/ressources/data/config.csv
@@ -1,6 +1,6 @@
-varname    ; test                                ; plot
-#----------;-------------------------------------;------
-SM2        ; harm_shift2Grid(freq="15Min")       ; False
-SM2        ; flagMissing(nodata=NAN)             ; False
-'SM(1|2)+' ; flagRange(min=10, max=60)           ; False
-SM2        ; spikes_flagMad(window="30d", z=3.5) ; True
+varname    ; test
+#----------;---------------------------------------
+SM2        ; shift(freq="15Min")
+SM2        ; flagMissing()
+'SM(1|2)+' ; flagRange(min=10, max=60)
+SM2        ; flagMAD(window="30d", z=3.5)
diff --git a/ressources/data/config_ci.csv b/ressources/data/config_ci.csv
index f631338ade105552e37c61d16ea72aab50dab106..e5b883102b8a541fc9fc89e6e1ffb7c8f4e1ca55 100644
--- a/ressources/data/config_ci.csv
+++ b/ressources/data/config_ci.csv
@@ -1,7 +1,8 @@
-varname;test;plot
-SM2;harm_shift2Grid(freq="15Min");False
-'.*';flagRange(min=10, max=60);False
-SM2;flagMissing(nodata=NAN);False
-SM2;flagRange(min=10, max=60);False
-SM2;spikes_flagMad(window="30d", z=3.5);False
-Dummy;flagGeneric(func=(isflagged(SM1) | isflagged(SM2)))
+varname ; test
+#-------; -----------------------------------------------------
+SM2     ; shift(freq="15Min")
+'.*'    ; flagRange(min=10, max=60)
+SM2     ; flagMissing()
+SM2     ; flagRange(min=10, max=60)
+SM2     ; flagMAD(window="30d", z=3.5)
+Dummy   ; genericFlag(func=(isflagged(SM1) | isflagged(SM2)))
diff --git a/ressources/images/cbooks_outliers_incidents.png b/ressources/images/cbooks_outliers_incidents.png
new file mode 100644
index 0000000000000000000000000000000000000000..ea4e0482bf93b543d2ec5d94ac30ef6c61140f7c
Binary files /dev/null and b/ressources/images/cbooks_outliers_incidents.png differ
diff --git a/ressources/machine_learning/data/soil_moisture_mwe.feather b/ressources/machine_learning/data/soil_moisture_mwe.feather
deleted file mode 100644
index b17f4e7371c2fdba0e7555b8e2c030634ed899f6..0000000000000000000000000000000000000000
--- a/ressources/machine_learning/data/soil_moisture_mwe.feather
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:d0abca6000efcb966bdb46800cf99acc6e925d3a8f8de8f13e3d53c8215e63eb
-size 13563392
diff --git a/ressources/machine_learning/models/testmodel_0.2.pkl b/ressources/machine_learning/models/testmodel_0.2.pkl
deleted file mode 100644
index 00b3bc956da02123ecdd6df7f13dc252b91b79b0..0000000000000000000000000000000000000000
--- a/ressources/machine_learning/models/testmodel_0.2.pkl
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:9f661a9cb4a6092f31ca018e7e283dc1170a140d64c921e544ec902159289a2b
-size 29718964
diff --git a/ressources/machine_learning/train_machine_learning.py b/ressources/machine_learning/train_machine_learning.py
deleted file mode 100644
index 13f80d21e0cc7fd7096e1704913b603816de2f00..0000000000000000000000000000000000000000
--- a/ressources/machine_learning/train_machine_learning.py
+++ /dev/null
@@ -1,225 +0,0 @@
-import pandas as pd
-import numpy as np
-import random  # for random sampling of training/test
-from sklearn.ensemble import RandomForestClassifier
-from sklearn.metrics import recall_score, precision_score, classification_report
-import joblib  # for saving of model objects
-import os
-import time
-import datetime
-
-###--------------------
-### EXAMPLE PARAMETRIZATION:
-###--------------------
-
-# pd.options.mode.chained_assignment = None  # default='warn'
-# data = pd.read_feather("data/sm/02_data.feather")
-# data = data.reset_index()#data.index has to be reset as I use row nos only for indexing
-#
-# ### Reflagging
-# index_manual = data.Flag == "Manual"
-# data["FlagMan"] = index_manual.astype("int")# True/False as 0 or 1
-# index_auto = data.Flag.str.contains("Auto")
-# data["flag_bin"] = index_auto.astype("int")# True/False as 0 or 1
-#
-# field = "Target"
-# references = ["Var1","Var2"]
-# window_values = 20
-# window_flags = 20
-# modelname="name"
-# path = "models/"
-# sensor_field="SensorID"
-# group_field = "GroupVar"
-
-
-def trainML(
-    data,
-    field,
-    references,
-    sensor_field: str,
-    group_field: str,
-    window_values: int,
-    window_flags: int,
-    path: str,
-    modelname: str,
-    testratio: float,
-    **kwargs
-):
-
-    """This Function trains machine-learning models to reproduce manual flags that were set for a specific variable. Inputs to the training script are the timeseries of the
-    respective target variable at multiple sensors, the automatic flags that were assigned by SaQC as well as multiple reference series.
-    Internally, context information for each point is gathered in form of moving windows to improve the flagging algorithm. By default, the
-    information of the previous and preceeding timestep of each data point t are gathered: For the target and reference series, this refers to the gradient of t+/-1 with respect to t. For
-    the automatic flgs, this denotes whether an automatic flag was set at t+/-1.
-    Next, according to user inputs of window_flags and window_values, the number of flags
-    and the mean gradient within the specified moving windows is calculated, both for t+windowsize and t-windowsize. The moving window calculations are executed for each sensor, seperately,
-    and multiple models are trained, one for each level a grouping variable that can be defined by the user. The model objects that can be used for future flagging are stored
-    along with log-files that give information on the training process, e.g. models`accuracy on training and test. The algorithm used is randomForest at default parameters.
-    For usage of the model inside the SaQC-pipeline, see "machinelearning" in the function reference.
-
-
-    :param data:                        The pandas dataframe holding the data of the target variable at multiple sensors in long format, i.e. concatenated row-wise.
-                                        Along with this, there should be columns with the respective series of reference variables and a column of quality flags. The latter
-                                        should contain both automatic and manual flags.
-    :param field:                       Fieldname of the field in data that is to be flagged
-    :param references:                  A list of strings, denoting the fieldnames of the data series that should be used as reference variables
-    :parameters sensor_field:           A string denoting the fieldname of unique sensor-IDs
-    :parameter group_field:             A string denoting the fieldname of the grouping variable. For each level of this variable, a seperate model will be trained.
-    :param window_values:               An integer, denoting the window size that is used to derive the gradients of both the field- and reference-series inside the moving window
-    :param window_flags:                An integer, denoting the window size that is used to count the surrounding automatic flags that have been set before
-    :param path:                        A string denoting the path to the folder where the model objects along with log-files should be saved to
-    :param modelname:                   A string denoting the name of the model. The name is used for naming of the model objects as well as log-files. Naming will
-                                        be: 'modelname'_'value of group_field'.pkl
-    :param testratio                    A float denoting the ratio of the test- vs. training-set to be drawn from the data, e.g. 0.3
-    """
-
-    def _refCalc(reference, window_values):
-        # Helper function for calculation of moving window values
-        outdata = dios.DictOfSeries()
-        name = reference.name
-        # derive gradients from reference series
-        outdata[name + "_Dt_1"] = reference - reference.shift(1)  # gradient t vs. t-1
-        outdata[name + "_Dt1"] = reference - reference.shift(-1)  # gradient t vs. t+1
-        # moving mean of gradients var1 and var2 before/after
-        outdata[name + "_Dt_" + str(window_values)] = (
-            outdata[name + "_Dt_1"].rolling(window_values, center=False).mean()
-        )  # mean gradient t to t-window
-        outdata[name + "_Dt" + str(window_values)] = (
-            outdata[name + "_Dt_1"].iloc[::-1].rolling(window_values, center=False).mean()[::-1]
-        )  # mean gradient t to t+window
-        return outdata
-
-    randomseed = 36
-    ### Prepare data, i.e. compute moving windows
-    print("Computing time-lags")
-    # save original row index for merging into original dataframe, as NAs will be introduced
-    data = data.rename(columns={"index": "RowIndex"})
-    # define Test/Training
-    data = data.assign(TeTr="Tr")
-    # create empty df for training data
-    traindata = dios.DictOfSeries()
-    # calculate windows
-    for sensor_id in data[sensor_field].unique():
-        print(sensor_id)
-        sensordf = data.loc[data[sensor_field] == sensor_id]
-        index_test = sensordf.RowIndex.sample(
-            n=int(testratio * len(sensordf)), random_state=randomseed
-        )  # draw random sample
-        sensordf.TeTr[index_test] = "Te"  # assign test samples
-
-        sensordf["flag_bin_t_1"] = sensordf["flag_bin"] - sensordf["flag_bin"].shift(1)  # Flag at t-1
-        sensordf["flag_bin_t1"] = sensordf["flag_bin"] - sensordf["flag_bin"].shift(-1)  # Flag at t+1
-        sensordf["flag_bin_t_" + str(window_flags)] = (
-            sensordf["flag_bin"].rolling(window_flags + 1, center=False).sum()
-        )  # n Flags in interval t to t-window_flags
-        sensordf["flag_bin_t" + str(window_flags)] = (
-            sensordf["flag_bin"].iloc[::-1].rolling(window_flags + 1, center=False).sum()[::-1]
-        )  # n Flags in interval t to t+window_flags
-        # forward-orientation not possible, so right-orientation on reversed data an reverse result
-
-        # Add context information for field+references
-        for i in [field] + references:
-            sensordf = pd.concat([sensordf, _refCalc(reference=sensordf[i], window_values=window_values),], axis=1,)
-
-        # write back into new dataframe
-        traindata = traindata.append(sensordf)
-
-    # remove rows that contain NAs (new ones occured during predictor calculation)
-    traindata = traindata.dropna(axis=0, how="any")
-
-    ################
-    ### FIT Model
-    ################
-    n_cores = os.getenv("NSLOTS", 1)
-    print("MODEL TRAINING ON " + str(n_cores) + " CORES")
-
-    # make column in "traindata" to store predictions
-    traindata = traindata.assign(PredMan=0)
-    outinfo_df = []
-    resultfile = open(os.path.join(os.getcwd(), path, modelname + "_resultfile.txt"), "w")
-    starttime = time.time()
-    # For each category of groupvar, fit a separate model
-
-    for groupvar in traindata[group_field].unique():
-        resultfile.write("GROUPVAR: " + str(groupvar) + "\n")
-        print("GROUPVAR: " + str(groupvar))
-        print("TRAINING MODEL...")
-        # drop unneeded columns
-        groupdata = traindata[traindata[group_field] == groupvar].drop(
-            columns=["Time", "RowIndex", "Flag", "flag_bin", "PredMan", group_field, sensor_field,]
-        )
-        forest = RandomForestClassifier(n_estimators=500, random_state=randomseed, oob_score=True, n_jobs=-1)
-        X_tr = groupdata.drop(columns=["TeTr", "FlagMan"])[groupdata.TeTr == "Tr"]
-        Y_tr = groupdata.FlagMan[groupdata.TeTr == "Tr"]
-        forest.fit(y=Y_tr, X=X_tr)
-        # save model object
-        joblib.dump(forest, os.path.join(path, modelname + "_" + str(groupvar) + ".pkl"))
-        # retrieve training predictions
-        print("PREDICTING...")
-        preds_tr = (
-            forest.oob_decision_function_[:, 1] > forest.oob_decision_function_[:, 0]
-        )  # training, derive from OOB class votes
-        preds_tr = preds_tr.astype("int")
-
-        # get test predictions
-        X_te = groupdata.drop(columns=["TeTr", "FlagMan"])[groupdata.TeTr == "Te"]
-        Y_te = groupdata.FlagMan[groupdata.TeTr == "Te"]
-        preds_te = forest.predict(X_te)  # test
-
-        # Collect info on model run (n datapoints, share of flags, Test/Training accuracy...)
-        outinfo = [
-            groupvar,
-            groupdata.shape[0],
-            len(preds_tr),
-            len(preds_te),
-            sum(groupdata.FlagMan[groupdata.TeTr == "Tr"]) / len(preds_tr) * 100,
-            sum(groupdata.FlagMan[groupdata.TeTr == "Te"]) / len(preds_te) * 100,
-            recall_score(Y_tr, preds_tr),
-            recall_score(Y_te, preds_te),
-            precision_score(Y_tr, preds_tr),
-            precision_score(Y_te, preds_te),
-        ]
-        resultfile.write("TRAINING RECALL:" + "\n")
-        resultfile.write(
-            str(recall_score(groupdata.FlagMan[groupdata.TeTr == "Tr"], preds_tr)) + "\n"
-        )  # Training error (Out-of-Bag)
-        resultfile.write("TEST RECALL:" + "\n")
-        resultfile.write(
-            str(recall_score(groupdata.FlagMan[groupdata.TeTr == "Te"], preds_te)) + "\n" + "\n"
-        )  # Test error
-        outinfo_df.append(outinfo)
-        # save back to dataframe
-        traindata.PredMan[(traindata.TeTr == "Tr") & (traindata[group_field] == groupvar)] = preds_tr
-        traindata.PredMan[(traindata.TeTr == "Te") & (traindata[group_field] == groupvar)] = preds_te
-
-    endtime = time.time()
-    print("TIME ELAPSED: " + str(datetime.timedelta(seconds=endtime - starttime)) + " hours")
-    outinfo_df = dios.DictOfSeries.from_records(
-        outinfo_df,
-        columns=[
-            group_field,
-            "n",
-            "n_Tr",
-            "n_Te",
-            "Percent_Flags_Tr",
-            "Percent_Flags_Te",
-            "Recall_Tr",
-            "Recall_Te",
-            "Precision_Tr",
-            "Precision_Te",
-        ],
-    )
-    outinfo_df = outinfo_df.assign(Modelname=modelname)
-    resultfile.write(str(outinfo_df))
-    outinfo_df.to_csv(os.path.join(path, modelname + "_outinfo.csv"), index=False)
-    resultfile.close()
-
-    # write results back into original "data" dataframe
-    data = data.assign(PredMan=np.nan)
-    data.PredMan[traindata.RowIndex] = traindata.PredMan  # based on RowIndex as NAs were created in traindata
-    data.to_feather(os.path.join(path, modelname + "_data_preds.feather"))
-
-
-trainML(
-    data, field, references, sensor_field, group_field, window_values, window_flags, path, modelname, 0.3,
-)
diff --git a/saqc/__init__.py b/saqc/__init__.py
index ddc4f2f02f3121c21a65a3f60c43986e657a2413..34f673b943c96f4b085e3f7af3c7d2f8aa4c7921 100644
--- a/saqc/__init__.py
+++ b/saqc/__init__.py
@@ -3,6 +3,17 @@
 
 __version__ = "1.4"
 
-from saqc.core.core import SaQC
-from saqc.flagger import *
-from saqc.core.register import register
+# import order: from small to big
+from saqc.constants import *
+from saqc.core import (
+    flagging,
+    processing,
+    initFlagsLike,
+    Flags,
+    FloatTranslator,
+    DmpTranslator,
+    PositionalTranslator,
+    SimpleTranslator,
+    SaQC,
+    fromConfig,
+)
diff --git a/saqc/__main__.py b/saqc/__main__.py
index 806377faa01a955c8105ace70425a706ae5ebdbc..fbfa1a706ab2b7b4cbecbb4bb2e4c6dd397ffaca 100644
--- a/saqc/__main__.py
+++ b/saqc/__main__.py
@@ -11,22 +11,27 @@ import numpy as np
 import pandas as pd
 import pyarrow as pa
 
-from saqc.core import SaQC
-from saqc.flagger import CategoricalFlagger
-from saqc.flagger.dmpflagger import DmpFlagger
+from saqc.core import (
+    fromConfig,
+    FloatTranslator,
+    DmpTranslator,
+    PositionalTranslator,
+    SimpleTranslator,
+)
 
 
 logger = logging.getLogger("SaQC")
 
 
-FLAGGERS = {
-    "numeric": CategoricalFlagger([-1, 0, 1]),
-    "category": CategoricalFlagger(["NIL", "OK", "BAD"]),
-    "dmp": DmpFlagger(),
+SCHEMES = {
+    "simple": SimpleTranslator,
+    "float": FloatTranslator,
+    "positional": PositionalTranslator,
+    "dmp": DmpTranslator,
 }
 
 
-def _setup_logging(loglvl):
+def _setupLogging(loglvl):
     logger.setLevel(loglvl)
     handler = logging.StreamHandler()
     formatter = logging.Formatter("[%(asctime)s][%(name)s][%(levelname)s]: %(message)s")
@@ -36,13 +41,15 @@ def _setup_logging(loglvl):
 
 def setupIO(nodata):
     reader = {
-        ".csv"     : partial(pd.read_csv, index_col=0, parse_dates=True),
-        ".parquet" : pd.read_parquet
+        ".csv": partial(pd.read_csv, index_col=0, parse_dates=True),
+        ".parquet": pd.read_parquet,
     }
 
     writer = {
-        ".csv" : partial(pd.DataFrame.to_csv, header=True, index=True, na_rep=nodata),
-        ".parquet" : lambda df, outfile: pa.parquet.write_table(pa.Table.from_pandas(df), outfile)
+        ".csv": partial(pd.DataFrame.to_csv, header=True, index=True, na_rep=nodata),
+        ".parquet": lambda df, outfile: pa.parquet.write_table(
+            pa.Table.from_pandas(df), outfile
+        ),
     }
     return reader, writer
 
@@ -51,7 +58,9 @@ def readData(reader_dict, fname):
     extension = Path(fname).suffix
     reader = reader_dict.get(extension)
     if not reader:
-        raise ValueError(f"Unsupported file format '{extension}', use one of {tuple(reader.keys())}")
+        raise ValueError(
+            f"Unsupported file format '{extension}', use one of {tuple(reader.keys())}"
+        )
     return reader(fname)
 
 
@@ -59,56 +68,73 @@ def writeData(writer_dict, df, fname):
     extension = Path(fname).suffix
     writer = writer_dict.get(extension)
     if not writer:
-        raise ValueError(f"Unsupported file format '{extension}', use one of {tuple(writer.keys())}")
+        raise ValueError(
+            f"Unsupported file format '{extension}', use one of {tuple(writer.keys())}"
+        )
     writer(df, fname)
 
 
 @click.command()
 @click.option(
-    "-c", "--config", type=click.Path(exists=True), required=True, help="path to the configuration file",
+    "-c",
+    "--config",
+    type=click.Path(exists=True),
+    required=True,
+    help="path to the configuration file",
+)
+@click.option(
+    "-d",
+    "--data",
+    type=click.Path(exists=True),
+    required=True,
+    help="path to the data file",
 )
 @click.option(
-    "-d", "--data", type=click.Path(exists=True), required=True, help="path to the data file",
+    "-o", "--outfile", type=click.Path(exists=False), help="path to the output file"
 )
-@click.option("-o", "--outfile", type=click.Path(exists=False), help="path to the output file")
 @click.option(
-    "--flagger", default="category", type=click.Choice(FLAGGERS.keys()), help="the flagging scheme to use",
+    "--scheme",
+    default=None,
+    type=click.Choice(SCHEMES.keys()),
+    help="the flagging scheme to use",
 )
 @click.option("--nodata", default=np.nan, help="nodata value")
 @click.option(
-    "--log-level", default="INFO", type=click.Choice(["DEBUG", "INFO", "WARNING"]), help="set output verbosity"
+    "--log-level",
+    default="INFO",
+    type=click.Choice(["DEBUG", "INFO", "WARNING"]),
+    help="set output verbosity",
 )
-@click.option("--fail/--no-fail", default=True, help="whether to stop the program run on errors")
-def main(config, data, flagger, outfile, nodata, log_level, fail):
+def main(config, data, scheme, outfile, nodata, log_level):
 
-    _setup_logging(log_level)
+    _setupLogging(log_level)
     reader, writer = setupIO(nodata)
 
     data = readData(reader, data)
 
-    saqc = SaQC(flagger=FLAGGERS[flagger], data=data, nodata=nodata, error_policy="raise" if fail else "warn",)
+    saqc = fromConfig(
+        config,
+        data=data,
+        scheme=SCHEMES[scheme or "simple"](),
+    )
 
-    data_result, flagger_result = saqc.readConfig(config).getResult(raw=True)
+    data_result, flags_result = saqc.getResult()
 
     if outfile:
-        data_result = data_result.to_df()
-        flags = flagger_result.flags.to_df()
-        flags_flagged = flagger_result.isFlagged().to_df()
-
-        flags_out = flags.where((flags.isnull() | flags_flagged), flagger_result.GOOD)
-        fields = {"data": data_result, "flags": flags_out}
-
-        if isinstance(flagger_result, DmpFlagger):
-            fields["quality_flag"] = fields.pop("flags")
-            fields["quality_comment"] = flagger_result.comments.to_df()
-            fields["quality_cause"] = flagger_result.causes.to_df()
-
-        out = (
-            pd.concat(fields.values(), axis=1, keys=fields.keys())
-            .reorder_levels(order=[1, 0], axis=1)
-            .sort_index(axis=1, level=0, sort_remaining=False)
+
+        data_result.columns = pd.MultiIndex.from_product(
+            [data_result.columns.tolist(), ["data"]]
         )
-        out.columns = out.columns.rename(["", ""])
+
+        if not isinstance(flags_result.columns, pd.MultiIndex):
+            flags_result.columns = pd.MultiIndex.from_product(
+                [flags_result.columns.tolist(), ["flags"]]
+            )
+
+        out = pd.concat([data_result, flags_result], axis=1).sort_index(
+            axis=1, level=0, sort_remaining=False
+        )
+
         writeData(writer, out, outfile)
 
 
diff --git a/saqc/constants.py b/saqc/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..7d4cb5f621c4eb0509325e09fcc0ae4ff9448de5
--- /dev/null
+++ b/saqc/constants.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+__all__ = [
+    "UNTOUCHED",
+    "UNFLAGGED",
+    "DOUBTFUL",
+    "BAD",
+    "GOOD",
+    "DOUBT",
+]
+
+import numpy as np
+
+UNTOUCHED = np.nan
+UNFLAGGED = -np.inf
+GOOD = 0
+DOUBTFUL = 25.0
+BAD = 255.0
+
+# aliases
+DOUBT = DOUBTFUL
diff --git a/saqc/core/__init__.py b/saqc/core/__init__.py
index 2f42342431e06979befbf39bbdf2e300b38ef2aa..efb119fc34739b57589eccc03a2ea8e530f87642 100644
--- a/saqc/core/__init__.py
+++ b/saqc/core/__init__.py
@@ -1,5 +1,13 @@
 #! /usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from saqc.core.core import SaQC, logger
-from saqc.core.register import register
+from saqc.core.register import flagging, processing
+from saqc.core.flags import Flags, initFlagsLike
+from saqc.core.core import SaQC
+from saqc.core.translator import (
+    FloatTranslator,
+    DmpTranslator,
+    PositionalTranslator,
+    SimpleTranslator,
+)
+from saqc.core.reader import fromConfig
diff --git a/saqc/core/config.py b/saqc/core/config.py
deleted file mode 100644
index e6a9e9f6307f8b2de08f560df56dc691e30f8e86..0000000000000000000000000000000000000000
--- a/saqc/core/config.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-
-class Fields:
-    VARNAME = "varname"
-    START = "start_date"
-    END = "end_date"
-    TEST = "test"
-    PLOT = "plot"
-    LINENUMBER = "line"
diff --git a/saqc/core/core.py b/saqc/core/core.py
index 13b7a7087c29012ef486d3a62c1897006570727f..6b729178193a6193d11365db365f8a57c0ed6296 100644
--- a/saqc/core/core.py
+++ b/saqc/core/core.py
@@ -1,113 +1,75 @@
 #! /usr/bin/env python
 # -*- coding: utf-8 -*-
+from __future__ import annotations
 
-"""
-TODOS:
-  - integrate plotting into the api
-  - `data` and `flagger` as arguments to `getResult`
-"""
-
-import logging
-from copy import deepcopy
-from typing import Any, Dict, List, Optional, Tuple, Callable, Sequence
-from dataclasses import dataclass, replace
+import inspect
+import warnings
+import copy as stdcopy
+from typing import Any, Callable, Tuple, Union, Optional
 
 import pandas as pd
-import dios
 import numpy as np
-import timeit
-import inspect
-
-from saqc.lib.plotting import plotHook, plotAllHook
-from saqc.flagger import BaseFlagger, CategoricalFlagger, SimpleFlagger, DmpFlagger
-from saqc.core.register import FUNC_MAP
-from saqc.funcs.proc_functions import proc_copy
-
-
-logger = logging.getLogger("SaQC")
 
+from dios import DictOfSeries, to_dios
 
-@dataclass
-class FuncCtrl:
-    "ctrl_kws"
-    masking: str          # one of: "none", "field", "all"
-    plot: bool
-    lineno: Optional[int] = None
-    expr: Optional[str] = None
-    inplace: bool = False
-    to_mask: Any = None   # flagger.FLAG constants or a list of those
-
-
-@dataclass
-class Func:
-    name: str
-    func: Callable[[pd.DataFrame, str, BaseFlagger, Any], Tuple[pd.DataFrame, BaseFlagger]]
-    field: str
-    kwargs: Dict[str, Any]
-    ctrl: FuncCtrl
-    regex: bool = False
-    target: Optional[str] = None
-    args: Tuple[Any] = tuple()
-
-
-def _handleErrors(exc, func, policy):
-    msg = f"Execution failed. Variable: '{func.field}', "
-    if func.ctrl.lineno is not None and func.ctrl.expr is not None:
-        msg += f"Config line {func.ctrl.lineno}: '{func.ctrl.expr}', "
-    else:
-        msg += f"Function: {func.name}(), parameters: '{func.kwargs}', "
-    msg += f"Exception:\n{type(exc).__name__}: {exc}"
+from saqc.core.flags import initFlagsLike, Flags
+from saqc.core.register import FUNC_MAP
+from saqc.core.modules import FunctionsMixin
+from saqc.core.translator.basetranslator import Translator, FloatTranslator
+from saqc.lib.tools import toSequence
+from saqc.lib.types import (
+    ExternalFlag,
+    PandasLike,
+)
 
-    if policy == "ignore":
-        logger.debug(msg)
-    elif policy == "warn":
-        logger.warning(msg)
-    else:
-        logger.error(msg)
-        raise exc
 
+# TODO: shouldn't the code/function go to SaQC.__init__ ?
+def _prepInput(
+    data: PandasLike, flags: Optional[Union[DictOfSeries, pd.DataFrame, Flags]]
+) -> Tuple[DictOfSeries, Optional[Flags]]:
+    dios_like = (DictOfSeries, pd.DataFrame)
 
-def _prepInput(flagger, data, flags):
-    dios_like = (dios.DictOfSeries, pd.DataFrame)
+    data = stdcopy.deepcopy(data)
 
     if isinstance(data, pd.Series):
         data = data.to_frame()
 
     if not isinstance(data, dios_like):
-        raise TypeError("'data' must be of type pd.Series, pd.DataFrame or dios.DictOfSeries")
+        raise TypeError(
+            "'data' must be of type pd.Series, pd.DataFrame or dios.DictOfSeries"
+        )
 
     if isinstance(data, pd.DataFrame):
-        if isinstance(data.index, pd.MultiIndex) or isinstance(data.columns, pd.MultiIndex):
+        if isinstance(data.index, pd.MultiIndex) or isinstance(
+            data.columns, pd.MultiIndex
+        ):
             raise TypeError("'data' should not use MultiIndex")
-        data = dios.to_dios(data)
+        data = to_dios(data)
 
     if not hasattr(data.columns, "str"):
         raise TypeError("expected dataframe columns of type string")
 
-    if not isinstance(flagger, BaseFlagger):
-        # NOTE: we should generate that list automatically,
-        #       it won't ever be complete otherwise
-        flaggerlist = [CategoricalFlagger, SimpleFlagger, DmpFlagger]
-        raise TypeError(f"'flagger' must be of type {flaggerlist} or a subclass of {BaseFlagger}")
-
     if flags is not None:
-        if not isinstance(flags, dios_like):
-            raise TypeError("'flags' must be of type dios.DictOfSeries or pd.DataFrame")
 
         if isinstance(flags, pd.DataFrame):
-            if isinstance(flags.index, pd.MultiIndex) or isinstance(flags.columns, pd.MultiIndex):
+            if isinstance(flags.index, pd.MultiIndex) or isinstance(
+                flags.columns, pd.MultiIndex
+            ):
                 raise TypeError("'flags' should not use MultiIndex")
-            flags = dios.to_dios(flags)
 
-        # NOTE: do not test all columns as they not necessarily need to be the same
-        cols = flags.columns & data.columns
-        if not (flags[cols].lengths == data[cols].lengths).all():
-            raise ValueError("the length of 'flags' and 'data' need to be equal")
+        if isinstance(flags, (DictOfSeries, pd.DataFrame, Flags)):
+            # NOTE: only test common columns, data as well as flags could
+            # have more columns than the respective other.
+            cols = flags.columns.intersection(data.columns)
+            for c in cols:
+                if not flags[c].index.equals(data[c].index):
+                    raise ValueError(
+                        f"the index of 'flags' and 'data' missmatch in column {c}"
+                    )
 
-    if flagger.initialized:
-        diff = data.columns.difference(flagger.getFlags().columns)
-        if not diff.empty:
-            raise ValueError("Missing columns in 'flagger': '{list(diff)}'")
+        # this also ensures float dtype
+        if not isinstance(flags, Flags):
+            flags = Flags(flags, copy=True)
 
     return data, flags
 
@@ -126,150 +88,195 @@ def _setup():
 _setup()
 
 
-class SaQC:
-    def __init__(self, flagger, data, flags=None, nodata=np.nan, to_mask=None, error_policy="raise"):
-        data, flags = _prepInput(flagger, data, flags)
-        self._data = data
-        self._nodata = nodata
-        self._to_mask = to_mask
-        self._flagger = self._initFlagger(data, flagger, flags)
-        self._error_policy = error_policy
-        # NOTE: will be filled by calls to `_wrap`
-        self._to_call: List[Func] = []  # todo fix the access everywhere
-
-    def _initFlagger(self, data, flagger, flags):
-        """ Init the internal flagger object.
-
-        Ensures that all data columns are present and user passed flags from
-        a flags frame and/or an already initialised flagger are used.
-        If columns overlap the passed flagger object is prioritised.
-        """
-        # ensure all data columns
-        merged = flagger.initFlags(data)
-        if flags is not None:
-            merged = merged.merge(flagger.initFlags(flags=flags), inplace=True)
-        if flagger.initialized:
-            merged = merged.merge(flagger, inplace=True)
-        return merged
-
-    def readConfig(self, fname):
-        from saqc.core.reader import readConfig
-        out = deepcopy(self)
-        out._to_call.extend(readConfig(fname, self._flagger))
-        return out
+class Accessor:
+    def __init__(self, obj: Union[DictOfSeries, pd.DataFrame, Flags]):
+        self._obj = obj
 
-    def _expandFields(self, func, variables) -> Sequence[Func]:
-        if not func.regex:
-            return [func]
+    def __getitem__(self, key):
+        return self._obj[key]
+
+    @property
+    def columns(self):
+        return self._obj.columns
+
+    def __len__(self):
+        return len(self.columns)
+
+    def __repr__(self):
+        return self._obj.__repr__()
 
-        out = []
-        for field in variables[variables.str.match(func.field)]:
-            out.append(replace(func, field=field))
-        return out
 
-    def evaluate(self):
+class SaQC(FunctionsMixin):
+    def __init__(
+        self,
+        data,
+        flags=None,
+        scheme: Translator = None,
+    ):
+        data, flags = _prepInput(data, flags)
+        self._data = data
+        self._flags = self._initFlags(data, flags)
+        self._translator = scheme or FloatTranslator()
+        self.called = []
+
+    @staticmethod
+    def _initFlags(data: DictOfSeries, flags: Optional[Flags]) -> Flags:
+        """
+        Init the internal Flags-object.
+
+        Ensures that all data columns are present and user passed
+        flags from a frame or an already initialised Flags-object
+        are used.
         """
-        Realize all the registered calculations and return a updated SaQC Object
+        if flags is None:
+            return initFlagsLike(data)
 
-        Paramters
-        ---------
+        # add columns that are present in data but not in flags
+        for c in data.columns.difference(flags.columns):
+            flags[c] = initFlagsLike(data[c])
 
-        Returns
-        -------
-        An updated SaQC Object incorporating the requested computations
+        return flags
+
+    def _construct(self, **injectables) -> SaQC:
         """
+        Construct a new `SaQC`-Object from `self` and optionally inject
+        attributes with any chechking and overhead.
 
-        # NOTE: It would be nicer to separate the plotting into an own
-        #       method instead of intermingling it with the computation
-        data, flagger = self._data, self._flagger
-
-        for func in self._to_call:
-            for func in self._expandFields(func, data.columns.union(flagger._flags.columns)):
-                logger.debug(f"processing: {func.field}, {func.name}, {func.kwargs}")
-
-                try:
-                    t0 = timeit.default_timer()
-                    data_result, flagger_result = _saqcCallFunc(func, data, flagger)
-
-                except Exception as e:
-                    t1 = timeit.default_timer()
-                    logger.debug(f"{func.name} failed after {t1 - t0} sec")
-                    _handleErrors(e, func, self._error_policy)
-                    continue
-                else:
-                    t1 = timeit.default_timer()
-                    logger.debug(f"{func.name} finished after {t1 - t0} sec")
-
-                if func.ctrl.plot:
-                    plotHook(
-                        data_old=data,
-                        data_new=data_result,
-                        flagger_old=flagger,
-                        flagger_new=flagger_result,
-                        sources=[],
-                        targets=[func.field],
-                        plot_name=func.name,
-                    )
+        Parameters
+        ----------
+        **injectables: any of the `SaQC` data attributes with name and value
 
-                data = data_result
-                flagger = flagger_result
+        Note
+        ----
+        For internal usage only! Setting values through `injectables` has
+        the potential to mess up certain invariants of the constructed object.
+        """
+        out = SaQC(
+            data=DictOfSeries(),
+            flags=Flags(),
+            scheme=self._translator,
+        )
+        for k, v in injectables.items():
+            if not hasattr(out, k):
+                raise AttributeError(f"failed to set unknown attribute: {k}")
+            setattr(out, k, v)
+        return out
 
-        if any([fdump.ctrl.plot for fdump in self._to_call]):
-            plotAllHook(data, flagger)
+    @property
+    def data(self) -> Accessor:
+        return Accessor(self._data)
 
-        # This is much faster for big datasets that to throw everything in the constructor.
-        # Simply because of _initFlagger -> merge() -> mergeDios() over all columns.
-        new = SaQC(SimpleFlagger(), dios.DictOfSeries(), nodata=self._nodata, error_policy=self._error_policy)
-        new._flagger, new._data = flagger, data
-        return new
+    @property
+    def flags(self) -> Accessor:
+        return Accessor(self._translator.backward(self._flags))
 
-    def getResult(self, raw=False):
+    def getResult(
+        self, raw=False
+    ) -> Union[Tuple[DictOfSeries, Flags], Tuple[pd.DataFrame, pd.DataFrame]]:
         """
-        Realized the registered calculations and return the results
+        Realize the registered calculations and return the results
 
         Returns
         -------
-        data, flagger: (DictOfSeries, DictOfSeries)
+        data, flags: (DictOfSeries, DictOfSeries)
         """
 
-        realization = self.evaluate()
-        data, flagger = realization._data, realization._flagger
-        if raw is False:
-            return data.to_df(), flagger.toFrame()
-        return data, flagger
+        data, flags = self._data, self._flags
 
-    def _wrap(self, func_name):
-        def inner(field: str, *args, target: str=None, regex: bool = False, to_mask=None, plot=False, inplace=False, **kwargs):
+        if raw:
+            return data, flags
 
-            kwargs.setdefault('nodata', self._nodata)
+        return data.to_df(), self._translator.backward(flags)
 
-            func = FUNC_MAP[func_name]["func"]
+    def _wrap(self, func: Callable):
+        """Enrich a function by special saqc-functionality.
 
-            ctrl_kws = FuncCtrl(
-                masking=FUNC_MAP[func_name]["masking"],
-                to_mask=to_mask or self._to_mask,
-                plot=plot,
-                inplace=inplace,
-                )
-
-            func_dump = Func(
-                name=func_name,
-                func=func,
-                field=field,
-                target=target if target is not None else field,
-                regex=regex,
-                args=args,
-                kwargs=kwargs,
-                ctrl=ctrl_kws,
-            )
+        For each saqc function this realize
+            - the source-target workflow,
+            - regex's in field,
+            - use default of translator for ``to_mask`` if not specified by user,
+            - translation of ``flag`` and
+            - working inplace.
+        Therefore it adds the following keywords to each saqc function:
+        ``target``, ``regex`` and ``inplace``.
 
-            out = self if inplace else self.copy()
-            out._to_call.append(func_dump)
+        The returned function returns a Saqc object.
+        """
 
+        def inner(
+            field: str,
+            *args,
+            target: str = None,
+            regex: bool = False,
+            flag: ExternalFlag = None,
+            **kwargs,
+        ) -> SaQC:
+
+            if regex and target is not None:
+                raise ValueError("explicit `target` not supported with `regex=True`")
+
+            kwargs.setdefault("to_mask", self._translator.TO_MASK)
+
+            # translation
+            if flag is not None:
+                kwargs["flag"] = self._translator(flag)
+
+            # expand regular expressions
+            if regex:
+                fields = self._data.columns.str.match(field)
+                fields = self._data.columns[fields]
+                targets = fields
+            else:
+                fields, targets = toSequence(field), toSequence(target, default=field)
+
+            out = self
+
+            for field, target in zip(fields, targets):
+                if field != target:
+                    out = out._callFunction(
+                        FUNC_MAP["copyField"],
+                        data=out._data,
+                        flags=out._flags,
+                        field=field,
+                        new_field=target,
+                    )
+                    field = target
+
+                out = out._callFunction(
+                    func,
+                    data=out._data,
+                    flags=out._flags,
+                    field=field,
+                    *args,
+                    **kwargs,
+                )
             return out
 
         return inner
 
+    def _callFunction(
+        self,
+        function: Callable,
+        data: DictOfSeries,
+        flags: Flags,
+        field: str,
+        *args: Any,
+        **kwargs: Any,
+    ) -> SaQC:
+
+        assert data.columns.difference(flags.columns).empty
+
+        data, flags = function(data=data, flags=flags, field=field, *args, **kwargs)
+        # we check the passed function-kwargs after the actual call,
+        # because now "hard" errors would already have been raised
+        # (eg. `TypeError: got multiple values for argument 'data'`,
+        # when the user pass data=...)
+        _warnForUnusedKwargs(function, kwargs, self._translator)
+
+        planned = self.called + [(field, (function, args, kwargs))]
+
+        return self._construct(_data=data, _flags=flags, called=planned)
+
     def __getattr__(self, key):
         """
         All failing attribute accesses are redirected to
@@ -279,108 +286,21 @@ class SaQC:
         """
         if key not in FUNC_MAP:
             raise AttributeError(f"no such attribute: '{key}'")
-        return self._wrap(key)
-
-    def copy(self):
-        return deepcopy(self)
-
-
-def _saqcCallFunc(func_dump, data, flagger):
+        return self._wrap(FUNC_MAP[key])
 
-    # NOTE:
-    # We assure that all columns in data have an equivalent column in flags,
-    # we might have more flagger columns though
-    assert data.columns.difference(flagger.getFlags().columns).empty
-
-    field = func_dump.field
-    target = func_dump.target
-    to_mask = func_dump.ctrl.to_mask
-    masking = func_dump.ctrl.masking
-
-    if (target != field) and (func_dump.regex is False):
-        data, flagger = proc_copy(data, field, flagger, target)
-        field = target
-
-    if masking == 'all':
-        columns = data.columns
-    elif masking == 'none':
-        columns = []
-    elif masking == 'field':
-        columns = [field]
-    else:
-        raise ValueError(f"wrong use of `register(masking={masking})`")
-
-    # warn if the user explicitly pass `to_mask=..` to a function that is
-    # decorated by `register(masking='none')`, and so `to_mask` is ignored.
-    if masking == 'none' and to_mask not in (None, []):
-        logging.warning("`to_mask` is given, but the test ignore masking. Please refer to the documentation: TODO")
-    to_mask = flagger.BAD if to_mask is None else to_mask
-
-    data_in, mask = _maskData(data, flagger, columns, to_mask)
-    data_result, flagger_result = func_dump.func(
-        data_in, field, flagger,
-        *func_dump.args, func_name=func_dump.name, **func_dump.kwargs)
-    data_result = _unmaskData(data, mask, data_result, flagger_result, to_mask)
-
-    # we check the passed function-kwargs after the actual call, because now "hard" errors would already have been
-    # raised (Eg. `TypeError: got multiple values for argument 'data'`, when the user pass data=...)
-    _warnForUnusedKwargs(func_dump, flagger)
-
-    return data_result, flagger_result
-
-
-def _maskData(data, flagger, columns, to_mask):
-    # TODO: this is heavily undertested
-    mask = flagger.isFlagged(field=columns, flag=to_mask, comparator='==')
-    data = data.copy()
-    for c in columns:
-        col_mask = mask[c].values
-        if np.any(col_mask):
-            col_data = data[c].values.astype(np.float64)
-            col_data[col_mask] = np.nan
-            data[c] = col_data
-    return data, mask
-
-
-def _unmaskData(data_old, mask_old, data_new, flagger_new, to_mask):
-    # TODO: this is heavily undertested
-
-    # NOTE:
-    # we only need to respect columns, that were masked,
-    # and are also still present in new data.
-    # this throws out:
-    #  - any newly assigned columns
-    #  - columns that were excluded from masking
-    columns = mask_old.dropempty().columns.intersection(data_new.dropempty().columns)
-    mask_new = flagger_new.isFlagged(field=columns, flag=to_mask, comparator="==")
+    def copy(self, deep=True):
+        if deep:
+            return stdcopy.deepcopy(self)
+        return stdcopy.copy(self)
 
-    for col in columns:
-        was_masked = mask_old[col]
-        is_masked = mask_new[col]
 
-        # if index changed we just go with the new data.
-        # A test should use `register(masking='none')` if it changes
-        # the index but, does not want to have all NaNs on flagged locations.
-        if was_masked.index.equals(is_masked.index):
-            mask = was_masked.values & is_masked.values & data_new[col].isna().values
-
-            # reapplying old values on masked positions
-            if np.any(mask):
-                data = np.where(mask, data_old[col].values, data_new[col].values)
-                data_new[col] = pd.Series(data=data, index=is_masked.index)
-
-    return data_new
-
-
-def _warnForUnusedKwargs(func_dump, flagger):
-    """ Warn for unused kwargs, passed to a SaQC.function.
+def _warnForUnusedKwargs(func, keywords, translator: Translator):
+    """Warn for unused kwargs, passed to a SaQC.function.
 
     Parameters
     ----------
-    func_dump: dict
+    func: SaqcFunction
         Saqc internal data structure that hold all function info.
-    flagger: saqc.flagger.BaseFlagger
-        Flagger object.
 
     Returns
     -------
@@ -388,25 +308,21 @@ def _warnForUnusedKwargs(func_dump, flagger):
 
     Notes
     -----
-    A single warning via the logging module is thrown, if any number of
-    missing kws are detected, naming each missing kw.
+    A single warning is thrown, if any number of missing kws are detected, naming each missing kw.
     """
-    sig_kws = inspect.signature(func_dump.func).parameters
+    sig_kws = inspect.signature(func).parameters
 
-    # we need to ignore kwargs that are injected or
-    # used to control the flagger
-    ignore = flagger.signature + ('nodata',)
+    # we need to ignore kws that are injected or by default hidden in ``**kwargs``
+    ignore = ("to_mask",)
 
     missing = []
-    for kw in func_dump.kwargs:
+    for kw in keywords:
         # there is no need to check for
         # `kw in [KEYWORD_ONLY, VAR_KEYWORD or POSITIONAL_OR_KEYWORD]`
         # because this would have raised an error beforehand.
-        if kw not in sig_kws and kw not in ignore:
+        if kw not in sig_kws and kw not in ignore and kw not in translator.ARGUMENTS:
             missing.append(kw)
 
     if missing:
-        missing = ', '.join(missing)
-        logging.warning(f"Unused argument(s): {missing}")
-
-
+        missing = ", ".join(missing)
+        warnings.warn(f"Unused argument(s): {missing}")
diff --git a/saqc/core/flags.py b/saqc/core/flags.py
new file mode 100644
index 0000000000000000000000000000000000000000..a895e0fb0a00d3f2afa37012ad508b34e385331e
--- /dev/null
+++ b/saqc/core/flags.py
@@ -0,0 +1,503 @@
+#!/usr/bin/env python
+from __future__ import annotations
+
+import pandas as pd
+import dios
+from typing import Mapping, Union, Dict, DefaultDict, Optional, Type, Tuple, Iterable
+
+from saqc.constants import *
+from saqc.core.history import History
+
+
+_VAL = Union[pd.Series, History]
+DictLike = Union[
+    pd.DataFrame,
+    dios.DictOfSeries,
+    Dict[str, _VAL],
+    DefaultDict[str, _VAL],
+]
+
+_Field = str
+SelectT = Union[
+    _Field,
+    Tuple[pd.Series, _Field],
+    Tuple[pd.Index, _Field],
+    Tuple[slice, _Field],
+]
+ValueT = Union[pd.Series, Iterable, float]
+
+
+class _HistAccess:
+    def __init__(self, obj: Flags):
+        self.obj = obj
+
+    def __getitem__(self, key: str) -> History:
+        return self.obj._data[key].copy()
+
+    def __setitem__(self, key: str, value: History):
+        if not isinstance(value, History):
+            raise TypeError("Not a History")
+
+        self.obj._validateHistForFlags(value)
+        self.obj._data[key] = value
+
+
+class Flags:
+    """
+    Saqc's flags container.
+
+    This container class holds the quality flags associated with the data. It hold key-value pairs, where
+    the key is the name of the column and the value is a ``pandas.Series`` of flags. The index of the series
+    and the key-value pair can be assumed to be immutable, which means, only the *values* of the series can
+    be change, once the series exist.
+    In other words: **an existing column can not be overwritten by a column with a different index.**
+
+    The flags can be accessed via ``__getitem__`` and ``__setitem__``, in real life known as the `[]`-operator.
+
+    For the curious:
+        Under the hood, the series are stored in a `history`, which allows the advanced user to retrieve all flags
+        once was set in this object, but in the most cases this is irrelevant. For simplicity one can safely assume,
+        that this class works just stores the flag-series one sets.
+
+    See Also
+    --------
+    initFlagsLike : create a Flags instance, with same dimensions as a reference object.
+    History : class that actually store the flags
+
+    Examples
+    --------
+    We create an empty instance, by calling ``Flags`` without any arguments and then add a column to it.
+
+    >>> from saqc.constants import UNFLAGGED, BAD, DOUBT, UNTOUCHED
+    >>> flags = Flags()
+    >>> flags
+    Empty Flags
+    Columns: []
+    >>> flags['v0'] = pd.Series([BAD,BAD,UNFLAGGED], dtype=float)
+    >>> flags
+          v0 |
+    ======== |
+    0  255.0 |
+    1  255.0 |
+    2   -inf |
+
+    Once the column exist, we cannot overwrite it anymore, with a different series.
+
+    >>> flags['v0'] = pd.Series([666.], dtype=float)
+    Traceback (most recent call last):
+      some file path ...
+    ValueError: Index does not match
+
+    But if we pass a series, which index match it will work,
+    because the series now is interpreted as value-to-set.
+
+    >>> flags['v0'] = pd.Series([DOUBT,UNTOUCHED,DOUBT], dtype=float)
+    >>> flags
+          v0 |
+    ======== |
+    0   25.0 |
+    1  255.0 |
+    2   25.0 |
+
+    As we see above, the column now holds a combination from the values from the
+    first and the second set. This is, because the special constant ``UNTOUCHED``,
+    an alias for ``numpy.nan`` was used. We can inspect all the updates that was
+    made by looking in the history.
+
+    >>> flags.history['v0']
+            0       1
+    0  (255.0)   25.0
+    1   255.0     nan
+    2   (-inf)   25.0
+
+    As we see now, the second call sets ``25.0`` and shadows (represented by the parentheses) ``(255.0)`` in the
+    first row and ``(-inf)`` in the last, but in the second row ``255.0`` still is valid, because it was
+    `not touched` by the set.
+
+    It is also possible to set values by a mask, which can be interpreted as condidional setting.
+    Imagine we want to `reset` all flags to ``0.`` if the existing flags are lower that ``255.``.
+
+    >>> mask = flags['v0'] < BAD
+    >>> mask
+    0     True
+    1    False
+    2     True
+    dtype: bool
+    >>> flags[mask, 'v0'] = 0
+    >>> flags
+          v0 |
+    ======== |
+    0    0.0 |
+    1  255.0 |
+    2    0.0 |
+
+    The objects you can pass as a row selector (``flags[rows, column]``) are:
+
+    - boolen arraylike, with or without index. Must have same length than the undeliing series.
+    - slices working on the index
+    - ``pd.Index``, which must be a subset of the existing index
+
+    For example, to set `all` values to a scalar value, use a Null-slice:
+
+    >>> flags[:, 'v0'] = 99.0
+    >>> flags
+         v0 |
+    ======= |
+    0  99.0 |
+    1  99.0 |
+    2  99.0 |
+
+    After all calls presented here, the history look like this:
+
+    >>> flags.history['v0']
+            0       1      2       3
+    0  (255.0)  (25.0)  (0.0)   99.0
+    1  (255.0)   (nan)  (nan)   99.0
+    2   (-inf)  (25.0)  (0.0)   99.0
+    """
+
+    def __init__(
+        self, raw_data: Optional[Union[DictLike, Flags]] = None, copy: bool = False
+    ):
+
+        if raw_data is None:
+            raw_data = {}
+
+        if isinstance(raw_data, Flags):
+            if copy:
+                raw_data = raw_data.copy()
+            self._data = raw_data._data
+
+        else:
+            self._data = self._initFromRaw(raw_data, copy)
+
+    @staticmethod
+    def _initFromRaw(data: Mapping, copy: bool) -> Dict[str, History]:
+        """
+        init from dict-like: keys are flag column, values become
+        initial columns of history(s).
+        """
+        result = {}
+
+        for k, item in data.items():
+
+            if not isinstance(k, str):
+                raise ValueError("column names must be string")
+            if k in result:
+                raise ValueError("raw_data must not have duplicate keys")
+
+            # a passed History is not altered. So if the passed History
+            # does not fit for Flags, we fail hard.
+            if isinstance(item, History):
+                Flags._validateHistForFlags(item, colname=k)
+                if copy:
+                    item = item.copy()
+                result[k] = item
+                continue
+            if not isinstance(item, pd.Series):
+                raise TypeError(
+                    f"cannot init from '{type(data).__name__}' of '{type(item).__name__}'"
+                )
+
+            result[k] = History(item.index).append(item)
+
+        return result
+
+    @staticmethod
+    def _validateHistForFlags(history: History, colname=None):
+        if history.empty:
+            return history
+
+        errm = f"History "
+        if colname:
+            errm += f"of column {colname} "
+
+        # this ensures that the mask does not shadow UNFLAGGED with a NaN.
+        if history.max().hasnans:
+            raise ValueError(errm + "is not valid (result of max() contains NaNs)")
+
+        return history
+
+    @property
+    def _constructor(self) -> Type["Flags"]:
+        return type(self)
+
+    # ----------------------------------------------------------------------
+    # meta data
+
+    @property
+    def columns(self) -> pd.Index:
+        """
+        Column index of the flags container
+
+        Returns
+        -------
+        columns: pd.Index
+            The columns index
+        """
+        return pd.Index(self._data.keys())
+
+    @columns.setter
+    def columns(self, value: pd.Index):
+        """
+        Set new columns names.
+
+        Parameters
+        ----------
+        value : pd.Index
+            New column names
+        """
+        if not isinstance(value, pd.Index):
+            value = pd.Index(value)
+
+        if not value.is_unique or not pd.api.types.is_string_dtype(value):
+            raise TypeError("value must be pd.Index, with unique indices of type str")
+
+        if not len(value) == len(self):
+            raise ValueError("index must match current index in length")
+
+        _data = {}
+
+        for old, new in zip(self.columns, value):
+            _data[new] = self._data[old]
+
+        self._data = _data
+
+    @property
+    def empty(self) -> bool:
+        """
+        True if flags has no columns.
+
+        Returns
+        -------
+        bool
+            ``True`` if the container has no columns, otherwise ``False``.
+        """
+        return len(self._data) == 0
+
+    def __len__(self) -> int:
+        return len(self._data)
+
+    def __contains__(self, item):
+        return item in self.columns
+
+    # ----------------------------------------------------------------------
+    # item access
+
+    def __getitem__(self, key: str) -> pd.Series:
+        return self._data[key].max()
+
+    def __setitem__(self, key: SelectT, value: ValueT):
+        # force-KW is only internally available
+
+        if isinstance(key, tuple):
+            if len(key) != 2:
+                raise KeyError(
+                    "a single 'column' or a tuple of 'mask, column' must be passt"
+                )
+            mask, key = key
+
+            tmp = pd.Series(UNTOUCHED, index=self._data[key].index, dtype=float)
+
+            # make a mask from an index, because it seems
+            # that passing an index is a very common workflow
+            if isinstance(mask, pd.Index):
+                mask = pd.Series(True, index=mask, dtype=bool)
+                mask = mask.reindex(tmp.index, fill_value=False)
+
+            # raises (correct) KeyError
+            try:
+                if pd.api.types.is_list_like(value) and len(value) != len(tmp):
+                    raise ValueError
+                tmp[mask] = value
+            except Exception:
+                raise ValueError(
+                    f"bad mask. cannot use mask of length {len(mask)} on "
+                    f"data of length {len(tmp)}"
+                )
+            else:
+                value = tmp
+
+        # technically it would be possible to select a field and set
+        # the entire column to a scalar flag value (float), but it has
+        # a high potential, that this is not intended by the user.
+        # if desired use ``flags[:, field] = flag``
+        if not isinstance(value, pd.Series):
+            raise ValueError(
+                "expected a value of type 'pd.Series', "
+                "if a scalar should be set, please use 'flags[:, field] = flag'"
+            )
+
+        if key not in self._data:
+            self._data[key] = History(value.index)
+
+        self._data[key].append(value, meta=None)
+
+    def __delitem__(self, key):
+        self._data.pop(key)
+
+    def drop(self, key: str):
+        """
+        Delete a flags column.
+
+        Parameters
+        ----------
+        key : str
+            column name
+
+        Returns
+        -------
+        flags object with dropped column, not a copy
+        """
+        self.__delitem__(key)
+
+    # ----------------------------------------------------------------------
+    # accessor
+
+    @property
+    def history(self) -> _HistAccess:
+        """
+        Accessor for the flags history.
+
+        To get a copy of the current history use ``flags.history['var']``.
+        To set a new history use ``flags.history['var'] = value``.
+        The passed value must be a instance of History or must be convertible to a history.
+
+        Returns
+        -------
+        history : History
+            Accessor for the flags history
+
+        See Also
+        --------
+        saqc.core.History : History storage class.
+        """
+        return _HistAccess(self)
+
+    # ----------------------------------------------------------------------
+    # copy
+
+    def copy(self, deep=True):
+        """
+        Copy the flags container.
+
+        Parameters
+        ----------
+        deep : bool, default True
+            If False, a new reference to the Flags container is returned,
+            otherwise the underlying data is also copied.
+
+        Returns
+        -------
+        copy of flags
+        """
+        new = self._constructor()
+        new._data = {c: h.copy() if deep else h for c, h in self._data.items()}
+        return new
+
+    def __copy__(self, deep=True):
+        return self.copy(deep=deep)
+
+    def __deepcopy__(self, memo=None):
+        """
+        Parameters
+        ----------
+        memo, default None
+            Standard signature. Unused
+        """
+        return self.copy(deep=True)
+
+    # ----------------------------------------------------------------------
+    # transformation and representation
+
+    def toDios(self) -> dios.DictOfSeries:
+        """
+        Transform the flags container to a ``dios.DictOfSeries``.
+
+        Returns
+        -------
+        dios.DictOfSeries
+        """
+        di = dios.DictOfSeries(columns=self.columns)
+
+        for k in self._data.keys():
+            di[k] = self[k]
+
+        return di.copy()
+
+    def toFrame(self) -> pd.DataFrame:
+        """
+        Transform the flags container to a ``pd.DataFrame``.
+
+        Returns
+        -------
+        pd.DataFrame
+        """
+        return self.toDios().to_df()
+
+    def __repr__(self) -> str:
+        return str(self.toDios()).replace("DictOfSeries", type(self).__name__)
+
+
+def initFlagsLike(
+    reference: Union[pd.Series, DictLike, Flags],
+    name: str = None,
+) -> Flags:
+    """
+    Create empty Flags, from an reference data structure.
+
+    Parameters
+    ----------
+    reference : pd.DataFrame, pd.Series, dios.DictOfSeries, dict of pd.Series
+        The reference structure to initialize for.
+
+    name : str, default None
+        Only respected if `reference` is of type ``pd.Series``.
+        The column name that is used for the Flags. If ``None``
+        the name of the series itself is taken, if it is unset,
+        a ValueError is raised.
+
+    Notes
+    -----
+    Implementation detail:
+
+    The resulting Flags has not necessarily the exact same (inner) dimensions as the reference.
+    This may happen, if the passed structure already holds History objects. Those are
+    reduced 1D-DataFrame (1-column-History). Nevertheless the returned flags are perfectly suitable
+    to be used in SaQC as flags container along with the passed reference structure (data).
+
+    Returns
+    -------
+    flags: Flags
+        a flags object,
+    """
+    result = {}
+
+    if isinstance(reference, Flags):
+        reference = reference._data
+
+    if isinstance(reference, pd.Series):
+        if name is None:
+            name = reference.name
+        if name is None:
+            raise ValueError(
+                "either the passed pd.Series must be named or a name must be passed"
+            )
+        if not isinstance(name, str):
+            raise TypeError(f"name must be str not '{type(name).__name__}'")
+        reference = reference.to_frame(name=name)
+
+    for k, item in reference.items():
+
+        if not isinstance(k, str):
+            raise TypeError(
+                f"cannot use '{k}' as a column name, currently only string keys are allowed"
+            )
+        if k in result:
+            raise ValueError("reference must not have duplicate column names")
+        if not isinstance(item, (pd.Series, History)):
+            raise TypeError("items in reference must be of type pd.Series")
+
+        result[k] = History(item.index)
+
+    return Flags(result)
diff --git a/saqc/core/history.py b/saqc/core/history.py
new file mode 100644
index 0000000000000000000000000000000000000000..a46c998f20a3bb2eb7dbad53e66b8569440bdc44
--- /dev/null
+++ b/saqc/core/history.py
@@ -0,0 +1,462 @@
+#!/usr/bin/env python
+from __future__ import annotations
+
+from copy import deepcopy, copy
+import itertools
+
+from typing import Dict, Tuple, Type, Union, List, Any
+from typing_extensions import Literal
+import pandas as pd
+import numpy as np
+
+from saqc.constants import *
+
+
+class History:
+    """
+    Saqc internal storage for the history of a (single) flags column.
+
+    The flag-history (FH) stores the history of a flags column. Each time
+    ``append`` is called a new column is appended to the FH. The column
+    names are increasing integers starting with 0. After initialisation
+    the FH is empty and has no columns at all.
+
+    To get the worst flags (highest value) that are currently stored in
+    the FH, we provide a ``max()`` method. It returns a pd.Series indicating
+    the worst flag per row.
+
+    For more details and a detailed discussion, why this is needed, how this
+    works and possible other implementations, see #GL143 [1].
+
+    [1] https://git.ufz.de/rdm-software/saqc/-/issues/143
+
+    Parameters
+    ----------
+    index: pd.Index
+        A index that fit the flags to be insert.
+
+    See Also
+    --------
+    createHistoryFromData: function to create History from existing data
+    """
+
+    def __init__(self, index: pd.Index):
+
+        self.hist = pd.DataFrame(index=index)
+        self.meta = []
+
+    @property
+    def index(self) -> pd.Index:
+        """
+        The index of FH.
+
+        The index is the same for all columns.
+
+        Notes
+        -----
+        The index should always be equal to the flags series,
+        the FH is associated with. If this is messed up
+        something went wrong in saqc internals or in a user-
+        defined test.
+
+        Returns
+        -------
+        index : pd.Index
+        """
+        return self.hist.index
+
+    @property
+    def columns(self) -> pd.Index:
+        """
+        Columns of the FH.
+
+        The columns are always continuously
+        increasing integers, starting from 0.
+
+        Returns
+        -------
+        columns : pd.Index
+        """
+        return self.hist.columns
+
+    @property
+    def empty(self) -> bool:
+        """
+        Indicator whether History is empty.
+
+        True if History is entirely empty (no items).
+
+        Returns
+        -------
+        bool
+            If History is empty, return True, if not return False.
+        """
+        return len(self) == 0
+
+    def _insert(self, s: pd.Series, pos: int) -> History:
+        """
+        Insert data at an arbitrary position in the FH.
+
+        No validation of series is done here.
+
+        Parameters
+        ----------
+        s : pd.Series
+            the series to insert
+
+        pos : int
+            the position to insert
+
+        Returns
+        -------
+        History
+        """
+        # Note:
+        # all following code must handle a passed empty series
+
+        # ensure continuous increasing columns
+        assert 0 <= pos <= len(self)
+
+        self.hist[pos] = s.astype("category")
+
+        return self
+
+    def append(self, value: Union[pd.Series, History], meta: dict = None) -> History:
+        """
+        Create a new FH column and insert given pd.Series to it.
+
+        Parameters
+        ----------
+        value : pd.Series or History
+            The data to append. Must have dtype float and the index must
+            match the index of the History.
+
+        meta : dict, default None
+            metadata dictionary to store with the series. Ignored if ``value`` is of
+             type History. None defaults to a empty dictionary.
+
+        Returns
+        -------
+        history with appended series
+
+        Raises
+        ------
+        TypeError: if value is not pd.Series
+        ValueError: on index miss-match or wrong dtype
+        """
+        if isinstance(value, History):
+            return self._appendHistory(value)
+
+        if not isinstance(value, pd.Series):
+            raise TypeError("'value' is not a pd.Series")
+
+        if meta is None:
+            meta = {}
+
+        if not isinstance(meta, dict):
+            raise TypeError("'meta' must be of type None or dict")
+
+        val = self._validateValue(value)
+        if not val.index.equals(self.index):
+            raise ValueError("Index does not match")
+
+        self._insert(val, pos=len(self))
+        self.meta.append(deepcopy(meta))
+        return self
+
+    def _appendHistory(self, value: History):
+        """
+        Append multiple columns of a history to self.
+
+        Parameters
+        ----------
+        value : History
+            Holding the columns to append
+
+        Returns
+        -------
+        History with appended columns.
+
+        Raises
+        ------
+        ValueError : If the index of the passed history does not match.
+
+        Notes
+        -----
+        This ignores the column names of the passed History.
+        """
+        self._validate(value.hist, value.meta)
+        if not value.index.equals(self.index):
+            raise ValueError("Index does not match")
+
+        n = len(self.columns)
+        # don't overwrite the `.columns` of the input down the line
+        value_hist = value.hist.copy(deep=False)
+        value_meta = deepcopy(value.meta)
+
+        # rename columns, to avoid ``pd.DataFrame.loc`` become confused
+        columns = pd.Index(range(n, n + len(value_hist.columns)))
+        value_hist.columns = columns
+
+        hist = self.hist.astype(float)
+        hist.loc[:, columns] = value_hist.astype(float)
+        self.hist = hist.astype("category", copy=True)
+        self.meta += value_meta
+        return self
+
+    def max(self, raw=False) -> pd.Series:
+        """
+        Get the maximum value per row of the FH.
+
+        Returns
+        -------
+        pd.Series: maximum values
+        """
+        result = self.hist.astype(float)
+        if result.empty:
+            result = pd.DataFrame(data=UNTOUCHED, index=self.hist.index, columns=[0])
+
+        result = result.ffill(axis=1).iloc[:, -1]
+
+        if raw:
+            return result
+        else:
+            return result.fillna(UNFLAGGED)
+
+    def reindex(self, index: pd.Index, fill_value_last: float = UNFLAGGED) -> History:
+        """
+        Reindex the History. Be careful this alters the past.
+
+        Parameters
+        ----------
+        index : pd.Index
+            the index to reindex to.
+        fill_value_last : float, default UNFLAGGED
+            value to fill nan's (UNTOUCHED) in the last column.
+            Defaults to 0 (UNFLAGGED).
+
+        Returns
+        -------
+        History
+        """
+        hist = self.hist.astype(float).reindex(
+            index=index, copy=False, fill_value=np.nan
+        )
+
+        # Note: all following code must handle empty frames
+        hist.iloc[:, -1:] = hist.iloc[:, -1:].fillna(fill_value_last)
+
+        self.hist = hist.astype("category")
+
+        return self
+
+    def apply(
+        self,
+        index: pd.Index,
+        func: callable,
+        func_kws: dict,
+        func_handle_df: bool = False,
+        copy: bool = True,
+    ):
+        """
+        Apply a function on each column in history.
+
+        The function must take a `pd.Series` as first arg, which is a column from
+        `hist`. If ``func_handle_df=True`` each functions take a ``pd.DataFrame``
+        as first argument, holding all columns at once.
+        Bear in mind:
+        - the functions mustn't alter the passed objects
+        - the functions are not allowed to add or remove columns
+        - the function must return same type as first argument
+        - the returned object must have same index as the passed ``index`` to ``apply``
+            as first argument
+
+        Parameters
+        ----------
+        index: pd.Index
+            Index the new history should have. This is used to ensure the passed
+            functions worked correct and also used if the function does not apply,
+            because the initial history is empty. Then the altered empty history is
+            reindexed to this index.
+
+        hist_func : callable
+            function to apply on `History.hist` (flags DataFrame)
+
+        hist_kws : dict
+            hist-function keywords dict
+
+        func_handle_df : bool, default False
+            If `True`, the Dataframe under `History`.hist is passed to the given functions,
+            thus the function must handle `pd.Dataframes` as first input. If `False`, each
+            column is passed separately, thus the function must handle those.
+
+        copy : bool, default True
+            If False, alter the underlying history, otherwise return a copy.
+
+
+        Returns
+        -------
+        history with altered columns
+        """
+        hist = pd.DataFrame(index=index)
+
+        if func_handle_df:
+            # we need to pass the data as floats as functions may fail with Categorical
+            hist = func(self.hist.astype(float), **func_kws)
+
+        else:
+            for pos in self.columns:
+                hist[pos] = func(self.hist[pos].astype(float), **func_kws)
+
+        History._validate(hist, self.meta)
+
+        if copy:
+            history = History(index=None)  # noqa
+            history.meta = deepcopy(self.meta)
+        else:
+            history = self
+
+        history.hist = hist.astype("category")
+
+        return history
+
+    def copy(self, deep=True) -> History:
+        """
+        Make a copy of the FH.
+
+        Parameters
+        ----------
+        deep : bool, default True
+            - ``True``: make a deep copy
+            - ``False``: make a shallow copy
+
+        Returns
+        -------
+        copy : History
+            the copied FH
+        """
+        if deep:
+            return deepcopy(self)
+        else:
+            return copy(self)
+
+    def __len__(self) -> int:
+        return len(self.hist.columns)
+
+    def __repr__(self):
+
+        if self.empty:
+            return str(self.hist).replace("DataFrame", "History")
+
+        r = self.hist.astype(str)
+
+        return str(r)[1:]
+
+    # --------------------------------------------------------------------------------
+    # validation
+    #
+
+    @staticmethod
+    def _validate(hist: pd.DataFrame, meta: List[Any]) -> Tuple[pd.DataFrame, List]:
+        """
+        check type, columns, index, dtype of hist and if the meta fits also
+        """
+
+        # check hist
+        if not isinstance(hist, pd.DataFrame):
+            raise TypeError(
+                f"'hist' must be of type pd.DataFrame, but is of type {type(hist).__name__}"
+            )
+        # isin([float, ..]) does not work !
+        if not (
+            (hist.dtypes == float)
+            | (hist.dtypes == np.float32)
+            | (hist.dtypes == np.float64)
+            | (hist.dtypes == "category")
+        ).all():
+            raise ValueError(
+                "dtype of all columns in hist must be float or categorical"
+            )
+
+        if not hist.empty and (
+            not hist.columns.equals(pd.Index(range(len(hist.columns))))
+            or hist.columns.dtype != int
+        ):
+            raise ValueError(
+                "column names must be continuous increasing int's, starting with 0."
+            )
+
+        # check meta
+        if not isinstance(meta, list):
+            raise TypeError(
+                f"'meta' must be of type list, but is of type {type(meta).__name__}"
+            )
+        if not all([isinstance(e, dict) for e in meta]):
+            raise TypeError("All elements in meta must be of type 'dict'")
+
+        # check combinations of hist and meta
+        if not len(hist.columns) == len(meta):
+            raise ValueError(
+                "'meta' must have as many entries as columns exist in hist"
+            )
+
+        return hist, meta
+
+    @staticmethod
+    def _validateValue(obj: pd.Series) -> pd.Series:
+        """
+        index is not checked !
+        """
+        if not isinstance(obj, pd.Series):
+            raise TypeError(
+                f"value must be of type pd.Series, but {type(obj).__name__} was given"
+            )
+
+        if not ((obj.dtype == float) or isinstance(obj.dtype, pd.CategoricalDtype)):
+            raise ValueError("dtype must be float or categorical")
+
+        return obj
+
+
+def createHistoryFromData(
+    hist: pd.DataFrame,
+    meta: List[Dict],
+    copy: bool = False,
+):
+    """
+    Create a History from existing data.
+
+    Parameters
+    ----------
+    hist : pd.Dataframe
+        Data that define the flags of the history.
+
+    meta : List of dict
+        A list holding meta information for each column, therefore it must
+        have the same number of entries as columns exist in `hist`.
+
+    copy : bool, default False
+        If `True`, the input data is copied, otherwise not.
+
+
+    Notes
+    -----
+    To create a very simple History from a flags dataframe ``f`` use
+    ``mask = pd.DataFrame(True, index=f.index, columns=f.columns``
+    and
+    ``meta = [{}] * len(f.columns)``.
+
+    Returns
+    -------
+    History
+    """
+    History._validate(hist, meta)
+
+    if copy:
+        hist = hist.copy()
+        meta = deepcopy(meta)
+
+    history = History(index=None)  # noqa
+    history.hist = hist.astype("category", copy=False)
+    history.meta = meta
+    return history
diff --git a/saqc/core/modules/__init__.py b/saqc/core/modules/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..c3e7f09a588c2f7be9c3f75c00eaba864fcfc4b8
--- /dev/null
+++ b/saqc/core/modules/__init__.py
@@ -0,0 +1,45 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from saqc.core.modules.breaks import Breaks
+from saqc.core.modules.noise import Noise
+from saqc.core.modules.changepoints import ChangePoints
+from saqc.core.modules.constants import Constants
+from saqc.core.modules.curvefit import Curvefit
+from saqc.core.modules.drift import Drift
+from saqc.core.modules.flagtools import FlagTools
+from saqc.core.modules.generic import Generic
+from saqc.core.modules.interpolation import Interpolation
+from saqc.core.modules.outliers import Outliers
+from saqc.core.modules.pattern import Pattern
+from saqc.core.modules.resampling import Resampling
+from saqc.core.modules.residues import Residues
+from saqc.core.modules.rolling import Rolling
+from saqc.core.modules.scores import Scores
+from saqc.core.modules.tools import Tools
+from saqc.core.modules.transformation import Transformation
+from saqc.core.register import FUNC_MAP
+
+
+class FunctionsMixin(
+    Breaks,
+    Noise,
+    ChangePoints,
+    Constants,
+    Curvefit,
+    Drift,
+    FlagTools,
+    Generic,
+    Interpolation,
+    Outliers,
+    Pattern,
+    Resampling,
+    Residues,
+    Rolling,
+    Scores,
+    Tools,
+    Transformation,
+):
+    def _defer(self, fname, flocals):
+        flocals.pop("self", None)
+        fkwargs = flocals.pop("kwargs", {})
+        return self._wrap(FUNC_MAP[fname])(**flocals, **fkwargs)
diff --git a/saqc/core/modules/breaks.py b/saqc/core/modules/breaks.py
new file mode 100644
index 0000000000000000000000000000000000000000..6def4f2da89f6a5234741b9c5b3fa13e0044c3b0
--- /dev/null
+++ b/saqc/core/modules/breaks.py
@@ -0,0 +1,35 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from saqc.constants import BAD, UNFLAGGED
+import saqc
+from saqc.lib.types import FreqString
+
+
+class Breaks:
+    def flagMissing(
+        self, field: str, flag: float = BAD, to_mask: float = UNFLAGGED, **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagMissing", locals())
+
+    def flagIsolated(
+        self,
+        field: str,
+        gap_window: FreqString,
+        group_window: FreqString,
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagIsolated", locals())
+
+    def flagJumps(
+        self,
+        field: str,
+        thresh: float,
+        window: FreqString,
+        min_periods: int = 1,
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagJumps", locals())
diff --git a/saqc/core/modules/changepoints.py b/saqc/core/modules/changepoints.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9a350bf3e12e44763b4ac6ad7cab4078171b37f
--- /dev/null
+++ b/saqc/core/modules/changepoints.py
@@ -0,0 +1,49 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Callable, Tuple
+
+import numpy as np
+from typing_extensions import Literal
+
+from saqc.constants import BAD
+import saqc
+from saqc.lib.types import FreqString
+
+
+class ChangePoints:
+    def flagChangePoints(
+        self,
+        field: str,
+        stat_func: Callable[[np.ndarray, np.ndarray], float],
+        thresh_func: Callable[[np.ndarray, np.ndarray], float],
+        window: FreqString | Tuple[FreqString, FreqString],
+        min_periods: int | Tuple[int, int],
+        closed: Literal["right", "left", "both", "neither"] = "both",
+        reduce_window: FreqString = None,
+        reduce_func: Callable[[np.ndarray, np.ndarray], int] = lambda x, _: x.argmax(),
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagChangePoints", locals())
+
+    def assignChangePointCluster(
+        self,
+        field: str,
+        stat_func: Callable[[np.array, np.array], float],
+        thresh_func: Callable[[np.array, np.array], float],
+        window: str | Tuple[str, str],
+        min_periods: int | Tuple[int, int],
+        closed: Literal["right", "left", "both", "neither"] = "both",
+        reduce_window: str = None,
+        reduce_func: Callable[
+            [np.ndarray, np.ndarray], float
+        ] = lambda x, _: x.argmax(),
+        model_by_resids: bool = False,
+        set_flags: bool = False,
+        assign_cluster: bool = True,
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("assignChangePointCluster", locals())
diff --git a/saqc/core/modules/constants.py b/saqc/core/modules/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..b11c62cd3a00b1d6e9eef2524c716a9360c76bdf
--- /dev/null
+++ b/saqc/core/modules/constants.py
@@ -0,0 +1,26 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from saqc.constants import BAD
+import saqc
+from saqc.lib.types import FreqString
+
+
+class Constants:
+    def flagByVariance(
+        self,
+        field: str,
+        window: FreqString = "12h",
+        thresh: float = 0.0005,
+        maxna: int = None,
+        maxna_group: int = None,
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagByVariance", locals())
+
+    def flagConstants(
+        self, field: str, thresh: float, window: FreqString, flag: float = BAD, **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagConstants", locals())
diff --git a/saqc/core/modules/curvefit.py b/saqc/core/modules/curvefit.py
new file mode 100644
index 0000000000000000000000000000000000000000..91a2cd6b7ea17ca43b0c99e492ad6b6a48e84741
--- /dev/null
+++ b/saqc/core/modules/curvefit.py
@@ -0,0 +1,25 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+from typing import Union
+
+from dios import DictOfSeries
+from typing_extensions import Literal
+
+from saqc.constants import BAD
+import saqc
+
+
+class Curvefit:
+    def fitPolynomial(
+        self,
+        field: str,
+        window: Union[int, str],
+        order: int,
+        set_flags: bool = True,
+        min_periods: int = 0,
+        return_residues: bool = False,
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("fitPolynomial", locals())
diff --git a/saqc/core/modules/drift.py b/saqc/core/modules/drift.py
new file mode 100644
index 0000000000000000000000000000000000000000..ed248633066cebbdf8f379625c0b51376802c2e4
--- /dev/null
+++ b/saqc/core/modules/drift.py
@@ -0,0 +1,99 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Sequence, Callable, Optional
+
+import numpy as np
+from scipy.spatial.distance import pdist
+
+from saqc.constants import BAD
+import saqc
+from saqc.funcs import LinkageString
+from saqc.lib.types import FreqString, CurveFitter
+
+
+class Drift:
+    def flagDriftFromNorm(
+        self,
+        field: str,
+        fields: Sequence[str],
+        freq: FreqString,
+        spread: float,
+        frac: float = 0.5,
+        metric: Callable[[np.ndarray, np.ndarray], float] = lambda x, y: pdist(
+            np.array([x, y]), metric="cityblock"
+        )
+        / len(x),
+        method: LinkageString = "single",
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagDriftFromNorm", locals())
+
+    def flagDriftFromReference(
+        self,
+        field: str,
+        fields: Sequence[str],
+        freq: FreqString,
+        thresh: float,
+        metric: Callable[[np.ndarray, np.ndarray], float] = lambda x, y: pdist(
+            np.array([x, y]), metric="cityblock"
+        )
+        / len(x),
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagDriftFromReference", locals())
+
+    def flagDriftFromScaledNorm(
+        self,
+        field: str,
+        set_1: Sequence[str],
+        set_2: Sequence[str],
+        freq: FreqString,
+        spread: float,
+        frac: float = 0.5,
+        metric: Callable[[np.ndarray, np.ndarray], float] = lambda x, y: pdist(
+            np.array([x, y]), metric="cityblock"
+        )
+        / len(x),
+        method: LinkageString = "single",
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagDriftFromScaledNorm", locals())
+
+    def correctDrift(
+        self,
+        field: str,
+        maintenance_field: str,
+        model: Callable[..., float],
+        cal_range: int = 5,
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("correctDrift", locals())
+
+    def correctRegimeAnomaly(
+        self,
+        field: str,
+        cluster_field: str,
+        model: CurveFitter,
+        tolerance: Optional[FreqString] = None,
+        epoch: bool = False,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("correctRegimeAnomaly", locals())
+
+    def correctOffset(
+        self,
+        field: str,
+        max_jump: float,
+        spread: float,
+        window: FreqString,
+        min_periods: int,
+        tolerance: Optional[FreqString] = None,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("correctOffset", locals())
diff --git a/saqc/core/modules/flagtools.py b/saqc/core/modules/flagtools.py
new file mode 100644
index 0000000000000000000000000000000000000000..a782d9700c1e869a13d6d12d2a6d95ba4dd6c1b7
--- /dev/null
+++ b/saqc/core/modules/flagtools.py
@@ -0,0 +1,37 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Any, Union
+
+import pandas as pd
+from dios import DictOfSeries
+from typing_extensions import Literal
+
+from saqc.constants import BAD
+import saqc
+
+
+class FlagTools:
+    def clearFlags(self, field: str, **kwargs) -> saqc.SaQC:
+        return self._defer("clearFlags", locals())
+
+    def forceFlags(self, field: str, flag: float = BAD, **kwargs) -> saqc.SaQC:
+        return self._defer("forceFlags", locals())
+
+    def flagDummy(self, field: str, **kwargs) -> saqc.SaQC:
+        return self._defer("flagDummy", locals())
+
+    def flagUnflagged(self, field: str, flag: float = BAD, **kwargs) -> saqc.SaQC:
+        return self._defer("flagUnflagged", locals())
+
+    def flagManual(
+        self,
+        field: str,
+        mdata: Union[pd.Series, pd.DataFrame, DictOfSeries],
+        mflag: Any = 1,
+        method: Literal["plain", "ontime", "left-open", "right-open"] = "plain",
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagManual", locals())
diff --git a/saqc/core/modules/generic.py b/saqc/core/modules/generic.py
new file mode 100644
index 0000000000000000000000000000000000000000..898be936416168d97c0480e7cd73a687532dfa7b
--- /dev/null
+++ b/saqc/core/modules/generic.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Callable
+
+import numpy as np
+import pandas as pd
+
+from saqc.constants import UNFLAGGED, BAD
+import saqc
+
+
+class Generic:
+    def genericProcess(
+        self,
+        field: str,
+        func: Callable[[pd.Series], pd.Series],
+        to_mask: float = UNFLAGGED,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("genericProcess", locals())
+
+    def genericFlag(
+        self,
+        field: str,
+        func: Callable[[pd.Series], pd.Series],
+        flag: float = BAD,
+        to_mask: float = UNFLAGGED,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("genericFlag", locals())
diff --git a/saqc/core/modules/interpolation.py b/saqc/core/modules/interpolation.py
new file mode 100644
index 0000000000000000000000000000000000000000..8787bd3250b3438702186cf1c7eb6c7b2200e169
--- /dev/null
+++ b/saqc/core/modules/interpolation.py
@@ -0,0 +1,50 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Union, Callable
+
+import numpy as np
+import pandas as pd
+
+from saqc.constants import UNFLAGGED
+import saqc
+from saqc.funcs.interpolation import _SUPPORTED_METHODS
+
+
+class Interpolation:
+    def interpolateByRolling(
+        self,
+        field: str,
+        window: Union[str, int],
+        func: Callable[[pd.Series], float] = np.median,
+        center: bool = True,
+        min_periods: int = 0,
+        flag: float = UNFLAGGED,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("interpolateByRolling", locals())
+
+    def interpolateInvalid(
+        self,
+        field: str,
+        method: _SUPPORTED_METHODS,
+        order: int = 2,
+        limit: int = 2,
+        downgrade: bool = False,
+        flag: float = UNFLAGGED,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("interpolateInvalid", locals())
+
+    def interpolateIndex(
+        self,
+        field: str,
+        freq: str,
+        method: _SUPPORTED_METHODS,
+        order: int = 2,
+        limit: int = 2,
+        downgrade: bool = False,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("interpolateIndex", locals())
diff --git a/saqc/core/modules/noise.py b/saqc/core/modules/noise.py
new file mode 100644
index 0000000000000000000000000000000000000000..8985f56f2eae96d36f8df749092f26c9ec54f54e
--- /dev/null
+++ b/saqc/core/modules/noise.py
@@ -0,0 +1,27 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+import numpy as np
+import pandas as pd
+from typing import Callable
+
+from saqc.constants import BAD
+import saqc
+from saqc.lib.types import FreqString
+
+
+class Noise:
+    def flagByStatLowPass(
+        self,
+        field: str,
+        func: Callable[[np.ndarray, pd.Series], float],
+        window: FreqString,
+        thresh: float,
+        sub_window: FreqString = None,
+        sub_thresh: float = None,
+        min_periods: int = None,
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagByStatLowPass", locals())
diff --git a/saqc/core/modules/outliers.py b/saqc/core/modules/outliers.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e64e99d34a8ce0ee22b12b40b8394263d9cc4a6
--- /dev/null
+++ b/saqc/core/modules/outliers.py
@@ -0,0 +1,117 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Optional, Union, Callable, Sequence
+
+import numpy as np
+import pandas as pd
+from typing_extensions import Literal
+
+from saqc.constants import BAD
+import saqc
+from saqc.lib.types import FreqString
+
+
+class Outliers:
+    def flagByStray(
+        self,
+        field: str,
+        freq: Optional[Union[int, FreqString]] = None,
+        min_periods: int = 11,
+        iter_start: float = 0.5,
+        alpha: float = 0.05,
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagByStray", locals())
+
+    def flagMVScores(
+        self,
+        field: str,
+        fields: Sequence[str],
+        trafo: Callable[[pd.Series], pd.Series] = lambda x: x,
+        alpha: float = 0.05,
+        n: int = 10,
+        func: Callable[[pd.Series], float] = np.sum,
+        iter_start: float = 0.5,
+        partition: Optional[Union[int, FreqString]] = None,
+        partition_min: int = 11,
+        stray_range: Optional[FreqString] = None,
+        drop_flagged: bool = False,  # TODO: still a case ?
+        thresh: float = 3.5,
+        min_periods: int = 1,
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagMVScores", locals())
+
+    def flagRaise(
+        self,
+        field: str,
+        thresh: float,
+        raise_window: FreqString,
+        freq: FreqString,
+        average_window: Optional[FreqString] = None,
+        raise_factor: float = 2.0,
+        slope: Optional[float] = None,
+        weight: float = 0.8,
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagRaise", locals())
+
+    def flagMAD(
+        self,
+        field: str,
+        window: FreqString,
+        z: float = 3.5,
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagMAD", locals())
+
+    def flagOffset(
+        self,
+        field: str,
+        thresh: float,
+        tolerance: float,
+        window: Union[int, FreqString],
+        thresh_relative: Optional[float] = None,
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagOffset", locals())
+
+    def flagByGrubbs(
+        self,
+        field: str,
+        window: Union[FreqString, int],
+        alpha: float = 0.05,
+        min_periods: int = 8,
+        pedantic: bool = False,
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagByGrubbs", locals())
+
+    def flagRange(
+        self,
+        field: str,
+        min: float = -np.inf,
+        max: float = np.inf,
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagRange", locals())
+
+    def flagCrossStatistic(
+        self,
+        field: str,
+        fields: Sequence[str],
+        thresh: float,
+        method: Literal["modZscore", "Zscore"] = "modZscore",
+        flag: float = BAD,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("flagCrossStatistic", locals())
diff --git a/saqc/core/modules/pattern.py b/saqc/core/modules/pattern.py
new file mode 100644
index 0000000000000000000000000000000000000000..2dbb020bec70fa15b20c44d21f2e8c92edbea4ff
--- /dev/null
+++ b/saqc/core/modules/pattern.py
@@ -0,0 +1,27 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Sequence
+
+from saqc.constants import BAD
+import saqc
+
+
+class Pattern:
+    def flagPatternByDTW(
+        self,
+        field,
+        reference,
+        max_distance=0.0,
+        normalize=True,
+        plot=False,
+        flag=BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagPatternByDTW", locals())
+
+    def flagPatternByWavelet(
+        self, field, reference, widths=(1, 2, 4, 8), waveform="mexh", flag=BAD, **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("flagPatternByWavelet", locals())
diff --git a/saqc/core/modules/resampling.py b/saqc/core/modules/resampling.py
new file mode 100644
index 0000000000000000000000000000000000000000..14350a1b9a34646528d5f6512819576035de8fea
--- /dev/null
+++ b/saqc/core/modules/resampling.py
@@ -0,0 +1,73 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Optional, Callable
+
+import numpy as np
+import pandas as pd
+from typing_extensions import Literal
+
+from saqc.constants import BAD
+import saqc
+from saqc.funcs.interpolation import _SUPPORTED_METHODS
+
+
+class Resampling:
+    def linear(self, field: str, freq: str, **kwargs) -> saqc.SaQC:
+        return self._defer("linear", locals())
+
+    def interpolate(
+        self,
+        field: str,
+        freq: str,
+        method: _SUPPORTED_METHODS,
+        order: int = 1,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("interpolate", locals())
+
+    def shift(
+        self,
+        field: str,
+        freq: str,
+        method: Literal["fshift", "bshift", "nshift"] = "nshift",
+        freq_check: Optional[Literal["check", "auto"]] = None,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("shift", locals())
+
+    def resample(
+        self,
+        field: str,
+        freq: str,
+        func: Callable[[pd.Series], pd.Series] = np.mean,
+        method: Literal["fagg", "bagg", "nagg"] = "bagg",
+        maxna: Optional[int] = None,
+        maxna_group: Optional[int] = None,
+        maxna_flags: Optional[int] = None,  # TODO: still a case ??
+        maxna_group_flags: Optional[int] = None,
+        flag_func: Callable[[pd.Series], float] = max,
+        freq_check: Optional[Literal["check", "auto"]] = None,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("resample", locals())
+
+    def reindexFlags(
+        self,
+        field: str,
+        method: Literal[
+            "inverse_fagg",
+            "inverse_bagg",
+            "inverse_nagg",
+            "inverse_fshift",
+            "inverse_bshift",
+            "inverse_nshift",
+            "inverse_interpolation",
+        ],
+        source: str,
+        freq: Optional[str] = None,
+        drop: Optional[bool] = False,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("reindexFlags", locals())
diff --git a/saqc/core/modules/residues.py b/saqc/core/modules/residues.py
new file mode 100644
index 0000000000000000000000000000000000000000..d2ee8db46a0217c386360ed0ed8831aa2efaa7d2
--- /dev/null
+++ b/saqc/core/modules/residues.py
@@ -0,0 +1,38 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Optional, Union, Callable
+
+import numpy as np
+from typing_extensions import Literal
+
+from saqc.constants import BAD
+import saqc
+
+
+class Residues:
+    def calculatePolynomialResidues(
+        self,
+        field: str,
+        window: Union[str, int],
+        order: int,
+        set_flags: bool = True,  # TODO, not valid anymore, if still needed, maybe assign user-passed ``flag``?
+        min_periods: Optional[int] = 0,
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("calculatePolynomialResidues", locals())
+
+    def calculateRollingResidues(
+        self,
+        field: str,
+        window: Union[str, int],
+        func: Callable[[np.ndarray], np.ndarray] = np.mean,
+        set_flags: bool = True,
+        min_periods: Optional[int] = 0,
+        center: bool = True,
+        flag: float = BAD,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("calculateRollingResidues", locals())
diff --git a/saqc/core/modules/rolling.py b/saqc/core/modules/rolling.py
new file mode 100644
index 0000000000000000000000000000000000000000..11131514b158ce4d62a83fe145b49a6e8ef864e3
--- /dev/null
+++ b/saqc/core/modules/rolling.py
@@ -0,0 +1,25 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from typing import Union, Callable
+
+import numpy as np
+import pandas as pd
+
+from saqc.constants import BAD
+
+
+class Rolling:
+    def roll(
+        self,
+        field: str,
+        window: Union[str, int],
+        func: Callable[[pd.Series], float] = np.mean,
+        set_flags: bool = True,  # TODO: not applicable anymore
+        min_periods: int = 0,
+        center: bool = True,
+        return_residues=False,  # TODO: this should not be public, a wrapper would be better
+        flag: float = BAD,
+        **kwargs
+    ):
+        return self._defer("roll", locals())
diff --git a/saqc/core/modules/scores.py b/saqc/core/modules/scores.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc015d98fc63a466f22346cfd174a3402362460d
--- /dev/null
+++ b/saqc/core/modules/scores.py
@@ -0,0 +1,29 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Sequence, Callable, Union
+
+import numpy as np
+import pandas as pd
+from typing_extensions import Literal
+
+import saqc
+
+
+class Scores:
+    def assignKNNScore(
+        self,
+        field: str,
+        fields: Sequence[str],
+        target: str = "kNNscores",
+        n: int = 10,
+        func: Callable[[pd.Series], float] = np.sum,
+        freq: Union[float, str] = np.inf,
+        min_periods: int = 2,
+        method: Literal["ball_tree", "kd_tree", "brute", "auto"] = "ball_tree",
+        metric: str = "minkowski",
+        p: int = 2,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("assignKNNScore", locals())
diff --git a/saqc/core/modules/tools.py b/saqc/core/modules/tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..bd6f189fe7ff391e5a0215b1de7848464107c5d7
--- /dev/null
+++ b/saqc/core/modules/tools.py
@@ -0,0 +1,46 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import Literal
+
+import saqc
+from saqc.lib.types import FreqString
+
+
+class Tools:
+    def copyField(self, field: str, new_field: str, **kwargs) -> saqc.SaQC:
+        return self._defer("copyField", locals())
+
+    def dropField(self, field: str, **kwargs) -> saqc.SaQC:
+        return self._defer("dropField", locals())
+
+    def renameField(self, field: str, new_name: str, **kwargs) -> saqc.SaQC:
+        return self._defer("renameField", locals())
+
+    def maskTime(
+        self,
+        field: str,
+        mode: Literal["periodic", "mask_field"],
+        mask_field: Optional[str] = None,
+        start: Optional[str] = None,
+        end: Optional[str] = None,
+        closed: bool = True,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("maskTime", locals())
+
+    def plot(
+        self,
+        field: str,
+        path: Optional[str] = None,
+        max_gap: Optional[FreqString] = None,
+        stats: bool = False,
+        plot_kwargs: Optional[dict] = None,
+        fig_kwargs: Optional[dict] = None,
+        stats_dict: Optional[dict] = None,
+        store_kwargs: Optional[dict] = None,
+        **kwargs,
+    ) -> saqc.SaQC:
+        return self._defer("plot", locals())
diff --git a/saqc/core/modules/transformation.py b/saqc/core/modules/transformation.py
new file mode 100644
index 0000000000000000000000000000000000000000..622040d42797c52440aa3ccec96a52dda1126bc9
--- /dev/null
+++ b/saqc/core/modules/transformation.py
@@ -0,0 +1,20 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Callable, Optional, Union
+
+import pandas as pd
+
+import saqc
+
+
+class Transformation:
+    def transform(
+        self,
+        field: str,
+        func: Callable[[pd.Series], pd.Series],
+        freq: Optional[Union[float, str]] = None,
+        **kwargs
+    ) -> saqc.SaQC:
+        return self._defer("transform", locals())
diff --git a/saqc/core/reader.py b/saqc/core/reader.py
index 9e07640f23a1a225ab558dc5f01e1dee095aec86..219f5b4c4a5fbaef7cd8bc3cb91d8974a385fd18 100644
--- a/saqc/core/reader.py
+++ b/saqc/core/reader.py
@@ -1,111 +1,76 @@
 #! /usr/bin/env python
 # -*- coding: utf-8 -*-
 
+import io
 import ast
-
-import numpy as np
-
+from pathlib import Path
 import pandas as pd
 
-from saqc.core.config import Fields as F
+from saqc.core.core import SaQC
 from saqc.core.visitor import ConfigFunctionParser
-from saqc.core.core import Func, FuncCtrl
-from saqc.core.register import FUNC_MAP
-
 from saqc.lib.tools import isQuoted
 
-COMMENT = "#"
-EMPTY = "None"
-
 
-def _handleEmptyLines(df):
-    if F.VARNAME not in df.columns:
-        # at least the first line was empty, so we search the header
-        df = df.reset_index()
-        i = (df == F.VARNAME).first_valid_index()
-        df.columns = df.iloc[i]
-        df = df.iloc[i + 1 :]
+COMMENT = "#"
+SEPARATOR = ";"
 
-    # mark empty lines
-    mask = (df.isnull() | (df == "")).all(axis=1)
-    df.loc[mask] = EMPTY
-    return df
 
+def readFile(fname):
 
-def _handleComments(df):
-    # mark commented lines
-    df.loc[df[F.VARNAME].str.startswith(COMMENT)] = EMPTY
+    fobj = (
+        io.open(fname, "r", encoding="utf-8")
+        if isinstance(fname, (str, Path))
+        else fname
+    )
 
-    for col in df:
-        try:
-            df[col] = df[col].str.split(COMMENT, expand=True).iloc[:, 0].str.strip()
-        except AttributeError:
-            # NOTE:
-            # if `df[col]` is not of type string, we know, that
-            # there are no comments and the `.str` access fails
-            pass
+    out = []
+    for i, line in enumerate(fobj):
+        row = line.strip().split(COMMENT, 1)[0]
+        if not row:
+            # skip over comment line
+            continue
 
-    return df
+        parts = [p.strip() for p in row.split(SEPARATOR)]
+        if len(parts) != 2:
+            raise RuntimeError(
+                "The configuration format expects exactly two columns, one "
+                "for the variable name and one for the test to apply, but "
+                f"in line {i} we got: \n'{line}'"
+            )
+        out.append(
+            [
+                i + 1,
+            ]
+            + parts
+        )
 
+    if isinstance(fname, str):
+        fobj.close()
 
-def _injectOptionalColumns(df):
-    # inject optional columns
-    if F.PLOT not in df:
-        empty = (df == EMPTY).all(axis=1)
-        df[F.PLOT] = "False"
-        df[empty] = EMPTY
+    df = pd.DataFrame(
+        out[1:],
+        columns=[
+            "row",
+        ]
+        + out[0][1:],
+    ).set_index("row")
     return df
 
 
-def _parseConfig(df, flagger):
+def fromConfig(fname, *args, **kwargs):
+    saqc = SaQC(*args, **kwargs)
+    config = readFile(fname)
 
-    funcs = []
-    for lineno, (_, target, expr, plot) in enumerate(df.itertuples()):
-        if target == "None" or pd.isnull(target) or pd.isnull(expr):
-            continue
+    for _, field, expr in config.itertuples():
 
         regex = False
-        if isQuoted(target):
+        if isQuoted(field):
+            field = field[1:-1]
             regex = True
-            target = target[1:-1]
 
         tree = ast.parse(expr, mode="eval")
-        func_name, kwargs = ConfigFunctionParser(flagger).parse(tree.body)
-        f = Func(
-            field=kwargs.get("field", target),
-            target=target,
-            name=func_name,
-            func=FUNC_MAP[func_name]["func"],
-            kwargs=kwargs,
-            regex=regex,
-            ctrl=FuncCtrl(
-                masking=FUNC_MAP[func_name]["masking"],
-                plot=plot,
-                lineno=lineno+2,
-                expr=expr
-            )
-        )
-        funcs.append(f)
-    return funcs
-
-
-def readConfig(fname, flagger):
-    df = pd.read_csv(
-        fname,
-        sep=r"\s*;\s*",
-        engine="python",
-        dtype=str,
-        quoting=3,
-        keep_default_na=False,  # don't replace "" by nan
-        skip_blank_lines=False,
-    )
+        func, kwargs = ConfigFunctionParser().parse(tree.body)
 
-    df = _handleEmptyLines(df)
-    df = _injectOptionalColumns(df)
-    df = _handleComments(df)
+        saqc = getattr(saqc, func)(field=field, regex=regex, **kwargs)
 
-    df[F.VARNAME] = df[F.VARNAME].replace(r"^\s*$", np.nan, regex=True)
-    df[F.TEST] = df[F.TEST].replace(r"^\s*$", np.nan, regex=True)
-    df[F.PLOT] = df[F.PLOT].replace({"False": "", EMPTY: "", np.nan: ""})
-    df = df.astype({F.PLOT: bool})
-    return _parseConfig(df, flagger)
+    return saqc
diff --git a/saqc/core/register.py b/saqc/core/register.py
index 3c9802559897cb9b3c78ef6bc3318967349f345b..2ccb2b318cd7c53ae957fe6c3c1b79b8b650a2c3 100644
--- a/saqc/core/register.py
+++ b/saqc/core/register.py
@@ -1,16 +1,416 @@
 #!/usr/bin/env python
+from typing import Dict, Optional, Union, Tuple, Callable, Sequence
+from typing_extensions import Literal
+from functools import wraps
+import dataclasses
+import numpy as np
+import pandas as pd
+import dios
 
-from typing import Dict, Any
+from saqc.constants import *
+from saqc.core.flags import initFlagsLike, Flags, History
 
 # NOTE:
 # the global SaQC function store,
 # will be filled by calls to register
-FUNC_MAP: Dict[str, Any] = {}
+FUNC_MAP: Dict[str, Callable] = {}
 
+MaskingStrT = Literal["all", "field", "none"]
+FuncReturnT = Tuple[dios.DictOfSeries, Flags]
 
-def register(masking='all'):
+
+@dataclasses.dataclass
+class CallState:
+    func: Callable
+    func_name: str
+
+    flags: Flags
+    field: str
+
+    args: tuple
+    kwargs: dict
+
+    masking: MaskingStrT
+    mthresh: float
+    mask: dios.DictOfSeries
+
+
+def processing():
+    # executed on module import
     def inner(func):
-        FUNC_MAP[func.__name__] = {"func": func, "masking": masking}
-        return func
+        @wraps(func)
+        def callWrapper(data, field, flags, *args, **kwargs):
+            kwargs["to_mask"] = _getMaskingThresh(kwargs)
+            return func(data, field, flags, *args, **kwargs)
+
+        FUNC_MAP[func.__name__] = callWrapper
+        return callWrapper
+
     return inner
 
+
+def flagging(masking: MaskingStrT = "all"):
+
+    # executed on module import
+    if masking not in ("all", "field", "none"):
+        raise ValueError(
+            f"invalid masking argument '{masking}', choose one of ('all', 'field', 'none')"
+        )
+
+    def inner(func):
+        func_name = func.__name__
+
+        # executed if a register-decorated function is called,
+        # nevertheless if it is called plain or via `SaQC.func`.
+        @wraps(func)
+        def callWrapper(data, field, flags, *args, **kwargs):
+            args = data, field, flags, *args
+            args, kwargs, old_state = _preCall(func, args, kwargs, masking, func_name)
+            result = func(*args, **kwargs)
+            return _postCall(result, old_state)
+
+        FUNC_MAP[func_name] = callWrapper
+        callWrapper._masking = masking
+
+        return callWrapper
+
+    return inner
+
+
+def _preCall(
+    func: Callable, args: tuple, kwargs: dict, masking: MaskingStrT, fname: str
+):
+    """
+    Handler that runs before any call to a saqc-function.
+
+    This is called before each call to a saqc-function, nevertheless if it is
+    called via the SaQC-interface or plain by importing and direct calling.
+
+    Parameters
+    ----------
+    func : callable
+        the function, which is called after this returns. This is not called here!
+
+    args : tuple
+        args to the function
+
+    kwargs : dict
+        kwargs to the function
+
+    masking : str
+        a string indicating which columns in data need masking
+
+    See Also
+    --------
+    _postCall: runs after a saqc-function call
+
+    Returns
+    -------
+    args: tuple
+        arguments to be passed to the actual call
+    kwargs: dict
+        keyword-arguments to be passed to the actual call
+    state: CallState
+        control keyword-arguments passed to `_postCall`
+
+    """
+    mthresh = _getMaskingThresh(kwargs)
+    kwargs["to_mask"] = mthresh
+
+    data, field, flags, *args = args
+
+    # handle data - masking
+    columns = _getMaskingColumns(data, field, masking)
+    masked_data, mask = _maskData(data, flags, columns, mthresh)
+
+    # store current state
+    state = CallState(
+        func=func,
+        func_name=fname,
+        flags=flags,
+        field=field,
+        args=args,
+        kwargs=kwargs,
+        masking=masking,
+        mthresh=mthresh,
+        mask=mask,
+    )
+
+    args = masked_data, field, flags.copy(), *args
+    return args, kwargs, state
+
+
+def _postCall(result, old_state: CallState) -> FuncReturnT:
+    """
+    Handler that runs after any call to a saqc-function.
+
+    This is called after a call to a saqc-function, nevertheless if it was
+    called via the SaQC-interface or plain by importing and direct calling.
+
+    Parameters
+    ----------
+    result : tuple
+        the result from the called function, namely: data and flags
+
+    old_state : dict
+        control keywords from `_preCall`
+
+    Returns
+    -------
+    data, flags : dios.DictOfSeries, saqc.Flags
+    """
+    data, flags = result
+    flags = _restoreFlags(flags, old_state)
+    data = _unmaskData(data, old_state)
+    return data, flags
+
+
+def _getMaskingColumns(data: dios.DictOfSeries, field: str, masking: MaskingStrT):
+    """
+    Return columns to mask, by `masking` (decorator keyword)
+
+    Depending on the `masking` kw, the following s returned:
+        * 'all' : all columns from data
+        * 'None' : empty pd.Index
+        * 'field': single entry Index
+
+    Returns
+    -------
+    columns: pd.Index
+        Data columns that need to be masked.
+
+    Raises
+    ------
+    ValueError: if given masking literal is not supported
+    """
+    if masking == "all":
+        return data.columns
+    if masking == "none":
+        return pd.Index([])
+    if masking == "field":
+        return pd.Index([field])
+
+    raise ValueError(f"wrong use of `register(masking={masking})`")
+
+
+def _getMaskingThresh(kwargs):
+    """
+    Check the correct usage of the `to_mask` keyword, iff passed, otherwise return a default.
+
+    Parameters
+    ----------
+    kwargs : dict
+        The kwargs that will be passed to the saqc-function, possibly contain ``to_mask``.
+
+    Returns
+    -------
+    threshold: float
+        All data gets masked, if the flags are equal or worse than the threshold.
+
+    Notes
+    -----
+    If ``to_mask`` is **not** in the kwargs, the threshold defaults to
+     - ``-np.inf``
+    If boolean ``to_mask`` is found in the kwargs, the threshold defaults to
+     - ``-np.inf``, if ``True``
+     - ``+np.inf``, if ``False``
+    If a floatish ``to_mask`` is found in the kwargs, this value is taken as the threshold.
+    """
+    if "to_mask" not in kwargs:
+        return UNFLAGGED
+
+    thresh = kwargs["to_mask"]
+
+    if not isinstance(thresh, (bool, float, int)):
+        raise TypeError(f"'to_mask' must be of type bool or float")
+
+    if thresh is True:  # masking ON
+        thresh = UNFLAGGED
+
+    if thresh is False:  # masking OFF
+        thresh = np.inf
+
+    thresh = float(thresh)  # handle int
+
+    return thresh
+
+
+def _isflagged(
+    flagscol: Union[np.array, pd.Series], thresh: float
+) -> Union[np.array, pd.Series]:
+    """
+    Return a mask of flags accordingly to `thresh`. Return type is same as flags.
+    """
+    if thresh == UNFLAGGED:
+        return flagscol > UNFLAGGED
+
+    return flagscol >= thresh
+
+
+def _restoreFlags(flags: Flags, old_state: CallState):
+    """
+    Generate flags from the temporary result-flags and the original flags.
+
+    Parameters
+    ----------
+    flags : Flags
+        The flags-frame, which is the result from a saqc-function
+
+    old_state : CallState
+        The state before the saqc-function was called
+
+    Returns
+    -------
+    Flags
+    """
+    out = old_state.flags.copy()
+    meta = {
+        "func": old_state.func_name,
+        "args": old_state.args,
+        "keywords": old_state.kwargs,
+    }
+    new_columns = flags.columns.difference(old_state.flags.columns)
+
+    # masking == 'none'
+    # - no data was masked (no relevance here, but help understanding)
+    # - the saqc-function got a copy of the whole flags frame with all full histories
+    #   (but is not allowed to change them; we have -> @processing for this case)
+    # - the saqc-function appended none or some columns to each history
+    #
+    # masking == 'all'
+    # - all data was masked by flags (no relevance here, but help understanding)
+    # - the saqc-function got a complete new flags frame, with empty Histories
+    # - the saqc-function appended none or some columns to the each history
+    #
+    # masking == 'field'
+    # - the column `field` was masked by flags (no relevance here)
+    # - the saqc-function got a complete new flags frame, with empty `History`s
+    # - the saqc-function appended none or some columns to none or some `History`s
+    #
+    # NOTE:
+    # Actually the flags SHOULD have been cleared only at the `field` (as the
+    # masking-parameter implies) but the current implementation in `_prepareFlags`
+    # clears all columns. Nevertheless the following code only update the `field`
+    # (and new columns) and not all columns.
+
+    if old_state.masking in ("none", "all"):
+        columns = flags.columns
+    else:  # field
+        columns = pd.Index([old_state.field])
+
+    for col in columns.union(new_columns):
+
+        if col not in out:  # ensure existence
+            out.history[col] = History(index=flags.history[col].index)
+
+        old_history = out.history[col]
+        new_history = flags.history[col]
+
+        # We only want to add new columns, that were appended during the last function
+        # call. If no such columns exist, we end up with an empty new_history.
+        start = len(old_history.columns)
+        new_history = _sliceHistory(new_history, slice(start, None))
+
+        # NOTE:
+        # Nothing to update -> i.e. a function did not set any flags at all.
+        # This has implications for function writers: early returns out of
+        # functions before `flags.__getitem__` was called once, make the
+        # function call invisable to the flags/history machinery and likely
+        # break translation schemes such as the `PositionalTranslator`
+        if new_history.empty:
+            continue
+
+        squeezed = new_history.max(raw=True)
+        out.history[col] = out.history[col].append(squeezed, meta=meta)
+
+    return out
+
+
+def _maskData(
+    data: dios.DictOfSeries, flags: Flags, columns: Sequence[str], thresh: float
+) -> Tuple[dios.DictOfSeries, dios.DictOfSeries]:
+    """
+    Mask data with Nans, if the flags are worse than a threshold.
+        - mask only passed `columns` (preselected by `masking`-kw from decorator)
+
+    Returns
+    -------
+    masked : dios.DictOfSeries
+        masked data, same dim as original
+    mask : dios.DictOfSeries
+        dios holding iloc-data-pairs for every column in `data`
+    """
+    mask = dios.DictOfSeries(columns=columns)
+
+    # we use numpy here because it is faster
+    for c in columns:
+        col_mask = _isflagged(flags[c].to_numpy(), thresh)
+
+        if col_mask.any():
+            col_data = data[c].to_numpy(dtype=np.float64)
+
+            mask[c] = pd.Series(col_data[col_mask], index=np.where(col_mask)[0])
+
+            col_data[col_mask] = np.nan
+            data[c] = col_data
+
+    return data, mask
+
+
+def _unmaskData(data: dios.DictOfSeries, old_state: CallState) -> dios.DictOfSeries:
+    """
+    Restore the masked data.
+
+    Notes
+    -----
+    Even if this returns data, it work inplace !
+    """
+    if old_state.masking == "none":
+        return data
+
+    # we have two options to implement this:
+    #
+    # =================================
+    # set new data on old
+    # =================================
+    # col in old, in masked, in new:
+    #    index differ : old <- new (replace column)
+    #    else         : old <- new (set on masked: ``old[masked & new.notna()] = new``)
+    # col in new only : old <- new (create column)
+    # col in old only : old (delete column)
+    #
+    #
+    # =================================
+    # set old data on new (implemented)
+    # =================================
+    # col in old, in masked, in new :
+    #    index differ : new (keep column)
+    #    else         : new <- old (set on masked, ``new[masked & new.isna()] = old``)
+    # col in new only : new (keep column)
+    # col in old only : new (ignore, was deleted)
+
+    columns = old_state.mask.columns.intersection(
+        data.columns
+    )  # in old, in masked, in new
+
+    for c in columns:
+
+        # ignore
+        if data[c].empty or old_state.mask[c].empty:
+            continue
+
+        # get the positions of values to unmask
+        candidates = old_state.mask[c]
+        # if the mask was removed during the function call, don't replace
+        unmask = candidates[data[c].iloc[candidates.index].isna().to_numpy()]
+        if unmask.empty:
+            continue
+        data[c].iloc[unmask.index] = unmask
+
+    return data
+
+
+def _sliceHistory(history: History, sl: slice) -> History:
+    history.hist = history.hist.iloc[:, sl]
+    history.meta = history.meta[sl]
+    return history
diff --git a/saqc/core/translator/__init__.py b/saqc/core/translator/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..17a0490bb274c77fd847a11cde24a80efd3ca020
--- /dev/null
+++ b/saqc/core/translator/__init__.py
@@ -0,0 +1,9 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from saqc.core.translator.basetranslator import (
+    Translator,
+    FloatTranslator,
+    SimpleTranslator,
+)
+from saqc.core.translator.positionaltranslator import PositionalTranslator
+from saqc.core.translator.dmptranslator import DmpTranslator
diff --git a/saqc/core/translator/basetranslator.py b/saqc/core/translator/basetranslator.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6bec9146327a4372be7dd19e8f3312d5de22f39
--- /dev/null
+++ b/saqc/core/translator/basetranslator.py
@@ -0,0 +1,203 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from __future__ import annotations
+
+from typing import Dict, Union, Any
+
+import numpy as np
+import pandas as pd
+
+from dios import DictOfSeries
+
+from saqc.core.flags import (
+    Flags,
+    UNFLAGGED,
+    UNTOUCHED,
+    BAD,
+    GOOD,
+)
+from saqc.lib.types import ExternalFlag
+
+
+ForwardMap = Dict[ExternalFlag, float]
+BackwardMap = Dict[float, ExternalFlag]
+
+
+class Translator:
+    """
+    This class provides the basic translation mechanism and should serve as
+    a base class for every other translation scheme.
+
+    The general translation is realized through dictionary lookups, altough
+    we might need to extend this logic to also allow calls to translation
+    functions in the future. Currently at least one `dict` defining the
+    'forward' translation  from 'user flags' -> 'internal flags' needs to be
+    provided.
+    Optionally a second `dict` can be passed to map 'internal flags' -> 'user flags',
+    if the latter is not given, this 'backward' translation will inferred as
+    the inverse of the 'forward' translation.
+
+    The translation mechanism imposes a few restrictions:
+    - The scheme must be well definied, i.e. we need a backward translation for
+      every forward translation (each value in `self._forward` needs a key in
+      `self._backward`).
+    - We need translations for the special flags `saqc.constants.UNFLAGGED` and
+      `saqc.constants.BAD`. That implies, that every valid translation scheme
+      provides at least one user flag that maps to `BAD` and one that maps to
+      `UNFLAGGED`.
+    """
+
+    # (internal) threshold flag above which values will be masked
+    TO_MASK: Union[float, bool] = True
+
+    # additional arguments and default values the translation scheme accepts
+    ARGUMENTS: Dict[str, Any] = {}
+
+    def __init__(self, forward: ForwardMap, backward: BackwardMap):
+        """
+        Parameters
+        ----------
+        forward : dict
+            A mapping defining the forward translation of scalar flag values
+
+        backward : dict
+            A mapping defining the backward translation of scalar flag values
+
+        Note
+        ----
+        `backward` needs to provide a mapping for the two special flags
+        `saqc.constants.UNFLAGGED`, `saqc.constants.BAD`
+        """
+        if UNFLAGGED not in backward or BAD not in backward:
+            raise ValueError(
+                f"need translations for the special flags `UNFLAGGED` ({UNFLAGGED}) and `BAD` ({BAD})"
+            )
+        self._forward = forward
+        self._backward = backward
+
+    @staticmethod
+    def _translate(
+        flags: Union[Flags, pd.DataFrame, pd.Series],
+        trans_map: Union[ForwardMap, BackwardMap],
+    ) -> DictOfSeries:
+        """
+        Translate a given flag data structure to another according to the
+        mapping given in `trans_map`
+
+        Parameters
+        ----------
+        flags : Flags, pd.DataFrame
+            The flags to translate
+
+        Returns
+        -------
+        pd.DataFrame, Flags
+        """
+        if isinstance(flags, pd.Series):
+            flags = flags.to_frame()
+
+        out = DictOfSeries()
+        expected = pd.Index(trans_map.values())
+        for field in flags.columns:
+            out[field] = flags[field].replace(trans_map)
+            diff = pd.Index(out[field]).difference(expected)
+            if not diff.empty:
+                raise ValueError(
+                    f"flags were not translated: {diff.drop_duplicates().to_list()}"
+                )
+        return out
+
+    def __call__(self, flag: ExternalFlag) -> float:
+        """
+        Translate a scalar 'external flag' to an 'internal flag'
+
+        Parameters
+        ----------
+        flag : float, int, str
+            The external flag to translate
+
+        Returns
+        -------
+        float
+        """
+        if flag not in self._forward:
+            if flag not in self._backward:
+                raise ValueError(f"invalid flag: {flag}")
+            return float(flag)
+        return self._forward[flag]
+
+    def forward(self, flags: pd.DataFrame) -> Flags:
+        """
+        Translate from 'external flags' to 'internal flags'
+
+        Parameters
+        ----------
+        flags : pd.DataFrame
+            The external flags to translate
+
+        Returns
+        -------
+        Flags object
+        """
+        return Flags(self._translate(flags, self._forward))
+
+    def backward(
+        self, flags: Flags, raw: bool = False
+    ) -> Union[pd.DataFrame, DictOfSeries]:
+        """
+        Translate from 'internal flags' to 'external flags'
+
+        Parameters
+        ----------
+        flags : pd.DataFrame
+            The external flags to translate
+
+        Returns
+        -------
+        pd.DataFrame
+        """
+        out = self._translate(flags, self._backward)
+        if not raw:
+            out = out.to_df()
+        return out
+
+
+class FloatTranslator(Translator):
+
+    """
+    Acts as the default Translator, provides a changeable subset of the
+    internal float flags
+    """
+
+    _MAP = {
+        -np.inf: -np.inf,
+        **{k: k for k in np.arange(0, 256, dtype=float)},
+    }
+
+    def __init__(self):
+        super().__init__(self._MAP, self._MAP)
+
+
+class SimpleTranslator(Translator):
+
+    """
+    Acts as the default Translator, provides a changeable subset of the
+    internal float flags
+    """
+
+    _FORWARD = {
+        "UNFLAGGED": -np.inf,
+        "BAD": BAD,
+        "OK": GOOD,
+    }
+
+    _BACKWARD = {
+        UNFLAGGED: "UNFLAGGED",
+        UNTOUCHED: "UNFLAGGED",
+        BAD: "BAD",
+        GOOD: "OK",
+    }
+
+    def __init__(self):
+        super().__init__(forward=self._FORWARD, backward=self._BACKWARD)
diff --git a/saqc/core/translator/dmptranslator.py b/saqc/core/translator/dmptranslator.py
new file mode 100644
index 0000000000000000000000000000000000000000..8544a770e197f34e56daca88e54eb30f752baf50
--- /dev/null
+++ b/saqc/core/translator/dmptranslator.py
@@ -0,0 +1,212 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from __future__ import annotations
+
+import json
+from typing import Any
+from functools import reduce
+
+import numpy as np
+import pandas as pd
+
+from saqc.core.history import History
+from saqc.core.flags import (
+    Flags,
+    UNFLAGGED,
+    UNTOUCHED,
+    GOOD,
+    DOUBTFUL,
+    BAD,
+)
+from saqc.core.translator.basetranslator import BackwardMap, Translator, ForwardMap
+
+
+_QUALITY_CAUSES = [
+    "",
+    "BATTERY_LOW",
+    "BELOW_MINIMUM",
+    "ABOVE_MAXIMUM",
+    "BELOW_OR_ABOVE_MIN_MAX",
+    "ISOLATED_SPIKE",
+    "DEFECTIVE_SENSOR",
+    "LEFT_CENSORED_DATA",
+    "RIGHT_CENSORED_DATA",
+    "OTHER",
+    "AUTOFLAGGED",
+]
+
+_QUALITY_LABELS = [
+    "quality_flag",
+    "quality_cause",
+    "quality_comment",
+]
+
+
+class DmpTranslator(Translator):
+
+    """
+    Implements the translation from and to the flagging scheme implemented in
+    the UFZ - Datamanagementportal
+    """
+
+    ARGUMENTS = {"comment": "", "cause": "OTHER"}
+
+    _FORWARD: ForwardMap = {
+        "NIL": UNFLAGGED,
+        "OK": GOOD,
+        "DOUBTFUL": DOUBTFUL,
+        "BAD": BAD,
+    }
+
+    _BACKWARD: BackwardMap = {
+        UNFLAGGED: "NIL",
+        UNTOUCHED: "NIL",
+        GOOD: "OK",
+        DOUBTFUL: "DOUBTFUL",
+        BAD: "BAD",
+    }
+
+    def __init__(self):
+        super().__init__(forward=self._FORWARD, backward=self._BACKWARD)
+
+    def forward(self, df: pd.DataFrame) -> Flags:
+        """
+        Translate from 'external flags' to 'internal flags'
+
+        Parameters
+        ----------
+        df : pd.DataFrame
+            The external flags to translate
+
+        Returns
+        -------
+        Flags object
+        """
+
+        self.validityCheck(df)
+
+        data = {}
+
+        for field in df.columns.get_level_values(0):
+
+            field_flags = df[field]
+            field_history = History(field_flags.index)
+
+            for (flag, cause, comment), values in field_flags.groupby(_QUALITY_LABELS):
+                try:
+                    comment = json.loads(comment)
+                except json.decoder.JSONDecodeError:
+                    comment = {"test": "unknown", "comment": ""}
+
+                histcol = pd.Series(UNTOUCHED, index=field_flags.index)
+                histcol.loc[values.index] = self(flag)
+
+                meta = {
+                    "func": comment["test"],
+                    "keywords": {"comment": comment["comment"], "cause": cause},
+                }
+                field_history.append(histcol, meta=meta)
+
+            data[str(field)] = field_history
+
+        return Flags(data)
+
+    def backward(self, flags: Flags) -> pd.DataFrame:
+        """
+        Translate from 'internal flags' to 'external flags'
+
+        Parameters
+        ----------
+        flags : The external flags to translate
+
+        Returns
+        -------
+        translated flags
+        """
+        tflags = super().backward(flags, raw=True)
+
+        out = pd.DataFrame(
+            index=reduce(lambda x, y: x.union(y), tflags.indexes).sort_values(),
+            columns=pd.MultiIndex.from_product([flags.columns, _QUALITY_LABELS]),
+        )
+
+        for field in tflags.columns:
+
+            df = pd.DataFrame(
+                {
+                    "quality_flag": tflags[field],
+                    "quality_cause": "",
+                    "quality_comment": "",
+                }
+            )
+
+            history = flags.history[field]
+            for col in history.columns:
+
+                valid = (history.hist[col] != UNFLAGGED) & history.hist[col].notna()
+
+                # extract from meta
+                meta = history.meta[col]
+                keywords = meta.get("keywords", {})
+                comment = json.dumps(
+                    {
+                        "test": meta.get("func", "unknown"),
+                        "comment": keywords.get("comment", self.ARGUMENTS["comment"]),
+                    }
+                )
+                cause = keywords.get("cause", self.ARGUMENTS["cause"])
+                df.loc[valid, "quality_comment"] = comment
+                df.loc[valid, "quality_cause"] = cause
+
+            out[field] = df.reindex(out.index)
+
+        self.validityCheck(out)
+        return out
+
+    @classmethod
+    def validityCheck(cls, df: pd.DataFrame) -> None:
+        """
+        Check wether the given causes and comments are valid.
+
+        Parameters
+        ----------
+        df : external flags
+        """
+
+        cols = df.columns
+        if not isinstance(cols, pd.MultiIndex):
+            raise TypeError("DMP-Flags need multi-index columns")
+
+        if not cols.get_level_values(1).isin(_QUALITY_LABELS).all(axis=None):
+            raise TypeError(
+                f"DMP-Flags expect the labels {list(_QUALITY_LABELS)} in the secondary level"
+            )
+
+        for field in df.columns.get_level_values(0):
+
+            # we might have NaN injected by DictOfSeries -> DataFrame conversions
+            field_df = df[field].dropna(how="all", axis="index")
+            flags = field_df["quality_flag"]
+            causes = field_df["quality_cause"]
+            comments = field_df["quality_comment"]
+
+            if not flags.isin(cls._FORWARD.keys()).all(axis=None):
+                raise ValueError(
+                    f"invalid quality flag(s) found, only the following values are supported: {set(cls._FORWARD.keys())}"
+                )
+
+            if not causes.isin(_QUALITY_CAUSES).all(axis=None):
+                raise ValueError(
+                    f"invalid quality cause(s) found, only the following values are supported: {_QUALITY_CAUSES}"
+                )
+
+            if (~flags.isin(("OK", "NIL")) & (causes == "")).any(axis=None):
+                raise ValueError(
+                    "quality flags other than 'OK and 'NIL' need a non-empty quality cause"
+                )
+
+            if ((causes == "OTHER") & (comments == "")).any(None):
+                raise ValueError(
+                    "quality cause 'OTHER' needs a non-empty quality comment"
+                )
diff --git a/saqc/core/translator/positionaltranslator.py b/saqc/core/translator/positionaltranslator.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a8f085f8c4f522726331a049cf48f4078f7392b
--- /dev/null
+++ b/saqc/core/translator/positionaltranslator.py
@@ -0,0 +1,103 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+import numpy as np
+import pandas as pd
+
+from saqc.core.flags import (
+    Flags,
+    History,
+    UNTOUCHED,
+    UNFLAGGED,
+    GOOD,
+    DOUBTFUL,
+    BAD,
+)
+from saqc.core.translator.basetranslator import Translator, ForwardMap, BackwardMap
+
+
+class PositionalTranslator(Translator):
+
+    """
+    Implements the translation from and to the flagging scheme implemented by CHS
+    """
+
+    TO_MASK = DOUBTFUL + 1
+
+    _FORWARD: ForwardMap = {
+        -6: UNFLAGGED,
+        -5: UNFLAGGED,
+        -2: UNFLAGGED,
+        0: UNFLAGGED,
+        1: DOUBTFUL,
+        2: BAD,
+    }
+    _BACKWARD: BackwardMap = {
+        UNTOUCHED: 0,
+        UNFLAGGED: 0,
+        GOOD: 0,
+        DOUBTFUL: 1,
+        BAD: 2,
+    }
+
+    def __init__(self):
+        super().__init__(forward=self._FORWARD, backward=self._BACKWARD)
+
+    def forward(self, flags: pd.DataFrame) -> Flags:
+        """
+        Translate from 'external flags' to 'internal flags'
+
+        Parameters
+        ----------
+        flags : pd.DataFrame
+            The external flags to translate
+
+        Returns
+        -------
+        Flags object
+        """
+
+        data = {}
+        for field, field_flags in flags.items():
+
+            # explode the flags into sperate columns and drop the leading `9`
+            df = pd.DataFrame(
+                field_flags.astype(str).str.slice(start=1).apply(tuple).tolist(),
+                index=field_flags.index,
+            ).astype(int)
+
+            # the exploded values form the History of `field`
+            fflags = super()._translate(df, self._FORWARD)
+            field_history = History(field_flags.index)
+            for _, s in fflags.items():
+                field_history.append(s)
+            data[str(field)] = field_history
+
+        return Flags(data)
+
+    def backward(self, flags: Flags) -> pd.DataFrame:
+        """
+        Translate from 'internal flags' to 'external flags'
+
+        Parameters
+        ----------
+        flags : pd.DataFrame
+            The external flags to translate
+
+        Returns
+        -------
+        pd.DataFrame
+        """
+        out = {}
+        for field in flags.columns:
+            thist = flags.history[field].hist.replace(self._BACKWARD).astype(int)
+            # concatenate the single flag values
+            ncols = thist.shape[-1]
+            init = 9 * 10 ** ncols
+            bases = 10 ** np.arange(ncols - 1, -1, -1)
+
+            tflags = init + (thist * bases).sum(axis=1)
+            out[field] = tflags
+
+        return pd.DataFrame(out).fillna(-9999).astype(int)
diff --git a/saqc/core/visitor.py b/saqc/core/visitor.py
index 79a285f7b0bd6afadde9a424cc678d734ed12fd7..e4b768f4d0b427e55b03831a7fafddffa4da4e18 100644
--- a/saqc/core/visitor.py
+++ b/saqc/core/visitor.py
@@ -2,10 +2,10 @@
 # -*- coding: utf-8 -*-
 
 import ast
-
 import numpy as np
 import pandas as pd
 
+from saqc.constants import *
 from saqc.core.register import FUNC_MAP
 import saqc.lib.ts_operators as ts_ops
 
@@ -39,17 +39,18 @@ ENVIRONMENT = {
     "zLog": ts_ops.zeroLog,
 }
 
-RESERVED = {"GOOD", "BAD", "UNFLAGGED", "NODATA"}
+# TODO:
+# get from saqc.constants
+RESERVED = {"GOOD", "BAD", "UNFLAGGED"}
 
 
 class ConfigExpressionParser(ast.NodeVisitor):
-
     """
     Generic configuration functions will be rewritten as lambda functions
     and variables that need a look up in `data` will act as arguments, e.g.:
-      `flagGeneric(func=(x != NODATA) & (y < 3))`
+      `flagGeneric(func=(x != 4) & (y < 3))`
       will be rewritten to
-      `lambda x, y: (x != NODATA) & (y < 3)`
+      `lambda x, y: (x != 4) & (y < 3)`
 
     The main purpose of this class is to identify all such lambda arguments
     and check the given expression for accordance with the restrictions
@@ -134,15 +135,16 @@ class ConfigFunctionParser(ast.NodeVisitor):
         ast.Index,
         ast.USub,
         ast.List,
+        ast.Attribute,
     )
 
-    def __init__(self, flagger):
+    def __init__(self):
 
         self.kwargs = {}
         self.environment = {
-            "GOOD": flagger.GOOD,
-            "BAD": flagger.BAD,
-            "UNFLAGGED": flagger.UNFLAGGED,
+            "GOOD": GOOD,
+            "BAD": BAD,
+            "UNFLAGGED": UNFLAGGED,
             **ENVIRONMENT,
         }
 
@@ -157,7 +159,11 @@ class ConfigFunctionParser(ast.NodeVisitor):
         if node.args:
             raise TypeError("only keyword arguments are supported")
 
-        func_name = node.func.id
+        if isinstance(node.func, ast.Attribute):
+            func_name = f"{node.func.value.id}.{node.func.attr}"  # type: ignore
+        else:
+            func_name = node.func.id  # type: ignore
+
         if func_name not in FUNC_MAP:
             raise NameError(f"unknown function '{func_name}'")
 
@@ -202,7 +208,7 @@ class ConfigFunctionParser(ast.NodeVisitor):
         co = compile(
             ast.fix_missing_locations(ast.Interactive(body=[vnode])),
             "<ast>",
-            mode="single"
+            mode="single",
         )
         # NOTE: only pass a copy to not clutter the self.environment
         exec(co, {**self.environment}, self.kwargs)
diff --git a/saqc/flagger/__init__.py b/saqc/flagger/__init__.py
deleted file mode 100644
index dd5b607158f13f3922cdf734f21fa98be19a96cb..0000000000000000000000000000000000000000
--- a/saqc/flagger/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-from saqc.flagger.baseflagger import BaseFlagger
-from saqc.flagger.categoricalflagger import CategoricalFlagger
-from saqc.flagger.simpleflagger import SimpleFlagger
-from saqc.flagger.dmpflagger import DmpFlagger
-from saqc.flagger.continuousflagger import ContinuousFlagger
-from saqc.flagger.positionalflagger import PositionalFlagger
diff --git a/saqc/flagger/baseflagger.py b/saqc/flagger/baseflagger.py
deleted file mode 100644
index b46515d68543bcfe1f4caf987440c621d4821005..0000000000000000000000000000000000000000
--- a/saqc/flagger/baseflagger.py
+++ /dev/null
@@ -1,480 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import operator as op
-from copy import deepcopy
-from abc import ABC, abstractmethod
-
-from typing import TypeVar, Union, Any, List, Optional
-
-import pandas as pd
-import numpy as np
-import dios
-
-from saqc.lib.tools import assertScalar, mergeDios, toSequence, customRoller
-
-COMPARATOR_MAP = {
-    "!=": op.ne,
-    "==": op.eq,
-    ">=": op.ge,
-    ">": op.gt,
-    "<=": op.le,
-    "<": op.lt,
-}
-
-# TODO: get some real types here (could be tricky...)
-LocT = Any
-FlagT = Any
-diosT = dios.DictOfSeries
-BaseFlaggerT = TypeVar("BaseFlaggerT")
-PandasT = Union[pd.Series, diosT]
-FieldsT = Union[str, List[str]]
-
-
-class BaseFlagger(ABC):
-    @abstractmethod
-    def __init__(self, dtype):
-        # NOTE: the type of the _flags DictOfSeries
-        self.dtype = dtype
-        self.extra_defaults = dict()
-        # NOTE: the arggumens of setFlags supported from
-        #       the configuration functions
-        self.signature = ("flag",)
-        self._flags: Optional[diosT] = None
-
-    @property
-    def initialized(self):
-        return self._flags is not None
-
-    @property
-    def flags(self):
-        return self._flags.copy()
-
-    def initFlags(self, data: diosT = None, flags: diosT = None) -> BaseFlaggerT:
-        """
-        initialize a flagger based on the given 'data' or 'flags'
-        if 'data' is not None: return a flagger with flagger.UNFLAGGED values
-        if 'flags' is not None: return a flagger with the given flags
-        """
-
-        if data is None and flags is None:
-            raise TypeError("either 'data' or 'flags' are required")
-
-        if data is not None and flags is not None:
-            raise TypeError("either 'data' or 'flags' can be given")
-
-        if data is not None:
-            if not isinstance(data, diosT):
-                data = dios.DictOfSeries(data)
-
-            flags = dios.DictOfSeries(columns=data.columns)
-            for c in flags.columns:
-                flags[c] = pd.Series(self.UNFLAGGED, index=data[c].index)
-        else:
-            if not isinstance(flags, diosT):
-                flags = dios.DictOfSeries(flags)
-
-        flags = flags.astype(self.dtype)
-        newflagger = self.copy(flags=flags)
-        return newflagger
-
-    def merge(self, other: BaseFlaggerT, subset: Optional[List] = None, join: str = "merge", inplace=False):
-        """
-        Merge the given flagger 'other' into self
-        """
-        # NOTE: add more checks !?
-        if not isinstance(other, self.__class__):
-            raise TypeError(f"flagger of type '{self.__class__}' needed")
-
-        if inplace:
-            self._flags = mergeDios(self._flags, other._flags, subset=subset, join=join)
-            return self
-        else:
-            return self.copy(flags=mergeDios(self._flags, other._flags, subset=subset, join=join))
-
-    def slice(self, field: FieldsT = None, loc: LocT = None, drop: FieldsT = None, inplace=False) -> BaseFlaggerT:
-        """ Return a potentially trimmed down copy of self. """
-        if drop is not None:
-            if field is not None:
-                raise TypeError("either 'field' or 'drop' can be given, but not both")
-            field = self._flags.columns.drop(drop, errors="ignore")
-        flags = self.getFlags(field=field, loc=loc).to_dios()
-
-        if inplace:
-            self._flags = flags
-            return self
-        else:
-            return self.copy(flags=flags)
-
-    def toFrame(self):
-        """ Return a pd.DataFrame holding the flags
-        Return
-        ------
-        frame: pandas.DataFrame
-
-        Note
-        ----
-        This is a convenience funtion hiding the implementation detail dios.DictOfSeries.
-        Subclasses with special flag structures (i.e. DmpFlagger) should overwrite the
-        this methode in order to provide a usefull user output.
-        """
-        return self._flags.to_df()
-
-    def getFlags(self, field: FieldsT = None, loc: LocT = None, full=False):
-        """ Return a potentially, to `loc`, trimmed down version of flags.
-
-        Parameters
-        ----------
-        field : str, list of str or None, default None
-            Field(s) to request.
-        loc :
-            limit result to specific rows.
-        full : object
-            If True, an additional dict is returned, holding all extras that
-            the flagger may specify. These extras can be feed back to a/the
-            flagger with `setFlags(...with_extras=True)`.
-
-        Return
-        ------
-        flags: pandas.Series or dios.DictOfSeries
-            If field is a scalar a series is returned, otherwise a dios.
-        extras: dict
-            Present only if `full=True`. A dict that hold all extra information.
-
-        Note
-        ----
-        This is more or less a __getitem__(key)-like function, where
-        self._flags is accessed and key is a single key or a tuple.
-        Either key is [loc] or [loc,field]. loc also can be a 2D-key,
-        aka. a booldios
-
-        The resulting dict (full=True) can be feed to setFlags to update extra Columns.
-        but field must be a scalar then, because setFlags only can process a scalar field.
-        """
-
-        # loc should be a valid 2D-indexer and
-        # then field must be None. Otherwise aloc
-        # will fail and throw the correct Error.
-        if isinstance(loc, diosT) and field is None:
-            indexer = loc
-
-        else:
-            loc = slice(None) if loc is None else loc
-            field = slice(None) if field is None else self._check_field(field)
-            indexer = (loc, field)
-
-        # this is a bug in `dios.aloc`, which may return a shallow copied dios, if `slice(None)` is passed
-        # as row indexer. Thus is because pandas `.loc` return a shallow copy if a null-slice is passed to a series.
-        flags = self._flags.aloc[indexer].copy()
-        if full:
-            return flags, {}
-        else:
-            return flags
-
-    def setFlags(
-            self,
-            field: str,
-            loc: LocT = None,
-            flag: FlagT = None,
-            force: bool = False,
-            inplace: bool = False,
-            with_extra: bool = False,
-            flag_after: Union[str, int] = None,
-            flag_before: Union[str, int] = None,
-            win_flag: FlagT = None,
-            **kwargs
-    ) -> BaseFlaggerT:
-        """Overwrite existing flags at loc.
-
-        If `force=False` (default) only flags with a lower priority are overwritten,
-        otherwise, if `force=True`, flags are overwritten unconditionally.
-
-        Examples
-        --------
-        One can use this to update extra columns without knowing their names. Eg. like so:
-
-        >>> field = 'var0'
-        >>> flags, extra = flagger.getFlags(field, full=True)
-        >>> newflags = magic_that_alter_index(flags)
-        >>> for k, v in extra.items()
-        ...     extra[k] = magic_that_alter_index(v)
-        >>> flagger = flagger.setFlags(field, flags=newflags, with_extra=True, **extra)
-        """
-
-        assert "iloc" not in kwargs, "deprecated keyword, `iloc=slice(i:j)`. Use eg. `loc=srs.index[i:j]` instead."
-
-        assertScalar("field", self._check_field(field), optional=False)
-        flag = self.BAD if flag is None else flag
-        out = self if inplace else deepcopy(self)
-
-        if with_extra and not isinstance(flag, pd.Series):
-            raise ValueError("flags must be pd.Series if `with_extras=True`.")
-
-        trimmed = self.getFlags(field=field, loc=loc)
-        if force:
-            mask = pd.Series(True, index=trimmed.index, dtype=bool)
-        else:
-            mask = trimmed < flag
-
-        # set flags of the test
-        out._flags.aloc[mask, field] = flag
-
-        # calc and set window flags
-        if flag_after is not None or flag_before is not None:
-            win_mask, win_flag = self._getWindowMask(field, mask, flag_after, flag_before, win_flag, flag, force)
-            out._flags.aloc[win_mask, field] = win_flag
-
-        return out
-
-    def _getWindowMask(self, field, mask, flag_after, flag_before, win_flag, flag, force):
-        """ Return a mask which is True where the additional window flags should get set.
-
-        Parameters
-        ----------
-        field : str
-            column identifier.
-        mask : boolean pd.Series
-            identified locations where flags was set
-        flag_after : offset or int
-            set additional flags after each flag that was set
-        flag_before : offset or int
-            set additional flags before each flag that was set
-        win_flag : any
-            Should be valid flag of the flagger or None. Defaults to `flag` if None.
-        flag : any
-            The flag that was used by flagger.setFlags(). Only used to determine `win_flag` if the latter is None.
-        force : bool
-            If True, the additional flags specified by `flag_after` and `flag_before` are set unconditionally and so
-            also could overwrite worse flags.
-
-        Returns
-        -------
-        mask: boolean pandas.Series
-            locations where additional flags should be set. The mask has the same (complete) length than `.flags[field]`
-        win_flag: the flag to set
-
-        Raises
-        ------
-        ValueError : If `win_flag` is None and `flag` is not a scalar.
-        ValueError : If `win_flag` is not a valid flagger flag
-        NotImplementedError: if `flag_before` is given
-        """
-
-        # win_flag default to flag if not explicitly given
-        if win_flag is None:
-            win_flag = flag
-            if not np.isscalar(win_flag):
-                raise ValueError("win_flag (None) cannot default to flag, if flag is not a scalar. "
-                                 "Pls specify `win_flag` or omit `flag_after` and `flag_before`.")
-        else:
-            if not self.isValidFlag(win_flag):
-                raise ValueError(f"invalid win_flag: {win_flag}")
-
-        # blow up the mask to the whole size of flags
-        base = mask.reindex_like(self._flags[field]).fillna(False)
-        before, after = False, False
-
-        if flag_before is not None:
-            closed = 'both'
-            if isinstance(flag_before, int):
-                flag_before, closed = flag_before + 1, None
-            r = customRoller(base, window=flag_before, min_periods=1, closed=closed, expand=True, forward=True)
-            before = r.sum().astype(bool)
-
-        if flag_after is not None:
-            closed = 'both'
-            if isinstance(flag_after, int):
-                flag_after, closed = flag_after + 1, None
-            r = customRoller(base, window=flag_after, min_periods=1, closed=closed, expand=True)
-            after = r.sum().astype(bool)
-
-        # does not include base, to avoid overriding flags that just was set
-        # by the test, because flag and win_flag may differ.
-        mask = ~base & (after | before)
-
-        # also do not to overwrite worse flags
-        if not force:
-            mask &= self.getFlags(field) < win_flag
-
-        return mask, win_flag
-
-    def clearFlags(self, field: str, loc: LocT = None, inplace=False, **kwargs) -> BaseFlaggerT:
-        assertScalar("field", field, optional=False)
-        if "force" in kwargs:
-            raise ValueError("Keyword 'force' is not allowed here.")
-        if "flag" in kwargs:
-            raise ValueError("Keyword 'flag' is not allowed here.")
-        return self.setFlags(field=field, loc=loc, flag=self.UNFLAGGED, force=True, inplace=inplace, **kwargs)
-
-    def isFlagged(self, field=None, loc: LocT = None, flag: FlagT = None, comparator: str = ">") -> PandasT:
-        """
-        Returns boolean data that indicate where data has been flagged.
-
-        Parameters
-        ----------
-        field : str, list-like, default None
-            The field(s)/column(s) of the data to be tested if flagged.
-            If None all columns are used.
-
-        loc : mask, slice, pd.Index, etc., default None
-            The location/rows of the data to be tested if flagged.
-            If None all rows are used.
-
-        flag : str, category, list-like, default None
-            The flag(s) that define data as flagged. If None, `flagger.GOOD`
-            is used.
-
-        comparator : {'<', '<=', '==', '!=', '>=', '>'}, default '>'
-            Defines how the comparison is done. The `flags` are always on the
-            left-hand-side, thus, the flag to compare is always on the right-
-            hand-side. Eg. a call with all defaults, return the equivalent
-            of `flagger.getFlags() > flagger.GOOD`
-
-        Returns
-        -------
-        pandas.Series or dios.DictOfSeries : Return Series if field is a scalar,
-        otherwise DictOfSeries.
-        """
-        if isinstance(flag, pd.Series):
-            raise TypeError("flag: pd.Series is not allowed")
-        flags_to_compare = set(toSequence(flag, self.GOOD))
-
-        flags = self.getFlags(field, loc)
-        cp = COMPARATOR_MAP[comparator]
-
-        # notna() to prevent nans to become True,
-        # eg.: `np.nan != 0 -> True`
-        flagged = flags.notna()
-
-        # passing an empty list must result
-        # in a everywhere-False data
-        if len(flags_to_compare) == 0:
-            flagged[:] = False
-        else:
-            for f in flags_to_compare:
-                if not self.isValidFlag(f):
-                    raise ValueError(f"invalid flag: {f}")
-                flagged &= cp(flags, f)
-
-        return flagged
-
-    def copy(self, flags=None) -> BaseFlaggerT:
-        if flags is None:
-            out = deepcopy(self)
-        else:
-            # if flags is given and self.flags is big,
-            # this hack will bring some speed improvement
-            # NOTE: there should be nicer way to do this,
-            #       why not through a constructur method?
-            saved = self._flags
-            self._flags = None
-            out = deepcopy(self)
-            out._flags = flags.copy()
-            self._flags = saved
-        return out
-
-    def isValidFlag(self, flag: FlagT) -> bool:
-        """
-        Check if given flag is known to this flagger.
-
-        Parameters
-        ----------
-        flag: str
-            The flag to be checked.
-
-        Returns
-        -------
-        bool
-        """
-        # This is a very rudimentary fallback for the check
-        # and the child flagger may should implement a better
-        # version of it.
-        return flag == self.BAD or flag == self.GOOD or flag == self.UNFLAGGED or self.isSUSPICIOUS(flag)
-
-    def replaceField(self, field, flags, inplace=False, **kwargs):
-        """ Replace or delete all data for a given field.
-
-        Parameters
-        ----------
-        field : str
-            The field to replace / delete. If the field already exist, the respected data
-            is replaced, otherwise the data is inserted in the respected field column.
-        flags : pandas.Series or None
-            If None, the series denoted by `field` will be deleted. Otherwise
-            a series of flags (dtype flagger.dtype) that will replace the series
-            currently stored under `field`
-        inplace : bool, default False
-            If False, a flagger copy is returned, otherwise the flagger is not copied.
-        **kwargs : dict
-            ignored.
-
-        Returns
-        -------
-        flagger: saqc.flagger.BaseFlagger
-            The flagger object or a copy of it (if inplace=True).
-
-        Raises
-        ------
-        ValueError: (delete) if field does not exist
-        TypeError: (replace / insert) if flags are not pd.Series
-        """
-
-        assertScalar("field", field, optional=False)
-
-        out = self if inplace else deepcopy(self)
-
-        # delete
-        if flags is None:
-            if field not in self._flags:
-                raise ValueError(f"{field}: field does not exist")
-            del out._flags[field]
-
-        # insert / replace
-        else:
-            if not isinstance(flags, pd.Series):
-                raise TypeError(f"`flags` must be pd.Series.")
-            out._flags[field] = flags.astype(self.dtype)
-        return out
-
-    def _check_field(self, field):
-        """ Check if (all) field(s) in self._flags. """
-
-        # wait for outcome of
-        # https://git.ufz.de/rdm-software/saqc/issues/46
-        failed = []
-        if isinstance(field, str):
-            if field not in self._flags:
-                failed += [field]
-        else:
-            try:
-                for f in field:
-                    if f not in self._flags:
-                        failed += [f]
-            # not iterable, probably a slice or
-            # any indexer we dont have to check
-            except TypeError:
-                pass
-
-        if failed:
-            raise ValueError(f"key(s) missing in flags: {failed}")
-        return field
-
-    @property
-    @abstractmethod
-    def UNFLAGGED(self) -> FlagT:
-        """ Return the flag that indicates unflagged data """
-
-    @property
-    @abstractmethod
-    def GOOD(self) -> FlagT:
-        """ Return the flag that indicates the very best data """
-
-    @property
-    @abstractmethod
-    def BAD(self) -> FlagT:
-        """ Return the flag that indicates the worst data """
-
-    @abstractmethod
-    def isSUSPICIOUS(self, flag: FlagT) -> bool:
-        """ Return bool that indicates if the given flag is valid, but neither
-        UNFLAGGED, BAD, nor GOOD."""
diff --git a/saqc/flagger/categoricalflagger.py b/saqc/flagger/categoricalflagger.py
deleted file mode 100644
index 20d2680343bfc659a5e95809732852b26e913a23..0000000000000000000000000000000000000000
--- a/saqc/flagger/categoricalflagger.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-from collections import OrderedDict
-
-import pandas as pd
-
-from saqc.flagger.baseflagger import BaseFlagger
-from saqc.lib.tools import assertDictOfSeries
-
-
-class Flags(pd.CategoricalDtype):
-    def __init__(self, flags):
-        # NOTE: all flag schemes need to support
-        #       at least 3 flag categories:
-        #       * unflagged
-        #       * good
-        #       * bad
-        assert len(flags) > 2
-        super().__init__(flags, ordered=True)
-
-
-class CategoricalFlagger(BaseFlagger):
-    def __init__(self, flags):
-        super().__init__(dtype=Flags(flags))
-        self._categories = self.dtype.categories
-
-    @property
-    def UNFLAGGED(self):
-        return self._categories[0]
-
-    @property
-    def GOOD(self):
-        return self._categories[1]
-
-    @property
-    def BAD(self):
-        return self._categories[-1]
-
-    def isSUSPICIOUS(self, flag):
-        return flag in self._categories[2:-1]
diff --git a/saqc/flagger/continuousflagger.py b/saqc/flagger/continuousflagger.py
deleted file mode 100644
index 37e96508d224f8973eeb61fffbf4068630d439ed..0000000000000000000000000000000000000000
--- a/saqc/flagger/continuousflagger.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import pandas as pd
-import numpy as np
-import intervals
-
-from saqc.flagger.baseflagger import BaseFlagger
-
-
-class ContinuousFlagger(BaseFlagger):
-    def __init__(self, min_=0.0, max_=1.0, unflagged=-1.0):
-        assert unflagged < 0 <= min_ < max_
-        super().__init__(dtype=np.float64)
-        self._interval = intervals.closed(min_, max_)
-        self._unflagged_flag = unflagged
-        self.signature = ("flag", "factor", "modify")
-
-    def setFlags(self, field, loc=None, iloc=None, flag=None, force=False, factor=1, modify=False, **kwargs):
-        # NOTE: incomplete, as the option to
-        #       update flags is temporarily gone
-        return super().setFlags(field=field, loc=loc, iloc=iloc, flag=self._checkFlag(flag), force=force, **kwargs)
-
-    # NOTE:
-    # we should probably override _assureDtype here
-
-    def _isDtype(self, flag):
-        if isinstance(flag, pd.Series):
-            # NOTE: it should be made sure, that all
-            #       values fall into the interval
-            return flag.dtype == self.dtype
-        return flag in self._interval or flag == self.UNFLAGGED
-
-    @property
-    def UNFLAGGED(self):
-        return self._unflagged_flag
-
-    @property
-    def GOOD(self):
-        return self._interval.lower
-
-    @property
-    def BAD(self):
-        return self._interval.upper
-
-    def isSUSPICIOUS(self, flag):
-        return flag in intervals.open(self.GOOD, self.BAD)
diff --git a/saqc/flagger/dmpflagger.py b/saqc/flagger/dmpflagger.py
deleted file mode 100644
index d4ff7cd5b4d54171df97a66d9d5d13b3812172e8..0000000000000000000000000000000000000000
--- a/saqc/flagger/dmpflagger.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import json
-from copy import deepcopy
-from typing import TypeVar, Optional, List
-
-import pandas as pd
-
-import dios
-
-from saqc.flagger.baseflagger import diosT
-from saqc.flagger.categoricalflagger import CategoricalFlagger
-from saqc.lib.tools import assertScalar, mergeDios, mutateIndex
-
-DmpFlaggerT = TypeVar("DmpFlaggerT")
-
-
-class Keywords:
-    VERSION = "$version"
-
-
-class FlagFields:
-    FLAG = "quality_flag"
-    CAUSE = "quality_cause"
-    COMMENT = "quality_comment"
-
-
-class ColumnLevels:
-    VARIABLES = "variables"
-    FLAGS = "flags"
-
-
-FLAGS = ["NIL", "OK", "DOUBTFUL", "BAD"]
-
-
-class DmpFlagger(CategoricalFlagger):
-    def __init__(self, **kwargs):
-        super().__init__(FLAGS)
-        self.flags_fields = [FlagFields.FLAG, FlagFields.CAUSE, FlagFields.COMMENT]
-        self.extra_defaults = dict(cause=FLAGS[0], comment="")
-        self.signature = ("flag", "comment", "cause", "force")
-
-        self._global_comments = kwargs
-        self._flags = None
-        self._causes = None
-        self._comments = None
-
-    @property
-    def causes(self):
-        return self._causes
-
-    @property
-    def comments(self):
-        return self._comments
-
-    def toFrame(self):
-        out = pd.concat(
-            [self._flags.to_df(), self._causes.to_df(), self._comments.to_df()],
-            axis=1,
-            keys=[FlagFields.FLAG, FlagFields.CAUSE, FlagFields.COMMENT],
-        )
-        out = out.reorder_levels(order=[1, 0], axis=1).sort_index(axis=1, level=0, sort_remaining=False)
-        return out
-
-    def initFlags(self, data: dios.DictOfSeries = None, flags: dios.DictOfSeries = None):
-        """
-        initialize a flagger based on the given 'data' or 'flags'
-        if 'data' is not None: return a flagger with flagger.UNFALGGED values
-        if 'flags' is not None: return a flagger with the given flags
-        """
-
-        # implicit set self._flags, and make deepcopy of self aka. DmpFlagger
-        newflagger = super().initFlags(data=data, flags=flags)
-        newflagger._causes = newflagger._flags.astype(str)
-        newflagger._comments = newflagger._flags.astype(str)
-        newflagger._causes[:], newflagger._comments[:] = "", ""
-        return newflagger
-
-    def slice(self, field=None, loc=None, drop=None, inplace=False):
-        newflagger = super().slice(field=field, loc=loc, drop=drop, inplace=inplace)
-        flags = newflagger._flags
-        newflagger._causes = self._causes.aloc[flags, ...]
-        newflagger._comments = self._comments.aloc[flags, ...]
-        return newflagger
-
-    def merge(self, other: DmpFlaggerT, subset: Optional[List] = None, join: str = "merge", inplace=False):
-        assert isinstance(other, DmpFlagger)
-        flags = mergeDios(self._flags, other._flags, subset=subset, join=join)
-        causes = mergeDios(self._causes, other._causes, subset=subset, join=join)
-        comments = mergeDios(self._comments, other._comments, subset=subset, join=join)
-        if inplace:
-            self._flags = flags
-            self._causes = causes
-            self._comments = comments
-            return self
-        else:
-            return self._construct_new(flags, causes, comments)
-
-    def getFlags(self, field=None, loc=None, full=False):
-        # loc should be a valid 2D-indexer and
-        # then field must be None. Otherwise aloc
-        # will fail and throw the correct Error.
-        if isinstance(loc, diosT) and field is None:
-            indexer = loc
-        else:
-            loc = slice(None) if loc is None else loc
-            field = slice(None) if field is None else self._check_field(field)
-            indexer = (loc, field)
-
-        # this is a bug in `dios.aloc`, which may return a shallow copied dios, if `slice(None)` is passed
-        # as row indexer. Thus is because pandas `.loc` return a shallow copy if a null-slice is passed to a series.
-        flags = self._flags.aloc[indexer].copy()
-
-        if full:
-            causes = self._causes.aloc[indexer].copy()
-            comments = self._comments.aloc[indexer].copy()
-            return flags, dict(cause=causes, comment=comments)
-        else:
-            return flags
-
-    def setFlags(
-        self,
-        field,
-        loc=None,
-        flag=None,
-        cause="OTHER",
-        comment="",
-        force=False,
-        inplace=False,
-        with_extra=False,
-        flag_after=None,
-        flag_before=None,
-        win_flag=None,
-        **kwargs
-    ):
-        assert "iloc" not in kwargs, "deprecated keyword, iloc"
-        assertScalar("field", self._check_field(field), optional=False)
-
-        out = self if inplace else deepcopy(self)
-
-        if with_extra:
-            for val in [comment, cause, flag]:
-                if not isinstance(val, pd.Series):
-                    raise TypeError(f"`flag`, `cause`, `comment` must be pd.Series, if `with_extra=True`.")
-            assert flag.index.equals(comment.index) and flag.index.equals(cause.index)
-
-        else:
-            flag = self.BAD if flag is None else flag
-            comment = json.dumps(
-                {**self._global_comments,
-                 "comment": comment,
-                 "test": kwargs.get("func_name", "")}
-            )
-
-        flags = self.getFlags(field=field, loc=loc)
-        if force:
-            mask = pd.Series(True, index=flags.index, dtype=bool)
-        else:
-            mask = flags < flag
-
-        # set flags of the test
-        out._flags.aloc[mask, field] = flag
-        out._causes.aloc[mask, field] = cause
-        out._comments.aloc[mask, field] = comment
-
-        # calc and set window flags
-        if flag_after is not None or flag_before is not None:
-            win_mask, win_flag = self._getWindowMask(field, mask, flag_after, flag_before, win_flag, flag, force)
-            out._flags.aloc[win_mask, field] = win_flag
-            out._causes.aloc[win_mask, field] = cause
-            out._comments.aloc[win_mask, field] = comment
-
-        return out
-
-    def replaceField(self, field, flags, inplace=False, cause=None, comment=None, **kwargs):
-        """ Replace or delete all data for a given field.
-
-        Parameters
-        ----------
-        field : str
-            The field to replace / delete. If the field already exist, the respected data
-            is replaced, otherwise the data is inserted in the respected field column.
-        flags : pandas.Series or None
-            If None, the series denoted by `field` will be deleted. Otherwise
-            a series of flags (dtype flagger.dtype) that will replace the series
-            currently stored under `field`
-        causes : pandas.Series
-            A series of causes (dtype str).
-        comments : pandas.Series
-            A series of comments (dtype str).
-        inplace : bool, default False
-            If False, a flagger copy is returned, otherwise the flagger is not copied.
-        **kwargs : dict
-            ignored.
-
-        Returns
-        -------
-        flagger: saqc.flagger.BaseFlagger
-            The flagger object or a copy of it (if inplace=True).
-
-        Raises
-        ------
-        ValueError: (delete) if field does not exist
-        TypeError: (replace / insert) if flags, causes, comments are not pd.Series
-        AssertionError: (replace / insert) if flags, causes, comments does not have the same index
-
-        Notes
-        -----
-        If deletion is requested (`flags=None`), `causes` and `comments` are don't-care.
-
-        Flags, causes and comments must have the same index, if flags is not None, also
-        each is casted implicit to the respected dtype.
-        """
-        assertScalar("field", field, optional=False)
-        out = self if inplace else deepcopy(self)
-        causes, comments = cause, comment
-
-        # delete
-        if flags is None:
-            if field not in self._flags:
-                raise ValueError(f"{field}: field does not exist")
-            del out._flags[field]
-            del out._comments[field]
-            del out._causes[field]
-
-        # insert / replace
-        else:
-            for val in [flags, causes, comments]:
-                if not isinstance(val, pd.Series):
-                    raise TypeError(f"`flag`, `cause`, `comment` must be pd.Series.")
-            assert flags.index.equals(comments.index) and flags.index.equals(causes.index)
-            out._flags[field] = flags.astype(self.dtype)
-            out._causes[field] = causes.astype(str)
-            out._comments[field] = comments.astype(str)
-        return out
-
-    def _construct_new(self, flags, causes, comments) -> DmpFlaggerT:
-        new = DmpFlagger()
-        new._global_comments = self._global_comments
-        new._flags = flags
-        new._causes = causes
-        new._comments = comments
-        return new
-
-    @property
-    def SUSPICIOUS(self):
-        return FLAGS[-2]
diff --git a/saqc/flagger/positionalflagger.py b/saqc/flagger/positionalflagger.py
deleted file mode 100644
index 00af0b2b1c9b678ab0a778740d14599022f949d6..0000000000000000000000000000000000000000
--- a/saqc/flagger/positionalflagger.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import json
-from copy import deepcopy
-
-import pandas as pd
-
-from dios import DictOfSeries
-from saqc.flagger.baseflagger import BaseFlagger, COMPARATOR_MAP
-from saqc.flagger.dmpflagger import DmpFlagger
-from saqc.lib.tools import toSequence, assertScalar
-
-
-FLAGS = ("-1", "0", "1", "2")
-
-
-class PositionalFlagger(BaseFlagger):
-    def __init__(self):
-        super().__init__(dtype=str)
-
-    def setFlags(
-            self,
-            field: str,
-            loc=None,
-            position=-1,
-            flag=None,
-            force: bool = False,
-            inplace: bool = False,
-            with_extra=False,
-            flag_after=None,
-            flag_before=None,
-            win_flag=None,
-            **kwargs
-    ):
-        assertScalar("field", field, optional=False)
-
-        # prepping
-        flag = str(self.BAD if flag is None else flag)
-        self.isValidFlag(flag, fail=True)
-        out = self if inplace else deepcopy(self)
-        out_flags = out._flags[field]
-
-        idx = self.getFlags(field, loc).index
-        mask = pd.Series(True, index=idx, dtype=bool)
-        mask = mask.reindex_like(out_flags).fillna(False)
-
-        # replace unflagged with the magic starter '9'
-        out_flags = out_flags.str.replace(f"^{self.UNFLAGGED}", "9", regex=True)
-
-        # bring all flags to the desired length
-        # length = position # if position > 0 else out_flags.str.len().max()
-        if position == -1:
-            length = position = out_flags.str.len().max()
-        else:
-            length = position = position + 1
-        out_flags = out_flags.str.pad(length + 1, fillchar=self.GOOD, side="right")
-
-        # we rigorously overwrite existing flags
-        new_flags = out_flags.str[position]
-        new_flags.loc[mask] = flag
-
-        # calc window flags
-        if flag_after is not None or flag_before is not None:
-            win_mask, win_flag = self._getWindowMask(field, mask, flag_after, flag_before, win_flag, flag, force)
-            new_flags.loc[win_mask] = win_flag
-
-        out._flags[field] = out_flags.str[:position] + new_flags + out_flags.str[position+1:]
-        return out
-
-    def isFlagged(self, field=None, loc=None, flag=None, comparator=">"):
-
-        field = slice(None) if field is None else field
-        flags = self._getMaxFlag(field, loc).astype(int)
-        flags = flags.loc[:, field]
-
-        # notna() to prevent nans to become True,
-        # eg.: `np.nan != 0 -> True`
-        flagged = flags.notna()
-        flags_to_compare = set(toSequence(flag, self.GOOD))
-        if not flags_to_compare:
-            flagged[:] = False
-            return flagged
-
-        cp = COMPARATOR_MAP[comparator]
-        for f in flags_to_compare:
-            self.isValidFlag(f, fail=True)
-            flagged &= cp(flags, int(f))
-        return flagged
-
-    def isValidFlag(self, flag, fail=False):
-        check = flag in FLAGS
-        if check is False and fail is True:
-            raise ValueError(f"invalid flag {flag}, given values should be in '{FLAGS}'")
-        return check
-
-    def _getMaxFlag(self, field, loc):
-
-        data = {}
-        flags = self.getFlags(field, loc)
-        if isinstance(flags, pd.Series):
-            flags = flags.to_frame()
-        for col_name, col in flags.iteritems():
-            mask = col != self.UNFLAGGED
-            col = col.str.replace("^9", "0", regex=True)
-            col[mask] = col[mask].apply(lambda x: max(list(x)))
-            data[col_name] = col
-        return DictOfSeries(data)
-
-    def toDmpFlagger(self):
-        self = PositionalFlagger().initFlags(flags=self._flags)
-        dmp_flagger = DmpFlagger().initFlags(data=self._flags)
-        flag_map = {
-            self.BAD: dmp_flagger.BAD,
-            self.SUSPICIOUS: dmp_flagger.SUSPICIOUS,
-            self.GOOD: dmp_flagger.GOOD,
-        }
-        for pos_flag, dmp_flag in flag_map.items():
-            loc = self.isFlagged(flag=pos_flag, comparator="==")
-            dmp_flagger._flags.aloc[loc] = dmp_flag
-
-        dmp_flagger._comments.loc[:] = self._flags.to_df().applymap(lambda v: json.dumps({"flag": v}))
-        dmp_flagger._causes.loc[:] = "OTHER"
-        return dmp_flagger
-
-    @property
-    def UNFLAGGED(self):
-        return FLAGS[0]
-
-    @property
-    def GOOD(self):
-        return FLAGS[1]
-
-    @property
-    def SUSPICIOUS(self):
-        return FLAGS[2]
-
-    @property
-    def BAD(self):
-        return FLAGS[3]
-
-    def isSUSPICIOUS(self, flag):
-        return flag == self.SUSPICIOUS
-
diff --git a/saqc/flagger/simpleflagger.py b/saqc/flagger/simpleflagger.py
deleted file mode 100644
index 4cda7b7ef09a1493b9da2e27603419eb9d40f671..0000000000000000000000000000000000000000
--- a/saqc/flagger/simpleflagger.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-from saqc.flagger.categoricalflagger import CategoricalFlagger
-
-
-FLAGS = [-1, 0, 1]
-
-
-class SimpleFlagger(CategoricalFlagger):
-    def __init__(self):
-        super().__init__(FLAGS)
diff --git a/saqc/funcs/__init__.py b/saqc/funcs/__init__.py
index e5c5153cbfef7a22c55982abbf7ae8cb369ffe74..006f061ab0cd5bbd2110d69906a5f05ef59c7c99 100644
--- a/saqc/funcs/__init__.py
+++ b/saqc/funcs/__init__.py
@@ -2,12 +2,21 @@
 # -*- coding: utf-8 -*-
 
 # imports needed to make the functions register themself
-from saqc.core.register import register
-from saqc.funcs.functions import *
-from saqc.funcs.breaks_detection import *
-from saqc.funcs.constants_detection import *
-from saqc.funcs.soil_moisture_tests import *
-from saqc.funcs.spikes_detection import *
-from saqc.funcs.harm_functions import *
-from saqc.funcs.modelling import *
-from saqc.funcs.proc_functions import *
+from saqc.core.register import flagging
+from saqc.funcs.breaks import *
+from saqc.funcs.changepoints import *
+from saqc.funcs.constants import *
+from saqc.funcs.curvefit import *
+from saqc.funcs.drift import *
+from saqc.funcs.generic import *
+from saqc.funcs.interpolation import *
+from saqc.funcs.outliers import *
+from saqc.funcs.pattern import *
+from saqc.funcs.resampling import *
+from saqc.funcs.residues import *
+from saqc.funcs.rolling import *
+from saqc.funcs.scores import *
+from saqc.funcs.tools import *
+from saqc.funcs.transformation import *
+from saqc.funcs.flagtools import *
+from saqc.funcs.noise import *
diff --git a/saqc/funcs/breaks.py b/saqc/funcs/breaks.py
new file mode 100644
index 0000000000000000000000000000000000000000..e139b886153ddc0e4a52ef7510a4f32f4402b96b
--- /dev/null
+++ b/saqc/funcs/breaks.py
@@ -0,0 +1,196 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+"""Detecting breakish changes in timeseries value courses.
+
+This module provides functions to detect and flag  breakish changes in the data value course, like gaps
+(:py:func:`flagMissing`), jumps/drops (:py:func:`flagJumps`) or isolated values (:py:func:`flagIsolated`).
+"""
+
+from typing import Tuple
+
+import numpy as np
+import pandas as pd
+import pandas.tseries.frequencies
+
+from dios import DictOfSeries
+
+from saqc.constants import *
+from saqc.lib.tools import groupConsecutives
+from saqc.lib.types import FreqString
+from saqc.funcs.changepoints import assignChangePointCluster
+from saqc.core.flags import Flags
+from saqc.core.history import History
+from saqc.core.register import _isflagged, flagging
+
+
+# NOTE:
+# masking="none" as we otherwise might interprete
+# the masked values as missing data
+@flagging(masking="none")
+def flagMissing(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    flag: float = BAD,
+    to_mask: float = UNFLAGGED,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function flags all values indicating missing data.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+
+    datacol = data[field]
+    mask = datacol.isna()
+
+    mask = ~_isflagged(flags[field], to_mask) & mask
+
+    flags[mask, field] = flag
+    return data, flags
+
+
+@flagging(masking="field")
+def flagIsolated(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    gap_window: FreqString,
+    group_window: FreqString,
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function flags arbitrary large groups of values, if they are surrounded by sufficiently
+    large data gaps.
+
+    A gap is a timespan containing either no data or data invalid only (usually `nan`) .
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        A flags object
+    gap_window : str
+        The minimum size of the gap before and after a group of valid values, making this group considered an
+        isolated group. See condition (2) and (3)
+    group_window : str
+        The maximum temporal extension allowed for a group that is isolated by gaps of size 'gap_window',
+        to be actually flagged as isolated group. See condition (1).
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The flags object, holding flags and additional information related to `data`.
+
+    Notes
+    -----
+    A series of values :math:`x_k,x_{k+1},...,x_{k+n}`, with associated timestamps :math:`t_k,t_{k+1},...,t_{k+n}`,
+    is considered to be isolated, if:
+
+    1. :math:`t_{k+1} - t_n <` `group_window`
+    2. None of the :math:`x_j` with :math:`0 < t_k - t_j <` `gap_window`, is valid (preceeding gap).
+    3. None of the :math:`x_j` with :math:`0 < t_j - t_(k+n) <` `gap_window`, is valid (succeding gap).
+
+    See Also
+    --------
+    :py:func:`flagMissing`
+    """
+    gap_window = pd.tseries.frequencies.to_offset(gap_window)
+    group_window = pd.tseries.frequencies.to_offset(group_window)
+
+    mask = data[field].isna()
+
+    bools = pd.Series(data=0, index=mask.index, dtype=bool)
+    for srs in groupConsecutives(mask):
+        if np.all(~srs):
+            # we found a chunk of non-nan values
+            start = srs.index[0]
+            stop = srs.index[-1]
+            if stop - start <= group_window:
+                # the chunk is large enough
+                left = mask[start - gap_window : start].iloc[:-1]
+                if left.all():
+                    # the section before our chunk is nan-only
+                    right = mask[stop : stop + gap_window].iloc[1:]
+                    if right.all():
+                        # the section after our chunk is nan-only
+                        # -> we found a chunk of isolated non-values
+                        bools[start:stop] = True
+
+    flags[bools, field] = flag
+    return data, flags
+
+
+@flagging(masking="field")
+def flagJumps(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    thresh: float,
+    window: FreqString,
+    min_periods: int = 1,
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Flag datapoints, where the mean of the values significantly changes (where the value course "jumps").
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The reference variable, the deviation from wich determines the flagging.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
+    thresh : float
+        The threshold, the mean of the values have to change by, to trigger flagging.
+    window : str
+        The temporal extension, of the rolling windows, the mean values that are to be
+        compared, are obtained from.
+    min_periods : int, default 1
+        Minimum number of periods that have to be present in a window of size `window`,
+        so that the mean value obtained from that window is regarded valid.
+    flag : float, default BAD
+        flag to set.
+    """
+    return assignChangePointCluster(
+        data,
+        field,
+        flags,
+        stat_func=lambda x, y: np.abs(np.mean(x) - np.mean(y)),
+        thresh_func=lambda x, y: thresh,
+        window=window,
+        min_periods=min_periods,
+        set_flags=True,
+        model_by_resids=False,
+        assign_cluster=False,
+        flag=flag,
+        **kwargs
+    )
diff --git a/saqc/funcs/breaks_detection.py b/saqc/funcs/breaks_detection.py
deleted file mode 100644
index d6da3d33d97f98bb2377244622c709f6487a5f8a..0000000000000000000000000000000000000000
--- a/saqc/funcs/breaks_detection.py
+++ /dev/null
@@ -1,251 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import numpy as np
-import pandas as pd
-import dios
-
-from scipy.signal import savgol_filter
-
-from saqc.core.register import register
-from saqc.lib.tools import retrieveTrustworthyOriginal, detectDeviants
-
-
-@register(masking='all')
-def breaks_flagRegimeAnomaly(data, field, flagger, cluster_field, norm_spread, linkage_method='single',
-                             metric=lambda x, y: np.abs(np.nanmean(x) - np.nanmean(y)),
-                             norm_frac=0.5, set_cluster=True, set_flags=True, **kwargs):
-    """
-    A function to flag values belonging to an anomalous regime regarding modelling regimes of field.
-
-    "Normality" is determined in terms of a maximum spreading distance, regimes must not exceed in respect
-    to a certain metric and linkage method.
-
-    In addition, only a range of regimes is considered "normal", if it models more then `norm_frac` percentage of
-    the valid samples in "field".
-
-    Note, that you must detect the regime changepoints prior to calling this function.
-
-    Note, that it is possible to perform hypothesis tests for regime equality by passing the metric
-    a function for p-value calculation and selecting linkage method "complete".
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    cluster_field : str
-        The name of the column in data, holding the cluster labels for the samples in field. (has to be indexed
-        equal to field)
-    norm_spread : float
-        A threshold denoting the valuelevel, up to wich clusters a agglomerated.
-    linkage_method : {"single", "complete", "average", "weighted", "centroid", "median", "ward"}, default "single"
-        The linkage method used for hierarchical (agglomerative) clustering of the variables.
-    metric : Callable[[numpy.array, numpy.array], float], default lambda x, y: np.abs(np.nanmean(x) - np.nanmean(y))
-        A metric function for calculating the dissimilarity between 2 regimes. Defaults to just the difference in mean.
-    norm_frac : float
-        Has to be in [0,1]. Determines the minimum percentage of samples,
-        the "normal" group has to comprise to be the normal group actually.
-    set_cluster : bool, default True
-        If True, all data, considered "anormal", gets assigned a negative clusterlabel.
-    set_flags : bool, default True
-        Wheather or not to flag abnormal values (do not flag them, if you want to correct them
-        afterwards, becasue flagged values usually are not visible in further tests.).
-
-    kwargs
-
-    Returns
-    -------
-
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    """
-
-    clusterser = data[cluster_field]
-    cluster = np.unique(clusterser)
-    cluster_dios = dios.DictOfSeries({i: data[field][clusterser == i] for i in cluster})
-    plateaus = detectDeviants(cluster_dios, metric, norm_spread, norm_frac, linkage_method, 'samples')
-
-    if set_flags:
-        for p in plateaus:
-            flagger = flagger.setFlags(field, loc=cluster_dios.iloc[:, p].index, **kwargs)
-
-    if set_cluster:
-        for p in plateaus:
-            if cluster[p] > 0:
-                clusterser[clusterser == cluster[p]] = -cluster[p]
-
-    data[cluster_field] = clusterser
-    return data, flagger
-
-
-@register(masking='field')
-def breaks_flagSpektrumBased(
-    data,
-    field,
-    flagger,
-    thresh_rel=0.1,
-    thresh_abs=0.01,
-    first_der_factor=10,
-    first_der_window="12h",
-    scnd_der_ratio_range=0.05,
-    scnd_der_ratio_thresh=10,
-    smooth=True,
-    smooth_window=None,
-    smooth_poly_deg=2,
-    **kwargs
-):
-
-    """
-    The Function is a generalization of the Spectrum based break flagging mechanism as presented in:
-
-    The function flags breaks (jumps/drops) in input measurement series by evaluating its derivatives.
-    A measurement y_t is flagged a, break, if:
-
-    (1) y_t is changing relatively to its preceeding value by at least (100*`rel_change_rate_min`) percent
-    (2) y_(t-1) is difffering from its preceeding value, by a margin of at least `thresh_abs`
-    (3) Absolute first derivative |(y_t)'| has to be at least `first_der_factor` times as big as the arithmetic middle
-        over all the first derivative values within a 2 times `first_der_window_size` hours window, centered at t.
-    (4) The ratio of the second derivatives at t and t+1 has to be "aproximately" 1.
-        ([1-`scnd_der_ration_margin_1`, 1+`scnd_ratio_margin_1`])
-    (5) The ratio of the second derivatives at t+1 and t+2 has to be larger than `scnd_der_ratio_margin_2`
-
-    NOTE 1: As no reliable statement about the plausibility of the meassurements before and after the jump is possible,
-    only the jump itself is flagged. For flagging constant values following upon a jump, use a flagConstants test.
-
-    NOTE 2: All derivatives in the reference publication are obtained by applying a Savitzky-Golay filter to the data
-    before differentiating.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    thresh_rel : float, default 0.1
-        Float in [0,1]. See (1) of function description above to learn more
-    thresh_abs : float, default 0.01
-        Float > 0. See (2) of function descritpion above to learn more.
-    first_der_factor : float, default 10
-        Float > 0. See (3) of function descritpion above to learn more.
-    first_der_window_range : str, default '12h'
-        Offset string. See (3) of function description to learn more.
-    scnd_der_ratio_margin_1 : float, default 0.05
-        Float in [0,1]. See (4) of function descritpion above to learn more.
-    scnd_der_ratio_margin_2 : float, default 10
-        Float in [0,1]. See (5) of function descritpion above to learn more.
-    smooth : bool, default True
-        Method for obtaining dataseries' derivatives.
-        * False: Just take series step differences (default)
-        * True: Smooth data with a Savitzky Golay Filter before differentiating.
-    smooth_window : {None, str}, default 2
-        Effective only if `smooth` = True
-        Offset string. Size of the filter window, used to calculate the derivatives.
-    smooth_poly_deg : int, default 2
-        Effective only, if `smooth` = True
-        Polynomial order, used for smoothing with savitzk golay filter.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    References
-    ----------
-    The Function is a generalization of the Spectrum based break flagging mechanism as presented in:
-
-    [1] Dorigo,W. et al.: Global Automated Quality Control of In Situ Soil Moisture
-        Data from the international Soil Moisture Network. 2013. Vadoze Zone J.
-        doi:10.2136/vzj2012.0097.
-
-    Find a brief mathematical description of the function here:
-
-    [2] https://git.ufz.de/rdm-software/saqc/-/blob/testfuncDocs/docs/funcs
-        /FormalDescriptions.md#breaks_flagspektrumbased
-    """
-
-    # retrieve data series input at its original sampling rate
-    dataseries, data_rate = retrieveTrustworthyOriginal(data, field, flagger)
-
-    if smooth_window is None:
-        smooth_window = 3 * pd.Timedelta(data_rate)
-    else:
-        smooth_window = pd.Timedelta(smooth_window)
-
-    # relative - change - break criteria testing:
-    abs_change = np.abs(dataseries.shift(+1) - dataseries)
-    breaks = (abs_change > thresh_abs) & (abs_change / dataseries > thresh_rel)
-    breaks = breaks[breaks]
-
-    # First derivative criterion
-    smoothing_periods = int(np.ceil((smooth_window.seconds / data_rate.n)))
-    if smoothing_periods % 2 == 0:
-        smoothing_periods += 1
-
-    for brake in breaks.index:
-        # slice out slice-to-be-filtered (with some safety extension of 12 times the data rate)
-        slice_start = brake - pd.Timedelta(first_der_window) - smoothing_periods * pd.Timedelta(data_rate)
-        slice_end = brake + pd.Timedelta(first_der_window) + smoothing_periods * pd.Timedelta(data_rate)
-        data_slice = dataseries[slice_start:slice_end]
-
-        # obtain first derivative:
-        if smooth is True:
-            first_deri_series = pd.Series(
-                data=savgol_filter(data_slice, window_length=smoothing_periods, polyorder=smooth_poly_deg, deriv=1,),
-                index=data_slice.index,
-            )
-        else:
-            first_deri_series = data_slice.diff()
-
-        # condition constructing and testing:
-        test_slice = first_deri_series[brake - pd.Timedelta(first_der_window) : brake + pd.Timedelta(first_der_window)]
-
-        test_sum = abs((test_slice.sum() * first_der_factor) / test_slice.size)
-
-        if abs(first_deri_series[brake]) > test_sum:
-            # second derivative criterion:
-            slice_start = brake - 12 * pd.Timedelta(data_rate)
-            slice_end = brake + 12 * pd.Timedelta(data_rate)
-            data_slice = data_slice[slice_start:slice_end]
-
-            # obtain second derivative:
-            if smooth is True:
-                second_deri_series = pd.Series(
-                    data=savgol_filter(
-                        data_slice, window_length=smoothing_periods, polyorder=smooth_poly_deg, deriv=2,
-                    ),
-                    index=data_slice.index,
-                )
-            else:
-                second_deri_series = data_slice.diff().diff()
-
-            # criterion evaluation:
-            first_second = (
-                (1 - scnd_der_ratio_range)
-                < abs((second_deri_series.shift(+1)[brake] / second_deri_series[brake]))
-                < 1 + scnd_der_ratio_range
-            )
-
-            second_second = abs(second_deri_series[brake] / second_deri_series.shift(-1)[brake]) > scnd_der_ratio_thresh
-
-            if (~first_second) | (~second_second):
-                breaks[brake] = False
-
-        else:
-            breaks[brake] = False
-
-    flagger = flagger.setFlags(field, breaks, **kwargs)
-
-    return data, flagger
diff --git a/saqc/funcs/changepoints.py b/saqc/funcs/changepoints.py
new file mode 100644
index 0000000000000000000000000000000000000000..58af0ad4c1f54e2eb053271c08ed76e933e04a4a
--- /dev/null
+++ b/saqc/funcs/changepoints.py
@@ -0,0 +1,336 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+import pandas as pd
+import numpy as np
+import numba
+
+from typing import Callable, Tuple
+from typing_extensions import Literal
+
+from dios import DictOfSeries
+
+from saqc.constants import *
+from saqc.lib.tools import customRoller
+from saqc.core import flagging, Flags
+from saqc.lib.types import FreqString
+
+
+@flagging(masking="field")
+def flagChangePoints(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    stat_func: Callable[[np.ndarray, np.ndarray], float],
+    thresh_func: Callable[[np.ndarray, np.ndarray], float],
+    window: FreqString | Tuple[FreqString, FreqString],
+    min_periods: int | Tuple[int, int],
+    closed: Literal["right", "left", "both", "neither"] = "both",
+    reduce_window: FreqString = None,
+    reduce_func: Callable[[np.ndarray, np.ndarray], int] = lambda x, _: x.argmax(),
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Flag data points, where the parametrization of the process, the data is assumed to
+    generate by, significantly changes.
+
+    The change points detection is based on a sliding window search.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The reference variable, the deviation from wich determines the flagging.
+
+    flags : saqc.flags
+        A flags object, holding flags and additional information related to `data`.
+
+    stat_func : Callable[numpy.array, numpy.array]
+         A function that assigns a value to every twin window. The backward-facing
+         window content will be passed as the first array, the forward-facing window
+         content as the second.
+
+    thresh_func : Callable[numpy.array, numpy.array]
+        A function that determines the value level, exceeding wich qualifies a
+        timestamps func value as denoting a change-point.
+
+    window : str, tuple of string
+        Size of the rolling windows the calculation is performed in. If it is a single
+        frequency offset, it applies for the backward- and the forward-facing window.
+
+        If two offsets (as a tuple) is passed the first defines the size of the
+        backward facing window, the second the size of the forward facing window.
+
+    min_periods : int or tuple of int
+        Minimum number of observations in a window required to perform the changepoint
+        test. If it is a tuple of two int, the first refer to the backward-,
+        the second to the forward-facing window.
+
+    closed : {'right', 'left', 'both', 'neither'}, default 'both'
+        Determines the closure of the sliding windows.
+
+    reduce_window : {None, str}, default None
+        The sliding window search method is not an exact CP search method and usually there wont be
+        detected a single changepoint, but a "region" of change around a changepoint.
+        If `reduce_window` is given, for every window of size `reduce_window`, there
+        will be selected the value with index `reduce_func(x, y)` and the others will be dropped.
+        If `reduce_window` is None, the reduction window size equals the
+        twin window size, the changepoints have been detected with.
+
+    reduce_func : Callable[[numpy.ndarray, numpy.ndarray], int], default lambda x, y: x.argmax()
+        A function that must return an index value upon input of two arrays x and y.
+        First input parameter will hold the result from the stat_func evaluation for every
+        reduction window. Second input parameter holds the result from the thresh_func evaluation.
+        The default reduction function just selects the value that maximizes the stat_func.
+
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    """
+    return assignChangePointCluster(
+        data,
+        field,
+        flags,
+        stat_func=stat_func,
+        thresh_func=thresh_func,
+        window=window,
+        min_periods=min_periods,
+        closed=closed,
+        reduce_window=reduce_window,
+        reduce_func=reduce_func,
+        set_flags=True,
+        model_by_resids=False,
+        assign_cluster=False,
+        flag=flag,
+        **kwargs,
+    )
+
+
+@flagging(masking="field")
+def assignChangePointCluster(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    stat_func: Callable[[np.array, np.array], float],
+    thresh_func: Callable[[np.array, np.array], float],
+    window: str | Tuple[str, str],
+    min_periods: int | Tuple[int, int],
+    closed: Literal["right", "left", "both", "neither"] = "both",
+    reduce_window: str = None,
+    reduce_func: Callable[[np.ndarray, np.ndarray], float] = lambda x, _: x.argmax(),
+    model_by_resids: bool = False,
+    set_flags: bool = False,
+    assign_cluster: bool = True,
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Assigns label to the data, aiming to reflect continous regimes of the processes the data is assumed to be
+    generated by.
+    The regime change points detection is based on a sliding window search.
+
+    Note, that the cluster labels will be stored to the `field` field of the input data, so that the data that is
+    clustered gets overridden.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The reference variable, the deviation from wich determines the flagging.
+
+    flags : saqc.flags
+        A flags object, holding flags and additional informations related to `data`.
+
+    stat_func : Callable[[numpy.array, numpy.array], float]
+        A function that assigns a value to every twin window. Left window content will be passed to first variable,
+        right window content will be passed to the second.
+
+    thresh_func : Callable[numpy.array, numpy.array], float]
+        A function that determines the value level, exceeding wich qualifies a timestamps func func value as denoting a
+        changepoint.
+
+    window : str, tuple of string
+        Size of the rolling windows the calculation is performed in. If it is a single
+        frequency offset, it applies for the backward- and the forward-facing window.
+
+        If two offsets (as a tuple) is passed the first defines the size of the
+        backward facing window, the second the size of the forward facing window.
+
+    min_periods : int or tuple of int
+        Minimum number of observations in a window required to perform the changepoint
+        test. If it is a tuple of two int, the first refer to the backward-,
+        the second to the forward-facing window.
+
+    closed : {'right', 'left', 'both', 'neither'}, default 'both'
+        Determines the closure of the sliding windows.
+
+    reduce_window : {None, str}, default None
+        The sliding window search method is not an exact CP search method and usually there wont be
+        detected a single changepoint, but a "region" of change around a changepoint.
+        If `reduce_window` is given, for every window of size `reduce_window`, there
+        will be selected the value with index `reduce_func(x, y)` and the others will be dropped.
+        If `reduce_window` is None, the reduction window size equals the
+        twin window size, the changepoints have been detected with.
+
+    reduce_func : Callable[[numpy.array, numpy.array], numpy.array], default lambda x, y: x.argmax()
+        A function that must return an index value upon input of two arrays x and y.
+        First input parameter will hold the result from the stat_func evaluation for every
+        reduction window. Second input parameter holds the result from the thresh_func evaluation.
+        The default reduction function just selects the value that maximizes the stat_func.
+
+    set_flags : bool, default False
+        If true, the points, where there is a change in data modelling regime detected gets flagged.
+
+    model_by_resids : bool, default False
+        If True, the data is replaced by the stat_funcs results instead of regime labels.
+
+    assign_cluster : bool, default True
+        Is set to False, if called by function that oly wants to calculate flags.
+
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    """
+    data = data.copy()
+    data_ser = data[field].dropna()
+    if isinstance(window, (list, tuple)):
+        bwd_window, fwd_window = window
+    else:
+        bwd_window = fwd_window = window
+
+    if isinstance(window, (list, tuple)):
+        bwd_min_periods, fwd_min_periods = min_periods
+    else:
+        bwd_min_periods = fwd_min_periods = min_periods
+
+    if reduce_window is None:
+        s = int(
+            pd.Timedelta(bwd_window).total_seconds()
+            + pd.Timedelta(fwd_window).total_seconds()
+        )
+        reduce_window = f"{s}s"
+
+    roller = customRoller(data_ser, window=bwd_window)
+    bwd_start, bwd_end = roller.window_indexer.get_window_bounds(
+        len(data_ser), min_periods=bwd_min_periods, closed=closed
+    )
+
+    roller = customRoller(data_ser, window=fwd_window, forward=True)
+    fwd_start, fwd_end = roller.window_indexer.get_window_bounds(
+        len(data_ser), min_periods=fwd_min_periods, closed=closed
+    )
+
+    min_mask = ~(
+        (fwd_end - fwd_start <= fwd_min_periods)
+        | (bwd_end - bwd_start <= bwd_min_periods)
+    )
+    fwd_end = fwd_end[min_mask]
+    split = bwd_end[min_mask]
+    bwd_start = bwd_start[min_mask]
+    masked_index = data_ser.index[min_mask]
+    check_len = len(fwd_end)
+    data_arr = data_ser.values
+
+    try_to_jit = True
+    jit_sf = numba.jit(stat_func, nopython=True)
+    jit_tf = numba.jit(thresh_func, nopython=True)
+    try:
+        jit_sf(data_arr[bwd_start[0] : bwd_end[0]], data_arr[fwd_start[0] : fwd_end[0]])
+        jit_tf(data_arr[bwd_start[0] : bwd_end[0]], data_arr[fwd_start[0] : fwd_end[0]])
+        stat_func = jit_sf
+        thresh_func = jit_tf
+    except (numba.TypingError, numba.UnsupportedError, IndexError):
+        try_to_jit = False
+
+    args = data_arr, bwd_start, fwd_end, split, stat_func, thresh_func, check_len
+
+    if try_to_jit:
+        stat_arr, thresh_arr = _slidingWindowSearchNumba(*args)
+    else:
+        stat_arr, thresh_arr = _slidingWindowSearch(*args)
+
+    result_arr = stat_arr > thresh_arr
+
+    if model_by_resids:
+        residues = pd.Series(np.nan, index=data[field].index)
+        residues[masked_index] = stat_arr
+        data[field] = residues
+        flags[:, field] = UNFLAGGED
+        return data, flags
+
+    det_index = masked_index[result_arr]
+    detected = pd.Series(True, index=det_index)
+    if reduce_window:
+        l = detected.shape[0]
+        roller = customRoller(detected, window=reduce_window)
+        start, end = roller.window_indexer.get_window_bounds(
+            num_values=l, min_periods=1, closed="both", center=True
+        )
+
+        detected = _reduceCPCluster(
+            stat_arr[result_arr], thresh_arr[result_arr], start, end, reduce_func, l
+        )
+        det_index = det_index[detected]
+
+    if assign_cluster:
+        cluster = pd.Series(False, index=data[field].index)
+        cluster[det_index] = True
+        cluster = cluster.cumsum()
+        # (better to start cluster labels with number one)
+        cluster += 1
+        data[field] = cluster
+        flags[:, field] = UNFLAGGED
+
+    if set_flags:
+        flags[det_index, field] = flag
+    return data, flags
+
+
+@numba.jit(parallel=True, nopython=True)
+def _slidingWindowSearchNumba(
+    data_arr, bwd_start, fwd_end, split, stat_func, thresh_func, num_val
+):
+    stat_arr = np.zeros(num_val)
+    thresh_arr = np.zeros(num_val)
+    for win_i in numba.prange(0, num_val - 1):
+        x = data_arr[bwd_start[win_i] : split[win_i]]
+        y = data_arr[split[win_i] : fwd_end[win_i]]
+        stat_arr[win_i] = stat_func(x, y)
+        thresh_arr[win_i] = thresh_func(x, y)
+    return stat_arr, thresh_arr
+
+
+def _slidingWindowSearch(
+    data_arr, bwd_start, fwd_end, split, stat_func, thresh_func, num_val
+):
+    stat_arr = np.zeros(num_val)
+    thresh_arr = np.zeros(num_val)
+    for win_i in range(0, num_val - 1):
+        x = data_arr[bwd_start[win_i] : split[win_i]]
+        y = data_arr[split[win_i] : fwd_end[win_i]]
+        stat_arr[win_i] = stat_func(x, y)
+        thresh_arr[win_i] = thresh_func(x, y)
+    return stat_arr, thresh_arr
+
+
+def _reduceCPCluster(stat_arr, thresh_arr, start, end, obj_func, num_val):
+    out_arr = np.zeros(shape=num_val, dtype=bool)
+    for win_i in numba.prange(0, num_val):
+        s, e = start[win_i], end[win_i]
+        x = stat_arr[s:e]
+        y = thresh_arr[s:e]
+        pos = s + obj_func(x, y) + 1
+        out_arr[s:e] = False
+        out_arr[pos] = True
+
+    return out_arr
diff --git a/saqc/funcs/constants.py b/saqc/funcs/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..395f0c808dc21e01557d104696e2219d99e9def1
--- /dev/null
+++ b/saqc/funcs/constants.py
@@ -0,0 +1,153 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from operator import mod
+from typing import Tuple
+
+import numpy as np
+import pandas as pd
+import operator
+
+from dios import DictOfSeries
+
+from saqc.constants import *
+from saqc.core import flagging, Flags
+from saqc.lib.ts_operators import varQC
+from saqc.lib.tools import customRoller, getFreqDelta, statPass
+from saqc.lib.types import FreqString
+
+
+@flagging(masking="field")
+def flagConstants(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    thresh: float,
+    window: FreqString,
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    This functions flags plateaus/series of constant values of length `window` if
+    their maximum total change is smaller than thresh.
+
+    Function flags plateaus/series of constant values. Any interval of values y(t),..y(t+n) is flagged, if:
+
+    (1) n > `window`
+    (2) |(y(t + i) - (t + j)| < `thresh`, for all i,j in [0, 1, ..., n]
+
+    Flag values are (semi-)constant.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        Name of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    thresh : float
+        Upper bound for the maximum total change of an interval to be flagged constant.
+    window : str
+        Lower bound for the size of an interval to be flagged constant.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The flags object, holding flags and additional informations related to `data`.
+        Flags values may have changed, relatively to the flags input.
+    """
+    if not isinstance(window, str):
+        raise TypeError("window must be offset string.")
+    d = data[field]
+
+    # min_periods=2 ensures that at least two non-nan values are present
+    # in each window and also min() == max() == d[i] is not possible.
+    kws = dict(window=window, min_periods=2, expand=False)
+
+    # 1. find starting points of consecutive constant values as a boolean mask
+    # 2. fill the whole window with True's
+    rolling = customRoller(d, **kws)
+    starting_points_mask = rolling.max() - rolling.min() <= thresh
+    rolling = customRoller(starting_points_mask, **kws, forward=True)
+    # mimic any()
+    mask = (rolling.sum() > 0) & d.notna()
+
+    flags[mask, field] = flag
+    return data, flags
+
+
+@flagging(masking="field")
+def flagByVariance(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    window: FreqString = "12h",
+    thresh: float = 0.0005,
+    maxna: int = None,
+    maxna_group: int = None,
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function flags plateaus/series of constant values. Any interval of values y(t),..y(t+n) is flagged, if:
+
+    (1) n > `window`
+    (2) variance(y(t),...,y(t+n) < `thresh`
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    window : str
+        Only intervals of minimum size "window" have the chance to get flagged as constant intervals
+    thresh : float
+        The upper bound, the variance of an interval must not exceed, if the interval wants to be flagged a plateau.
+    maxna : int, default None
+        Maximum number of NaNs tolerated in an interval. If more NaNs are present, the
+        interval is not flagged as plateau.
+    maxna_group : int, default None
+        Same as `maxna` but for consecutive NaNs.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The flags object, holding flags and additional informations related to `data`.
+        Flags values may have changed, relatively to the flags input.
+    """
+    dataseries = data[field]
+    delta = getFreqDelta(dataseries.index)
+    if not delta:
+        raise IndexError("Timeseries irregularly sampled!")
+
+    if maxna is None:
+        maxna = np.inf
+
+    if maxna_group is None:
+        maxna_group = np.inf
+
+    min_periods = int(np.ceil(pd.Timedelta(window) / pd.Timedelta(delta)))
+    window = pd.Timedelta(window)
+    to_set = statPass(
+        dataseries,
+        lambda x: varQC(x, maxna, maxna_group),
+        window,
+        thresh,
+        operator.lt,
+        min_periods=min_periods,
+    )
+
+    flags[to_set, field] = flag
+    return data, flags
diff --git a/saqc/funcs/constants_detection.py b/saqc/funcs/constants_detection.py
deleted file mode 100644
index d402056901b32bd78997f89fb2671305f7dfe2ae..0000000000000000000000000000000000000000
--- a/saqc/funcs/constants_detection.py
+++ /dev/null
@@ -1,131 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import numpy as np
-import pandas as pd
-
-from saqc.core.register import register
-from saqc.lib.ts_operators import varQC
-from saqc.lib.tools import retrieveTrustworthyOriginal, customRoller
-
-
-@register(masking='field')
-def constants_flagBasic(data, field, flagger, thresh, window, **kwargs):
-    """
-    This functions flags plateaus/series of constant values of length `window` if
-    their maximum total change is smaller than thresh.
-
-    Function flags plateaus/series of constant values. Any interval of values y(t),..y(t+n) is flagged, if:
-
-    (1) n > `window`
-    (2) |(y(t + i) - (t + j)| < `thresh`, for all i,j in [0, 1, ..., n]
-
-    Flag values are (semi-)constant.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    thresh : float
-        Upper bound for the maximum total change of an interval to be flagged constant.
-    window : str
-        Lower bound for the size of an interval to be flagged constant.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-    """
-
-    d = data[field]
-    if not isinstance(window, str):
-        raise TypeError('window must be offset string.')
-
-    # min_periods=2 ensures that at least two non-nan values are present
-    # in each window and also min() == max() == d[i] is not possible.
-    kws = dict(window=window, min_periods=2, expand=False)
-
-    # find all consecutive constant values in one direction...
-    r = customRoller(d, **kws)
-    m1 = r.max() - r.min() <= thresh
-    # and in the other
-    r = customRoller(d, forward=True, **kws)
-    m2 = r.max() - r.min() <= thresh
-    mask = m1 | m2
-
-    flagger = flagger.setFlags(field, mask, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def constants_flagVarianceBased(
-    data, field, flagger, window="12h", thresh=0.0005, max_missing=None, max_consec_missing=None, **kwargs
-):
-
-    """
-    Function flags plateaus/series of constant values. Any interval of values y(t),..y(t+n) is flagged, if:
-
-    (1) n > `window`
-    (2) variance(y(t),...,y(t+n) < `thresh`
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    window : str
-        Only intervals of minimum size "window" have the chance to get flagged as constant intervals
-    thresh : float
-        The upper bound, the variance of an interval must not exceed, if the interval wants to be flagged a plateau.
-    max_missing : {None, int}, default None
-        Maximum number of nan values tolerated in an interval, for retrieving a valid
-        variance from it. (Intervals with a number of nans exceeding "max_missing"
-        have no chance to get flagged a plateau!)
-    max_consec_missing : {None, int}, default None
-        Maximum number of consecutive nan values allowed in an interval to retrieve a
-        valid  variance from it. (Intervals with a number of nans exceeding
-        "max_consec_missing" have no chance to get flagged a plateau!)
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-    """
-
-    dataseries, data_rate = retrieveTrustworthyOriginal(data, field, flagger)
-
-    if max_missing is None:
-        max_missing = np.inf
-    if max_consec_missing is None:
-        max_consec_missing = np.inf
-
-    min_periods = int(np.ceil(pd.Timedelta(window) / pd.Timedelta(data_rate)))
-
-    plateaus = dataseries.rolling(window=window, min_periods=min_periods).apply(
-        lambda x: True if varQC(x, max_missing, max_consec_missing) <= thresh else np.nan, raw=False,
-    )
-
-    # are there any candidates for beeing flagged plateau-ish
-    if plateaus.sum() == 0:
-        return data, flagger
-
-    plateaus.fillna(method="bfill", limit=min_periods - 1, inplace=True)
-
-    # result:
-    plateaus = (plateaus[plateaus == 1.0]).index
-
-    flagger = flagger.setFlags(field, plateaus, **kwargs)
-    return data, flagger
diff --git a/saqc/funcs/curvefit.py b/saqc/funcs/curvefit.py
new file mode 100644
index 0000000000000000000000000000000000000000..c29d2bbb4afda3ace36e582c53df0a333baa13b1
--- /dev/null
+++ b/saqc/funcs/curvefit.py
@@ -0,0 +1,206 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from math import floor
+from typing import Tuple, Union
+from typing_extensions import Literal
+import numpy as np
+import pandas as pd
+from dios import DictOfSeries
+
+from saqc.constants import *
+from saqc.core import flagging, Flags
+from saqc.lib.tools import getFreqDelta
+from saqc.lib.ts_operators import (
+    polyRollerIrregular,
+    polyRollerNumba,
+    polyRoller,
+    polyRollerNoMissingNumba,
+    polyRollerNoMissing,
+)
+
+
+@flagging(masking="field")
+def fitPolynomial(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    window: Union[int, str],
+    order: int,
+    set_flags: bool = True,
+    min_periods: int = 0,
+    return_residues: bool = False,
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function fits a polynomial model to the data and returns the fitted data curve.
+
+    The fit is calculated by fitting a polynomial of degree `order` to a data slice
+    of size `window`, that has x at its center.
+
+    Note, that the resulting fit is stored to the `field` field of the input data, so that the original data, the
+    polynomial is fitted to, gets overridden.
+
+    Note, that, if data[field] is not alligned to an equidistant frequency grid, the window size passed,
+    has to be an offset string.
+
+    Note, that calculating the residues tends to be quite costy, because a function fitting is perfomed for every
+    sample. To improve performance, consider the following possibillities:
+
+    In case your data is sampled at an equidistant frequency grid:
+
+    (1) If you know your data to have no significant number of missing values, or if you do not want to
+        calculate residues for windows containing missing values any way, performance can be increased by setting
+        min_periods=window.
+
+    Note, that in the current implementation, the initial and final window/2 values do not get fitted.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-modelled.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    window : {str, int}
+        The size of the window you want to use for fitting. If an integer is passed, the size
+        refers to the number of periods for every fitting window. If an offset string is passed,
+        the size refers to the total temporal extension. The window will be centered around the vaule-to-be-fitted.
+        For regularly sampled timeseries the period number will be casted down to an odd number if
+        even.
+    order : int
+        The degree of the polynomial used for fitting
+    set_flags : bool, default True
+        Whether or not to assign new flags to the calculated residuals. If True, a residual gets assigned the worst
+        flag present in the interval, the data for its calculation was obtained from.
+    min_periods : {int, None}, default 0
+        The minimum number of periods, that has to be available in every values fitting surrounding for the polynomial
+        fit to be performed. If there are not enough values, np.nan gets assigned. Default (0) results in fitting
+        regardless of the number of values present (results in overfitting for too sparse intervals). To automatically
+        set the minimum number of periods to the number of values in an offset defined window size, pass np.nan.
+    return_residues : bool, default False
+        Internal parameter. Makes the method return the residues instead of the fit.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+    # TODO: some (rater large) parts are functional similar to saqc.funcs.rolling.roll
+    if data[field].empty:
+        flags[:, field] = UNTOUCHED
+        return data, flags
+    data = data.copy()
+    to_fit = data[field]
+    regular = getFreqDelta(to_fit.index)
+    if not regular:
+        if isinstance(window, int):
+            raise NotImplementedError(
+                "Integer based window size is not supported for not-harmonized"
+                "sample series."
+            )
+        # get interval centers
+        centers = (
+            to_fit.rolling(
+                pd.Timedelta(window) / 2, closed="both", min_periods=min_periods
+            ).count()
+        ).floor()
+        centers = centers.drop(centers[centers.isna()].index)
+        centers = centers.astype(int)
+        residues = to_fit.rolling(
+            pd.Timedelta(window), closed="both", min_periods=min_periods
+        ).apply(polyRollerIrregular, args=(centers, order))
+
+        def center_func(x, y=centers):
+            pos = x.index[int(len(x) - y[x.index[-1]])]
+            return y.index.get_loc(pos)
+
+        centers_iloc = (
+            centers.rolling(window, closed="both")
+            .apply(center_func, raw=False)
+            .astype(int)
+        )
+        temp = residues.copy()
+        for k in centers_iloc.iteritems():
+            residues.iloc[k[1]] = temp[k[0]]
+        residues[residues.index[0] : residues.index[centers_iloc[0]]] = np.nan
+        residues[residues.index[centers_iloc[-1]] : residues.index[-1]] = np.nan
+    else:
+        if isinstance(window, str):
+            window = pd.Timedelta(window) // regular
+        if window % 2 == 0:
+            window = int(window - 1)
+        if min_periods is None:
+            min_periods = window
+        if to_fit.shape[0] < 200000:
+            numba = False
+        else:
+            numba = True
+
+        val_range = np.arange(0, window)
+        center_index = window // 2
+        if min_periods < window:
+            if min_periods > 0:
+                to_fit = to_fit.rolling(
+                    window, min_periods=min_periods, center=True
+                ).apply(lambda x, y: x[y], raw=True, args=(center_index,))
+
+            # we need a missing value marker that is not nan,
+            # because nan values dont get passed by pandas rolling method
+            miss_marker = to_fit.min()
+            miss_marker = np.floor(miss_marker - 1)
+            na_mask = to_fit.isna()
+            to_fit[na_mask] = miss_marker
+            if numba:
+                residues = to_fit.rolling(window).apply(
+                    polyRollerNumba,
+                    args=(miss_marker, val_range, center_index, order),
+                    raw=True,
+                    engine="numba",
+                    engine_kwargs={"no_python": True},
+                )
+                # due to a tiny bug - rolling with center=True doesnt work when using numba engine.
+                residues = residues.shift(-int(center_index))
+            else:
+                residues = to_fit.rolling(window, center=True).apply(
+                    polyRoller,
+                    args=(miss_marker, val_range, center_index, order),
+                    raw=True,
+                )
+            residues[na_mask] = np.nan
+        else:
+            # we only fit fully populated intervals:
+            if numba:
+                residues = to_fit.rolling(window).apply(
+                    polyRollerNoMissingNumba,
+                    args=(val_range, center_index, order),
+                    engine="numba",
+                    engine_kwargs={"no_python": True},
+                    raw=True,
+                )
+                # due to a tiny bug - rolling with center=True doesnt work when using numba engine.
+                residues = residues.shift(-int(center_index))
+            else:
+                residues = to_fit.rolling(window, center=True).apply(
+                    polyRollerNoMissing,
+                    args=(val_range, center_index, order),
+                    raw=True,
+                )
+
+    if return_residues:
+        residues = to_fit - residues
+
+    data[field] = residues
+    if set_flags:
+        # TODO: we does not get any flags here, because of masking=field
+        worst = flags[field].rolling(window, center=True, min_periods=min_periods).max()
+        flags[field] = worst
+
+    return data, flags
diff --git a/saqc/funcs/drift.py b/saqc/funcs/drift.py
new file mode 100644
index 0000000000000000000000000000000000000000..54e658eec8a00703a23ba18f88c84ad5e6abe81d
--- /dev/null
+++ b/saqc/funcs/drift.py
@@ -0,0 +1,882 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from typing import Optional, Tuple, Sequence, Callable, Optional
+from typing_extensions import Literal
+
+import numpy as np
+import pandas as pd
+import functools
+
+from scipy import stats
+from scipy.optimize import curve_fit
+from scipy.spatial.distance import pdist
+
+from dios import DictOfSeries
+from saqc.constants import *
+from saqc.core.register import flagging
+from saqc.core import Flags
+from saqc.funcs.changepoints import assignChangePointCluster
+from saqc.funcs.tools import dropField, copyField
+from saqc.lib.tools import detectDeviants
+from saqc.lib.types import FreqString, CurveFitter
+
+
+LinkageString = Literal[
+    "single", "complete", "average", "weighted", "centroid", "median", "ward"
+]
+
+
+@flagging(masking="all")
+def flagDriftFromNorm(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    fields: Sequence[str],
+    freq: FreqString,
+    spread: float,
+    frac: float = 0.5,
+    metric: Callable[[np.ndarray, np.ndarray], float] = lambda x, y: pdist(
+        np.array([x, y]), metric="cityblock"
+    )
+    / len(x),
+    method: LinkageString = "single",
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function flags value courses that significantly deviate from a group of normal value courses.
+
+    "Normality" is determined in terms of a maximum spreading distance, that members of a normal group must not exceed.
+    In addition, only a group is considered "normal" if it contains more then `frac` percent of the
+    variables in "fields".
+
+    See the Notes section for a more detailed presentation of the algorithm
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        A dummy parameter.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
+    fields : str
+        List of fieldnames in data, determining which variables are to be included into the flagging process.
+    freq : str
+        An offset string, determining the size of the seperate datachunks that the algorihm is to be piecewise
+        applied on.
+    spread : float
+        A parameter limiting the maximum "spread" of the timeseries, allowed in the "normal" group. See Notes section
+        for more details.
+    frac : float, default 0.5
+        Has to be in [0,1]. Determines the minimum percentage of variables, the "normal" group has to comprise to be the
+        normal group actually. The higher that value, the more stable the algorithm will be with respect to false
+        positives. Also, nobody knows what happens, if this value is below 0.5.
+    metric : Callable[[numpy.array, numpy.array], float]
+        A distance function. It should be a function of 2 1-dimensional arrays and return a float scalar value.
+        This value is interpreted as the distance of the two input arrays. The default is the averaged manhatten metric.
+        See the Notes section to get an idea of why this could be a good choice.
+    method : {"single", "complete", "average", "weighted", "centroid", "median", "ward"}, default "single"
+        The linkage method used for hierarchical (agglomerative) clustering of the timeseries.
+        See the Notes section for more details.
+        The keyword gets passed on to scipy.hierarchy.linkage. See its documentation to learn more about the different
+        keywords (References [1]).
+        See wikipedia for an introduction to hierarchical clustering (References [2]).
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the input flags.
+
+    Notes
+    -----
+    following steps are performed for every data "segment" of length `freq` in order to find the
+    "abnormal" data:
+
+    1. Calculate the distances :math:`d(x_i,x_j)` for all :math:`x_i` in parameter `fields`. (with :math:`d`
+       denoting the distance function
+       passed to the parameter `metric`.
+    2. Calculate a dendogram with a hierarchical linkage algorithm, specified by the parameter `method`.
+    3. Flatten the dendogram at the level, the agglomeration costs exceed the value given by the parameter `spread`
+    4. check if there is a cluster containing more than `frac` percentage of the variables in fields.
+
+        1. if yes: flag all the variables that are not in that cluster (inside the segment)
+        2. if no: flag nothing
+
+    The main parameter giving control over the algorithms behavior is the `spread` parameter, that determines
+    the maximum spread of a normal group by limiting the costs, a cluster agglomeration must not exceed in every
+    linkage step.
+    For singleton clusters, that costs just equal half the distance, the timeseries in the clusters, have to
+    each other. So, no timeseries can be clustered together, that are more then
+    2*`spread` distanted from each other.
+    When timeseries get clustered together, this new clusters distance to all the other timeseries/clusters is
+    calculated according to the linkage method specified by `method`. By default, it is the minimum distance,
+    the members of the clusters have to each other.
+    Having that in mind, it is advisable to choose a distance function, that can be well interpreted in the units
+    dimension of the measurement and where the interpretation is invariant over the length of the timeseries.
+    That is, why, the "averaged manhatten metric" is set as the metric default, since it corresponds to the
+    averaged value distance, two timeseries have (as opposed by euclidean, for example).
+
+    References
+    ----------
+    Documentation of the underlying hierarchical clustering algorithm:
+        [1] https://docs.scipy.org/doc/scipy/reference/generated/scipy.cluster.hierarchy.linkage.html
+    Introduction to Hierarchical clustering:
+        [2] https://en.wikipedia.org/wiki/Hierarchical_clustering
+    """
+    data_to_flag = data[fields].to_df()
+    data_to_flag.dropna(inplace=True)
+
+    segments = data_to_flag.groupby(pd.Grouper(freq=freq))
+    for segment in segments:
+
+        if segment[1].shape[0] <= 1:
+            continue
+
+        drifters = detectDeviants(segment[1], metric, spread, frac, method, "variables")
+
+        for var in drifters:
+            flags[segment[1].index, fields[var]] = flag
+
+    return data, flags
+
+
+@flagging(masking="all")
+def flagDriftFromReference(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    fields: Sequence[str],
+    freq: FreqString,
+    thresh: float,
+    metric: Callable[[np.ndarray, np.ndarray], float] = lambda x, y: pdist(
+        np.array([x, y]), metric="cityblock"
+    )
+    / len(x),
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function flags value courses that deviate from a reference course by a margin exceeding a certain threshold.
+
+    The deviation is measured by the distance function passed to parameter metric.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The reference variable, the deviation from wich determines the flagging.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
+    fields : str
+        List of fieldnames in data, determining wich variables are to be included into the flagging process.
+    freq : str
+        An offset string, determining the size of the seperate datachunks that the algorihm is to be piecewise
+        applied on.
+    thresh : float
+        The threshod by wich normal variables can deviate from the reference variable at max.
+    metric : Callable[(numpyp.array, numpy-array), float]
+        A distance function. It should be a function of 2 1-dimensional arrays and return a float scalar value.
+        This value is interpreted as the distance of the two input arrays. The default is the averaged manhatten metric.
+        See the Notes section to get an idea of why this could be a good choice.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the input flags.
+
+    Notes
+    -----
+    it is advisable to choose a distance function, that can be well interpreted in the units
+    dimension of the measurement and where the interpretation is invariant over the length of the timeseries.
+    That is, why, the "averaged manhatten metric" is set as the metric default, since it corresponds to the
+    averaged value distance, two timeseries have (as opposed by euclidean, for example).
+    """
+    data_to_flag = data[fields].to_df()
+    data_to_flag.dropna(inplace=True)
+
+    fields = list(fields)
+    if field not in fields:
+        fields.append(field)
+
+    var_num = len(fields)
+
+    segments = data_to_flag.groupby(pd.Grouper(freq=freq))
+    for segment in segments:
+
+        if segment[1].shape[0] <= 1:
+            continue
+
+        for i in range(var_num):
+            dist = metric(segment[1].iloc[:, i].values, segment[1].loc[:, field].values)
+
+            if dist > thresh:
+                flags[segment[1].index, fields[i]] = flag
+
+    return data, flags
+
+
+@flagging(masking="all")
+def flagDriftFromScaledNorm(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    set_1: Sequence[str],
+    set_2: Sequence[str],
+    freq: FreqString,
+    spread: float,
+    frac: float = 0.5,
+    metric: Callable[[np.ndarray, np.ndarray], float] = lambda x, y: pdist(
+        np.array([x, y]), metric="cityblock"
+    )
+    / len(x),
+    method: LinkageString = "single",
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function linearly rescales one set of variables to another set of variables
+    with a different scale and then flags value courses that significantly deviate
+    from a group of normal value courses.
+
+    The two sets of variables can be linearly scaled one to another and hence the
+    scaling transformation is performed via linear regression: A linear regression is
+    performed on each pair of variables giving a slope and an intercept. The
+    transformation is then calculated a the median of all the calculated slopes and
+    intercepts.
+
+    Once the transformation is performed, the function flags those values,
+    that deviate from a group of normal values. "Normality" is determined in terms of
+    a maximum spreading distance, that members of a normal group must not exceed. In
+    addition, only a group is considered "normal" if it contains more then `frac`
+    percent of the variables in "fields".
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        A dummy parameter.
+
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
+
+    set_1 : str
+        The first set of fieldnames in data to be included into the flagging process.
+
+    set_2 : str
+        The second set of fieldnames in data to be included into the flagging process.
+
+    freq : str
+        An offset string, determining the size of the seperate datachunks that the
+        algorihm is to be piecewise applied on.
+
+    spread : float
+        A parameter limiting the maximum "spread" of the timeseries, allowed in the
+        "normal" group. See Notes section for more details.
+
+    frac : float, default 0.5
+        Has to be in [0,1]. Determines the minimum percentage of variables,
+        the "normal" group has to comprise to be the normal group actually. The
+        higher that value, the more stable the algorithm will be with respect to
+        false positives. Also, nobody knows what happens, if this value is below 0.5.
+
+    metric : Callable[(numpyp.array, numpy-array), float]
+        A distance function. It should be a function of 2 1-dimensional arrays and
+        return a float scalar value. This value is interpreted as the distance of the
+        two input arrays. The default is the averaged manhatten metric. See the Notes
+        section to get an idea of why this could be a good choice.
+
+    method : str, default "single"
+        The linkage method used for hierarchical (agglomerative) clustering of the
+        timeseries. See the Notes section for more details. The keyword gets passed
+        on to scipy.hierarchy.linkage. See its documentation to learn more about the
+        different keywords (References [1]). See wikipedia for an introduction to
+        hierarchical clustering (References [2]).
+
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the input flags.
+
+    References
+    ----------
+    Documentation of the underlying hierarchical clustering algorithm:
+        [1] https://docs.scipy.org/doc/scipy/reference/generated/scipy.cluster.hierarchy.linkage.html
+    Introduction to Hierarchical clustering:
+        [2] https://en.wikipedia.org/wiki/Hierarchical_clustering
+    """
+    fields = list(set_1) + list(set_2)
+    data_to_flag = data[fields].to_df()
+    data_to_flag.dropna(inplace=True)
+
+    convert_slope = []
+    convert_intercept = []
+
+    for field1 in set_1:
+        for field2 in set_2:
+            slope, intercept, *_ = stats.linregress(
+                data_to_flag[field1], data_to_flag[field2]
+            )
+            convert_slope.append(slope)
+            convert_intercept.append(intercept)
+
+    factor_slope = np.median(convert_slope)
+    factor_intercept = np.median(convert_intercept)
+
+    dat = DictOfSeries()
+    for field1 in set_1:
+        dat[field1] = factor_intercept + factor_slope * data_to_flag[field1]
+    for field2 in set_2:
+        dat[field2] = data_to_flag[field2]
+
+    dat_to_flag = dat[fields].to_df()
+
+    segments = dat_to_flag.groupby(pd.Grouper(freq=freq))
+    for segment in segments:
+
+        if segment[1].shape[0] <= 1:
+            continue
+
+        drifters = detectDeviants(segment[1], metric, spread, frac, method, "variables")
+
+        for var in drifters:
+            flags[segment[1].index, fields[var]] = flag
+
+    return data, flags
+
+
+@flagging(masking="all")
+def correctDrift(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    maintenance_field: str,
+    model: Callable[..., float],
+    cal_range: int = 5,
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function corrects drifting behavior.
+
+    See the Notes section for an overview over the correction algorithm.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the data column, you want to correct.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    maintenance_field : str
+        The fieldname of the datacolumn holding the support-points information.
+        The maint data is to expected to have following form:
+        The series' timestamp itself represents the beginning of a
+        maintenance event, wheras the values represent the endings of the maintenance intervals.
+    model : Callable
+        A modelfunction describing the drift behavior, that is to be corrected.
+        The model function must always contain the keyword parameters 'origin' and 'target'.
+        The starting parameter must always be the parameter, by wich the data is passed to the model.
+        After the data parameter, there can occure an arbitrary number of model calibration arguments in
+        the signature.
+        See the Notes section for an extensive description.
+    cal_range : int, default 5
+        The number of values the mean is computed over, for obtaining the value level directly after and
+        directly before maintenance event. This values are needed for shift calibration. (see above description)
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+
+    Notes
+    -----
+    It is assumed, that between support points, there is a drift effect shifting the meassurements in a way, that
+    can be described, by a model function M(t, *p, origin, target). (With 0<=t<=1, p being a parameter set, and origin,
+    target being floats).
+
+    Note, that its possible for the model to have no free parameters p at all. (linear drift mainly)
+
+    The drift model, directly after the last support point (t=0),
+    should evaluate to the origin - calibration level (origin), and directly before the next support point
+    (t=1), it should evaluate to the target calibration level (target).
+
+    M(0, *p, origin, target) = origin
+    M(1, *p, origin, target) = target
+
+    The model is than fitted to any data chunk in between support points, by optimizing the parameters p*, and
+    thus, obtaining optimal parameterset P*.
+
+    The new values at t are computed via:
+
+    new_vals(t) = old_vals(t) + M(t, *P, origin, target) - M_drift(t, *P, origin, new_target)
+
+    Wheras new_target represents the value level immediately after the nex support point.
+
+    Examples
+    --------
+    Some examples of meaningful driftmodels.
+
+    Linear drift modell (no free parameters).
+
+    >>> M = lambda t, origin, target: origin + t*target
+
+    exponential drift model (exponential raise!)
+
+    >>> expFunc = lambda t, a, b, c: a + b * (np.exp(c * x) - 1)
+    >>> M = lambda t, p, origin, target: expFunc(t, (target - origin) / (np.exp(abs(c)) - 1), abs(c))
+
+    Exponential and linear driftmodels are part of the ts_operators library, under the names
+    expDriftModel and linearDriftModel.
+
+    """
+    # 1: extract fit intervals:
+    if data[maintenance_field].empty:
+        flags[:, field] = UNTOUCHED
+        return data, flags
+
+    data = data.copy()
+    to_correct = data[field]
+    maint_data = data[maintenance_field]
+
+    to_correct_clean = to_correct.dropna()
+    d = {"drift_group": np.nan, to_correct.name: to_correct_clean.values}
+    drift_frame = pd.DataFrame(d, index=to_correct_clean.index)
+
+    # group the drift frame
+    for k in range(0, maint_data.shape[0] - 1):
+        # assign group numbers for the timespans in between one maintenance ending and the beginning of the next
+        # maintenance time itself remains np.nan assigned
+        drift_frame.loc[
+            maint_data.values[k] : pd.Timestamp(maint_data.index[k + 1]), "drift_group"
+        ] = k
+
+    # define target values for correction
+    drift_grouper = drift_frame.groupby("drift_group")
+    shift_targets = drift_grouper.aggregate(lambda x: x[:cal_range].mean()).shift(-1)
+
+    for k, group in drift_grouper:
+        data_series = group[to_correct.name]
+        data_fit, data_shiftTarget = _driftFit(
+            data_series, shift_targets.loc[k, :][0], cal_range, model
+        )
+        data_fit = pd.Series(data_fit, index=group.index)
+        data_shiftTarget = pd.Series(data_shiftTarget, index=group.index)
+        data_shiftVektor = data_shiftTarget - data_fit
+        shiftedData = data_series + data_shiftVektor
+        to_correct[shiftedData.index] = shiftedData
+
+    data[field] = to_correct
+
+    return data, flags
+
+
+@flagging(masking="all")
+def correctRegimeAnomaly(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    cluster_field: str,
+    model: CurveFitter,
+    tolerance: Optional[FreqString] = None,
+    epoch: bool = False,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function fits the passed model to the different regimes in data[field] and tries to correct
+    those values, that have assigned a negative label by data[cluster_field].
+
+    Currently, the only correction mode supported is the "parameter propagation."
+
+    This means, any regime :math:`z`, labeled negatively and being modeled by the parameters p, gets corrected via:
+
+    :math:`z_{correct} = z + (m(p^*) - m(p))`,
+
+    where :math:`p^*` denotes the parameter set belonging to the fit of the nearest not-negatively labeled cluster.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the data column, you want to correct.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    cluster_field : str
+        A string denoting the field in data, holding the cluster label for the data you want to correct.
+    model : Callable
+        The model function to be fitted to the regimes.
+        It must be a function of the form :math:`f(x, *p)`, where :math:`x` is the ``numpy.array`` holding the
+        independent variables and :math:`p` are the model parameters that are to be obtained by fitting.
+        Depending on the `x_date` parameter, independent variable x will either be the timestamps
+        of every regime transformed to seconds from epoch, or it will be just seconds, counting the regimes length.
+    tolerance : {None, str}, default None:
+        If an offset string is passed, a data chunk of length `offset` right at the
+        start and right at the end is ignored when fitting the model. This is to account for the
+        unreliability of data near the changepoints of regimes.
+    epoch : bool, default False
+        If True, use "seconds from epoch" as x input to the model func, instead of "seconds from regime start".
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+    cluster_ser = data[cluster_field]
+    unique_successive = pd.unique(cluster_ser.values)
+    data_ser = data[field]
+    regimes = data_ser.groupby(cluster_ser)
+    para_dict = {}
+    x_dict = {}
+    x_mask = {}
+    if tolerance is not None:
+        # get seconds
+        tolerance = pd.Timedelta(tolerance).total_seconds()
+    for label, regime in regimes:
+        if epoch is False:
+            # get seconds data:
+            xdata = (regime.index - regime.index[0]).to_numpy(dtype=float) * 10 ** (-9)
+        else:
+            # get seconds from epoch data
+            xdata = regime.index.to_numpy(dtype=float) * 10 ** (-9)
+        ydata = regime.values
+        valid_mask = ~np.isnan(ydata)
+        if tolerance is not None:
+            valid_mask &= xdata > xdata[0] + tolerance
+            valid_mask &= xdata < xdata[-1] - tolerance
+        try:
+            p, *_ = curve_fit(model, xdata[valid_mask], ydata[valid_mask])
+        except (RuntimeError, ValueError):
+            p = np.array([np.nan])
+        para_dict[label] = p
+        x_dict[label] = xdata
+        x_mask[label] = valid_mask
+
+    first_normal = unique_successive > 0
+    first_valid = np.array(
+        [
+            ~pd.isna(para_dict[unique_successive[i]]).any()
+            for i in range(0, unique_successive.shape[0])
+        ]
+    )
+    first_valid = np.where(first_normal & first_valid)[0][0]
+    last_valid = 1
+
+    for k in range(0, unique_successive.shape[0]):
+        if unique_successive[k] < 0 & (
+            not pd.isna(para_dict[unique_successive[k]]).any()
+        ):
+            ydata = data_ser[regimes.groups[unique_successive[k]]].values
+            xdata = x_dict[unique_successive[k]]
+            ypara = para_dict[unique_successive[k]]
+            if k > 0:
+                target_para = para_dict[unique_successive[k - last_valid]]
+            else:
+                # first regime has no "last valid" to its left, so we use first valid to the right:
+                target_para = para_dict[unique_successive[k + first_valid]]
+            y_shifted = ydata + (model(xdata, *target_para) - model(xdata, *ypara))
+            data_ser[regimes.groups[unique_successive[k]]] = y_shifted
+            if k > 0:
+                last_valid += 1
+        elif pd.isna(para_dict[unique_successive[k]]).any() & (k > 0):
+            last_valid += 1
+        else:
+            last_valid = 1
+
+    data[field] = data_ser
+    return data, flags
+
+
+@flagging(masking="all")
+def correctOffset(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    max_jump: float,
+    spread: float,
+    window: FreqString,
+    min_periods: int,
+    tolerance: Optional[FreqString] = None,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the data column, you want to correct.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    max_jump : float
+        when searching for changepoints in mean - this is the threshold a mean difference in the
+        sliding window search must exceed to trigger changepoint detection.
+    spread : float
+        threshold denoting the maximum, regimes are allowed to abolutely differ in their means
+        to form the "normal group" of values.
+    window : str
+        Size of the adjacent windows that are used to search for the mean changepoints.
+    min_periods : int
+        Minimum number of periods a search window has to contain, for the result of the changepoint
+        detection to be considered valid.
+    tolerance : {None, str}, default None:
+        If an offset string is passed, a data chunk of length `offset` right from the
+        start and right before the end of any regime is ignored when calculating a regimes mean for data correcture.
+        This is to account for the unrelyability of data near the changepoints of regimes.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+    data, flags = copyField(data, field, flags, field + "_CPcluster")
+    data, flags = assignChangePointCluster(
+        data,
+        field + "_CPcluster",
+        flags,
+        lambda x, y: np.abs(np.mean(x) - np.mean(y)),
+        lambda x, y: max_jump,
+        window=window,
+        min_periods=min_periods,
+    )
+    data, flags = assignRegimeAnomaly(data, field, flags, field + "_CPcluster", spread)
+    data, flags = correctRegimeAnomaly(
+        data,
+        field,
+        flags,
+        field + "_CPcluster",
+        lambda x, p1: np.array([p1] * x.shape[0]),
+        tolerance=tolerance,
+    )
+    data, flags = dropField(data, field + "_CPcluster", flags)
+
+    return data, flags
+
+
+def _driftFit(x, shift_target, cal_mean, driftModel):
+    x_index = x.index - x.index[0]
+    x_data = x_index.total_seconds().values
+    x_data = x_data / x_data[-1]
+    y_data = x.values
+    origin_mean = np.mean(y_data[:cal_mean])
+    target_mean = np.mean(y_data[-cal_mean:])
+
+    dataFitFunc = functools.partial(driftModel, origin=origin_mean, target=target_mean)
+    # if drift model has free parameters:
+    try:
+        # try fitting free parameters
+        fit_paras, *_ = curve_fit(dataFitFunc, x_data, y_data)
+        data_fit = dataFitFunc(x_data, *fit_paras)
+        data_shift = driftModel(
+            x_data, *fit_paras, origin=origin_mean, target=shift_target
+        )
+    except RuntimeError:
+        # if fit fails -> make no correction
+        data_fit = np.array([0] * len(x_data))
+        data_shift = np.array([0] * len(x_data))
+    # when there are no free parameters in the model:
+    except ValueError:
+        data_fit = dataFitFunc(x_data)
+        data_shift = driftModel(x_data, origin=origin_mean, target=shift_target)
+
+    return data_fit, data_shift
+
+
+@flagging(masking="all")
+def flagRegimeAnomaly(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    cluster_field: str,
+    spread: float,
+    method: LinkageString = "single",
+    metric: Callable[[np.ndarray, np.ndarray], float] = lambda x, y: np.abs(
+        np.nanmean(x) - np.nanmean(y)
+    ),
+    frac: float = 0.5,
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    A function to flag values belonging to an anomalous regime regarding modelling regimes of field.
+
+    "Normality" is determined in terms of a maximum spreading distance, regimes must not exceed in respect
+    to a certain metric and linkage method.
+
+    In addition, only a range of regimes is considered "normal", if it models more then `frac` percentage of
+    the valid samples in "field".
+
+    Note, that you must detect the regime changepoints prior to calling this function.
+
+    Note, that it is possible to perform hypothesis tests for regime equality by passing the metric
+    a function for p-value calculation and selecting linkage method "complete".
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    cluster_field : str
+        The name of the column in data, holding the cluster labels for the samples in field. (has to be indexed
+        equal to field)
+    spread : float
+        A threshold denoting the valuelevel, up to wich clusters a agglomerated.
+    method : {"single", "complete", "average", "weighted", "centroid", "median", "ward"}, default "single"
+        The linkage method used for hierarchical (agglomerative) clustering of the variables.
+    metric : Callable[[numpy.array, numpy.array], float], default lambda x, y: np.abs(np.nanmean(x) - np.nanmean(y))
+        A metric function for calculating the dissimilarity between 2 regimes. Defaults to just the difference in mean.
+    frac : float
+        Has to be in [0,1]. Determines the minimum percentage of samples,
+        the "normal" group has to comprise to be the normal group actually.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The flags object, holding flags and additional informations related to `data`.
+        Flags values may have changed, relatively to the flags input.
+    """
+    return assignRegimeAnomaly(
+        data,
+        field,
+        flags,
+        cluster_field,
+        spread,
+        method=method,
+        metric=metric,
+        frac=frac,
+        set_cluster=False,
+        set_flags=True,
+        flag=flag,
+        **kwargs
+    )
+
+
+@flagging(masking="all")
+def assignRegimeAnomaly(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    cluster_field: str,
+    spread: float,
+    method: LinkageString = "single",
+    metric: Callable[[np.array, np.array], float] = lambda x, y: np.abs(
+        np.nanmean(x) - np.nanmean(y)
+    ),
+    frac: float = 0.5,
+    set_cluster: bool = True,  # todo: hide by a wrapper
+    set_flags: bool = False,  # todo: hide by a wrapper
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    A function to detect values belonging to an anomalous regime regarding modelling
+    regimes of field.
+
+    The function changes the value of the regime cluster labels to be negative.
+    "Normality" is determined in terms of a maximum spreading distance, regimes must
+    not exceed in respect to a certain metric and linkage method. In addition,
+    only a range of regimes is considered "normal", if it models more then `frac`
+    percentage of the valid samples in "field". Note, that you must detect the regime
+    changepoints prior to calling this function. (They are expected to be stored
+    parameter `cluster_field`.)
+
+    Note, that it is possible to perform hypothesis tests for regime equality by
+    passing the metric a function for p-value calculation and selecting linkage
+    method "complete".
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    cluster_field : str
+        The name of the column in data, holding the cluster labels for the samples in field. (has to be indexed
+        equal to field)
+    spread : float
+        A threshold denoting the valuelevel, up to wich clusters a agglomerated.
+    method : str, default "single"
+        The linkage method used for hierarchical (agglomerative) clustering of the
+        variables.
+    metric : Callable[[numpy.array, numpy.array], float], default lambda x, y: np.abs(np.nanmean(x) - np.nanmean(y))
+        A metric function for calculating the dissimilarity between 2 regimes. Defaults
+        to just the difference in mean.
+    frac : float
+        Has to be in [0,1]. Determines the minimum percentage of samples,
+        the "normal" group has to comprise to be the normal group actually.
+    set_cluster : bool, default False
+        If True, all data, considered "anormal", gets assigned a negative clusterlabel.
+         This option is present for further use (correction) of the anomaly information.
+    set_flags : bool, default True
+        Whether or not to flag abnormal values (do not flag them, if you want to
+        correct them afterwards, because flagged values usually are not visible in
+        further tests).
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The flags object, holding flags and additional informations related to `data`.
+        Flags values may have changed, relatively to the flags input.
+    """
+    series = data[cluster_field]
+    cluster = np.unique(series)
+    cluster_dios = DictOfSeries({i: data[field][series == i] for i in cluster})
+    plateaus = detectDeviants(cluster_dios, metric, spread, frac, method, "samples")
+
+    if set_flags:
+        for p in plateaus:
+            flags[cluster_dios.iloc[:, p].index, field] = flag
+
+    if set_cluster:
+        for p in plateaus:
+            if cluster[p] > 0:
+                series[series == cluster[p]] = -cluster[p]
+
+    data[cluster_field] = series
+    return data, flags
diff --git a/saqc/funcs/flagtools.py b/saqc/funcs/flagtools.py
new file mode 100644
index 0000000000000000000000000000000000000000..30924b46bda36e2e9ef24d7e4b8599caecf892d9
--- /dev/null
+++ b/saqc/funcs/flagtools.py
@@ -0,0 +1,302 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from typing import Any, Tuple, Union
+from typing_extensions import Literal
+import pandas as pd
+from dios import DictOfSeries
+
+from saqc.constants import BAD, UNFLAGGED
+from saqc.core import flagging, processing, Flags
+import warnings
+
+
+@flagging(masking="field")
+def forceFlags(
+    data: DictOfSeries, field: str, flags: Flags, flag: float = BAD, **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Set whole column to a flag value.
+
+    Parameters
+    ----------
+    data : DictOfSeries
+        data container
+    field : str
+        columns name that holds the data
+    flags : saqc.Flags
+        flags object
+    flag : float, default BAD
+        flag to set
+    kwargs : dict
+        unused
+
+    Returns
+    -------
+    data : DictOfSeries
+    flags : saqc.Flags
+
+    See Also
+    --------
+    clearFlags : set whole column to UNFLAGGED
+    flagUnflagged : set flag value at all unflagged positions
+    """
+    flags[:, field] = flag
+    return data, flags
+
+
+# masking='none' is sufficient because call is redirected
+@flagging(masking="none")
+def clearFlags(
+    data: DictOfSeries, field: str, flags: Flags, **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Set whole column to UNFLAGGED.
+
+    Parameters
+    ----------
+    data : DictOfSeries
+        data container
+    field : str
+        columns name that holds the data
+    flags : saqc.Flags
+        flags object
+    kwargs : dict
+        unused
+
+    Returns
+    -------
+    data : DictOfSeries
+    flags : saqc.Flags
+
+    Notes
+    -----
+    This function ignores the ``to_mask`` keyword, because the data is not relevant
+    for processing.
+    A warning is triggered if the ``flag`` keyword is given, because the flags are
+    always set to `UNFLAGGED`.
+
+
+    See Also
+    --------
+    forceFlags : set whole column to a flag value
+    flagUnflagged : set flag value at all unflagged positions
+    """
+    # NOTE: do we really need this?
+    if "flag" in kwargs:
+        kwargs = {**kwargs}  # copy
+        flag = kwargs.pop("flag")
+        warnings.warn(f"`flag={flag}` is ignored here.")
+
+    return forceFlags(data, field, flags, flag=UNFLAGGED, **kwargs)
+
+
+@flagging(masking="none")
+def flagUnflagged(
+    data: DictOfSeries, field: str, flags: Flags, flag: float = BAD, **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function sets a flag at all unflagged positions.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
+    flag : float, default BAD
+        flag value to set
+    kwargs : Dict
+        unused
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+
+    Notes
+    -----
+    This function ignores the ``to_mask`` keyword, because the data is not relevant
+    for processing.
+
+    See Also
+    --------
+    clearFlags : set whole column to UNFLAGGED
+    forceFlags : set whole column to a flag value
+    """
+    unflagged = flags[field].isna() | (flags[field] == UNFLAGGED)
+    flags[unflagged, field] = flag
+    return data, flags
+
+
+@flagging(masking="field")
+def flagManual(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    mdata: Union[pd.Series, pd.DataFrame, DictOfSeries],
+    mflag: Any = 1,
+    method: Literal["plain", "ontime", "left-open", "right-open"] = "plain",
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Flag data by given, "manually generated" data.
+
+    The data is flagged at locations where `mdata` is equal to a provided flag (`mflag`).
+    The format of mdata can be an indexed object, like pd.Series, pd.Dataframe or dios.DictOfSeries,
+    but also can be a plain list- or array-like.
+    How indexed mdata is aligned to data is specified via the `method` parameter.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
+    mdata : {pd.Series, pd.Dataframe, DictOfSeries}
+        The "manually generated" data
+    mflag : scalar
+        The flag that indicates data points in `mdata`, of wich the projection in data should be flagged.
+
+    method : {'plain', 'ontime', 'left-open', 'right-open'}, default plain
+        Defines how mdata is projected on data. Except for the 'plain' method, the methods assume mdata to have an
+        index.
+
+        * 'plain': mdata must have the same length as data and is projected one-to-one on data.
+        * 'ontime': works only with indexed mdata. mdata entries are matched with data entries that have the same index.
+        * 'right-open': mdata defines intervals, values are to be projected on.
+          The intervals are defined by any two consecutive timestamps t_1 and 1_2 in mdata.
+          the value at t_1 gets projected onto all data timestamps t with t_1 <= t < t_2.
+        * 'left-open': like 'right-open', but the projected interval now covers all t with t_1 < t <= t_2.
+
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : original data
+    flags : modified flags
+
+    Examples
+    --------
+    An example for mdata
+    >>> mdata = pd.Series([1,0,1], index=pd.to_datetime(['2000-02', '2000-03', '2001-05']))
+    >>> mdata
+    2000-02-01    1
+    2000-03-01    0
+    2001-05-01    1
+    dtype: int64
+
+    On *dayly* data, with the 'ontime' method, only the provided timestamnps are used.
+    Bear in mind that only exact timestamps apply, any offset will result in ignoring
+    the timestamp.
+    >>> _, fl = flagManual(data, field, flags, mdata, mflag=1, method='ontime')
+    >>> fl[field] > UNFLAGGED
+    2000-01-31    False
+    2000-02-01    True
+    2000-02-02    False
+    2000-02-03    False
+    ..            ..
+    2000-02-29    False
+    2000-03-01    True
+    2000-03-02    False
+    Freq: D, dtype: bool
+
+    With the 'right-open' method, the mdata is forward fill:
+    >>> _, fl = flagManual(data, field, flags, mdata, mflag=1, method='right-open')
+    >>> fl[field] > UNFLAGGED
+    2000-01-31    False
+    2000-02-01    True
+    2000-02-02    True
+    ..            ..
+    2000-02-29    True
+    2000-03-01    False
+    2000-03-02    False
+    Freq: D, dtype: bool
+
+    With the 'left-open' method, backward filling is used:
+    >>> _, fl = flagManual(data, field, flags, mdata, mflag=1, method='left-open')
+    >>> fl[field] > UNFLAGGED
+    2000-01-31    False
+    2000-02-01    False
+    2000-02-02    True
+    ..            ..
+    2000-02-29    True
+    2000-03-01    True
+    2000-03-02    False
+    Freq: D, dtype: bool
+    """
+    dat = data[field]
+
+    if isinstance(mdata, (pd.DataFrame, DictOfSeries)):
+        mdata = mdata[field]
+
+    hasindex = isinstance(mdata, (pd.Series, pd.DataFrame, DictOfSeries))
+    if not hasindex and method != "plain":
+        raise ValueError("mdata has no index")
+
+    if method == "plain":
+
+        if hasindex:
+            mdata = mdata.to_numpy()
+
+        if len(mdata) != len(dat):
+            raise ValueError("mdata must have same length then data")
+
+        mdata = pd.Series(mdata, index=dat.index)
+
+    # reindex will do the job later
+    elif method == "ontime":
+        pass
+
+    elif method in ["left-open", "right-open"]:
+        mdata = mdata.reindex(dat.index.union(mdata.index))
+
+        # -->)[t0-->)[t1--> (ffill)
+        if method == "right-open":
+            mdata = mdata.ffill()
+
+        # <--t0](<--t1](<-- (bfill)
+        if method == "left-open":
+            mdata = mdata.bfill()
+
+    else:
+        raise ValueError(method)
+
+    mask = mdata == mflag
+    mask = mask.reindex(dat.index).fillna(False)
+
+    flags[mask, field] = flag
+    return data, flags
+
+
+@flagging()
+def flagDummy(
+    data: DictOfSeries, field: str, flags: Flags, **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function does nothing but returning data and flags.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+    return data, flags
diff --git a/saqc/funcs/functions.py b/saqc/funcs/functions.py
deleted file mode 100644
index c93731500a732099cfaf71a4b37d6e753ab50644..0000000000000000000000000000000000000000
--- a/saqc/funcs/functions.py
+++ /dev/null
@@ -1,1005 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-from functools import partial
-from inspect import signature
-
-import dios
-import numpy as np
-import pandas as pd
-import scipy
-import itertools
-import collections
-import numba
-from mlxtend.evaluate import permutation_test
-from scipy import stats
-from scipy.cluster.hierarchy import linkage, fcluster
-
-
-from saqc.lib.tools import groupConsecutives, detectDeviants
-from saqc.lib.tools import groupConsecutives, seasonalMask
-from saqc.funcs.proc_functions import proc_fork, proc_drop, proc_projectFlags
-from saqc.funcs.modelling import modelling_mask
-
-from saqc.core.register import register
-from saqc.core.visitor import ENVIRONMENT
-from dios import DictOfSeries
-from typing import Any
-
-
-def _dslIsFlagged(flagger, var, flag=None, comparator=">="):
-    """
-    helper function for `flagGeneric`
-    """
-    return flagger.isFlagged(var.name, flag=flag, comparator=comparator)
-
-
-def _execGeneric(flagger, data, func, field, nodata):
-    # TODO:
-    # - check series.index compatibility
-    # - field is only needed to translate 'this' parameters
-    #    -> maybe we could do the translation on the tree instead
-
-    sig = signature(func)
-    args = []
-    for k, v in sig.parameters.items():
-        k = field if k == "this" else k
-        if k not in data:
-            raise NameError(f"variable '{k}' not found")
-        args.append(data[k])
-
-    globs = {
-        "isflagged": partial(_dslIsFlagged, flagger),
-        "ismissing": lambda var: ((var == nodata) | pd.isnull(var)),
-        "mask": lambda cond: data[cond.name].mask(cond),
-        "this": field,
-        "NODATA": nodata,
-        "GOOD": flagger.GOOD,
-        "BAD": flagger.BAD,
-        "UNFLAGGED": flagger.UNFLAGGED,
-        **ENVIRONMENT,
-    }
-    func.__globals__.update(globs)
-    return func(*args)
-
-
-@register(masking='all')
-def procGeneric(data, field, flagger, func, nodata=np.nan, **kwargs):
-    """
-    generate/process data with generically defined functions.
-
-    The functions can depend on on any of the fields present in data.
-
-    Formally, what the function does, is the following:
-
-    1.  Let F be a Callable, depending on fields f_1, f_2,...f_K, (F = F(f_1, f_2,...f_K))
-        Than, for every timestamp t_i that occurs in at least one of the timeseries data[f_j] (outer join),
-        The value v_i is computed via:
-        v_i = data([f_1][t_i], data[f_2][t_i], ..., data[f_K][t_i]), if all data[f_j][t_i] do exist
-        v_i = `nodata`, if at least one of the data[f_j][t_i] is missing.
-    2.  The result is stored to data[field] (gets generated if not present)
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, where you want the result from the generic expressions processing to be written to.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    func : Callable
-        The data processing function with parameter names that will be
-        interpreted as data column entries.
-        See the examples section to learn more.
-    nodata : any, default np.nan
-        The value that indicates missing/invalid data
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        The shape of the data may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        The flags shape may have changed relatively to the input flagger.
-
-    Examples
-    --------
-    Some examples on what to pass to the func parameter:
-    To compute the sum of the variables "temperature" and "uncertainty", you would pass the function:
-
-    >>> lambda temperature, uncertainty: temperature + uncertainty
-
-    You also can pass numpy and pandas functions:
-
-    >>> lambda temperature, uncertainty: np.round(temperature) * np.sqrt(uncertainty)
-
-    """
-    data[field] = _execGeneric(flagger, data, func, field, nodata).squeeze()
-    # NOTE:
-    # The flags to `field` will be (re-)set to UNFLAGGED
-    # That leads to the following problem:
-    # flagger.merge merges the given flaggers, if
-    # `field` did already exist before the call to `procGeneric`
-    # but with a differing index, we end up with:
-    # len(data[field]) != len(flagger.getFlags(field))
-    # see: test/funcs/test_generic_functions.py::test_procGenericMultiple
-
-    # TODO:
-    # We need a way to simply overwrite a given flagger column, maybe
-    # an optional keyword to merge ?
-    flagger = flagger.merge(flagger.initFlags(data[field]))
-    return data, flagger
-
-
-@register(masking='all')
-def flagGeneric(data, field, flagger, func, nodata=np.nan, **kwargs):
-    """
-    a function to flag a data column by evaluation of a generic expression.
-
-    The expression can depend on any of the fields present in data.
-
-    Formally, what the function does, is the following:
-
-    Let X be an expression, depending on fields f_1, f_2,...f_K, (X = X(f_1, f_2,...f_K))
-    Than for every timestamp t_i in data[field]:
-    data[field][t_i] is flagged if X(data[f_1][t_i], data[f_2][t_i], ..., data[f_K][t_i]) is True.
-
-    Note, that all value series included in the expression to evaluate must be labeled identically to field.
-
-    Note, that the expression is passed in the form of a Callable and that this callables variable names are
-    interpreted as actual names in the data header. See the examples section to get an idea.
-
-    Note, that all the numpy functions are available within the generic expressions.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, where you want the result from the generic expressions evaluation to be projected
-        to.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    func : Callable
-        The expression that is to be evaluated is passed in form of a callable, with parameter names that will be
-        interpreted as data column entries. The Callable must return an boolen array like.
-        See the examples section to learn more.
-    nodata : any, default np.nan
-        The value that indicates missing/invalid data
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-
-    Examples
-    --------
-    Some examples on what to pass to the func parameter:
-    To flag the variable `field`, if the sum of the variables
-    "temperature" and "uncertainty" is below zero, you would pass the function:
-
-    >>> lambda temperature, uncertainty: temperature + uncertainty < 0
-
-    There is the reserved name 'This', that always refers to `field`. So, to flag field if field is negative, you can
-    also pass:
-
-    >>> lambda this: this < 0
-
-    If you want to make dependent the flagging from flags already present in the data, you can use the built-in
-    ``isflagged`` method. For example, to flag the 'temperature', if 'level' is flagged, you would use:
-
-    >>> lambda level: isflagged(level)
-
-    You can furthermore specify a flagging level, you want to compare the flags to. For example, for flagging
-    'temperature', if 'level' is flagged at a level named 'doubtfull' or worse, use:
-
-    >>> lambda level: isflagged(level, flag='doubtfull', comparator='<=')
-
-    If you are unsure about the used flaggers flagging level names, you can use the reserved key words BAD, UNFLAGGED
-    and GOOD, to refer to the worst (BAD), best(GOOD) or unflagged (UNFLAGGED) flagging levels. For example.
-
-    >>> lambda level: isflagged(level, flag=UNFLAGGED, comparator='==')
-
-    Your expression also is allowed to include pandas and numpy functions
-
-    >>> lambda level: np.sqrt(level) > 7
-    """
-    # NOTE:
-    # The naming of the func parameter is pretty confusing
-    # as it actually holds the result of a generic expression
-    mask = _execGeneric(flagger, data, func, field, nodata).squeeze()
-    if np.isscalar(mask):
-        raise TypeError(f"generic expression does not return an array")
-    if not np.issubdtype(mask.dtype, np.bool_):
-        raise TypeError(f"generic expression does not return a boolean array")
-
-    if field not in flagger.getFlags():
-        flagger = flagger.merge(flagger.initFlags(data=pd.Series(index=mask.index, name=field)))
-
-    # if flagger.getFlags(field).empty:
-    #     flagger = flagger.merge(
-    #         flagger.initFlags(
-    #             data=pd.Series(name=field, index=mask.index, dtype=np.float64)))
-    flagger = flagger.setFlags(field=field, loc=mask, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def flagRange(data, field, flagger, min=-np.inf, max=np.inf, **kwargs):
-    """
-    Function flags values not covered by the closed interval [`min`, `max`].
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    min : float
-        Lower bound for valid data.
-    max : float
-        Upper bound for valid data.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-    """
-
-    # using .values is very much faster
-    datacol = data[field].values
-    mask = (datacol < min) | (datacol > max)
-    flagger = flagger.setFlags(field, mask, **kwargs)
-    return data, flagger
-
-
-
-@register(masking='field')
-def flagMissing(data, field, flagger, nodata=np.nan, **kwargs):
-    """
-    The function flags all values indicating missing data.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    nodata : any, default np.nan
-        A value that defines missing data.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-    """
-
-    datacol = data[field]
-    if np.isnan(nodata):
-        mask = datacol.isna()
-    else:
-        mask = datacol == nodata
-
-    flagger = flagger.setFlags(field, loc=mask, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def flagSesonalRange(
-        data, field, flagger, min, max, startmonth=1, endmonth=12, startday=1, endday=31, **kwargs,
-):
-    """
-    Function applies a range check onto data chunks (seasons).
-
-    The data chunks to be tested are defined by annual seasons that range from a starting date,
-    to an ending date, wheras the dates are defined by month and day number.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    min : float
-        Lower bound for valid data.
-    max : float
-        Upper bound for valid data.
-    startmonth : int
-        Starting month of the season to flag.
-    endmonth : int
-        Ending month of the season to flag.
-    startday : int
-        Starting day of the season to flag.
-    endday : int
-        Ending day of the season to flag
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-    """
-    if data[field].empty:
-        return data, flagger
-
-    newfield = f"{field}_masked"
-    start = f"{startmonth:02}-{startday:02}T00:00:00"
-    end = f"{endmonth:02}-{endday:02}T00:00:00"
-
-    data, flagger = proc_fork(data, field, flagger, suffix="_masked")
-    data, flagger = modelling_mask(data, newfield, flagger, mode='seasonal', season_start=start, season_end=end,
-                                   include_bounds=True)
-    data, flagger = flagRange(data, newfield, flagger, min=min, max=max, **kwargs)
-    data, flagger = proc_projectFlags(data, field, flagger, method='match', source=newfield)
-    data, flagger = proc_drop(data, newfield, flagger)
-    return data, flagger
-
-
-@register(masking='field')
-def clearFlags(data, field, flagger, **kwargs):
-    flagger = flagger.clearFlags(field, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def forceFlags(data, field, flagger, flag, **kwargs):
-    flagger = flagger.clearFlags(field).setFlags(field, flag=flag, inplace=True, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def flagIsolated(
-        data, field, flagger, gap_window, group_window, **kwargs,
-):
-    """
-    The function flags arbitrary large groups of values, if they are surrounded by sufficiently
-    large data gaps. A gap is defined as group of missing and/or flagged values.
-
-    A series of values x_k,x_(k+1),...,x_(k+n), with associated timestamps t_k,t_(k+1),...,t_(k+n),
-    is considered to be isolated, if:
-
-    1. t_(k+1) - t_n < `group_window`
-    2. None of the x_j with 0 < t_k - t_j < `gap_window`, is valid or unflagged (preceeding gap).
-    3. None of the x_j with 0 < t_j - t_(k+n) < `gap_window`, is valid or unflagged (succeding gap).
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-    gap_window :
-        The minimum size of the gap before and after a group of valid values, making this group considered an
-        isolated group. See condition (2) and (3)
-    group_window :
-        The maximum temporal extension allowed for a group that is isolated by gaps of size 'gap_window',
-        to be actually flagged as isolated group. See condition (1).
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-    """
-
-    gap_window = pd.tseries.frequencies.to_offset(gap_window)
-    group_window = pd.tseries.frequencies.to_offset(group_window)
-
-    col = data[field].mask(flagger.isFlagged(field))
-    mask = col.isnull()
-
-    flags = pd.Series(data=0, index=col.index, dtype=bool)
-    for srs in groupConsecutives(mask):
-        if np.all(~srs):
-            start = srs.index[0]
-            stop = srs.index[-1]
-            if stop - start <= group_window:
-                left = mask[start - gap_window: start].iloc[:-1]
-                if left.all():
-                    right = mask[stop: stop + gap_window].iloc[1:]
-                    if right.all():
-                        flags[start:stop] = True
-
-    flagger = flagger.setFlags(field, flags, **kwargs)
-
-    return data, flagger
-
-
-@register(masking='field')
-def flagDummy(data, field, flagger, **kwargs):
-    """
-    Function does nothing but returning data and flagger.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-    """
-    return data, flagger
-
-
-@register(masking='field')
-def flagForceFail(data, field, flagger, **kwargs):
-    """
-    Function raises a runtime error.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-
-    """
-    raise RuntimeError("Works as expected :D")
-
-
-@register(masking='field')
-def flagUnflagged(data, field, flagger, **kwargs):
-    """
-    Function sets the flagger.GOOD flag to all values flagged better then flagger.GOOD.
-    If there is an entry 'flag' in the kwargs dictionary passed, the
-    function sets the kwargs['flag'] flag to all values flagged better kwargs['flag']
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-    kwargs : Dict
-        If kwargs contains 'flag' entry, kwargs['flag] is set, if no entry 'flag' is present,
-        'flagger.UNFLAGGED' is set.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-    """
-
-    flag = kwargs.pop('flag', flagger.GOOD)
-    flagger = flagger.setFlags(field, flag=flag, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def flagGood(data, field, flagger, **kwargs):
-    """
-    Function sets the flagger.GOOD flag to all values flagged better then flagger.GOOD.
-
-    Parameters
-    ----------
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-
-    """
-    kwargs.pop('flag', None)
-    return flagUnflagged(data, field, flagger, **kwargs)
-
-
-@register(masking='field')
-def flagManual(data, field, flagger, mdata, mflag: Any = 1, method="plain", **kwargs):
-    """
-    Flag data by given, "manually generated" data.
-
-    The data is flagged at locations where `mdata` is equal to a provided flag (`mflag`).
-    The format of mdata can be an indexed object, like pd.Series, pd.Dataframe or dios.DictOfSeries,
-    but also can be a plain list- or array-like.
-    How indexed mdata is aligned to data is specified via the `method` parameter.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-    mdata : {pd.Series, pd.Dataframe, DictOfSeries, str}
-        The "manually generated" data
-    mflag : scalar
-        The flag that indicates data points in `mdata`, of wich the projection in data should be flagged.
-    method : {'plain', 'ontime', 'left-open', 'right-open'}, default plain
-        Defines how mdata is projected on data. Except for the 'plain' method, the methods assume mdata to have an
-        index.
-
-        * 'plain': mdata must have the same length as data and is projected one-to-one on data.
-        * 'ontime': works only with indexed mdata. mdata entries are matched with data entries that have the same index.
-        * 'right-open': mdata defines intervals, values are to be projected on.
-          The intervals are defined by any two consecutive timestamps t_1 and 1_2 in mdata.
-          the value at t_1 gets projected onto all data timestamps t with t_1 <= t < t_2.
-        * 'left-open': like 'right-open', but the projected interval now covers all t with t_1 < t <= t_2.
-
-    Returns
-    -------
-    data, flagger: original data, modified flagger
-    
-    Examples
-    --------
-    An example for mdata
-    >>> mdata = pd.Series([1,0,1], index=pd.to_datetime(['2000-02', '2000-03', '2001-05']))
-    >>> mdata
-    2000-02-01    1
-    2000-03-01    0
-    2001-05-01    1
-    dtype: int64
-
-    On *dayly* data, with the 'ontime' method, only the provided timestamnps are used.
-    Bear in mind that only exact timestamps apply, any offset will result in ignoring
-    the timestamp.
-    >>> _, fl = flagManual(data, field, flagger, mdata, mflag=1, method='ontime')
-    >>> fl.isFlagged(field)
-    2000-01-31    False
-    2000-02-01    True
-    2000-02-02    False
-    2000-02-03    False
-    ..            ..
-    2000-02-29    False
-    2000-03-01    True
-    2000-03-02    False
-    Freq: D, dtype: bool
-
-    With the 'right-open' method, the mdata is forward fill:
-    >>> _, fl = flagManual(data, field, flagger, mdata, mflag=1, method='right-open')
-    >>> fl.isFlagged(field)
-    2000-01-31    False
-    2000-02-01    True
-    2000-02-02    True
-    ..            ..
-    2000-02-29    True
-    2000-03-01    False
-    2000-03-02    False
-    Freq: D, dtype: bool
-
-    With the 'left-open' method, backward filling is used:
-    >>> _, fl = flagManual(data, field, flagger, mdata, mflag=1, method='left-open')
-    >>> fl.isFlagged(field)
-    2000-01-31    False
-    2000-02-01    False
-    2000-02-02    True
-    ..            ..
-    2000-02-29    True
-    2000-03-01    True
-    2000-03-02    False
-    Freq: D, dtype: bool
-    """
-    dat = data[field]
-    if isinstance(mdata, str):
-        # todo import path type in mdata, use
-        #  s = pd.read_csv(mdata, index_col=N, usecol=[N,N,..]) <- use positional
-        #  use a list-arg in config to get the columns
-        #  at last, fall throug to next checks
-        raise NotImplementedError("giving a path is currently not supported")
-
-    if isinstance(mdata, (pd.DataFrame, DictOfSeries)):
-        mdata = mdata[field]
-
-    hasindex = isinstance(mdata, (pd.Series, pd.DataFrame, DictOfSeries))
-    if not hasindex and method != "plain":
-        raise ValueError("mdata has no index")
-
-    if method == "plain":
-        if hasindex:
-            mdata = mdata.to_numpy()
-        if len(mdata) != len(dat):
-            raise ValueError("mdata must have same length then data")
-        mdata = pd.Series(mdata, index=dat.index)
-    elif method == "ontime":
-        pass  # reindex will do the job later
-    elif method in ["left-open", "right-open"]:
-        mdata = mdata.reindex(dat.index.union(mdata.index))
-
-        # -->)[t0-->)[t1--> (ffill)
-        if method == "right-open":
-            mdata = mdata.ffill()
-
-        # <--t0](<--t1](<-- (bfill)
-        if method == "left-open":
-            mdata = mdata.bfill()
-    else:
-        raise ValueError(method)
-
-    mask = mdata == mflag
-    mask = mask.reindex(dat.index).fillna(False)
-    flagger = flagger.setFlags(field=field, loc=mask, **kwargs)
-    return data, flagger
-
-
-@register(masking='all')
-def flagCrossScoring(data, field, flagger, fields, thresh, cross_stat='modZscore', **kwargs):
-    """
-    Function checks for outliers relatively to the "horizontal" input data axis.
-
-    For `fields` :math:`=[f_1,f_2,...,f_N]` and timestamps :math:`[t_1,t_2,...,t_K]`, the following steps are taken
-    for outlier detection:
-
-    1. All timestamps :math:`t_i`, where there is one :math:`f_k`, with :math:`data[f_K]` having no entry at
-       :math:`t_i`, are excluded from the following process (inner join of the :math:`f_i` fields.)
-    2. for every :math:`0 <= i <= K`, the value
-       :math:`m_j = median(\\{data[f_1][t_i], data[f_2][t_i], ..., data[f_N][t_i]\\})` is calculated
-    2. for every :math:`0 <= i <= K`, the set
-       :math:`\\{data[f_1][t_i] - m_j, data[f_2][t_i] - m_j, ..., data[f_N][t_i] - m_j\\}` is tested for outliers with the
-       specified method (`cross_stat` parameter).
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        A dummy parameter.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-    fields : str
-        List of fieldnames in data, determining wich variables are to be included into the flagging process.
-    thresh : float
-        Threshold which the outlier score of an value must exceed, for being flagged an outlier.
-    cross_stat : {'modZscore', 'Zscore'}, default 'modZscore'
-        Method used for calculating the outlier scores.
-
-        * ``'modZscore'``: Median based "sigma"-ish approach. See Referenecs [1].
-        * ``'Zscore'``: Score values by how many times the standard deviation they differ from the median.
-          See References [1]
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the input flagger.
-
-    References
-    ----------
-    [1] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
-    """
-
-    df = data[fields].loc[data[fields].index_of('shared')].to_df()
-
-    if isinstance(cross_stat, str):
-        if cross_stat == 'modZscore':
-            MAD_series = df.subtract(df.median(axis=1), axis=0).abs().median(axis=1)
-            diff_scores = ((0.6745 * (df.subtract(df.median(axis=1), axis=0))).divide(MAD_series, axis=0)).abs()
-        elif cross_stat == 'Zscore':
-            diff_scores = (df.subtract(df.mean(axis=1), axis=0)).divide(df.std(axis=1), axis=0).abs()
-        else:
-            raise ValueError(cross_stat)
-    else:
-        try:
-            stat = getattr(df, cross_stat.__name__)(axis=1)
-        except AttributeError:
-            stat = df.aggregate(cross_stat, axis=1)
-        diff_scores = df.subtract(stat, axis=0).abs()
-
-    mask = diff_scores > thresh
-    for var in fields:
-        flagger = flagger.setFlags(var, mask[var], **kwargs)
-
-    return data, flagger
-
-@register(masking='all')
-def flagDriftFromNorm(data, field, flagger, fields, segment_freq, norm_spread, norm_frac=0.5,
-                      metric=lambda x, y: scipy.spatial.distance.pdist(np.array([x, y]),
-                                                                       metric='cityblock') / len(x),
-                      linkage_method='single', **kwargs):
-    """
-    The function flags value courses that significantly deviate from a group of normal value courses.
-
-    "Normality" is determined in terms of a maximum spreading distance, that members of a normal group must not exceed.
-    In addition, only a group is considered "normal" if it contains more then `norm_frac` percent of the
-    variables in "fields".
-
-    See the Notes section for a more detailed presentation of the algorithm
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        A dummy parameter.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-    fields : str
-        List of fieldnames in data, determining which variables are to be included into the flagging process.
-    segment_freq : str
-        An offset string, determining the size of the seperate datachunks that the algorihm is to be piecewise
-        applied on.
-    norm_spread : float
-        A parameter limiting the maximum "spread" of the timeseries, allowed in the "normal" group. See Notes section
-        for more details.
-    norm_frac : float, default 0.5
-        Has to be in [0,1]. Determines the minimum percentage of variables, the "normal" group has to comprise to be the
-        normal group actually. The higher that value, the more stable the algorithm will be with respect to false
-        positives. Also, nobody knows what happens, if this value is below 0.5.
-    metric : Callable[(numpyp.array, numpy-array), float]
-        A distance function. It should be a function of 2 1-dimensional arrays and return a float scalar value.
-        This value is interpreted as the distance of the two input arrays. The default is the averaged manhatten metric.
-        See the Notes section to get an idea of why this could be a good choice.
-    linkage_method : {"single", "complete", "average", "weighted", "centroid", "median", "ward"}, default "single"
-        The linkage method used for hierarchical (agglomerative) clustering of the timeseries.
-        See the Notes section for more details.
-        The keyword gets passed on to scipy.hierarchy.linkage. See its documentation to learn more about the different
-        keywords (References [1]).
-        See wikipedia for an introduction to hierarchical clustering (References [2]).
-    kwargs
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the input flagger.
-
-    Notes
-    -----
-    following steps are performed for every data "segment" of length `segment_freq` in order to find the
-    "abnormal" data:
-
-    1. Calculate the distances :math:`d(x_i,x_j)` for all :math:`x_i` in parameter `fields`. (with :math:`d`
-       denoting the distance function
-       passed to the parameter `metric`.
-    2. Calculate a dendogram with a hierarchical linkage algorithm, specified by the parameter `linkage_method`.
-    3. Flatten the dendogram at the level, the agglomeration costs exceed the value given by the parameter `norm_spread`
-    4. check if there is a cluster containing more than `norm_frac` percentage of the variables in fields.
-
-        1. if yes: flag all the variables that are not in that cluster (inside the segment)
-        2. if no: flag nothing
-
-    The main parameter giving control over the algorithms behavior is the `norm_spread` parameter, that determines
-    the maximum spread of a normal group by limiting the costs, a cluster agglomeration must not exceed in every
-    linkage step.
-    For singleton clusters, that costs just equal half the distance, the timeseries in the clusters, have to
-    each other. So, no timeseries can be clustered together, that are more then
-    2*`norm_spread` distanted from each other.
-    When timeseries get clustered together, this new clusters distance to all the other timeseries/clusters is
-    calculated according to the linkage method specified by `linkage_method`. By default, it is the minimum distance,
-    the members of the clusters have to each other.
-    Having that in mind, it is advisable to choose a distance function, that can be well interpreted in the units
-    dimension of the measurement and where the interpretation is invariant over the length of the timeseries.
-    That is, why, the "averaged manhatten metric" is set as the metric default, since it corresponds to the
-    averaged value distance, two timeseries have (as opposed by euclidean, for example).
-
-    References
-    ----------
-    Documentation of the underlying hierarchical clustering algorithm:
-        [1] https://docs.scipy.org/doc/scipy/reference/generated/scipy.cluster.hierarchy.linkage.html
-    Introduction to Hierarchical clustering:
-        [2] https://en.wikipedia.org/wiki/Hierarchical_clustering
-    """
-
-    data_to_flag = data[fields].to_df()
-    data_to_flag.dropna(inplace=True)
-    segments = data_to_flag.groupby(pd.Grouper(freq=segment_freq))
-    for segment in segments:
-        if segment[1].shape[0] <= 1:
-            continue
-        drifters = detectDeviants(segment[1], metric, norm_spread, norm_frac, linkage_method, 'variables')
-
-        for var in drifters:
-            flagger = flagger.setFlags(fields[var], loc=segment[1].index, **kwargs)
-
-    return data, flagger
-
-@register(masking='all')
-def flagDriftFromReference(data, field, flagger, fields, segment_freq, thresh,
-                      metric=lambda x, y: scipy.spatial.distance.pdist(np.array([x, y]),
-                                                                    metric='cityblock')/len(x),
-                       **kwargs):
-    """
-    The function flags value courses that deviate from a reference course by a margin exceeding a certain threshold.
-
-    The deviation is measured by the distance function passed to parameter metric.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The reference variable, the deviation from wich determines the flagging.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-    fields : str
-        List of fieldnames in data, determining wich variables are to be included into the flagging process.
-    segment_freq : str
-        An offset string, determining the size of the seperate datachunks that the algorihm is to be piecewise
-        applied on.
-    thresh : float
-        The threshod by wich normal variables can deviate from the reference variable at max.
-    metric : Callable[(numpyp.array, numpy-array), float]
-        A distance function. It should be a function of 2 1-dimensional arrays and return a float scalar value.
-        This value is interpreted as the distance of the two input arrays. The default is the averaged manhatten metric.
-        See the Notes section to get an idea of why this could be a good choice.
-    kwargs
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the input flagger.
-
-    Notes
-    -----
-    it is advisable to choose a distance function, that can be well interpreted in the units
-    dimension of the measurement and where the interpretation is invariant over the length of the timeseries.
-    That is, why, the "averaged manhatten metric" is set as the metric default, since it corresponds to the
-    averaged value distance, two timeseries have (as opposed by euclidean, for example).
-    """
-
-    data_to_flag = data[fields].to_df()
-    data_to_flag.dropna(inplace=True)
-    if field not in fields:
-        fields.append(field)
-    var_num = len(fields)
-    segments = data_to_flag.groupby(pd.Grouper(freq=segment_freq))
-
-    for segment in segments:
-
-        if segment[1].shape[0] <= 1:
-            continue
-        for i in range(var_num):
-            dist = metric(segment[1].iloc[:, i].values, segment[1].loc[:, field].values)
-            if dist > thresh:
-                flagger = flagger.setFlags(fields[i], loc=segment[1].index, **kwargs)
-
-    return data, flagger
-
-
-@register(masking='all')
-def flagDriftScale(data, field, flagger, fields_scale1, fields_scale2, segment_freq, norm_spread, norm_frac=0.5,
-                      metric=lambda x, y: scipy.spatial.distance.pdist(np.array([x, y]),
-                                                                                    metric='cityblock')/len(x),
-                      linkage_method='single', **kwargs):
-
-
-    """
-    The function linearly rescales one set of variables to another set of variables with a different scale and then
-    flags value courses that significantly deviate from a group of normal value courses.
-
-    The two sets of variables can be linearly scaled one to another and hence the scaling transformation is performed
-    via linear regression: A linear regression is performed on each pair of variables giving a slope and an intercept.
-    The transformation is then calculated a the median of all the calculated slopes and intercepts.
-
-    Once the transformation is performed, the function flags those values, that deviate from a group of normal values.
-    "Normality" is determined in terms of a maximum spreading distance, that members of a normal group must not exceed.
-    In addition, only a group is considered "normal" if it contains more then `norm_frac` percent of the
-    variables in "fields".
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        A dummy parameter.
-    flagger : saqc.flagger
-        A flagger object, holding flags and additional informations related to `data`.
-    fields_scale1 : str
-        List of fieldnames in data to be included into the flagging process which are scaled according to scaling
-        scheme 1.
-    fields_scale2 : str
-        List of fieldnames in data to be included into the flagging process which are scaled according to scaling
-        scheme 2.
-    segment_freq : str
-        An offset string, determining the size of the seperate datachunks that the algorihm is to be piecewise
-        applied on.
-    norm_spread : float
-        A parameter limiting the maximum "spread" of the timeseries, allowed in the "normal" group. See Notes section
-        for more details.
-    norm_frac : float, default 0.5
-        Has to be in [0,1]. Determines the minimum percentage of variables, the "normal" group has to comprise to be the
-        normal group actually. The higher that value, the more stable the algorithm will be with respect to false
-        positives. Also, nobody knows what happens, if this value is below 0.5.
-    metric : Callable[(numpyp.array, numpy-array), float]
-        A distance function. It should be a function of 2 1-dimensional arrays and return a float scalar value.
-        This value is interpreted as the distance of the two input arrays. The default is the averaged manhatten metric.
-        See the Notes section to get an idea of why this could be a good choice.
-    linkage_method : {"single", "complete", "average", "weighted", "centroid", "median", "ward"}, default "single"
-        The linkage method used for hierarchical (agglomerative) clustering of the timeseries.
-        See the Notes section for more details.
-        The keyword gets passed on to scipy.hierarchy.linkage. See its documentation to learn more about the different
-        keywords (References [1]).
-        See wikipedia for an introduction to hierarchical clustering (References [2]).
-    kwargs
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the input flagger.
-
-    References
-    ----------
-    Documentation of the underlying hierarchical clustering algorithm:
-        [1] https://docs.scipy.org/doc/scipy/reference/generated/scipy.cluster.hierarchy.linkage.html
-    Introduction to Hierarchical clustering:
-        [2] https://en.wikipedia.org/wiki/Hierarchical_clustering
-    """
-
-    fields = fields_scale1 + fields_scale2
-    data_to_flag = data[fields].to_df()
-    data_to_flag.dropna(inplace=True)
-
-    convert_slope = []
-    convert_intercept = []
-
-    for field1 in fields_scale1:
-        for field2 in fields_scale2:
-            slope, intercept, r_value, p_value, std_err = stats.linregress(data_to_flag[field1], data_to_flag[field2])
-            convert_slope.append(slope)
-            convert_intercept.append(intercept)
-
-    factor_slope = np.median(convert_slope)
-    factor_intercept = np.median(convert_intercept)
-
-    dat = dios.DictOfSeries()
-    for field1 in fields_scale1:
-        dat[field1] = factor_intercept + factor_slope * data_to_flag[field1]
-    for field2 in fields_scale2:
-        dat[field2] = data_to_flag[field2]
-
-    dat_to_flag = dat[fields].to_df()
-
-    segments = dat_to_flag.groupby(pd.Grouper(freq=segment_freq))
-    for segment in segments:
-        if segment[1].shape[0] <= 1:
-            continue
-        drifters = detectDeviants(segment[1], metric, norm_spread, norm_frac, linkage_method, 'variables')
-        for var in drifters:
-            flagger = flagger.setFlags(fields[var], loc=segment[1].index, **kwargs)
-
-    return data, flagger
diff --git a/saqc/funcs/generic.py b/saqc/funcs/generic.py
new file mode 100644
index 0000000000000000000000000000000000000000..99e460bc7d95b3af4146f8cc30e5f061d1774907
--- /dev/null
+++ b/saqc/funcs/generic.py
@@ -0,0 +1,255 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from functools import partial
+from inspect import signature
+from typing import Tuple, Union, Callable
+
+import numpy as np
+import pandas as pd
+
+from dios import DictOfSeries
+
+from saqc.constants import GOOD, BAD, UNFLAGGED
+from saqc.core.flags import initFlagsLike, Flags
+from saqc.core.register import flagging, processing, _maskData, _isflagged
+from saqc.core.visitor import ENVIRONMENT
+
+import operator as op
+
+_OP = {"<": op.lt, "<=": op.le, "==": op.eq, "!=": op.ne, ">": op.gt, ">=": op.ge}
+
+
+def _dslIsFlagged(
+    flags: Flags, var: pd.Series, flag: float = None, comparator: str = None
+) -> Union[pd.Series, DictOfSeries]:
+    """
+    helper function for `flag`
+
+    Param Combinations
+    ------------------
+    - ``isflagged('var')``              : show me (anything) flagged
+    - ``isflagged('var', DOUBT)``       : show me ``flags >= DOUBT``
+    - ``isflagged('var', DOUBT, '==')`` : show me ``flags == DOUBT``
+
+    Raises
+    ------
+    ValueError: if `comparator` is passed but no `flag` vaule. Eg. ``isflagged('var', comparator='>=')``
+    """
+    if flag is None:
+        if comparator is not None:
+            raise ValueError("if `comparator` is used, explicitly pass a `flag` level.")
+        flag = UNFLAGGED
+        comparator = ">"
+
+    # default
+    if comparator is None:
+        comparator = ">="
+
+    _op = _OP[comparator]
+    return _op(flags[var.name], flag)
+
+
+def _execGeneric(
+    flags: Flags,
+    data: DictOfSeries,
+    func: Callable[[pd.Series], pd.Series],
+    field: str,
+) -> pd.Series:
+    # TODO:
+    # - check series.index compatibility
+    # - field is only needed to translate 'this' parameters
+    #    -> maybe we could do the translation on the tree instead
+
+    sig = signature(func)
+    args = []
+    for k, v in sig.parameters.items():
+        k = field if k == "this" else k
+        if k not in data:
+            raise NameError(f"variable '{k}' not found")
+        args.append(data[k])
+
+    globs = {
+        "isflagged": partial(_dslIsFlagged, flags),
+        "ismissing": lambda var: pd.isnull(var),
+        "this": field,
+        "GOOD": GOOD,
+        "BAD": BAD,
+        "UNFLAGGED": UNFLAGGED,
+        **ENVIRONMENT,
+    }
+    func.__globals__.update(globs)
+    return func(*args)
+
+
+@processing()
+def genericProcess(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    func: Callable[[pd.Series], pd.Series],
+    to_mask: float = UNFLAGGED,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    generate/process data with generically defined functions.
+
+    The functions can depend on on any of the fields present in data.
+
+    Formally, what the function does, is the following:
+
+    1.  Let F be a Callable, depending on fields f_1, f_2,...f_K, (F = F(f_1, f_2,...f_K))
+        Than, for every timestamp t_i that occurs in at least one of the timeseries data[f_j] (outer join),
+        The value v_i is computed via:
+        v_i = data([f_1][t_i], data[f_2][t_i], ..., data[f_K][t_i]), if all data[f_j][t_i] do exist
+        v_i = `np.nan`, if at least one of the data[f_j][t_i] is missing.
+    2.  The result is stored to data[field] (gets generated if not present)
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, where you want the result from the generic expressions processing to be written to.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    func : Callable
+        The data processing function with parameter names that will be
+        interpreted as data column entries.
+        See the examples section to learn more.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        The shape of the data may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+        The flags shape may have changed relatively to the input flags.
+
+    Examples
+    --------
+    Some examples on what to pass to the func parameter:
+    To compute the sum of the variables "temperature" and "uncertainty", you would pass the function:
+
+    >>> lambda temperature, uncertainty: temperature + uncertainty
+
+    You also can pass numpy and pandas functions:
+
+    >>> lambda temperature, uncertainty: np.round(temperature) * np.sqrt(uncertainty)
+    """
+
+    data_masked, _ = _maskData(data.copy(), flags, data.columns, to_mask)
+    data[field] = _execGeneric(flags, data_masked, func, field).squeeze()
+
+    if field in flags:
+        flags.drop(field)
+
+    flags[field] = initFlagsLike(data[field])[field]
+
+    return data, flags
+
+
+@flagging(masking="all")
+def genericFlag(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    func: Callable[[pd.Series], pd.Series],
+    flag: float = BAD,
+    to_mask: float = UNFLAGGED,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    # TODO : fix docstring, check if all still works
+    """
+    a function to flag a data column by evaluation of a generic expression.
+
+    The expression can depend on any of the fields present in data.
+
+    Formally, what the function does, is the following:
+
+    Let X be an expression, depending on fields f_1, f_2,...f_K, (X = X(f_1, f_2,...f_K))
+    Than for every timestamp t_i in data[field]:
+    data[field][t_i] is flagged if X(data[f_1][t_i], data[f_2][t_i], ..., data[f_K][t_i]) is True.
+
+    Note, that all value series included in the expression to evaluate must be labeled identically to field.
+
+    Note, that the expression is passed in the form of a Callable and that this callables variable names are
+    interpreted as actual names in the data header. See the examples section to get an idea.
+
+    Note, that all the numpy functions are available within the generic expressions.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, where you want the result from the generic expressions evaluation to be projected
+        to.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    func : Callable
+        The expression that is to be evaluated is passed in form of a callable, with parameter names that will be
+        interpreted as data column entries. The Callable must return an boolen array like.
+        See the examples section to learn more.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
+
+    Examples
+    --------
+    Some examples on what to pass to the func parameter:
+    To flag the variable `field`, if the sum of the variables
+    "temperature" and "uncertainty" is below zero, you would pass the function:
+
+    >>> lambda temperature, uncertainty: temperature + uncertainty < 0
+
+    There is the reserved name 'This', that always refers to `field`. So, to flag field if field is negative, you can
+    also pass:
+
+    >>> lambda this: this < 0
+
+    If you want to make dependent the flagging from flags already present in the data, you can use the built-in
+    ``isflagged`` method. For example, to flag the 'temperature', if 'level' is flagged, you would use:
+
+    >>> lambda level: isflagged(level)
+
+    You can furthermore specify a flagging level, you want to compare the flags to. For example, for flagging
+    'temperature', if 'level' is flagged at a level named DOUBTFUL or worse, use:
+
+    >>> lambda level: isflagged(level, flag=DOUBTFUL, comparator='>')
+
+    If you are unsure about the used flaggers flagging level names, you can use the reserved key words BAD, UNFLAGGED
+    and GOOD, to refer to the worst (BAD), best(GOOD) or unflagged (UNFLAGGED) flagging levels. For example.
+
+    >>> lambda level: isflagged(level, flag=UNFLAGGED, comparator='==')
+
+    Your expression also is allowed to include pandas and numpy functions
+
+    >>> lambda level: np.sqrt(level) > 7
+    """
+    # we get the data unmasked, in order to also receive flags,
+    # so let's do to the masking manually
+    # data_masked, _ = _maskData(data, flags, data.columns, to_mask)
+
+    mask = _execGeneric(flags, data, func, field).squeeze()
+    if np.isscalar(mask):
+        raise TypeError(f"generic expression does not return an array")
+    if not np.issubdtype(mask.dtype, np.bool_):
+        raise TypeError(f"generic expression does not return a boolean array")
+
+    if field not in flags:
+        flags[field] = pd.Series(data=UNFLAGGED, index=mask.index, name=field)
+
+    mask = ~_isflagged(flags[field], to_mask) & mask
+
+    flags[mask, field] = flag
+
+    return data, flags
diff --git a/saqc/funcs/harm_functions.py b/saqc/funcs/harm_functions.py
deleted file mode 100644
index 49762412c2e0473d0604b59209739cd7641f0ce5..0000000000000000000000000000000000000000
--- a/saqc/funcs/harm_functions.py
+++ /dev/null
@@ -1,351 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-
-import numpy as np
-import logging
-from saqc.core.register import register
-from saqc.funcs.proc_functions import (
-    proc_interpolateGrid,
-    proc_shift,
-    proc_fork,
-    proc_resample,
-    proc_projectFlags,
-    proc_drop,
-    proc_rename,
-    ORIGINAL_SUFFIX,
-)
-
-logger = logging.getLogger("SaQC")
-
-
-@register(masking='none')
-def harm_shift2Grid(data, field, flagger, freq, method="nshift", to_drop=None, **kwargs):
-    """
-    A method to "regularize" data by shifting data points forward/backward to a regular timestamp.
-
-    A series of data is considered "regular", if it is sampled regularly (= having uniform sampling rate).
-
-    Method keywords:
-
-    * ``'nshift'``:  every grid point gets assigned the nearest value in its range (*range = +/-(freq/2)*)
-    * ``'bshift'``:  every grid point gets assigned its first succeeding value - if there is one available in the
-            succeeding sampling interval.
-    * ``'fshift'``:  every grid point gets assigned its ultimately preceding value - if there is one available in
-      the preceeding sampling interval.
-
-    Note: the flags associated with every datapoint will just get shifted with them.
-
-    Note: if there is no valid data (existing and not-na) available in a sampling interval assigned to a regular
-    timestamp by the selected method, nan gets assigned to this timestamp. The associated flag will be of value
-    ``flagger.UNFLAGGED``.
-
-    Note: all data nans get excluded defaultly from shifting. If to_drop is None - all *BAD* flagged values get
-    excluded as well.
-
-    Note: the method will likely and significantly alter values and shape of ``data[field]``. The original data is kept
-    in the data dios and assigned to the fieldname ``field + '_original'``.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The field name of the column, holding the data-to-be-regularized.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.freq
-    freq : str
-        The frequency of the grid you want to shift your data to.
-    method : {'nshift', 'bshift', 'fshift'}, default 'nshift'
-        Specifies if datapoints get propagated forwards, backwards or to the nearest grid timestamp.
-        See description above for details
-    to_drop : {List[str], str}, default None
-        Flag types you want to drop before shifting - effectively excluding values that are flagged
-        with a flag in to_drop from the shifting process. Default - results in flagger.BAD
-        values being dropped initially.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
-    """
-
-    data, flagger = proc_fork(data, field, flagger)
-    data, flagger = proc_shift(
-        data, field, flagger, freq, method, to_drop=to_drop, empty_intervals_flag=flagger.UNFLAGGED, **kwargs
-    )
-    return data, flagger
-
-
-@register(masking='none')
-def harm_aggregate2Grid(
-        data, field, flagger, freq, value_func, flag_func=np.nanmax, method="nagg", to_drop=None, **kwargs
-):
-    """
-    A method to "regularize" data by aggregating (resampling) data at a regular timestamp.
-
-    A series of data is considered "regular", if it is sampled regularly (= having uniform sampling rate).
-
-    The data will therefor get aggregated with a function, specified by the `value_func` parameter and
-    the result gets projected onto the new timestamps with a method, specified by "method".
-
-    The following method (keywords) are available:
-
-    * ``'nagg'``: (aggreagtion to nearest) - all values in the range (+/- freq/2) of a grid point get aggregated with
-      `agg_func`. and assigned to it. Flags get aggregated by `flag_func` and assigned the same way.
-    * ``'bagg'``: (backwards aggregation) - all values in a sampling interval get aggregated with agg_func and the
-      result gets assigned to the last regular timestamp. Flags get aggregated by `flag_func` and assigned the same way.
-    * ``'fagg'``: (forward aggregation) - all values in a sampling interval get aggregated with agg_func and the result
-      gets assigned to the next regular timestamp. Flags get aggregated by `flag_func` and assigned the same way.
-
-    Note, that, if there is no valid data (exisitng and not-na) available in a sampling interval assigned to a regular
-    timestamp by the selected method, nan gets assigned to this timestamp. The associated flag will be of value
-    ``flagger.UNFLAGGED``.
-
-    Note: the method will likely and significantly alter values and shape of ``data[field]``. The original data is kept
-    in the data dios and assigned to the fieldname ``field + '_original'``.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-regularized.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.freq
-    freq : str
-        The sampling frequency the data is to be aggregated (resampled) at.
-    value_func : Callable
-        The function you want to use for aggregation.
-    flag_func : Callable
-        The function you want to aggregate the flags with. It should be capable of operating on the flags dtype
-        (usually ordered categorical).
-    method : {'fagg', 'bagg', 'nagg'}, default 'nagg'
-        Specifies which intervals to be aggregated for a certain timestamp. (preceeding, succeeding or
-        "surrounding" interval). See description above for more details.
-    to_drop : {List[str], str}, default None
-        Flagtypes you want to drop before aggregation - effectively excluding values that are flagged
-        with a flag in to_drop from the aggregation process. Default results in flagger.BAD
-        values being dropped initially.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
-    """
-
-    data, flagger = proc_fork(data, field, flagger)
-    data, flagger = proc_resample(
-        data,
-        field,
-        flagger,
-        freq,
-        agg_func=value_func,
-        flag_agg_func=flag_func,
-        method=method,
-        empty_intervals_flag=flagger.UNFLAGGED,
-        to_drop=to_drop,
-        all_na_2_empty=True,
-        **kwargs,
-    )
-    return data, flagger
-
-
-@register(masking='none')
-def harm_linear2Grid(data, field, flagger, freq, to_drop=None, **kwargs):
-    """
-    A method to "regularize" data by interpolating linearly the data at regular timestamp.
-
-    A series of data is considered "regular", if it is sampled regularly (= having uniform sampling rate).
-
-    Interpolated values will get assigned the worst flag within freq-range.
-
-    Note: the method will likely and significantly alter values and shape of ``data[field]``. The original data is kept
-    in the data dios and assigned to the fieldname ``field + '_original'``.
-
-    Note, that the data only gets interpolated at those (regular) timestamps, that have a valid (existing and
-    not-na) datapoint preceeding them and one succeeding them within freq range.
-    Regular timestamp that do not suffice this condition get nan assigned AND The associated flag will be of value
-    ``flagger.UNFLAGGED``.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-regularized.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.freq
-    freq : str
-        An offset string. The frequency of the grid you want to interpolate your data at.
-    to_drop : {List[str], str}, default None
-        Flagtypes you want to drop before interpolation - effectively excluding values that are flagged
-        with a flag in to_drop from the interpolation process. Default results in flagger.BAD
-        values being dropped initially.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
-    """
-
-    data, flagger = proc_fork(data, field, flagger)
-    data, flagger = proc_interpolateGrid(
-        data, field, flagger, freq, "time", to_drop=to_drop, empty_intervals_flag=flagger.UNFLAGGED, **kwargs
-    )
-    return data, flagger
-
-
-@register(masking='none')
-def harm_interpolate2Grid(data, field, flagger, freq, method, order=1, to_drop=None, **kwargs,):
-    """
-    A method to "regularize" data by interpolating the data at regular timestamp.
-
-    A series of data is considered "regular", if it is sampled regularly (= having uniform sampling rate).
-
-    Interpolated values will get assigned the worst flag within freq-range.
-
-    There are available all the interpolations from the pandas.Series.interpolate method and they are called by
-    the very same keywords.
-
-    Note, that, to perform a timestamp aware, linear interpolation, you have to pass ``'time'`` as `method`,
-    and NOT ``'linear'``.
-
-    Note: the `method` will likely and significantly alter values and shape of ``data[field]``. The original data is
-    kept in the data dios and assigned to the fieldname ``field + '_original'``.
-
-    Note, that the data only gets interpolated at those (regular) timestamps, that have a valid (existing and
-    not-na) datapoint preceeding them and one succeeding them within freq range.
-    Regular timestamp that do not suffice this condition get nan assigned AND The associated flag will be of value
-    ``flagger.UNFLAGGED``.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-regularized.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.freq
-    freq : str
-        An offset string. The frequency of the grid you want to interpolate your data at.
-    method : {"linear", "time", "nearest", "zero", "slinear", "quadratic", "cubic", "spline", "barycentric",
-        "polynomial", "krogh", "piecewise_polynomial", "spline", "pchip", "akima"}: string
-        The interpolation method you want to apply.
-    order : int, default 1
-        If your selected interpolation method can be performed at different *orders* - here you pass the desired
-        order.
-    to_drop : {List[str], str}, default None
-        Flagtypes you want to drop before interpolation - effectively excluding values that are flagged
-        with a flag in `to_drop` from the interpolation process. Default results in ``flagger.BAD``
-        values being dropped initially.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
-    """
-
-    data, flagger = proc_fork(data, field, flagger)
-    data, flagger = proc_interpolateGrid(
-        data,
-        field,
-        flagger,
-        freq,
-        method=method,
-        inter_order=order,
-        to_drop=to_drop,
-        empty_intervals_flag=flagger.UNFLAGGED,
-        **kwargs,
-    )
-    return data, flagger
-
-
-@register(masking='none')
-def harm_deharmonize(data, field, flagger, method, to_drop=None, **kwargs):
-    """
-    The Function function "undoes" regularization, by regaining the original data and projecting the
-    flags calculated for the regularized data onto the original ones.
-
-    Afterwards the regularized data is removed from the data dios and ``'field'`` will be associated
-    with the original data "again".
-
-    Wherever the flags in the original data are "better" then the regularized flags projected on them,
-    they get overridden with this regularized flags value.
-
-    Which regularized flags are to be projected on which original flags, is controlled by the "method" parameters.
-
-    Generally, if you regularized with the method "X", you should pass the method "inverse_X" to the deharmonization.
-    If you regularized with an interpolation, the method "inverse_interpolation" would be the appropriate choice.
-    Also you should pass the same drop flags keyword.
-
-    The deharm methods in detail:
-    ("original_flags" are associated with the original data that is to be regained,
-    "regularized_flags" are associated with the regularized data that is to be "deharmonized",
-    "freq" refers to the regularized datas sampling frequencie)
-
-    * ``'inverse_nagg'``: all original_flags within the range *+/- freq/2* of a regularized_flag, get assigned this
-      regularized flags value. (if regularized_flags > original_flag)
-    * ``'inverse_bagg'``: all original_flags succeeding a regularized_flag within the range of "freq", get assigned this
-      regularized flags value. (if regularized_flag > original_flag)
-    * ``'inverse_fagg'``: all original_flags preceeding a regularized_flag within the range of "freq", get assigned this
-      regularized flags value. (if regularized_flag > original_flag)
-
-    * ``'inverse_interpolation'``: all original_flags within the range *+/- freq* of a regularized_flag, get assigned this
-      regularized flags value (if regularized_flag > original_flag).
-
-    * ``'inverse_nshift'``: That original_flag within the range +/- *freq/2*, that is nearest to a regularized_flag,
-      gets the regularized flags value. (if regularized_flag > original_flag)
-    * ``'inverse_bshift'``: That original_flag succeeding a source flag within the range freq, that is nearest to a
-      regularized_flag, gets assigned this regularized flags value. (if regularized_flag > original_flag)
-    * ``'inverse_nshift'``: That original_flag preceeding a regularized flag within the range freq, that is nearest to a
-      regularized_flag, gets assigned this regularized flags value. (if source_flag > original_flag)
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-deharmonized.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.freq
-    method : {'inverse_fagg', 'inverse_bagg', 'inverse_nagg', 'inverse_fshift', 'inverse_bshift', 'inverse_nshift',
-            'inverse_interpolation'}
-        The method used for projection of regularized flags onto original flags. See description above for more
-        details.
-    to_drop : {List[str], str}, default None
-        Flagtypes you want to drop before interpolation - effectively excluding values that are flagged
-        with a flag in to_drop from the interpolation process. Default results in flagger.BAD
-        values being dropped initially.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
-    """
-
-    newfield = str(field) + ORIGINAL_SUFFIX
-    data, flagger = proc_projectFlags(data, newfield, flagger, method, source=field, to_drop=to_drop, **kwargs)
-    data, flagger = proc_drop(data, field, flagger)
-    data, flagger = proc_rename(data, newfield, flagger, field)
-    return data, flagger
diff --git a/saqc/funcs/interpolation.py b/saqc/funcs/interpolation.py
new file mode 100644
index 0000000000000000000000000000000000000000..53e4f1e6e341c896d65a2dd6bc482fd5052dc237
--- /dev/null
+++ b/saqc/funcs/interpolation.py
@@ -0,0 +1,297 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from typing import Tuple, Union, Callable
+from typing_extensions import Literal
+import numpy as np
+import pandas as pd
+from dios import DictOfSeries
+
+from saqc.constants import *
+from saqc.core import flagging, processing, Flags
+from saqc.core.register import _isflagged
+from saqc.lib.ts_operators import interpolateNANs
+
+_SUPPORTED_METHODS = Literal[
+    "linear",
+    "time",
+    "nearest",
+    "zero",
+    "slinear",
+    "quadratic",
+    "cubic",
+    "spline",
+    "barycentric",
+    "polynomial",
+    "krogh",
+    "piecewise_polynomial",
+    "spline",
+    "pchip",
+    "akima",
+]
+
+
+@flagging(masking="field")
+def interpolateByRolling(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    window: Union[str, int],
+    func: Callable[[pd.Series], float] = np.median,
+    center: bool = True,
+    min_periods: int = 0,
+    flag: float = UNFLAGGED,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Interpolates nan-values in the data by assigning them the aggregation result of the window surrounding them.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        The data container.
+
+    field : str
+        Name of the column, holding the data-to-be-interpolated.
+
+    flags : saqc.Flags
+        A flags object, holding flags and additional Information related to `data`.
+
+    window : int, str
+        The size of the window, the aggregation is computed from. An integer define the number of periods to be used,
+        an string is interpreted as an offset. ( see `pandas.rolling` for more information).
+        Integer windows may result in screwed aggregations if called on none-harmonized or irregular data.
+
+    func : Callable
+        The function used for aggregation.
+
+    center : bool, default True
+        Center the window around the value. Can only be used with integer windows, otherwise it is silently ignored.
+
+    min_periods : int
+        Minimum number of valid (not np.nan) values that have to be available in a window for its aggregation to be
+        computed.
+
+    flag : float or None, default UNFLAGGED
+        Flag that is to be inserted for the interpolated values. If ``None`` no flags are set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+
+    data = data.copy()
+    datcol = data[field]
+    roller = datcol.rolling(window=window, center=center, min_periods=min_periods)
+    try:
+        func_name = func.__name__
+        if func_name[:3] == "nan":
+            func_name = func_name[3:]
+        rolled = getattr(roller, func_name)()
+    except AttributeError:
+        rolled = roller.apply(func)
+
+    na_mask = datcol.isna()
+    interpolated = na_mask & rolled.notna()
+    datcol[na_mask] = rolled[na_mask]
+    data[field] = datcol
+
+    if flag is not None:
+        flags[interpolated, field] = flag
+
+    return data, flags
+
+
+@flagging(masking="field")
+def interpolateInvalid(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    method: _SUPPORTED_METHODS,
+    order: int = 2,
+    limit: int = 2,
+    downgrade: bool = False,
+    flag: float = UNFLAGGED,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function to interpolate nan values in the data.
+
+    There are available all the interpolation methods from the pandas.interpolate method and they are applicable by
+    the very same key words, that you would pass to the ``pd.Series.interpolate``'s method parameter.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        The data container.
+
+    field : str
+        Name of the column, holding the data-to-be-interpolated.
+
+    flags : saqc.Flags
+        A flags object, holding flags and additional Information related to `data`.
+
+    method : {"linear", "time", "nearest", "zero", "slinear", "quadratic", "cubic", "spline", "barycentric",
+        "polynomial", "krogh", "piecewise_polynomial", "spline", "pchip", "akima"}
+        The interpolation method to use.
+
+    order : int, default 2
+        If there your selected interpolation method can be performed at different 'orders' - here you pass the desired
+        order.
+
+    limit : int, default 2
+        Maximum number of consecutive 'nan' values allowed for a gap to be interpolated. This really restricts the
+        interpolation to chunks, containing not more than `limit` successive nan entries.
+
+    flag : float or None, default UNFLAGGED
+        Flag that is set for interpolated values. If ``None``, no flags are set at all.
+
+    downgrade : bool, default False
+        If `True` and the interpolation can not be performed at current order, retry with a lower order.
+        This can happen, because the chosen ``method`` does not support the passed ``order``, or
+        simply because not enough values are present in a interval.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+    inter_data = interpolateNANs(
+        data[field],
+        method,
+        order=order,
+        inter_limit=limit,
+        downgrade_interpolation=downgrade,
+    )
+    interpolated = data[field].isna() & inter_data.notna()
+
+    if flag is not None:
+        flags[interpolated, field] = flag
+
+    data[field] = inter_data
+    return data, flags
+
+
+def _resampleOverlapping(data: pd.Series, freq: str, fill_value):
+    """TODO: docstring needed"""
+    dtype = data.dtype
+    end = data.index[-1].ceil(freq)
+    data = data.resample(freq).max()
+    data = data.combine(data.shift(1, fill_value=fill_value), max)
+    if end not in data:
+        data.loc[end] = fill_value
+    return data.fillna(fill_value).astype(dtype)
+
+
+@processing()
+def interpolateIndex(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    freq: str,
+    method: _SUPPORTED_METHODS,
+    order: int = 2,
+    limit: int = 2,
+    downgrade: bool = False,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function to interpolate the data at regular (equidistant) timestamps (or Grid points).
+
+    Note, that the interpolation will only be calculated, for grid timestamps that have a preceding AND a succeeding
+    valid data value within "freq" range.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        The data container.
+
+    field : str
+        Name of the column, holding the data-to-be-interpolated.
+
+    flags : saqc.Flags
+        A flags object, holding flags and additional Information related to `data`.
+
+    freq : str
+        An Offset String, interpreted as the frequency of
+        the grid you want to interpolate your data at.
+
+    method : {"linear", "time", "nearest", "zero", "slinear", "quadratic", "cubic", "spline", "barycentric",
+        "polynomial", "krogh", "piecewise_polynomial", "spline", "pchip", "akima"}: string
+        The interpolation method you want to apply.
+
+    order : int, default 2
+        If there your selected interpolation method can be performed at different 'orders' - here you pass the desired
+        order.
+
+    limit : int, default 2
+        Maximum number of consecutive 'nan' values allowed for a gap to be interpolated. This really restricts the
+        interpolation to chunks, containing not more than `limit` successive nan entries.
+
+    downgrade : bool, default False
+        If `True` and the interpolation can not be performed at current order, retry with a lower order.
+        This can happen, because the chosen ``method`` does not support the passed ``order``, or
+        simply because not enough values are present in a interval.
+
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values and shape may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
+    """
+    if data[field].empty:
+        return data, flags
+
+    datcol = data[field].copy()
+
+    start, end = datcol.index[0].floor(freq), datcol.index[-1].ceil(freq)
+    grid_index = pd.date_range(start=start, end=end, freq=freq, name=datcol.index.name)
+
+    flagged = _isflagged(flags[field], kwargs["to_mask"])
+
+    # drop all points that hold no relevant grid information
+    datcol = datcol[~flagged].dropna()
+
+    # account for annoying case of subsequent frequency aligned values,
+    # that differ exactly by the margin of 2*freq
+    gaps = datcol.index[1:] - datcol.index[:-1] == 2 * pd.Timedelta(freq)
+    gaps = datcol.index[1:][gaps]
+    gaps = gaps.intersection(grid_index).shift(-1, freq)
+
+    # prepare grid interpolation:
+    datcol = datcol.reindex(datcol.index.union(grid_index))
+
+    # do the grid interpolation
+    inter_data = interpolateNANs(
+        data=datcol,
+        method=method,
+        order=order,
+        inter_limit=limit,
+        downgrade_interpolation=downgrade,
+    )
+
+    # override falsely interpolated values:
+    inter_data[gaps] = np.nan
+
+    # store interpolated grid
+    data[field] = inter_data[grid_index]
+
+    history = flags.history[field].apply(
+        index=data[field].index,
+        func=_resampleOverlapping,
+        func_kws=dict(freq=freq, fill_value=UNFLAGGED),
+        copy=False,
+    )
+
+    flags.history[field] = history
+    return data, flags
diff --git a/saqc/funcs/modelling.py b/saqc/funcs/modelling.py
deleted file mode 100644
index 59f169c521583b41b83c5781741ae1efa5836f05..0000000000000000000000000000000000000000
--- a/saqc/funcs/modelling.py
+++ /dev/null
@@ -1,576 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import pandas as pd
-import numpy as np
-import numba
-from saqc.core.register import register
-from saqc.lib.ts_operators import (
-    polyRoller,
-    polyRollerNoMissing,
-    polyRollerNumba,
-    polyRollerNoMissingNumba,
-    polyRollerIrregular,
-    count
-)
-from saqc.lib.tools import seasonalMask, customRoller
-import logging
-
-logger = logging.getLogger("SaQC")
-
-
-@register(masking='field')
-def modelling_polyFit(data, field, flagger, winsz, polydeg, numba="auto", eval_flags=True, min_periods=0, **kwargs):
-    """
-    Function fits a polynomial model to the data and returns the residues.
-
-    The residue for value x is calculated by fitting a polynomial of degree "polydeg" to a data slice
-    of size "winsz", wich has x at its center.
-
-    Note, that the residues will be stored to the `field` field of the input data, so that the original data, the
-    polynomial is fitted to, gets overridden.
-
-    Note, that, if data[field] is not alligned to an equidistant frequency grid, the window size passed,
-    has to be an offset string. Also numba boost options don`t apply for irregularly sampled
-    timeseries.
-
-    Note, that calculating the residues tends to be quite costy, because a function fitting is perfomed for every
-    sample. To improve performance, consider the following possibillities:
-
-    In case your data is sampled at an equidistant frequency grid:
-
-    (1) If you know your data to have no significant number of missing values, or if you do not want to
-        calculate residues for windows containing missing values any way, performance can be increased by setting
-        min_periods=winsz.
-
-    (2) If your data consists of more then around 200000 samples, setting numba=True, will boost the
-        calculations up to a factor of 5 (for samplesize > 300000) - however for lower sample sizes,
-        numba will slow down the calculations, also, up to a factor of 5, for sample_size < 50000.
-        By default (numba='auto'), numba is set to true, if the data sample size exceeds 200000.
-
-    in case your data is not sampled at an equidistant frequency grid:
-
-    (1) Harmonization/resampling of your data will have a noticable impact on polyfittings performance - since
-        numba_boost doesnt apply for irregularly sampled data in the current implementation.
-
-    Note, that in the current implementation, the initial and final winsz/2 values do not get fitted.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-modelled.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    winsz : {str, int}
-        The size of the window you want to use for fitting. If an integer is passed, the size
-        refers to the number of periods for every fitting window. If an offset string is passed,
-        the size refers to the total temporal extension. The window will be centered around the vaule-to-be-fitted.
-        For regularly sampled timeseries the period number will be casted down to an odd number if
-        even.
-    polydeg : int
-        The degree of the polynomial used for fitting
-    numba : {True, False, "auto"}, default "auto"
-        Wheather or not to apply numbas just-in-time compilation onto the poly fit function. This will noticably
-        increase the speed of calculation, if the sample size is sufficiently high.
-        If "auto" is selected, numba compatible fit functions get applied for data consisiting of > 200000 samples.
-    eval_flags : bool, default True
-        Wheather or not to assign new flags to the calculated residuals. If True, a residual gets assigned the worst
-        flag present in the interval, the data for its calculation was obtained from.
-    min_periods : {int, np.nan}, default 0
-        The minimum number of periods, that has to be available in every values fitting surrounding for the polynomial
-        fit to be performed. If there are not enough values, np.nan gets assigned. Default (0) results in fitting
-        regardless of the number of values present (results in overfitting for too sparse intervals). To automatically
-        set the minimum number of periods to the number of values in an offset defined window size, pass np.nan.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-
-    """
-    if data[field].empty:
-        return data, flagger
-    data = data.copy()
-    to_fit = data[field]
-    flags = flagger.getFlags(field)
-    if not to_fit.index.freqstr:
-        if isinstance(winsz, int):
-            raise NotImplementedError("Integer based window size is not supported for not-harmonized" "sample series.")
-        # get interval centers
-        centers = np.floor((to_fit.rolling(pd.Timedelta(winsz) / 2, closed="both", min_periods=min_periods).count()))
-        centers = centers.drop(centers[centers.isna()].index)
-        centers = centers.astype(int)
-        residues = to_fit.rolling(pd.Timedelta(winsz), closed="both", min_periods=min_periods).apply(
-            polyRollerIrregular, args=(centers, polydeg)
-        )
-
-        def center_func(x, y=centers):
-            pos = x.index[int(len(x) - y[x.index[-1]])]
-            return y.index.get_loc(pos)
-
-        centers_iloc = centers.rolling(winsz, closed="both").apply(center_func, raw=False).astype(int)
-        temp = residues.copy()
-        for k in centers_iloc.iteritems():
-            residues.iloc[k[1]] = temp[k[0]]
-        residues[residues.index[0] : residues.index[centers_iloc[0]]] = np.nan
-        residues[residues.index[centers_iloc[-1]] : residues.index[-1]] = np.nan
-    else:
-        if isinstance(winsz, str):
-            winsz = int(np.floor(pd.Timedelta(winsz) / pd.Timedelta(to_fit.index.freqstr)))
-        if winsz % 2 == 0:
-            winsz = int(winsz - 1)
-        if numba == "auto":
-            if to_fit.shape[0] < 200000:
-                numba = False
-            else:
-                numba = True
-
-        val_range = np.arange(0, winsz)
-        center_index = int(np.floor(winsz / 2))
-        if min_periods < winsz:
-            if min_periods > 0:
-                to_fit = to_fit.rolling(winsz, min_periods=min_periods, center=True).apply(
-                    lambda x, y: x[y], raw=True, args=(center_index,)
-                )
-
-            # we need a missing value marker that is not nan, because nan values dont get passed by pandas rolling
-            # method
-            miss_marker = to_fit.min()
-            miss_marker = np.floor(miss_marker - 1)
-            na_mask = to_fit.isna()
-            to_fit[na_mask] = miss_marker
-            if numba:
-                residues = to_fit.rolling(winsz).apply(
-                    polyRollerNumba,
-                    args=(miss_marker, val_range, center_index, polydeg),
-                    raw=True,
-                    engine="numba",
-                    engine_kwargs={"no_python": True},
-                )
-                # due to a tiny bug - rolling with center=True doesnt work when using numba engine.
-                residues = residues.shift(-int(center_index))
-            else:
-                residues = to_fit.rolling(winsz, center=True).apply(
-                    polyRoller, args=(miss_marker, val_range, center_index, polydeg), raw=True
-                )
-            residues[na_mask] = np.nan
-        else:
-            # we only fit fully populated intervals:
-            if numba:
-                residues = to_fit.rolling(winsz).apply(
-                    polyRollerNoMissingNumba,
-                    args=(val_range, center_index, polydeg),
-                    engine="numba",
-                    engine_kwargs={"no_python": True},
-                    raw=True,
-                )
-                # due to a tiny bug - rolling with center=True doesnt work when using numba engine.
-                residues = residues.shift(-int(center_index))
-            else:
-                residues = to_fit.rolling(winsz, center=True).apply(
-                    polyRollerNoMissing, args=(val_range, center_index, polydeg), raw=True
-                )
-
-    residues = residues - to_fit
-    data[field] = residues
-    if eval_flags:
-        num_cats, codes = flags.factorize()
-        num_cats = pd.Series(num_cats, index=flags.index).rolling(winsz, center=True, min_periods=min_periods).max()
-        nan_samples = num_cats[num_cats.isna()]
-        num_cats.drop(nan_samples.index, inplace=True)
-        to_flag = pd.Series(codes[num_cats.astype(int)], index=num_cats.index)
-        to_flag = to_flag.align(nan_samples)[0]
-        to_flag[nan_samples.index] = flags[nan_samples.index]
-        flagger = flagger.setFlags(field, to_flag.values, **kwargs)
-
-    return data, flagger
-
-
-@register(masking='field')
-def modelling_rollingMean(data, field, flagger, winsz, eval_flags=True, min_periods=0, center=True, **kwargs):
-    """
-    Models the data with the rolling mean and returns the residues.
-
-    Note, that the residues will be stored to the `field` field of the input data, so that the data that is modelled
-    gets overridden.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-modelled.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    winsz : {int, str}
-        The size of the window you want to roll with. If an integer is passed, the size
-        refers to the number of periods for every fitting window. If an offset string is passed,
-        the size refers to the total temporal extension.
-        For regularly sampled timeseries, the period number will be casted down to an odd number if
-        center = True.
-    eval_flags : bool, default True
-        Wheather or not to assign new flags to the calculated residuals. If True, a residual gets assigned the worst
-        flag present in the interval, the data for its calculation was obtained from.
-        Currently not implemented in combination with not-harmonized timeseries.
-    min_periods : int, default 0
-        The minimum number of periods, that has to be available in every values fitting surrounding for the mean
-        fitting to be performed. If there are not enough values, np.nan gets assigned. Default (0) results in fitting
-        regardless of the number of values present.
-    center : bool, default True
-        Wheather or not to center the window the mean is calculated of around the reference value. If False,
-        the reference value is placed to the right of the window (classic rolling mean with lag.)
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-    """
-
-    data = data.copy()
-    to_fit = data[field]
-    flags = flagger.getFlags(field)
-    if to_fit.empty:
-        return data, flagger
-
-    # starting with the annoying case: finding the rolling interval centers of not-harmonized input time series:
-    if (to_fit.index.freqstr is None) and center:
-        if isinstance(winsz, int):
-            raise NotImplementedError(
-                "Integer based window size is not supported for not-harmonized"
-                'sample series when rolling with "center=True".'
-            )
-        # get interval centers
-        centers = np.floor((to_fit.rolling(pd.Timedelta(winsz) / 2, closed="both", min_periods=min_periods).count()))
-        centers = centers.drop(centers[centers.isna()].index)
-        centers = centers.astype(int)
-        means = to_fit.rolling(pd.Timedelta(winsz), closed="both", min_periods=min_periods).mean()
-
-        def center_func(x, y=centers):
-            pos = x.index[int(len(x) - y[x.index[-1]])]
-            return y.index.get_loc(pos)
-
-        centers_iloc = centers.rolling(winsz, closed="both").apply(center_func, raw=False).astype(int)
-        temp = means.copy()
-        for k in centers_iloc.iteritems():
-            means.iloc[k[1]] = temp[k[0]]
-        # last values are false, due to structural reasons:
-        means[means.index[centers_iloc[-1]] : means.index[-1]] = np.nan
-
-    # everything is more easy if data[field] is harmonized:
-    else:
-        if isinstance(winsz, str):
-            winsz = int(np.floor(pd.Timedelta(winsz) / pd.Timedelta(to_fit.index.freqstr)))
-        if (winsz % 2 == 0) & center:
-            winsz = int(winsz - 1)
-
-        means = to_fit.rolling(window=winsz, center=center, closed="both").mean()
-
-    residues = means - to_fit
-    data[field] = residues
-    if eval_flags:
-        num_cats, codes = flags.factorize()
-        num_cats = pd.Series(num_cats, index=flags.index).rolling(winsz, center=True, min_periods=min_periods).max()
-        nan_samples = num_cats[num_cats.isna()]
-        num_cats.drop(nan_samples.index, inplace=True)
-        to_flag = pd.Series(codes[num_cats.astype(int)], index=num_cats.index)
-        to_flag = to_flag.align(nan_samples)[0]
-        to_flag[nan_samples.index] = flags[nan_samples.index]
-        flagger = flagger.setFlags(field, to_flag.values, **kwargs)
-
-    return data, flagger
-
-
-def modelling_mask(data, field, flagger, mode, mask_var=None, season_start=None, season_end=None,
-                   include_bounds=True):
-    """
-    This function realizes masking within saqc.
-
-    Due to some inner saqc mechanics, it is not straight forwardly possible to exclude
-    values or datachunks from flagging routines. This function replaces flags with np.nan
-    value, wherever values are to get masked. Furthermore, the masked values get replaced by
-    np.nan, so that they dont effect calculations.
-
-    Here comes a recipe on how to apply a flagging function only on a masked chunk of the variable field:
-
-    1. dublicate "field" in the input data (proc_fork)
-    2. mask the dublicated data (modelling_mask)
-    3. apply the tests you only want to be applied onto the masked data chunks (saqc_tests)
-    4. project the flags, calculated on the dublicated and masked data onto the original field data
-        (proc_projectFlags or flagGeneric)
-    5. drop the dublicated data (proc_drop)
-
-    To see an implemented example, checkout flagSeasonalRange in the saqc.functions module
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-masked.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    mode : {"seasonal", "mask_var"}
-        The masking mode.
-        - "seasonal": parameters "season_start", "season_end" are evaluated to generate a seasonal (periodical) mask
-        - "mask_var": data[mask_var] is expected to be a boolean valued timeseries and is used as mask.
-    mask_var : {None, str}, default None
-        Only effective if mode == "mask_var"
-        Fieldname of the column, holding the data that is to be used as mask. (must be moolean series)
-        Neither the series` length nor its labels have to match data[field]`s index and length. An inner join of the
-        indices will be calculated and values get masked where the values of the inner join are "True".
-    season_start : {None, str}, default None
-        Only effective if mode == "seasonal"
-        String denoting starting point of every period. Formally, it has to be a truncated instance of "mm-ddTHH:MM:SS".
-        Has to be of same length as `season_end` parameter.
-        See examples section below for some examples.
-    season_end : {None, str}, default None
-        Only effective if mode == "seasonal"
-        String denoting starting point of every period. Formally, it has to be a truncated instance of "mm-ddTHH:MM:SS".
-        Has to be of same length as `season_end` parameter.
-        See examples section below for some examples.
-    include_bounds : boolean
-        Wheather or not to include the mask defining bounds to the mask.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-
-
-    Examples
-    --------
-    The `season_start` and `season_end` parameters provide a conveniant way to generate seasonal / date-periodic masks.
-    They have to be strings of the forms: "mm-ddTHH:MM:SS", "ddTHH:MM:SS" , "HH:MM:SS", "MM:SS" or "SS"
-    (mm=month, dd=day, HH=hour, MM=minute, SS=second)
-    Single digit specifications have to be given with leading zeros.
-    `season_start` and `seas   on_end` strings have to be of same length (refer to the same periodicity)
-    The highest date unit gives the period.
-    For example:
-
-    >>> season_start = "01T15:00:00"
-    >>> season_end = "13T17:30:00"
-
-    Will result in all values sampled between 15:00 at the first and  17:30 at the 13th of every month get masked
-
-    >>> season_start = "01:00"
-    >>> season_end = "04:00"
-
-    All the values between the first and 4th minute of every hour get masked.
-
-    >>> season_start = "01-01T00:00:00"
-    >>> season_end = "01-03T00:00:00"
-
-    Mask january and february of evcomprosed in theery year. masking is inclusive always, so in this case the mask will
-    include 00:00:00 at the first of march. To exclude this one, pass:
-
-    >>> season_start = "01-01T00:00:00"
-    >>> season_end = "02-28T23:59:59"
-
-    To mask intervals that lap over a seasons frame, like nights, or winter, exchange sequence of season start and
-    season end. For example, to mask night hours between 22:00:00 in the evening and 06:00:00 in the morning, pass:
-
-    >>> season_start = "22:00:00"
-    >>> season_end = "06:00:00"
-
-    When inclusive_selection="season", all above examples work the same way, only that you now
-    determine wich values NOT TO mask (=wich values are to constitute the "seasons").
-    """
-    data = data.copy()
-    datcol_idx = data[field].index
-
-    if mode == 'seasonal':
-        to_mask = seasonalMask(datcol_idx, season_start, season_end, include_bounds)
-    elif mode == 'mask_var':
-        idx = data[mask_var].index.intersection(datcol_idx)
-        to_mask = data.loc[idx, mask_var]
-    else:
-        raise ValueError("Keyword passed as masking mode is unknown ({})!".format(mode))
-
-    data.aloc[to_mask, field] = np.nan
-    flagger = flagger.setFlags(field, loc=to_mask, flag=np.nan, force=True)
-
-    return data, flagger
-
-
-@numba.jit(parallel=True, nopython=True)
-def _slidingWindowSearchNumba(data_arr, bwd_start, fwd_end, split, stat_func, thresh_func, num_val):
-    stat_arr = np.zeros(num_val)
-    thresh_arr = np.zeros(num_val)
-    for win_i in numba.prange(0, num_val-1):
-        x = data_arr[bwd_start[win_i]:split[win_i]]
-        y = data_arr[split[win_i]:fwd_end[win_i]]
-        stat_arr[win_i] = stat_func(x, y)
-        thresh_arr[win_i] = thresh_func(x, y)
-    return stat_arr, thresh_arr
-
-
-def _slidingWindowSearch(data_arr, bwd_start, fwd_end, split, stat_func, thresh_func, num_val):
-    stat_arr = np.zeros(num_val)
-    thresh_arr = np.zeros(num_val)
-    for win_i in range(0, num_val-1):
-        x = data_arr[bwd_start[win_i]:split[win_i]]
-        y = data_arr[split[win_i]:fwd_end[win_i]]
-        stat_arr[win_i] = stat_func(x, y)
-        thresh_arr[win_i] = thresh_func(x, y)
-    return stat_arr, thresh_arr
-
-
-def _reduceCPCluster(stat_arr, thresh_arr, start, end, obj_func, num_val):
-    out_arr = np.zeros(shape=num_val, dtype=bool)
-    for win_i in numba.prange(0, num_val):
-        s, e = start[win_i], end[win_i]
-        x = stat_arr[s:e]
-        y = thresh_arr[s:e]
-        pos = s + obj_func(x, y) + 1
-        out_arr[s:e] = False
-        out_arr[pos] = True
-    return out_arr
-
-
-@register(masking='field')
-def modelling_changePointCluster(data, field, flagger, stat_func, thresh_func, bwd_window, min_periods_bwd,
-                                 fwd_window=None, min_periods_fwd=None, closed='both', try_to_jit=True,
-                                 reduce_window=None, reduce_func=lambda x, y: x.argmax(), flag_changepoints=False,
-                                 model_by_resids=False, **kwargs):
-    """
-    Assigns label to the data, aiming to reflect continous regimes of the processes the data is assumed to be
-    generated by.
-    The regime change points detection is based on a sliding window search.
-
-    Note, that the cluster labels will be stored to the `field` field of the input data, so that the data that is
-    clustered gets overridden.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The reference variable, the deviation from wich determines the flagging.
-    flagger : saqc.flagger
-        A flagger object, holding flags and additional informations related to `data`.
-    stat_func : Callable[numpy.array, numpy.array]
-        A function that assigns a value to every twin window. Left window content will be passed to first variable,
-        right window content will be passed to the second.
-    thresh_func : Callable[numpy.array, numpy.array]
-        A function that determines the value level, exceeding wich qualifies a timestamps stat func value as denoting a
-        changepoint.
-    bwd_window : str
-        The left (backwards facing) windows temporal extension (freq-string).
-    min_periods_bwd : {str, int}
-        Minimum number of periods that have to be present in a backwards facing window, for a changepoint test to be
-        performed.
-    fwd_window : {Non/home/luenensc/PyPojects/testSpace/flagBasicMystery.pye, str}, default None
-        The right (fo/home/luenensc/PyPojects/testSpace/flagBasicMystery.pyrward facing) windows temporal extension (freq-string).
-    min_periods_fwd : {None, str, int}, default None
-        Minimum numbe/home/luenensc/PyPojects/testSpace/flagBasicMystery.pyr of periods that have to be present in a forward facing window, for a changepoint test to be
-        performed.
-    closed : {'right', 'left', 'both', 'neither'}, default 'both'
-        Determines the closure of the sliding windows.
-    reduce_window : {None, False, str}, default None
-        The sliding window search method is not an exact CP search method and usually there wont be
-        detected a single changepoint, but a "region" of change around a changepoint.
-        If `agg_range` is not False, for every window of size `agg_range`, there
-        will be selected the value with index `reduce_func(x, y)` and the others will be dropped.
-        If `reduce_window` is None, the reduction window size equals the
-        twin window size the changepoints have been detected with.
-    reduce_func : Callable[numpy.array, numpy.array], default lambda x, y: x.argmax()
-        A function that must return an index value upon input of two arrays x and y.
-        First input parameter will hold the result from the stat_func evaluation for every
-        reduction window. Second input parameter holds the result from the thresh_func evaluation.
-        The default reduction function just selects the value that maximizes the stat_func.
-    flag_changepoints : bool, default False
-        If true, the points, where there is a change in data modelling regime detected get flagged bad.
-    model_by_resids _ bool, default False
-        If True, the data is replaced by the stat_funcs results instead of regime labels.
-
-    Returns
-    -------
-
-    """
-    data = data.copy()
-    data_ser = data[field].dropna()
-    center = False
-    var_len = data_ser.shape[0]
-    if fwd_window is None:
-        fwd_window = bwd_window
-    if min_periods_fwd is None:
-        min_periods_fwd = min_periods_bwd
-    if reduce_window is None:
-        reduce_window = f"{int(pd.Timedelta(bwd_window).total_seconds() + pd.Timedelta(fwd_window).total_seconds())}s"
-
-    roller = customRoller(data_ser, window=bwd_window)
-    bwd_start, bwd_end = roller.window.get_window_bounds(len(data_ser), min_periods=min_periods_bwd, closed=closed)
-
-    roller = customRoller(data_ser, window=fwd_window, forward=True)
-    fwd_start, fwd_end = roller.window.get_window_bounds(len(data_ser), min_periods=min_periods_fwd, closed=closed)
-
-    min_mask = ~((fwd_end - fwd_start <= min_periods_fwd) | (bwd_end - bwd_start <= min_periods_bwd))
-    fwd_end = fwd_end[min_mask]
-    split = bwd_end[min_mask]
-    bwd_start = bwd_start[min_mask]
-    masked_index = data_ser.index[min_mask]
-    check_len = len(fwd_end)
-    data_arr = data_ser.values
-
-    if try_to_jit:
-        jit_sf = numba.jit(stat_func, nopython=True)
-        jit_tf = numba.jit(thresh_func, nopython=True)
-        try:
-            jit_sf(data_arr[bwd_start[0]:bwd_end[0]], data_arr[fwd_start[0]:fwd_end[0]])
-            jit_tf(data_arr[bwd_start[0]:bwd_end[0]], data_arr[fwd_start[0]:fwd_end[0]])
-            stat_func = jit_sf
-            thresh_func = jit_tf
-            try_to_jit = True
-        except numba.core.errors.TypingError:
-            try_to_jit = False
-            logging.warning('Could not jit passed statistic - omitting jitting!')
-
-    if try_to_jit:
-        stat_arr, thresh_arr = _slidingWindowSearchNumba(data_arr, bwd_start, fwd_end, split, stat_func, thresh_func,
-                                                    check_len)
-    else:
-        stat_arr, thresh_arr = _slidingWindowSearch(data_arr, bwd_start, fwd_end, split, stat_func, thresh_func,
-                                                    check_len)
-    result_arr = stat_arr > thresh_arr
-
-    if model_by_resids:
-        residues = pd.Series(np.nan, index=data[field].index)
-        residues[masked_index] = stat_arr
-        data[field] = residues
-        flagger = flagger.setFlags(field, flag=flagger.UNFLAGGED, force=True, **kwargs)
-        return data, flagger
-
-    det_index = masked_index[result_arr]
-    detected = pd.Series(True, index=det_index)
-    if reduce_window is not False:
-        l = detected.shape[0]
-        roller = customRoller(detected, window=reduce_window)
-        start, end = roller.window.get_window_bounds(num_values=l, min_periods=1, closed='both', center=True)
-
-        detected = _reduceCPCluster(stat_arr[result_arr], thresh_arr[result_arr], start, end, reduce_func, l)
-        det_index = det_index[detected]
-
-    cluster = pd.Series(False, index=data[field].index)
-    cluster[det_index] = True
-    cluster = cluster.cumsum()
-    # (better to start cluster labels with number one)
-    cluster += 1
-    data[field] = cluster
-    flagger = flagger.setFlags(field, flag=flagger.UNFLAGGED, force=True, **kwargs)
-    if flag_changepoints:
-        flagger = flagger.setFlags(field, loc=det_index)
-    return data, flagger
diff --git a/saqc/funcs/noise.py b/saqc/funcs/noise.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb70835d98f8e64b56e1ffe4e0a0839159f81c36
--- /dev/null
+++ b/saqc/funcs/noise.py
@@ -0,0 +1,73 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+import pandas as pd
+import numpy as np
+import operator
+from dios import DictOfSeries
+from typing import Callable
+from saqc.constants import *
+from saqc.core import flagging, Flags
+from saqc.lib.types import FreqString
+from saqc.lib.tools import statPass
+
+
+@flagging(masking="field")
+def flagByStatLowPass(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    func: Callable[[np.array, pd.Series], float],
+    window: FreqString,
+    thresh: float,
+    sub_window: FreqString = None,
+    sub_thresh: float = None,
+    min_periods: int = None,
+    flag: float = BAD,
+    **kwargs
+):
+    """
+    Flag *chunks* of length, `window`:
+
+    1. If they excexceed `thresh` with regard to `stat`:
+    2. If all (maybe overlapping) *sub-chunks* of *chunk*, with length `sub_window`,
+       `excexceed `sub_thresh` with regard to `stat`:
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    func: Callable[[np.array, pd.Series], float]
+        Function to aggregate chunk contnent with.
+    window: FreqString
+        Temporal extension of the chunks to test
+    thresh: float
+        Threshold, that triggers flagging, if exceeded by stat value.
+    sub_window: FreqString, default None,
+        Window size of the sub chunks, that are additionally tested for exceeding
+        `sub_thresh` with respect to `stat`.
+    sub_thresh: float, default None
+    min_periods: int, default None
+
+    Returns
+    -------
+    """
+
+    datcol = data[field]
+    if not min_periods:
+        min_periods = 0
+    if not sub_thresh:
+        sub_thresh = thresh
+    window = pd.Timedelta(window)
+
+    if sub_window:
+        sub_window = pd.Timedelta(sub_window)
+
+    to_set = statPass(
+        datcol, func, window, thresh, operator.gt, sub_window, sub_thresh, min_periods
+    )
+    flags[to_set, field] = flag
+    return data, flags
diff --git a/saqc/funcs/outliers.py b/saqc/funcs/outliers.py
new file mode 100644
index 0000000000000000000000000000000000000000..667fcc81a9811629308bcdb845ab0c4c5e065e2d
--- /dev/null
+++ b/saqc/funcs/outliers.py
@@ -0,0 +1,1298 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from typing import Optional, Union, Tuple, Sequence, Callable
+from typing_extensions import Literal
+
+import numba
+import numpy as np
+import numpy.polynomial.polynomial as poly
+import pandas as pd
+
+from dios import DictOfSeries
+from outliers import smirnov_grubbs
+from scipy.optimize import curve_fit
+
+from saqc.constants import *
+from saqc.core import flagging, Flags
+from saqc.lib.types import FreqString
+from saqc.lib.tools import customRoller, findIndex, getFreqDelta
+from saqc.funcs.scores import assignKNNScore
+from saqc.funcs.tools import copyField, dropField
+from saqc.funcs.transformation import transform
+import saqc.lib.ts_operators as ts_ops
+
+
+@flagging(masking="field")
+def flagByStray(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    freq: Optional[Union[int, FreqString]] = None,
+    min_periods: int = 11,
+    iter_start: float = 0.5,
+    alpha: float = 0.05,
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Flag outliers in 1-dimensional (score) data with the STRAY Algorithm.
+
+    Find more information on the algorithm in References [1].
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+
+    freq : str, int, or None, default None
+        Determines the segmentation of the data into partitions, the kNN algorithm is
+        applied onto individually.
+
+        * ``np.inf``: Apply Scoring on whole data set at once
+        * ``x`` > 0 : Apply scoring on successive data chunks of periods length ``x``
+        * Offset String : Apply scoring on successive partitions of temporal extension
+          matching the passed offset string
+
+    min_periods : int, default 11
+        Minimum number of periods per partition that have to be present for a valid
+        outlier dettection to be made in this partition. (Only of effect, if `freq`
+        is an integer.) Partition min value must always be greater then the
+        nn_neighbors value.
+
+    iter_start : float, default 0.5
+        Float in [0,1] that determines which percentage of data is considered
+        "normal". 0.5 results in the stray algorithm to search only the upper 50 % of
+        the scores for the cut off point. (See reference section for more information)
+
+    alpha : float, default 0.05
+        Level of significance by which it is tested, if a score might be drawn from
+        another distribution, than the majority of the data.
+
+    flag : float, default BAD
+        flag to set.
+
+    References
+    ----------
+    [1] Talagala, P. D., Hyndman, R. J., & Smith-Miles, K. (2019). Anomaly detection in
+        high dimensional data. arXiv preprint arXiv:1908.04000.
+    """
+    scores = data[field].dropna()
+
+    if scores.empty:
+        flags[:, field] = UNTOUCHED
+        return data, flags
+
+    if not freq:
+        freq = scores.shape[0]
+
+    if isinstance(freq, str):
+        partitions = scores.groupby(pd.Grouper(freq=freq))
+
+    else:
+        grouper_series = pd.Series(
+            data=np.arange(0, scores.shape[0]), index=scores.index
+        )
+        grouper_series = grouper_series.transform(lambda x: int(np.floor(x / freq)))
+        partitions = scores.groupby(grouper_series)
+
+    # calculate flags for every partition
+    for _, partition in partitions:
+
+        if partition.empty | (partition.shape[0] < min_periods):
+            continue
+
+        sample_size = partition.shape[0]
+
+        sorted_i = partition.values.argsort()
+        resids = partition.values[sorted_i]
+        gaps = np.append(0, np.diff(resids))
+
+        tail_size = int(max(min(50, np.floor(sample_size / 4)), 2))
+        tail_indices = np.arange(2, tail_size + 1)
+
+        i_start = int(max(np.floor(sample_size * iter_start), 1) + 1)
+        ghat = np.array([np.nan] * sample_size)
+
+        for i in range(i_start - 1, sample_size):
+            ghat[i] = sum((tail_indices / (tail_size - 1)) * gaps[i - tail_indices + 1])
+
+        log_alpha = np.log(1 / alpha)
+        for iter_index in range(i_start - 1, sample_size):
+            if gaps[iter_index] > log_alpha * ghat[iter_index]:
+                index = partition.index[sorted_i[iter_index:]]
+                flags[index, field] = flag
+                break
+
+    return data, flags
+
+
+def _evalStrayLabels(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    fields: Sequence[str],
+    reduction_range: Optional[str] = None,
+    reduction_drop_flagged: bool = False,  # TODO: still a case ?
+    reduction_thresh: float = 3.5,
+    reduction_min_periods: int = 1,
+    at_least_one: bool = True,
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function "reduces" an observations flag to components of it, by applying MAD
+    (See references) test onto every components temporal surrounding.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The fieldname of the column, holding the labels to be evaluated.
+
+    flags : saqc.Flags
+        Container to store quality flags to data.
+
+    fields : list[str]
+        A list of strings, holding the column names of the variables, the stray labels
+        shall be projected onto.
+
+    val_frame : (N,M) pd.DataFrame
+        Input NxM DataFrame of observations, where N is the number of observations and
+        M the number of components per observation.
+
+    to_flag_frame : pandas.DataFrame
+        Input dataframe of observations to be tested, where N is the number of
+        observations and M the number of components per observation.
+
+    reduction_range : {None, str}
+        An offset string, denoting the range of the temporal surrounding to include
+        into the MAD testing. If ``None`` is passed, no testing will be performed and
+        all fields will have the stray flag projected.
+
+    reduction_drop_flagged : bool, default False
+        Wheather or not to drop flagged values other than the value under test, from the
+        temporal surrounding before checking the value with MAD.
+
+    reduction_thresh : float, default 3.5
+        The `critical` value, controlling wheather the MAD score is considered
+        referring to an outlier or not. Higher values result in less rigid flagging.
+        The default value is widely used in the literature. See references section
+        for more details ([1]).
+
+    at_least_one : bool, default True
+        If none of the variables, the outlier label shall be reduced to, is an outlier
+        with regard to the test, all (True) or none (False) of the variables are flagged
+
+    flag : float, default BAD
+        flag to set.
+
+    References
+    ----------
+    [1] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
+    """
+    val_frame = data[fields].to_df()
+    stray_detects = flags[field] > UNFLAGGED
+    stray_detects = stray_detects[stray_detects]
+    to_flag_frame = pd.DataFrame(False, columns=fields, index=stray_detects.index)
+
+    if reduction_range is None:
+        for field in to_flag_frame.columns:
+            flags[to_flag_frame.index, field] = flag
+        return data, flags
+
+    for var in fields:
+        for index in enumerate(to_flag_frame.index):
+
+            index_slice = slice(
+                index[1] - pd.Timedelta(reduction_range),
+                index[1] + pd.Timedelta(reduction_range),
+            )
+            test_slice = val_frame[var][index_slice].dropna()
+
+            # check, wheather value under test is sufficiently centered:
+            first = test_slice.first_valid_index()
+            last = test_slice.last_valid_index()
+            min_range = pd.Timedelta(reduction_range) / 4
+
+            if (
+                pd.Timedelta(index[1] - first) < min_range
+                or pd.Timedelta(last - index[1]) < min_range
+            ):
+                polydeg = 0
+            else:
+                polydeg = 2
+
+            if reduction_drop_flagged:
+                test_slice = test_slice.drop(to_flag_frame.index, errors="ignore")
+
+            if test_slice.shape[0] < reduction_min_periods:
+                to_flag_frame.loc[index[1], var] = True
+                continue
+
+            x = test_slice.index.values.astype(float)
+            x_0 = x[0]
+            x = (x - x_0) / 10 ** 12
+
+            polyfitted = poly.polyfit(y=test_slice.values, x=x, deg=polydeg)
+
+            testval = poly.polyval(
+                (float(index[1].to_numpy()) - x_0) / 10 ** 12, polyfitted
+            )
+            testval = val_frame[var][index[1]] - testval
+
+            resids = test_slice.values - poly.polyval(x, polyfitted)
+            med_resids = np.median(resids)
+            MAD = np.median(np.abs(resids - med_resids))
+            crit_val = 0.6745 * (abs(med_resids - testval)) / MAD
+
+            if crit_val > reduction_thresh:
+                to_flag_frame.loc[index[1], var] = True
+
+    if at_least_one:
+        to_flag_frame[~to_flag_frame.any(axis=1)] = True
+
+    for field in to_flag_frame.columns:
+        col = to_flag_frame[field]
+        flags[col[col].index, field] = flag
+
+    return data, flags
+
+
+def _expFit(
+    val_frame,
+    scoring_method="kNNMaxGap",
+    n_neighbors=10,
+    iter_start=0.5,
+    alpha=0.05,
+    bin_frac=10,
+):
+    """
+    Find outliers in multi dimensional observations.
+
+    The general idea is to assigning scores to every observation based on the observations neighborhood in the space
+    of observations. Then, the gaps between the (greatest) scores are tested for beeing drawn from the same
+    distribution, as the majority of the scores.
+
+    Note, that no normalizations/transformations are applied to the different components (data columns)
+    - those are expected to be applied previously, if necessary.
+
+    Parameters
+    ----------
+    val_frame : (N,M) ndarray
+        Input NxM array of observations, where N is the number of observations and M the number of components per
+        observation.
+    scoring_method : {'kNNSum', 'kNNMaxGap'}, default 'kNNMaxGap'
+        Scoring method applied.
+        `'kNNSum'`: Assign to every point the sum of the distances to its 'n' nearest neighbors.
+        `'kNNMaxGap'`: Assign to every point the distance to the neighbor with the "maximum gap" to its predecessor
+        in the hierarchy of the `n` nearest neighbors. (see reference section for further descriptions)
+    n_neighbors : int, default 10
+        Number of neighbors included in the scoring process for every datapoint.
+    iter_start : float, default 0.5
+        Float in [0,1] that determines which percentage of data is considered "normal". 0.5 results in the expfit
+        algorithm to search only the upper 50 % of the scores for the cut off point. (See reference section for more
+        information)
+    alpha : float, default 0.05
+        Niveau of significance by which it is tested, if a score might be drawn from another distribution, than the
+        majority of the data.
+    bin_frac : {int, str}, default 10
+        Controls the binning for the histogram in the fitting step. If an integer is passed, the residues will
+        equidistantly be covered by `bin_frac` bins, ranging from the minimum to the maximum of the residues.
+        If a string is passed, it will be passed on to the ``numpy.histogram_bin_edges`` method.
+    """
+
+    kNNfunc = getattr(ts_ops, scoring_method)
+    resids = kNNfunc(val_frame.values, n_neighbors=n_neighbors, algorithm="ball_tree")
+    data_len = resids.shape[0]
+
+    # sorting
+    sorted_i = resids.argsort()
+    resids = resids[sorted_i]
+    iter_index = int(np.floor(resids.size * iter_start))
+    # initialize condition variables:
+    crit_val = np.inf
+    test_val = 0
+    neg_log_alpha = -np.log(alpha)
+
+    # define exponential dist density function:
+    def fit_function(x, lambd):
+        return lambd * np.exp(-lambd * x)
+
+    # initialise sampling bins
+    if isinstance(bin_frac, int):
+        binz = np.linspace(
+            resids[0], resids[-1], 10 * int(np.ceil(data_len / bin_frac))
+        )
+    elif bin_frac in [
+        "auto",
+        "fd",
+        "doane",
+        "scott",
+        "stone",
+        "rice",
+        "sturges",
+        "sqrt",
+    ]:
+        binz = np.histogram_bin_edges(resids, bins=bin_frac)
+    else:
+        raise ValueError(f"Can't interpret {bin_frac} as an binning technique.")
+
+    binzenters = np.array([0.5 * (binz[i] + binz[i + 1]) for i in range(len(binz) - 1)])
+    # inititialize full histogram:
+    full_hist, binz = np.histogram(resids, bins=binz)
+    # check if start index is sufficiently high (pointing at resids value beyond histogram maximum at least):
+    hist_argmax = full_hist.argmax()
+
+    if hist_argmax >= findIndex(binz, resids[iter_index - 1], 0):
+        raise ValueError(
+            "Either the data histogram is too strangely shaped for oddWater OD detection - "
+            "or a too low value for 'offset' was passed "
+            "(offset better be much greater 0.5)"
+        )
+    # GO!
+    iter_max_bin_index = findIndex(binz, resids[iter_index - 1], 0)
+    upper_tail_index = int(np.floor(0.5 * hist_argmax + 0.5 * iter_max_bin_index))
+    resids_tail_index = findIndex(resids, binz[upper_tail_index], 0)
+    upper_tail_hist, bins = np.histogram(
+        resids[resids_tail_index:iter_index],
+        bins=binz[upper_tail_index : iter_max_bin_index + 1],
+    )
+
+    while (test_val < crit_val) & (iter_index < resids.size - 1):
+        iter_index += 1
+        new_iter_max_bin_index = findIndex(binz, resids[iter_index - 1], 0)
+        # following if/else block "manually" expands the data histogram and circumvents calculation of the complete
+        # histogram in any new iteration.
+        if new_iter_max_bin_index == iter_max_bin_index:
+            upper_tail_hist[-1] += 1
+        else:
+            upper_tail_hist = np.append(
+                upper_tail_hist, np.zeros([new_iter_max_bin_index - iter_max_bin_index])
+            )
+            upper_tail_hist[-1] += 1
+            iter_max_bin_index = new_iter_max_bin_index
+            upper_tail_index_new = int(
+                np.floor(0.5 * hist_argmax + 0.5 * iter_max_bin_index)
+            )
+            upper_tail_hist = upper_tail_hist[upper_tail_index_new - upper_tail_index :]
+            upper_tail_index = upper_tail_index_new
+
+        # fitting
+
+        lambdA, _ = curve_fit(
+            fit_function,
+            xdata=binzenters[upper_tail_index:iter_max_bin_index],
+            ydata=upper_tail_hist,
+            p0=[-np.log(alpha / resids[iter_index])],
+        )
+
+        crit_val = neg_log_alpha / lambdA
+        test_val = resids[iter_index]
+
+    return val_frame.index[sorted_i[iter_index:]]
+
+
+@flagging(masking="all")
+def flagMVScores(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    fields: Sequence[str],
+    trafo: Callable[[pd.Series], pd.Series] = lambda x: x,
+    alpha: float = 0.05,
+    n: int = 10,
+    func: Callable[[pd.Series], float] = np.sum,
+    iter_start: float = 0.5,
+    partition: Optional[Union[int, FreqString]] = None,
+    partition_min: int = 11,
+    stray_range: Optional[FreqString] = None,
+    drop_flagged: bool = False,  # TODO: still a case ?
+    thresh: float = 3.5,
+    min_periods: int = 1,
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The algorithm implements a 3-step outlier detection procedure for simultaneously
+    flagging of higher dimensional data (dimensions > 3).
+
+    In references [1], the procedure is introduced and exemplified with an
+    application on hydrological data. See the notes section for an overview over the
+    algorithms basic steps.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged. (Here a dummy,
+        for structural reasons)
+
+    flags : saqc.Flags
+        Container to store quality flags to data.
+
+    fields : List[str]
+        List of fieldnames, corresponding to the variables that are to be included
+        into the flagging process.
+
+    trafo : callable, default lambda x:x
+        Transformation to be applied onto every column before scoring. Will likely
+        get deprecated soon. Its better to transform the data in a processing step,
+        preceeeding the call to ``flagMVScores``.
+
+    alpha : float, default 0.05
+        Level of significance by which it is tested, if an observations score might
+        be drawn from another distribution than the majority of the observation.
+
+    n : int, default 10
+        Number of neighbors included in the scoring process for every datapoint.
+
+    func : Callable[numpy.array, float], default np.sum
+        The function that maps the set of every points k-nearest neighbor distances
+        onto a certain scoring.
+
+    iter_start : float, default 0.5
+        Float in [0,1] that determines which percentage of data is considered
+        "normal". 0.5 results in the threshing algorithm to search only the upper 50
+        % of the scores for the cut off point. (See reference section for more
+        information)
+
+    partition : {None, str, int}, default None
+        Only effective when `threshing` = 'stray'. Determines the size of the data
+        partitions, the data is decomposed into. Each partition is checked seperately
+        for outliers. If a String is passed, it has to be an offset string and it
+        results in partitioning the data into parts of according temporal length. If
+        an integer is passed, the data is simply split up into continous chunks of
+        `freq` periods. if ``None`` is passed (default), all the data will be tested
+        in one run.
+
+    partition_min : int, default 11
+        Only effective when `threshing` = 'stray'. Minimum number of periods per
+        partition that have to be present for a valid outlier detection to be made in
+        this partition. (Only of effect, if `stray_partition` is an integer.)
+
+    partition_trafo : bool, default True
+        Whether or not to apply the passed transformation on every partition the
+        algorithm is applied on, separately.
+
+    stray_range : {None, str}, default None
+        If not None, it is tried to reduce the stray result onto single outlier
+        components of the input fields. An offset string, denoting the range of the
+        temporal surrounding to include into the MAD testing while trying to reduce
+        flags.
+
+    drop_flagged : bool, default False
+        Only effective when `range` is not ``None``. Whether or not to drop flagged
+        values other than the value under test from the temporal surrounding before
+        checking the value with MAD.
+
+    thresh : float, default 3.5
+        Only effective when `range` is not ``None``. The `critical` value,
+        controlling wheather the MAD score is considered referring to an outlier or
+        not. Higher values result in less rigid flagging. The default value is widely
+        considered apropriate in the literature.
+
+    min_periods : int, 1
+        Only effective when `range` is not ``None``. Minimum number of meassurements
+        necessarily present in a reduction interval for reduction actually to be
+        performed.
+
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed, relatively to the flags input.
+
+    Notes
+    -----
+    The basic steps are:
+
+    1. transforming
+
+    The different data columns are transformed via timeseries transformations to
+    (a) make them comparable and
+    (b) make outliers more stand out.
+
+    This step is usually subject to a phase of research/try and error. See [1] for more
+    details.
+
+    Note, that the data transformation as an built-in step of the algorithm,
+    will likely get deprecated soon. Its better to transform the data in a processing
+    step, preceeding the multivariate flagging process. Also, by doing so, one gets
+    mutch more control and variety in the transformation applied, since the `trafo`
+    parameter only allows for application of the same transformation to all of the
+    variables involved.
+
+    2. scoring
+
+    Every observation gets assigned a score depending on its k nearest neighbors. See
+    the `scoring_method` parameter description for details on the different scoring
+    methods. Furthermore [1], [2] may give some insight in the pro and cons of the
+    different methods.
+
+    3. threshing
+
+    The gaps between the (greatest) scores are tested for beeing drawn from the same
+    distribution as the majority of the scores. If a gap is encountered, that,
+    with sufficient significance, can be said to not be drawn from the same
+    distribution as the one all the smaller gaps are drawn from, than the observation
+    belonging to this gap, and all the observations belonging to gaps larger then
+    this gap, get flagged outliers. See description of the `threshing` parameter for
+    more details. Although [2] gives a fully detailed overview over the `stray`
+    algorithm.
+    """
+
+    for f in fields:
+        data, flags = copyField(data, f, flags, f"trafo_{f}")
+        data, flags = transform(data, f"trafo_{f}", flags, func=trafo, freq=partition)
+
+    data, flags = assignKNNScore(
+        data,
+        "dummy",
+        flags,
+        fields=[f"trafo_{f}" for f in fields],
+        target="kNN",
+        n=n,
+        func=func,
+        freq=partition,
+        method="ball_tree",
+        min_periods=partition_min,
+        **kwargs,
+    )
+
+    data, flags = flagByStray(
+        data,
+        "kNN",
+        flags,
+        freq=partition,
+        min_periods=partition_min,
+        iter_start=iter_start,
+        alpha=alpha,
+        flag=flag,
+        **kwargs,
+    )
+
+    data, flags = _evalStrayLabels(
+        data,
+        "kNN",
+        flags,
+        fields=fields,
+        reduction_range=stray_range,
+        reduction_drop_flagged=drop_flagged,
+        reduction_thresh=thresh,
+        reduction_min_periods=min_periods,
+        flag=flag,
+        **kwargs,
+    )
+    data, flags = dropField(data, "kNN", flags)
+    for f in fields:
+        data, flags = dropField(data, f"trafo_{f}", flags)
+
+    return data, flags
+
+
+@flagging(masking="field")
+def flagRaise(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    thresh: float,
+    raise_window: FreqString,
+    freq: FreqString,
+    average_window: Optional[FreqString] = None,
+    raise_factor: float = 2.0,
+    slope: Optional[float] = None,
+    weight: float = 0.8,
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function flags raises and drops in value courses, that exceed a certain threshold
+    within a certain timespan.
+
+    The parameter variety of the function is owned to the intriguing
+    case of values, that "return" from outlierish or anomalious value levels and
+    thus exceed the threshold, while actually being usual values.
+
+    NOTE, the dataset is NOT supposed to be harmonized to a time series with an
+    equidistant frequency grid.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    thresh : float
+        The threshold, for the total rise (thresh > 0), or total drop (thresh < 0),
+        value courses must not exceed within a timespan of length `raise_window`.
+    raise_window : str
+        An offset string, determining the timespan, the rise/drop thresholding refers
+        to. Window is inclusively defined.
+    freq : str
+        An offset string, determining The frequency, the timeseries to-be-flagged is
+        supposed to be sampled at. The window is inclusively defined.
+    average_window : {None, str}, default None
+        See condition (2) of the description linked in the references. Window is
+        inclusively defined. The window defaults to 1.5 times the size of `raise_window`
+    raise_factor : float, default 2
+        See second condition listed in the notes below.
+    slope : {None, float}, default None
+        See third condition listed in the notes below.
+    weight : float, default 0.8
+        See third condition listed in the notes below.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed, relatively to the flags input.
+
+    Notes
+    -----
+    The value :math:`x_{k}` of a time series :math:`x` with associated
+    timestamps :math:`t_i`, is flagged a raise, if:
+
+    * There is any value :math:`x_{s}`, preceeding :math:`x_{k}` within `raise_window`
+    range, so that:
+
+      * :math:`M = |x_k - x_s | >`  `thresh` :math:`> 0`
+
+    * The weighted average :math:`\\mu^{*}` of the values, preceding :math:`x_{k}`
+      within `average_window`
+      range indicates, that :math:`x_{k}` does not return from an "outlierish" value
+      course, meaning that:
+
+      * :math:`x_k > \\mu^* + ( M` / `mean_raise_factor` :math:`)`
+
+    * Additionally, if `min_slope` is not `None`, :math:`x_{k}` is checked for being
+      sufficiently divergent from its very predecessor :max:`x_{k-1}`$, meaning that, it
+      is additionally checked if:
+
+      * :math:`x_k - x_{k-1} >` `min_slope`
+      * :math:`t_k - t_{k-1} >` `weight` :math:`\\times` `freq`
+
+    """
+
+    # prepare input args
+    dataseries = data[field].dropna()
+    raise_window = pd.Timedelta(raise_window)
+    freq = pd.Timedelta(freq)
+    if slope is not None:
+        slope = np.abs(slope)
+
+    if average_window is None:
+        average_window = 1.5 * pd.Timedelta(raise_window)
+
+    if thresh < 0:
+        dataseries *= -1
+        thresh *= -1
+
+    def raise_check(x, thresh):
+        test_set = x[-1] - x[0:-1]
+        max_val = np.max(test_set)
+        if max_val >= thresh:
+            return max_val
+        else:
+            return np.nan
+
+    def custom_rolling_mean(x):
+        return np.sum(x[:-1])
+
+    # get invalid-raise/drop mask:
+    raise_series = dataseries.rolling(raise_window, min_periods=2, closed="both")
+
+    numba_boost = True
+    if numba_boost:
+        raise_check = numba.jit(raise_check, nopython=True)
+        raise_series = raise_series.apply(
+            raise_check, args=(thresh,), raw=True, engine="numba"
+        )
+    else:
+        raise_series = raise_series.apply(raise_check, args=(thresh,), raw=True)
+
+    if raise_series.isna().all():
+        flags[:, field] = UNTOUCHED
+        return data, flags
+
+    # "unflag" values of insufficient deviation to their predecessors
+    if slope is not None:
+        w_mask = (
+            pd.Series(dataseries.index).diff().dt.total_seconds() / freq.total_seconds()
+        ) > weight
+        slope_mask = np.abs(dataseries.diff()) < slope
+        to_unflag = raise_series.notna() & w_mask.values & slope_mask
+        raise_series[to_unflag] = np.nan
+
+    # calculate and apply the weighted mean weights (pseudo-harmonization):
+    weights = (
+        pd.Series(dataseries.index).diff(periods=2).shift(-1).dt.total_seconds()
+        / freq.total_seconds()
+        / 2
+    )
+
+    weights.iloc[0] = 0.5 + (
+        dataseries.index[1] - dataseries.index[0]
+    ).total_seconds() / (freq.total_seconds() * 2)
+
+    weights.iloc[-1] = 0.5 + (
+        dataseries.index[-1] - dataseries.index[-2]
+    ).total_seconds() / (freq.total_seconds() * 2)
+
+    weights[weights > 1.5] = 1.5
+    weights.index = dataseries.index
+    weighted_data = dataseries.mul(weights)
+
+    # rolling weighted mean calculation
+    weighted_rolling_mean = weighted_data.rolling(
+        average_window, min_periods=2, closed="both"
+    )
+    weights_rolling_sum = weights.rolling(average_window, min_periods=2, closed="both")
+    if numba_boost:
+        custom_rolling_mean = numba.jit(custom_rolling_mean, nopython=True)
+        weighted_rolling_mean = weighted_rolling_mean.apply(
+            custom_rolling_mean, raw=True, engine="numba"
+        )
+        weights_rolling_sum = weights_rolling_sum.apply(
+            custom_rolling_mean, raw=True, engine="numba"
+        )
+    else:
+        weighted_rolling_mean = weighted_rolling_mean.apply(
+            custom_rolling_mean, raw=True
+        )
+        weights_rolling_sum = weights_rolling_sum.apply(
+            custom_rolling_mean, raw=True, engine="numba"
+        )
+
+    weighted_rolling_mean = weighted_rolling_mean / weights_rolling_sum
+    # check means against critical raise value:
+    to_flag = dataseries >= weighted_rolling_mean + (raise_series / raise_factor)
+    to_flag &= raise_series.notna()
+    flags[to_flag[to_flag].index, field] = flag
+
+    return data, flags
+
+
+@flagging(masking="field")
+def flagMAD(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    window: FreqString,
+    z: float = 3.5,
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function represents an implementation of the modyfied Z-score outlier detection method.
+
+    See references [1] for more details on the algorithm.
+
+    Note, that the test needs the input data to be sampled regularly (fixed sampling rate).
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged. (Here a dummy, for structural reasons)
+    flags : saqc.Flags
+        Container to store flags of the data.
+    window : str
+       Offset string. Denoting the windows size that the "Z-scored" values have to lie in.
+    z: float, default 3.5
+        The value the Z-score is tested against. Defaulting to 3.5 (Recommendation of [1])
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed, relatively to the flags input.
+
+    References
+    ----------
+    [1] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
+    """
+    d = data[field]
+    if d.empty:
+        flags[:, field] = UNTOUCHED
+        return data, flags
+
+    median = d.rolling(window=window, closed="both").median()
+    diff = (d - median).abs()
+    mad = diff.rolling(window=window, closed="both").median()
+    mask = (mad > 0) & (0.6745 * diff > z * mad)
+    # NOTE:
+    # In pandas <= 0.25.3, the window size is not fixed if the
+    # window-argument to rolling is a frequency. That implies,
+    # that during the first iterations the window has a size of
+    # 1, 2, 3, ... until it eventually covers the disered time
+    # span. For stuff the calculation of median, that is rather
+    # unfortunate, as the size of calculation base might differ
+    # heavily. So don't flag something until, the window reaches
+    # its target size
+    if not isinstance(window, int):
+        index = mask.index
+        mask.loc[index < index[0] + pd.to_timedelta(window)] = False
+
+    flags[mask, field] = flag
+    return data, flags
+
+
+@flagging(masking="field")
+def flagOffset(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    thresh: float,
+    tolerance: float,
+    window: Union[int, FreqString],
+    thresh_relative: Optional[float] = None,
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    A basic outlier test that work on regular and irregular sampled data
+
+    The test classifies values/value courses as outliers by detecting not only a rise
+    in value, but also, checking for a return to the initial value level.
+
+    Values :math:`x_n, x_{n+1}, .... , x_{n+k}` of a timeseries :math:`x` with
+    associated timestamps :math:`t_n, t_{n+1}, .... , t_{n+k}` are considered spikes, if
+
+    1. :math:`|x_{n-1} - x_{n + s}| >` `thresh`, for all :math:`s \\in [0,1,2,...,k]`
+
+    2. :math:`|x_{n-1} - x_{n+k+1}| <` `tolerance`
+
+    3. :math:`|t_{n-1} - t_{n+k+1}| <` `window`
+
+    Note, that this definition of a "spike" not only includes one-value outliers, but
+    also plateau-ish value courses.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The field in data.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    thresh : float
+        Minimum difference between to values, to consider the latter one as a spike. See condition (1)
+    tolerance : float
+        Maximum difference between pre-spike and post-spike values. See condition (2)
+    window : {str, int}, default '15min'
+        Maximum length of "spiky" value courses. See condition (3). Integer defined window length are only allowed for
+        regularly sampled timeseries.
+    thresh_relative : {float, None}, default None
+        Relative threshold.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed, relatively to the flags input.
+
+    References
+    ----------
+    The implementation is a time-window based version of an outlier test from the UFZ Python library,
+    that can be found here:
+
+    https://git.ufz.de/chs/python/blob/master/ufz/level1/spike.py
+
+    """
+    dataseries = data[field].dropna()
+    if dataseries.empty:
+        flags[:, field] = UNTOUCHED
+        return data, flags
+
+    # using reverted series - because ... long story.
+    ind = dataseries.index
+    rev_ind = ind[0] + ((ind[-1] - ind)[::-1])
+    map_i = pd.Series(ind, index=rev_ind)
+    dataseries = pd.Series(dataseries.values, index=rev_ind)
+
+    if isinstance(window, int):
+        delta = getFreqDelta(dataseries.index)
+        window = delta * window
+        if not delta:
+            raise TypeError(
+                "Only offset string defined window sizes allowed for irrgegularily sampled timeseries"
+            )
+
+    # get all the entries preceding a significant jump
+    if thresh:
+        post_jumps = dataseries.diff().abs() > thresh
+
+    if thresh_relative:
+        s = np.sign(thresh_relative)
+        rel_jumps = s * (dataseries.shift(1) - dataseries).div(dataseries.abs()) > abs(
+            thresh_relative
+        )
+        if thresh:
+            post_jumps = rel_jumps & post_jumps
+        else:
+            post_jumps = rel_jumps
+
+    post_jumps = post_jumps[post_jumps]
+    if post_jumps.empty:
+        flags[:, field] = UNTOUCHED
+        return data, flags
+
+    # get all the entries preceding a significant jump
+    # and its successors within "length" range
+    to_roll = post_jumps.reindex(
+        dataseries.index, method="bfill", tolerance=window, fill_value=False
+    ).dropna()
+    to_roll = dataseries[to_roll]
+
+    if thresh_relative:
+
+        def spikeTester(chunk, thresh=abs(thresh_relative), tol=tolerance):
+            jump = chunk[-2] - chunk[-1]
+            thresh = thresh * abs(jump)
+            chunk_stair = (np.sign(jump) * (chunk - chunk[-1]) < thresh)[::-1].cumsum()
+            initial = np.searchsorted(chunk_stair, 2)
+            if initial == len(chunk):
+                return 0
+            if np.abs(chunk[-initial - 1] - chunk[-1]) < tol:
+                return initial - 1
+            return 0
+
+    else:
+
+        # define spike testing function to roll with (no  rel_check):
+        def spikeTester(chunk, thresh=thresh, tol=tolerance):
+            # signum change!!!
+            chunk_stair = (
+                np.sign(chunk[-2] - chunk[-1]) * (chunk - chunk[-1]) < thresh
+            )[::-1].cumsum()
+            initial = np.searchsorted(chunk_stair, 2)
+            if initial == len(chunk):
+                return 0
+            if np.abs(chunk[-initial - 1] - chunk[-1]) < tol:
+                return initial - 1
+            return 0
+
+    roller = customRoller(to_roll, window=window, min_periods=2, closed="both")
+    engine = None if len(to_roll) < 200000 else "numba"
+    result = roller.apply(spikeTester, raw=True, engine=engine)
+
+    ignore = pd.Series(True, index=to_roll.index)
+    ignore[post_jumps.index] = False
+    result[ignore] = np.nan
+
+    result.index = map_i[result.index]
+
+    # correct the result: only those values define plateaus, that do not have
+    # values at their left starting point, that belong to other plateaus themself:
+    def calcResult(result):
+        var_num = result.shape[0]
+        flag_scopes = np.zeros(var_num, dtype=bool)
+        for k in range(var_num):
+            if result[k] > 0:
+                k_r = int(result[k])
+                # validity check: plateuas start isnt another plateaus end:
+                if not flag_scopes[k - k_r - 1]:
+                    flag_scopes[(k - k_r) : k] = True
+        return pd.Series(flag_scopes, index=result.index)
+
+    cresult = calcResult(result)
+    cresult = cresult[cresult].index
+    flags[cresult, field] = flag
+    return data, flags
+
+
+@flagging(masking="field")
+def flagByGrubbs(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    window: Union[FreqString, int],
+    alpha: float = 0.05,
+    min_periods: int = 8,
+    pedantic: bool = False,
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function flags values that are regarded outliers due to the grubbs test.
+
+    See reference [1] for more information on the grubbs tests definition.
+
+    The (two-sided) test gets applied onto data chunks of size "window". The tests
+    application  will be iterated on each data-chunk under test, till no more
+    outliers are detected in that chunk.
+
+    Note, that the test performs poorely for small data chunks (resulting in heavy
+    overflagging). Therefor you should select "window" so that every window contains
+    at least > 8 values and also adjust the min_periods values accordingly.
+
+    Note, that the data to be tested by the grubbs test are expected to be distributed
+    "normalish".
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    window : {int, str}
+        The size of the window you want to use for outlier testing. If an integer is
+        passed, the size refers to the number of periods of every testing window. If a
+        string is passed, it has to be an offset string, and will denote the total
+        temporal extension of every window.
+    alpha : float, default 0.05
+        The level of significance, the grubbs test is to be performed at. (between 0 and 1)
+    min_periods : int, default 8
+        The minimum number of values that have to be present in an interval under test,
+        for a grubbs test result to be accepted. Only makes sence in case `window` is
+        an offset string.
+    pedantic: boolean, default False
+        If True, every value gets checked twice for being an outlier. Ones in the
+        initial rolling window and one more time in a rolling window that is lagged
+        by half the windows delimeter (window/2). Recommended for avoiding false
+        positives at the window edges. Only available when rolling with integer
+        defined window size.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
+
+    References
+    ----------
+    introduction to the grubbs test:
+
+    [1] https://en.wikipedia.org/wiki/Grubbs%27s_test_for_outliers
+    """
+    data = data.copy()
+    datcol = data[field]
+    rate = getFreqDelta(datcol.index)
+
+    # if timeseries that is analyzed, is regular,
+    # window size can be transformed to a number of periods:
+    if rate and isinstance(window, str):
+        window = pd.Timedelta(window) // rate
+
+    to_group = pd.DataFrame(data={"ts": datcol.index, "data": datcol})
+    to_flag = pd.Series(False, index=datcol.index)
+
+    # period number defined test intervals
+    if isinstance(window, int):
+        grouper_series = pd.Series(
+            data=np.arange(0, datcol.shape[0]), index=datcol.index
+        )
+        grouper_series_lagged = grouper_series + (window / 2)
+        grouper_series = grouper_series.transform(lambda x: x // window)
+        grouper_series_lagged = grouper_series_lagged.transform(lambda x: x // window)
+        partitions = to_group.groupby(grouper_series)
+        partitions_lagged = to_group.groupby(grouper_series_lagged)
+
+    # offset defined test intervals:
+    else:
+        partitions = to_group.groupby(pd.Grouper(freq=window))
+        partitions_lagged = []
+
+    for _, partition in partitions:
+        if partition.shape[0] > min_periods:
+            detected = smirnov_grubbs.two_sided_test_indices(
+                partition["data"].values, alpha=alpha
+            )
+            detected = partition["ts"].iloc[detected]
+            to_flag[detected.index] = True
+
+    if isinstance(window, int) and pedantic:
+        to_flag_lagged = pd.Series(False, index=datcol.index)
+
+        for _, partition in partitions_lagged:
+            if partition.shape[0] > min_periods:
+                detected = smirnov_grubbs.two_sided_test_indices(
+                    partition["data"].values, alpha=alpha
+                )
+                detected = partition["ts"].iloc[detected]
+                to_flag_lagged[detected.index] = True
+
+        to_flag &= to_flag_lagged
+
+    flags[to_flag, field] = flag
+    return data, flags
+
+
+@flagging(masking="field")
+def flagRange(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    min: float = -np.inf,
+    max: float = np.inf,
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function flags values not covered by the closed interval [`min`, `max`].
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-flagged.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    min : float
+        Lower bound for valid data.
+    max : float
+        Upper bound for valid data.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+
+    # using .values is much faster
+    datacol = data[field].values
+    mask = (datacol < min) | (datacol > max)
+    flags[mask, field] = flag
+    return data, flags
+
+
+@flagging(masking="all")
+def flagCrossStatistic(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    fields: Sequence[str],
+    thresh: float,
+    method: Literal["modZscore", "Zscore"] = "modZscore",
+    flag: float = BAD,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function checks for outliers relatively to the "horizontal" input data axis.
+
+    For `fields` :math:`=[f_1,f_2,...,f_N]` and timestamps :math:`[t_1,t_2,...,t_K]`, the following steps are taken
+    for outlier detection:
+
+    1. All timestamps :math:`t_i`, where there is one :math:`f_k`, with :math:`data[f_K]` having no entry at
+       :math:`t_i`, are excluded from the following process (inner join of the :math:`f_i` fields.)
+    2. for every :math:`0 <= i <= K`, the value
+       :math:`m_j = median(\\{data[f_1][t_i], data[f_2][t_i], ..., data[f_N][t_i]\\})` is calculated
+    2. for every :math:`0 <= i <= K`, the set
+       :math:`\\{data[f_1][t_i] - m_j, data[f_2][t_i] - m_j, ..., data[f_N][t_i] - m_j\\}` is tested for outliers with the
+       specified method (`cross_stat` parameter).
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        A dummy parameter.
+    flags : saqc.Flags
+        A flags object, holding flags and additional informations related to `data`.
+    fields : str
+        List of fieldnames in data, determining wich variables are to be included into the flagging process.
+    thresh : float
+        Threshold which the outlier score of an value must exceed, for being flagged an outlier.
+    method : {'modZscore', 'Zscore'}, default 'modZscore'
+        Method used for calculating the outlier scores.
+
+        * ``'modZscore'``: Median based "sigma"-ish approach. See Referenecs [1].
+        * ``'Zscore'``: Score values by how many times the standard deviation they differ from the median.
+          See References [1]
+
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the input flags.
+
+    References
+    ----------
+    [1] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
+    """
+
+    df = data[fields].loc[data[fields].index_of("shared")].to_df()
+
+    if isinstance(method, str):
+
+        if method == "modZscore":
+            MAD_series = df.subtract(df.median(axis=1), axis=0).abs().median(axis=1)
+            diff_scores = (
+                (0.6745 * (df.subtract(df.median(axis=1), axis=0)))
+                .divide(MAD_series, axis=0)
+                .abs()
+            )
+
+        elif method == "Zscore":
+            diff_scores = (
+                df.subtract(df.mean(axis=1), axis=0)
+                .divide(df.std(axis=1), axis=0)
+                .abs()
+            )
+
+        else:
+            raise ValueError(method)
+
+    else:
+
+        try:
+            stat = getattr(df, method.__name__)(axis=1)
+        except AttributeError:
+            stat = df.aggregate(method, axis=1)
+
+        diff_scores = df.subtract(stat, axis=0).abs()
+
+    mask = diff_scores > thresh
+    if mask.empty:
+        flags[:, field] = UNTOUCHED
+        return data, flags
+
+    for var in fields:
+        flags[mask[var], var] = flag
+
+    return data, flags
diff --git a/saqc/funcs/pattern.py b/saqc/funcs/pattern.py
new file mode 100644
index 0000000000000000000000000000000000000000..a5c23e0323dcebc4c88cc4c735b7d20c7d292b49
--- /dev/null
+++ b/saqc/funcs/pattern.py
@@ -0,0 +1,286 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import numpy as np
+import pandas as pd
+import dtw
+import pywt
+from mlxtend.evaluate import permutation_test
+
+from saqc.constants import *
+from saqc.core.register import flagging
+from saqc.lib.tools import customRoller
+
+
+@flagging(masking="field")
+def flagPatternByWavelet(
+    data,
+    field,
+    flags,
+    reference,
+    widths=(1, 2, 4, 8),
+    waveform="mexh",
+    flag=BAD,
+    **kwargs
+):
+    """
+    Pattern recognition via wavelets.
+
+    The steps are:
+     1. work on chunks returned by a moving window
+     2. each chunk is compared to the given pattern, using the wavelet algorithm as
+        presented in [1]
+     3. if the compared chunk is equal to the given pattern it gets flagged
+
+    Parameters
+    ----------
+
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The fieldname of the data column, you want to correct.
+
+    flags : saqc.Flags
+        The flags belongiong to `data`.
+
+    reference: str
+        The fieldname in `data' which holds the pattern.
+
+    widths: tuple of int
+        Widths for wavelet decomposition. [1] recommends a dyadic scale.
+        Default: (1,2,4,8)
+
+    waveform: str.
+        Wavelet to be used for decomposition. Default: 'mexh'. See [2] for a list.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+
+    flags : saqc.Flags
+        The flags belongiong to `data`.
+
+    References
+    ----------
+
+    The underlying pattern recognition algorithm using wavelets is documented here:
+    [1] Maharaj, E.A. (2002): Pattern Recognition of Time Series using Wavelets. In: Härdle W., Rönz B. (eds) Compstat. Physica, Heidelberg, 978-3-7908-1517-7.
+
+    The documentation of the python package used for the wavelt decomposition can be found here:
+    [2] https://pywavelets.readthedocs.io/en/latest/ref/cwt.html#continuous-wavelet-families
+    """
+
+    dat = data[field]
+    ref = data[reference].to_numpy()
+    cwtmat_ref, _ = pywt.cwt(ref, widths, waveform)
+    wavepower_ref = np.power(cwtmat_ref, 2)
+    len_width = len(widths)
+    sz = len(ref)
+
+    assert len_width
+    assert sz
+
+    def func(x, y):
+        return x.sum() / y.sum()
+
+    def pvalue(chunk):
+        cwtmat_chunk, _ = pywt.cwt(chunk, widths, waveform)
+        wavepower_chunk = np.power(cwtmat_chunk, 2)
+
+        # Permutation test on Powersum of matrix
+        for i in range(len_width):
+            x = wavepower_ref[i]
+            y = wavepower_chunk[i]
+            pval = permutation_test(
+                x, y, method="approximate", num_rounds=200, func=func, seed=0
+            )
+            pval = min(pval, 1 - pval)
+        return pval  # noqa # existence ensured by assert
+
+    rolling = customRoller(dat, window=sz, min_periods=sz, forward=True)
+    pvals = rolling.apply(pvalue, raw=True)
+    markers = pvals > 0.01  # nans -> False
+
+    # the markers are set on the left edge of the window. thus we must propagate
+    # `sz`-many True's to the right of every marker.
+    rolling = customRoller(markers, window=sz, min_periods=sz)
+    mask = rolling.sum().fillna(0).astype(bool)
+
+    flags[mask, field] = flag
+    return data, flags
+
+
+def calculateDistanceByDTW(
+    data: pd.Series, reference: pd.Series, forward=True, normalize=True
+):
+    """
+    Calculate the DTW-distance of data to pattern in a rolling calculation.
+
+    The data is compared to pattern in a rolling window.
+    The size of the rolling window is determined by the timespan defined
+    by the first and last timestamp of the reference data's datetime index.
+
+    For details see the linked functions in the `See Also` section.
+
+    Parameters
+    ----------
+    data : pd.Series
+        Data series. Must have datetime-like index, and must be regularly sampled.
+
+    reference : : pd.Series
+        Reference series. Must have datetime-like index, must not contain NaNs
+        and must not be empty.
+
+    forward: bool, default True
+        If `True`, the distance value is set on the left edge of the data chunk. This
+        means, with a perfect match, `0.0` marks the beginning of the pattern in
+        the data. If `False`, `0.0` would mark the end of the pattern.
+
+    normalize : bool, default True
+        If `False`, return unmodified distances.
+        If `True`, normalize distances by the number of observations in the reference.
+        This helps to make it easier to find a good cutoff threshold for further
+        processing. The distances then refer to the mean distance per datapoint,
+        expressed in the datas units.
+
+    Returns
+    -------
+    distance : pd.Series
+
+    Notes
+    -----
+    The data must be regularly sampled, otherwise a ValueError is raised.
+    NaNs in the data will be dropped before dtw distance calculation.
+
+    See Also
+    --------
+    flagPatternByDTW : flag data by DTW
+    """
+    if reference.hasnans or reference.empty:
+        raise ValueError("reference must not have nan's and must not be empty.")
+
+    winsz = reference.index.max() - reference.index.min()
+    reference = reference.to_numpy()
+
+    def isPattern(chunk):
+        return dtw.accelerated_dtw(chunk, reference, "euclidean")[0]
+
+    # generate distances, excluding NaNs
+    rolling = customRoller(
+        data.dropna(), window=winsz, forward=forward, expand=False, closed="both"
+    )
+    distances: pd.Series = rolling.apply(isPattern, raw=True)
+
+    if normalize:
+        distances /= len(reference)
+
+    return distances.reindex(index=data.index)  # reinsert NaNs
+
+
+@flagging(masking="field")
+def flagPatternByDTW(
+    data,
+    field,
+    flags,
+    reference,
+    max_distance=0.0,
+    normalize=True,
+    plot=False,
+    flag=BAD,
+    **kwargs
+):
+    """Pattern Recognition via Dynamic Time Warping.
+
+    The steps are:
+     1. work on a moving window
+     2. for each data chunk extracted from each window, a distance to the given pattern
+        is calculated, by the dynamic time warping algorithm [1]
+     3. if the distance is below the threshold, all the data in the window gets flagged
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The name of the data column
+
+    flags : saqc.Flags
+        The flags belonging to `data`.
+
+    reference : str
+        The name in `data` which holds the pattern. The pattern must not have NaNs,
+        have a datetime index and must not be empty.
+
+    max_distance : float, default 0.0
+        Maximum dtw-distance between chunk and pattern, if the distance is lower than
+        ``max_distance`` the data gets flagged. With default, ``0.0``, only exact
+        matches are flagged.
+
+    normalize : bool, default True
+        If `False`, return unmodified distances.
+        If `True`, normalize distances by the number of observations of the reference.
+        This helps to make it easier to find a good cutoff threshold for further
+        processing. The distances then refer to the mean distance per datapoint,
+        expressed in the datas units.
+
+    plot: bool, default False
+        Show a calibration plot, which can be quite helpful to find the right threshold
+        for `max_distance`. It works best with `normalize=True`. Do not use in automatic
+        setups / pipelines. The plot show three lines:
+            - data: the data the function was called on
+            - distances: the calculated distances by the algorithm
+            - indicator: have to distinct levels: `0` and the value of `max_distance`.
+              If `max_distance` is `0.0` it defaults to `1`. Everywhere where the
+              indicator is not `0` the data will be flagged.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+
+    flags : saqc.Flags
+        The flags belonging to `data`.
+
+    Notes
+    -----
+    The window size of the moving window is set to equal the temporal extension of the
+    reference datas datetime index.
+
+    References
+    ----------
+    Find a nice description of underlying the Dynamic Time Warping Algorithm here:
+
+    [1] https://cran.r-project.org/web/packages/dtw/dtw.pdf
+    """
+    ref = data[reference]
+    dat = data[field]
+
+    distances = calculateDistanceByDTW(dat, ref, forward=True, normalize=normalize)
+    winsz = ref.index.max() - ref.index.min()
+
+    # prevent nan propagation
+    distances = distances.fillna(max_distance + 1)
+
+    # find minima filter by threshold
+    fw = customRoller(distances, window=winsz, forward=True, closed="both")
+    bw = customRoller(distances, window=winsz, closed="both")
+    minima = (fw.min() == bw.min()) & (distances <= max_distance)
+
+    # Propagate True's to size of pattern.
+    rolling = customRoller(minima, window=winsz, closed="both")
+    mask = rolling.sum() > 0
+
+    if plot:
+        df = pd.DataFrame()
+        df["data"] = dat
+        df["distances"] = distances
+        df["indicator"] = mask.astype(float) * (max_distance or 1)
+        df.plot()
+
+    flags[mask, field] = flag
+    return data, flags
diff --git a/saqc/funcs/pattern_rec.py b/saqc/funcs/pattern_rec.py
deleted file mode 100644
index 83f392df76a9c0dfc5a1a1af2e7fce108c92caf9..0000000000000000000000000000000000000000
--- a/saqc/funcs/pattern_rec.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import numpy as np
-import pandas as pd
-import dtw
-import pywt
-from mlxtend.evaluate import permutation_test
-
-from saqc.core.register import register
-from saqc.lib.tools import customRoller
-
-
-@register(masking='field')
-def flagPattern_wavelet(data, field, flagger, ref_field, widths=(1, 2, 4, 8), waveform='mexh', **kwargs):
-    """
-    Pattern recognition via wavelets.
-
-    The steps are:
-     1. work on chunks returned by a moving window
-     2. each chunk is compared to the given pattern, using the wavelet algorithm as presented in [1]
-     3. if the compared chunk is equal to the given pattern it gets flagged
-
-    Parameters
-    ----------
-
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    ref_field: str
-        The fieldname in `data' which holds the pattern.
-    widths: tuple of int
-        Widths for wavelet decomposition. [1] recommends a dyadic scale. Default: (1,2,4,8)
-    waveform: str.
-        Wavelet to be used for decomposition. Default: 'mexh'. See [2] for a list.
-
-    kwargs
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-
-
-    References
-    ----------
-
-    The underlying pattern recognition algorithm using wavelets is documented here:
-    [1] Maharaj, E.A. (2002): Pattern Recognition of Time Series using Wavelets. In: Härdle W., Rönz B. (eds) Compstat. Physica, Heidelberg, 978-3-7908-1517-7.
-
-    The documentation of the python package used for the wavelt decomposition can be found here:
-    [2] https://pywavelets.readthedocs.io/en/latest/ref/cwt.html#continuous-wavelet-families
-    """
-
-    ref = data[ref_field].to_numpy()
-    cwtmat_ref, _ = pywt.cwt(ref, widths, waveform)
-    wavepower_ref = np.power(cwtmat_ref, 2)
-    len_width = len(widths)
-
-    def func(x, y):
-        return x.sum() / y.sum()
-
-    def isPattern(chunk):
-        cwtmat_chunk, _ = pywt.cwt(chunk, widths, waveform)
-        wavepower_chunk = np.power(cwtmat_chunk, 2)
-
-        # Permutation test on Powersum of matrix
-        for i in range(len_width):
-            x = wavepower_ref[i]
-            y = wavepower_chunk[i]
-            pval = permutation_test(x, y, method='approximate', num_rounds=200, func=func, seed=0)
-            if min(pval, 1 - pval) > 0.01:
-                return True
-        return False
-
-    dat = data[field]
-    sz = len(ref)
-    mask = customRoller(dat, window=sz, min_periods=sz).apply(isPattern, raw=True)
-
-    flagger = flagger.setFlags(field, loc=mask, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def flagPattern_dtw(data, field, flagger, ref_field, max_distance=0.03, normalize=True, **kwargs):
-    """ Pattern Recognition via Dynamic Time Warping.
-
-    The steps are:
-     1. work on chunks returned by a moving window
-     2. each chunk is compared to the given pattern, using the dynamic time warping algorithm as presented in [1]
-     3. if the compared chunk is equal to the given pattern it gets flagged
-
-    Parameters
-    ----------
-
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    ref_field: str
-        The fieldname in `data` which holds the pattern.
-    max_distance: float
-        Maximum dtw-distance between partition and pattern, so that partition is recognized as pattern. Default: 0.03
-    normalize: boolean.
-        Normalizing dtw-distance (see [1]). Default: True
-
-
-    kwargs
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-
-
-    References
-    ----------
-    Find a nice description of underlying the Dynamic Time Warping Algorithm here:
-
-    [1] https://cran.r-project.org/web/packages/dtw/dtw.pdf
-    """
-    ref = data[ref_field]
-    ref_var = ref.var()
-
-    def func(a, b):
-        return np.linalg.norm(a - b)
-
-    def isPattern(chunk):
-        dist, *_ = dtw.dtw(chunk, ref, func)
-        if normalize:
-            dist /= ref_var
-        return dist < max_distance
-
-    dat = data[field]
-    sz = len(ref)
-    mask = customRoller(dat, window=sz, min_periods=sz).apply(isPattern, raw=True)
-
-    flagger = flagger.setFlags(field, loc=mask, **kwargs)
-    return data, flagger
diff --git a/saqc/funcs/proc_functions.py b/saqc/funcs/proc_functions.py
deleted file mode 100644
index aa6974c781e90c49520c63300bfae6c15af426ae..0000000000000000000000000000000000000000
--- a/saqc/funcs/proc_functions.py
+++ /dev/null
@@ -1,1265 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import pandas as pd
-import numpy as np
-from saqc.core.register import register
-from saqc.lib.ts_operators import interpolateNANs, aggregate2Freq, shift2Freq, expModelFunc
-from saqc.funcs.breaks_detection import breaks_flagRegimeAnomaly
-from saqc.funcs.modelling import modelling_changePointCluster
-from saqc.lib.tools import toSequence, mergeDios, dropper, mutateIndex, detectDeviants, evalFreqStr
-import dios
-import functools
-from scipy.optimize import curve_fit
-from sklearn.linear_model import LinearRegression
-from sklearn.utils import resample
-
-
-ORIGINAL_SUFFIX = "_original"
-
-METHOD2ARGS = {
-    "inverse_fshift": ("backward", pd.Timedelta),
-    "inverse_bshift": ("forward", pd.Timedelta),
-    "inverse_nshift": ("nearest", lambda x: pd.Timedelta(x) / 2),
-    "inverse_fagg": ("bfill", pd.Timedelta),
-    "inverse_bagg": ("ffill", pd.Timedelta),
-    "inverse_nagg": ("nearest", lambda x: pd.Timedelta(x) / 2),
-    "match": (None, lambda x: "0min"),
-}
-
-
-@register(masking='field')
-def proc_rollingInterpolateMissing(
-    data, field, flagger, winsz, func=np.median, center=True, min_periods=0, interpol_flag="UNFLAGGED", **kwargs
-):
-    """
-    Interpolates missing values (nan values present in the data) by assigning them the aggregation result of
-    a window surrounding them.
-
-    Note, that in the current implementation, center=True can only be used with integer window sizes - furthermore
-    note, that integer window sizes can yield screwed aggregation results for not-harmonized or irregular data.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-interpolated.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    winsz : int, str
-        The size of the window, the aggregation is computed from. Either counted in periods number (Integer passed),
-        or defined by a total temporal extension (offset String passed).
-    func : Callable
-        The function used for aggregation.
-    center : bool, default True
-        Wheather or not the window, the aggregation is computed of, is centered around the value to be interpolated.
-    min_periods : int
-        Minimum number of valid (not np.nan) values that have to be available in a window for its aggregation to be
-        computed.
-    interpol_flag : {'GOOD', 'BAD', 'UNFLAGGED', str}, default 'UNFLAGGED'
-        Flag that is to be inserted for the interpolated values. You can either pass one of the three major flag-classes
-        or specify directly a certain flag from the passed flagger.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-    """
-
-    data = data.copy()
-    datcol = data[field]
-    roller = datcol.rolling(window=winsz, center=center, min_periods=min_periods)
-    try:
-        func_name = func.__name__
-        if func_name[:3] == "nan":
-            func_name = func_name[3:]
-        rolled = getattr(roller, func_name)()
-    except AttributeError:
-        rolled = roller.apply(func)
-
-    na_mask = datcol.isna()
-    interpolated = na_mask & ~rolled.isna()
-    datcol[na_mask] = rolled[na_mask]
-    data[field] = datcol
-
-    if interpol_flag:
-        if interpol_flag in ["BAD", "UNFLAGGED", "GOOD"]:
-            interpol_flag = getattr(flagger, interpol_flag)
-        flagger = flagger.setFlags(field, loc=interpolated, force=True, flag=interpol_flag, **kwargs)
-
-    return data, flagger
-
-
-@register(masking='field')
-def proc_interpolateMissing(
-    data,
-    field,
-    flagger,
-    method,
-    inter_order=2,
-    inter_limit=2,
-    interpol_flag="UNFLAGGED",
-    downgrade_interpolation=False,
-    not_interpol_flags=None,
-    **kwargs
-):
-
-    """
-    Function to interpolate nan values in the data.
-
-    There are available all the interpolation methods from the pandas.interpolate method and they are applicable by
-    the very same key words, that you would pass to the ``pd.Series.interpolate``'s method parameter.
-
-    Note, that the `inter_limit` keyword really restricts the interpolation to chunks, not containing more than
-    `inter_limit` successive nan entries.
-
-    Note, that the function differs from ``proc_interpolateGrid``, in its behaviour to ONLY interpolate nan values that
-    were already present in the data passed.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-interpolated.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    method : {"linear", "time", "nearest", "zero", "slinear", "quadratic", "cubic", "spline", "barycentric",
-        "polynomial", "krogh", "piecewise_polynomial", "spline", "pchip", "akima"}: string
-        The interpolation method you want to apply.
-    inter_order : int, default 2
-        If there your selected interpolation method can be performed at different 'orders' - here you pass the desired
-        order.
-    inter_limit : int, default 2
-        Maximum number of consecutive 'nan' values allowed for a gap to be interpolated.
-    interpol_flag : {'GOOD', 'BAD', 'UNFLAGGED', str}, default 'UNFLAGGED'
-        Flag that is to be inserted for the interpolated values. You can either pass one of the three major flag-classes
-        or specify directly a certain flag from the passed flagger.
-    downgrade_interpolation : bool, default False
-        If interpolation can not be performed at `inter_order` - (not enough values or not implemented at this order) -
-        automaticalyy try to interpolate at order `inter_order` :math:`- 1`.
-    not_interpol_flags : {None, str, List[str]}, default None
-        A list of flags or a single Flag, marking values, you want NOT to be interpolated.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-    """
-
-    data = data.copy()
-    inter_data = interpolateNANs(
-        data[field],
-        method,
-        order=inter_order,
-        inter_limit=inter_limit,
-        downgrade_interpolation=downgrade_interpolation,
-        return_chunk_bounds=False,
-    )
-    interpolated = data[field].isna() & inter_data.notna()
-
-    if not_interpol_flags:
-        for f in toSequence(not_interpol_flags):
-            if f in ["BAD", "UNFLAGGED", "GOOD"]:
-                f = getattr(flagger, interpol_flag)
-            is_flagged = flagger.isFlagged(flag=f)[field]
-            cond = is_flagged & interpolated
-            inter_data.mask(cond, np.nan, inplace=True)
-        interpolated &= inter_data.notna()
-
-    if interpol_flag:
-        if interpol_flag in ["BAD", "UNFLAGGED", "GOOD"]:
-            interpol_flag = getattr(flagger, interpol_flag)
-        flagger = flagger.setFlags(field, loc=interpolated, force=True, flag=interpol_flag, **kwargs)
-
-    data[field] = inter_data
-    return data, flagger
-
-
-@register(masking='field')
-def proc_interpolateGrid(
-        data,
-        field,
-        flagger,
-        freq,
-        method,
-        inter_order=2,
-        to_drop=None,
-        downgrade_interpolation=False,
-        empty_intervals_flag=None,
-        grid_field=None,
-        inter_limit=2,
-        freq_check=None,
-        **kwargs):
-
-    """
-    Function to interpolate the data at regular (equidistant) timestamps (or Grid points).
-
-    Note, that the interpolation will only be calculated, for grid timestamps that have a preceding AND a succeeding
-    valid data value within "freq" range.
-
-    Note, that the function differs from proc_interpolateMissing, by returning a whole new data set, only containing
-    samples at the interpolated, equidistant timestamps (of frequency "freq").
-
-    Note, it is possible to interpolate unregular "grids" (with no frequencies). In fact, any date index
-    can be target of the interpolation. Just pass the field name of the variable, holding the index
-    you want to interpolate, to "grid_field". 'freq' is then use to determine the maximum gap size for
-    a grid point to be interpolated.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-interpolated.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    freq : str
-        An Offset String, interpreted as the frequency of
-        the grid you want to interpolate your data at.
-    method : {"linear", "time", "nearest", "zero", "slinear", "quadratic", "cubic", "spline", "barycentric",
-        "polynomial", "krogh", "piecewise_polynomial", "spline", "pchip", "akima"}: string
-        The interpolation method you want to apply.
-    inter_order : integer, default 2
-        If there your selected interpolation method can be performed at different 'orders' - here you pass the desired
-        order.
-    to_drop : {None, str, List[str]}, default None
-        Flags that refer to values you want to drop before interpolation - effectively excluding grid points from
-        interpolation, that are only surrounded by values having a flag in them, that is listed in drop flags. Default
-        results in the flaggers *BAD* flag to be the drop_flag.
-    downgrade_interpolation : bool, default False
-        If interpolation can not be performed at `inter_order` - (not enough values or not implemented at this order) -
-        automatically try to interpolate at order `inter_order` :math:`- 1`.
-    empty_intervals_flag : str, default None
-        A Flag, that you want to assign to those values in the resulting equidistant sample grid, that were not
-        surrounded by valid data in the original dataset, and thus were not interpolated. Default automatically assigns
-        ``flagger.BAD`` flag to those values.
-    grid_field : String, default None
-        Use the timestamp of another variable as (not necessarily regular) "grid" to be interpolated.
-    inter_limit : Integer, default 2
-        Maximum number of consecutive Grid values allowed for interpolation. If set
-        to *n*, chunks of *n* and more consecutive grid values, where there is no value in between, wont be
-        interpolated.
-    freq_check : {None, 'check', 'auto'}, default None
-
-        * ``None``: do not validate frequency-string passed to `freq`
-        * ``'check'``: estimate frequency and log a warning if estimate miss matchs frequency string passed to 'freq', or
-          if no uniform sampling rate could be estimated
-        * ``'auto'``: estimate frequency and use estimate. (Ignores `freq` parameter.)
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
-    """
-
-    datcol = data[field]
-    datcol = datcol.copy()
-    flagscol = flagger.getFlags(field)
-    freq = evalFreqStr(freq, freq_check, datcol.index)
-    if empty_intervals_flag is None:
-        empty_intervals_flag = flagger.BAD
-
-    drop_mask = dropper(field, to_drop, flagger, flagger.BAD)
-    drop_mask |= flagscol.isna()
-    drop_mask |= datcol.isna()
-    datcol[drop_mask] = np.nan
-    datcol.dropna(inplace=True)
-    freq = evalFreqStr(freq, freq_check, datcol.index)
-    if datcol.empty:
-        data[field] = datcol
-        reshaped_flagger = flagger.initFlags(datcol).setFlags(field, flag=flagscol, force=True, inplace=True, **kwargs)
-        flagger = flagger.slice(drop=field).merge(reshaped_flagger, subset=[field], inplace=True)
-        return data, flagger
-    # account for annoying case of subsequent frequency aligned values, differing exactly by the margin
-    # 2*freq:
-    spec_case_mask = datcol.index.to_series()
-    spec_case_mask = spec_case_mask - spec_case_mask.shift(1)
-    spec_case_mask = spec_case_mask == 2 * pd.Timedelta(freq)
-    spec_case_mask = spec_case_mask[spec_case_mask]
-    spec_case_mask = spec_case_mask.resample(freq).asfreq().dropna()
-
-    if not spec_case_mask.empty:
-        spec_case_mask = spec_case_mask.tshift(-1, freq)
-
-    # prepare grid interpolation:
-    if grid_field is None:
-        grid_index = pd.date_range(start=datcol.index[0].floor(freq), end=datcol.index[-1].ceil(freq), freq=freq,
-                                   name=datcol.index.name)
-    else:
-        grid_index = data[grid_field].index
-
-
-    aligned_start = datcol.index[0] == grid_index[0]
-    aligned_end = datcol.index[-1] == grid_index[-1]
-    datcol = datcol.reindex(datcol.index.join(grid_index, how="outer",))
-
-    # do the interpolation
-    inter_data, chunk_bounds = interpolateNANs(
-        datcol, method, order=inter_order, inter_limit=inter_limit, downgrade_interpolation=downgrade_interpolation,
-        return_chunk_bounds=True
-    )
-
-    if grid_field is None:
-        # override falsely interpolated values:
-        inter_data[spec_case_mask.index] = np.nan
-
-    # store interpolated grid
-    inter_data = inter_data[grid_index]
-    data[field] = inter_data
-
-    # flags reshaping (dropping data drops):
-    flagscol.drop(flagscol[drop_mask].index, inplace=True)
-
-    if grid_field is not None:
-        # only basic flag propagation supported for custom grids (take worst from preceeding/succeeding)
-        preceeding = flagscol.reindex(grid_index, method='ffill', tolerance=freq)
-        succeeding = flagscol.reindex(grid_index, method='bfill', tolerance=freq)
-        # check for too big gaps in the source data and drop the values interpolated in those too big gaps
-        na_mask = preceeding.isna() | succeeding.isna()
-        na_mask = na_mask[na_mask]
-        preceeding.drop(na_mask.index, inplace=True)
-        succeeding.drop(na_mask.index, inplace=True)
-        inter_data.drop(na_mask.index, inplace=True)
-        data[field] = inter_data
-        mask = succeeding > preceeding
-        preceeding.loc[mask] = succeeding.loc[mask]
-        flagscol = preceeding
-        flagger_new = flagger.initFlags(inter_data).setFlags(field, flag=flagscol, force=True, **kwargs)
-        flagger = flagger.slice(drop=field).merge(flagger_new)
-        return data, flagger
-
-    # for freq defined grids, max-aggregate flags of every grid points freq-ranged surrounding
-    # hack ahead! Resampling with overlapping intervals:
-    # 1. -> no rolling over categories allowed in pandas, so we translate manually:
-    cats = pd.CategoricalIndex(flagger.dtype.categories, ordered=True)
-    cats_dict = {cats[i]: i for i in range(0, len(cats))}
-    flagscol = flagscol.replace(cats_dict)
-    # 3. -> combine resample+rolling to resample with overlapping intervals:
-    flagscol = flagscol.resample(freq).max()
-    initial = flagscol[0]
-    flagscol = flagscol.rolling(2, center=True, closed="neither").max()
-    flagscol[0] = initial
-    cats_dict = {num: key for (key, num) in cats_dict.items()}
-    flagscol = flagscol.astype(int, errors="ignore").replace(cats_dict)
-    flagscol[flagscol.isna()] = empty_intervals_flag
-    # ...hack done
-
-    # we might miss the flag for interpolated data grids last entry (if we miss it - the datapoint is always nan
-    # - just settling a convention here(resulting GRID should start BEFORE first valid data entry and range to AFTER
-    # last valid data)):
-    if inter_data.shape[0] > flagscol.shape[0]:
-        flagscol = flagscol.append(pd.Series(empty_intervals_flag, index=[datcol.index[-1]]))
-
-    # Additional consistency operation: we have to block first/last interpolated datas flags - since they very
-    # likely represent chunk starts/ends (except data start and or end timestamp were grid-aligned before Grid
-    # interpolation already.)
-    if np.isnan(inter_data[0]) and not aligned_start:
-        chunk_bounds = chunk_bounds.insert(0, inter_data.index[0])
-    if np.isnan(inter_data[-1]) and not aligned_end:
-        chunk_bounds = chunk_bounds.append(pd.DatetimeIndex([inter_data.index[-1]]))
-    chunk_bounds = chunk_bounds.unique()
-    flagger_new = flagger.initFlags(inter_data).setFlags(field, flag=flagscol, force=True, inplace=True, **kwargs)
-
-    # block chunk ends of interpolation
-    flags_to_block = pd.Series(np.nan, index=chunk_bounds).astype(flagger_new.dtype)
-    flagger_new = flagger_new.setFlags(field, loc=chunk_bounds, flag=flags_to_block, force=True, inplace=True)
-
-    flagger = flagger.slice(drop=field).merge(flagger_new, subset=[field], inplace=True)
-    return data, flagger
-
-
-@register(masking='field')
-def proc_resample(
-    data,
-    field,
-    flagger,
-    freq,
-    agg_func=np.mean,
-    method="bagg",
-    max_invalid_total_d=np.inf,
-    max_invalid_consec_d=np.inf,
-    max_invalid_consec_f=np.inf,
-    max_invalid_total_f=np.inf,
-    flag_agg_func=max,
-    empty_intervals_flag=None,
-    to_drop=None,
-    all_na_2_empty=False,
-    freq_check=None,
-    **kwargs
-):
-    """
-    Function to resample the data. Afterwards the data will be sampled at regular (equidistant) timestamps
-    (or Grid points). Sampling intervals therefor get aggregated with a function, specifyed by 'agg_func' parameter and
-    the result gets projected onto the new timestamps with a method, specified by "method". The following method
-    (keywords) are available:
-
-    * ``'nagg'``: all values in the range (+/- `freq`/2) of a grid point get aggregated with agg_func and assigned to it.
-    * ``'bagg'``: all values in a sampling interval get aggregated with agg_func and the result gets assigned to the last
-      grid point.
-    * ``'fagg'``: all values in a sampling interval get aggregated with agg_func and the result gets assigned to the next
-      grid point.
-
-
-    Note, that. if possible, functions passed to agg_func will get projected internally onto pandas.resample methods,
-    wich results in some reasonable performance boost - however, for this to work, you should pass functions that have
-    the __name__ attribute initialised and the according methods name assigned to it.
-    Furthermore, you shouldnt pass numpys nan-functions
-    (``nansum``, ``nanmean``,...) because those for example, have ``__name__ == 'nansum'`` and they will thus not
-    trigger ``resample.func()``, but the slower ``resample.apply(nanfunc)``. Also, internally, no nans get passed to
-    the functions anyway, so that there is no point in passing the nan functions.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-resampled.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    freq : str
-        An Offset String, that will be interpreted as the frequency you want to resample your data with.
-    agg_func : Callable
-        The function you want to use for aggregation.
-    method: {'fagg', 'bagg', 'nagg'}, default 'bagg'
-        Specifies which intervals to be aggregated for a certain timestamp. (preceding, succeeding or
-        "surrounding" interval). See description above for more details.
-    max_invalid_total_d : {np.inf, int}, np.inf
-        Maximum number of invalid (nan) datapoints, allowed per resampling interval. If max_invalid_total_d is
-        exceeded, the interval gets resampled to nan. By default (``np.inf``), there is no bound to the number of nan
-        values in an interval and only intervals containing ONLY nan values or those, containing no values at all,
-        get projected onto nan
-    max_invalid_consec_d : {np.inf, int}, default np.inf
-        Maximum number of consecutive invalid (nan) data points, allowed per resampling interval.
-        If max_invalid_consec_d is exceeded, the interval gets resampled to nan. By default (np.inf),
-        there is no bound to the number of consecutive nan values in an interval and only intervals
-        containing ONLY nan values, or those containing no values at all, get projected onto nan.
-    max_invalid_total_f : {np.inf, int}, default np.inf
-        Same as `max_invalid_total_d`, only applying for the flags. The flag regarded as "invalid" value,
-        is the one passed to empty_intervals_flag (default=``flagger.BAD``).
-        Also this is the flag assigned to invalid/empty intervals.
-    max_invalid_total_f : {np.inf, int}, default np.inf
-        Same as `max_invalid_total_f`, only applying onto flags. The flag regarded as "invalid" value, is the one passed
-        to empty_intervals_flag (default=flagger.BAD). Also this is the flag assigned to invalid/empty intervals.
-    flag_agg_func : Callable, default: max
-        The function you want to aggregate the flags with. It should be capable of operating on the flags dtype
-        (usually ordered categorical).
-    empty_intervals_flag : {None, str}, default None
-        A Flag, that you want to assign to invalid intervals. Invalid are those intervals, that contain nan values only,
-        or no values at all. Furthermore the empty_intervals_flag is the flag, serving as "invalid" identifyer when
-        checking for `max_total_invalid_f` and `max_consec_invalid_f patterns`. Default triggers ``flagger.BAD`` to be
-        assigned.
-    to_drop : {None, str, List[str]}, default None
-        Flags that refer to values you want to drop before resampling - effectively excluding values that are flagged
-        with a flag in to_drop from the resampling process - this means that they also will not be counted in the
-        the `max_consec`/`max_total evaluation`. `to_drop` = ``None`` results in NO flags being dropped initially.
-    freq_check : {None, 'check', 'auto'}, default None
-
-        * ``None``: do not validate frequency-string passed to `freq`
-        * ``'check'``: estimate frequency and log a warning if estimate miss matchs frequency string passed to 'freq', or
-          if no uniform sampling rate could be estimated
-        * ``'auto'``: estimate frequency and use estimate. (Ignores `freq` parameter.)
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
-    """
-
-    data = data.copy()
-    datcol = data[field]
-    flagscol = flagger.getFlags(field)
-    if empty_intervals_flag is None:
-        empty_intervals_flag = flagger.BAD
-
-    drop_mask = dropper(field, to_drop, flagger, [])
-    datcol.drop(datcol[drop_mask].index, inplace=True)
-    freq = evalFreqStr(freq, freq_check, datcol.index)
-    flagscol.drop(flagscol[drop_mask].index, inplace=True)
-    if all_na_2_empty:
-        if datcol.dropna().empty:
-            datcol = pd.Series([], index=pd.DatetimeIndex([]), name=field)
-
-    if datcol.empty:
-        # for consistency reasons - return empty data/flags column when there is no valid data left
-        # after filtering.
-        data[field] = datcol
-        reshaped_flagger = flagger.initFlags(datcol).setFlags(field, flag=flagscol, force=True, inplace=True, **kwargs)
-        flagger = flagger.slice(drop=field).merge(reshaped_flagger, subset=[field], inplace=True)
-        return data, flagger
-
-    datcol = aggregate2Freq(
-        datcol,
-        method,
-        freq,
-        agg_func,
-        fill_value=np.nan,
-        max_invalid_total=max_invalid_total_d,
-        max_invalid_consec=max_invalid_consec_d,
-    )
-    flagscol = aggregate2Freq(
-        flagscol,
-        method,
-        freq,
-        flag_agg_func,
-        fill_value=empty_intervals_flag,
-        max_invalid_total=max_invalid_total_f,
-        max_invalid_consec=max_invalid_consec_f,
-    )
-
-    # data/flags reshaping:
-    data[field] = datcol
-    reshaped_flagger = flagger.initFlags(datcol).setFlags(field, flag=flagscol, force=True, inplace=True, **kwargs)
-    flagger = flagger.slice(drop=field).merge(reshaped_flagger, subset=[field], inplace=True)
-    return data, flagger
-
-
-@register(masking='field')
-def proc_shift(data, field, flagger, freq, method, to_drop=None, empty_intervals_flag=None, freq_check=None, **kwargs):
-    """
-    Function to shift data points to regular (equidistant) timestamps.
-    Values get shifted according to the keyword passed to the `method` parameter.
-
-    * ``'nshift'``: every grid point gets assigned the nearest value in its range. (range = +/- 0.5 * `freq`)
-    * ``'bshift'``:  every grid point gets assigned its first succeeding value - if there is one available in the
-      succeeding sampling interval.
-    * ``'fshift'``:  every grid point gets assigned its ultimately preceeding value - if there is one available in
-      the preceeding sampling interval.
-
-    Note: all data nans get excluded defaultly from shifting. If `to_drop` is ``None``, - all *BAD* flagged values get
-    excluded as well.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-shifted.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    freq : str
-        An frequency Offset String that will be interpreted as the sampling rate you want the data to be shifted to.
-    method: {'fagg', 'bagg', 'nagg'}, default 'nshift'
-        Specifies if datapoints get propagated forwards, backwards or to the nearest grid timestamp. See function
-        description for more details.
-    empty_intervals_flag : {None, str}, default None
-        A Flag, that you want to assign to grid points, where no values are avaible to be shifted to.
-        Default triggers flagger.BAD to be assigned.
-    to_drop : {None, str, List[str]}, default None
-        Flags that refer to values you want to drop before shifting - effectively, excluding values that are flagged
-        with a flag in to_drop from the shifting process. Default - to_drop = None  - results in flagger.BAD
-        values being dropped initially.
-    freq_check : {None, 'check', 'auto'}, default None
-
-        * ``None``: do not validate frequency-string passed to `freq`
-        * ``'check'``: estimate frequency and log a warning if estimate miss matches frequency string passed to `freq`,
-          or if no uniform sampling rate could be estimated
-        * ``'auto'``: estimate frequency and use estimate. (Ignores `freq` parameter.)
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values and shape may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
-    """
-    data = data.copy()
-    datcol = data[field]
-    flagscol = flagger.getFlags(field)
-
-    if empty_intervals_flag is None:
-        empty_intervals_flag = flagger.BAD
-
-    drop_mask = dropper(field, to_drop, flagger, flagger.BAD)
-    drop_mask |= datcol.isna()
-    datcol[drop_mask] = np.nan
-    datcol.dropna(inplace=True)
-    freq = evalFreqStr(freq, freq_check, datcol.index)
-    if datcol.empty:
-        data[field] = datcol
-        reshaped_flagger = flagger.initFlags(datcol).setFlags(field, flag=flagscol, force=True, inplace=True, **kwargs)
-        flagger = flagger.slice(drop=field).merge(reshaped_flagger, subset=[field], inplace=True)
-        return data, flagger
-
-    flagscol.drop(drop_mask[drop_mask].index, inplace=True)
-
-    datcol = shift2Freq(datcol, method, freq, fill_value=np.nan)
-    flagscol = shift2Freq(flagscol, method, freq, fill_value=empty_intervals_flag)
-    data[field] = datcol
-    reshaped_flagger = flagger.initFlags(datcol).setFlags(field, flag=flagscol, force=True, inplace=True, **kwargs)
-    flagger = flagger.slice(drop=field).merge(reshaped_flagger, subset=[field], inplace=True)
-    return data, flagger
-
-
-@register(masking='field')
-def proc_transform(data, field, flagger, func, **kwargs):
-    """
-    Function to transform data columns with a transformation that maps series onto series of the same length.
-
-    Note, that flags get preserved.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-transformed.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    func : Callable
-        Function to transform data[field] with.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-    """
-    data = data.copy()
-    # NOTE: avoiding pd.Series.transform() in the line below, because transform does process columns element wise
-    # (so interpolati   ons wouldn't work)
-    new_col = pd.Series(func(data[field]), index=data[field].index)
-    data[field] = new_col
-    return data, flagger
-
-
-@register(masking='field')
-def proc_projectFlags(data, field, flagger, method, source, freq=None, to_drop=None, freq_check=None, **kwargs):
-
-    """
-    The Function projects flags of "source" onto flags of "field". Wherever the "field" flags are "better" then the
-    source flags projected on them, they get overridden with this associated source flag value.
-
-    Which "field"-flags are to be projected on which source flags, is controlled by the "method" and "freq"
-    parameters.
-
-    method: (field_flag in associated with "field", source_flags associated with "source")
-
-    'inverse_nagg' - all field_flags within the range +/- freq/2 of a source_flag, get assigned this source flags value.
-        (if source_flag > field_flag)
-    'inverse_bagg' - all field_flags succeeding a source_flag within the range of "freq", get assigned this source flags
-        value. (if source_flag > field_flag)
-    'inverse_fagg' - all field_flags preceeding a source_flag within the range of "freq", get assigned this source flags
-        value. (if source_flag > field_flag)
-
-    'inverse_interpolation' - all field_flags within the range +/- freq of a source_flag, get assigned this source flags value.
-        (if source_flag > field_flag)
-
-    'inverse_nshift' - That field_flag within the range +/- freq/2, that is nearest to a source_flag, gets the source
-        flags value. (if source_flag > field_flag)
-    'inverse_bshift' - That field_flag succeeding a source flag within the range freq, that is nearest to a
-        source_flag, gets assigned this source flags value. (if source_flag > field_flag)
-    'inverse_nshift' - That field_flag preceeding a source flag within the range freq, that is nearest to a
-        source_flag, gets assigned this source flags value. (if source_flag > field_flag)
-
-    'match' - any field_flag with a timestamp matching a source_flags timestamp gets this source_flags value
-    (if source_flag > field_flag)
-
-    Note, to undo or backtrack a resampling/shifting/interpolation that has been performed with a certain method,
-    you can just pass the associated "inverse" method. Also you should pass the same drop flags keyword.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to project the source-flags onto.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    method : {'inverse_fagg', 'inverse_bagg', 'inverse_nagg', 'inverse_fshift', 'inverse_bshift', 'inverse_nshift'}
-        The method used for projection of source flags onto field flags. See description above for more details.
-    source : str
-        The source source of flags projection.
-    freq : {None, str},default None
-        The freq determines the projection range for the projection method. See above description for more details.
-        Defaultly (None), the sampling frequency of source is used.
-    to_drop : {None, str, List[str]}, default None
-        Flags referring to values that are to drop before flags projection. Relevant only when projecting with an
-        inverted shift method. Defaultly flagger.BAD is listed.
-    freq_check : {None, 'check', 'auto'}, default None
-        - None: do not validate frequency-string passed to `freq`
-        - 'check': estimate frequency and log a warning if estimate miss matchs frequency string passed to 'freq', or
-            if no uniform sampling rate could be estimated
-        - 'auto': estimate frequency and use estimate. (Ignores `freq` parameter.)
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values and shape may have changed relatively to the flagger input.
-    """
-    flagscol, metacols = flagger.getFlags(source, full=True)
-    if flagscol.empty:
-        return data, flagger
-    target_datcol = data[field]
-    target_flagscol, target_metacols = flagger.getFlags(field, full=True)
-
-    if (freq is None) and (method != "match"):
-        freq_check = 'auto'
-
-    freq = evalFreqStr(freq, freq_check, flagscol.index)
-
-    if method[-13:] == "interpolation":
-        backprojected = flagscol.reindex(target_flagscol.index, method="bfill", tolerance=freq)
-        fwrdprojected = flagscol.reindex(target_flagscol.index, method="ffill", tolerance=freq)
-        b_replacement_mask = (backprojected > target_flagscol) & (backprojected >= fwrdprojected)
-        f_replacement_mask = (fwrdprojected > target_flagscol) & (fwrdprojected > backprojected)
-        target_flagscol.loc[b_replacement_mask] = backprojected.loc[b_replacement_mask]
-        target_flagscol.loc[f_replacement_mask] = fwrdprojected.loc[f_replacement_mask]
-
-        backprojected_meta = {}
-        fwrdprojected_meta = {}
-        for meta_key in target_metacols.keys():
-            backprojected_meta[meta_key] = metacols[meta_key].reindex(target_metacols[meta_key].index, method='bfill',
-                                                                      tolerance=freq)
-            fwrdprojected_meta[meta_key] = metacols[meta_key].reindex(target_metacols[meta_key].index, method='ffill',
-                                                                      tolerance=freq)
-            target_metacols[meta_key].loc[b_replacement_mask] = backprojected_meta[meta_key].loc[b_replacement_mask]
-            target_metacols[meta_key].loc[f_replacement_mask] = fwrdprojected_meta[meta_key].loc[f_replacement_mask]
-
-    if method[-3:] == "agg" or method == "match":
-        # Aggregation - Inversion
-        projection_method = METHOD2ARGS[method][0]
-        tolerance = METHOD2ARGS[method][1](freq)
-        flagscol = flagscol.reindex(target_flagscol.index, method=projection_method, tolerance=tolerance)
-        replacement_mask = flagscol > target_flagscol
-        target_flagscol.loc[replacement_mask] = flagscol.loc[replacement_mask]
-        for meta_key in target_metacols.keys():
-            metacols[meta_key] = metacols[meta_key].reindex(target_metacols[meta_key].index, method=projection_method,
-                                                            tolerance=tolerance)
-            target_metacols[meta_key].loc[replacement_mask] = metacols[meta_key].loc[replacement_mask]
-
-    if method[-5:] == "shift":
-        # NOTE: although inverting a simple shift seems to be a less complex operation, it has quite some
-        # code assigned to it and appears to be more verbose than inverting aggregation -
-        # that owes itself to the problem of BAD/invalid values blocking a proper
-        # shift inversion and having to be outsorted before shift inversion and re-inserted afterwards.
-        #
-        # starting with the dropping and its memorization:
-
-        drop_mask = dropper(field, to_drop, flagger, flagger.BAD)
-        drop_mask |= target_datcol.isna()
-        target_flagscol_drops = target_flagscol[drop_mask]
-        target_flagscol.drop(drop_mask[drop_mask].index, inplace=True)
-
-        # shift inversion
-        projection_method = METHOD2ARGS[method][0]
-        tolerance = METHOD2ARGS[method][1](freq)
-        flags_merged = pd.merge_asof(
-            flagscol,
-            pd.Series(target_flagscol.index.values, index=target_flagscol.index, name="pre_index"),
-            left_index=True,
-            right_index=True,
-            tolerance=tolerance,
-            direction=projection_method,
-        )
-        flags_merged.dropna(subset=["pre_index"], inplace=True)
-        flags_merged = flags_merged.set_index(["pre_index"]).squeeze()
-
-        # write flags to target
-        replacement_mask = flags_merged > target_flagscol.loc[flags_merged.index]
-        target_flagscol.loc[replacement_mask[replacement_mask].index] = flags_merged.loc[replacement_mask]
-
-        # reinsert drops
-        target_flagscol = target_flagscol.reindex(target_flagscol.index.join(target_flagscol_drops.index, how="outer"))
-        target_flagscol.loc[target_flagscol_drops.index] = target_flagscol_drops.values
-
-        for meta_key in target_metacols.keys():
-            target_metadrops = target_metacols[meta_key][drop_mask]
-            target_metacols[meta_key].drop(drop_mask[drop_mask].index, inplace=True)
-            meta_merged = pd.merge_asof(
-                metacols[meta_key],
-                pd.Series(target_metacols[meta_key].index.values, index=target_metacols[meta_key].index,
-                          name="pre_index"),
-                left_index=True,
-                right_index=True,
-                tolerance=tolerance,
-                direction=projection_method,
-            )
-            meta_merged.dropna(subset=["pre_index"], inplace=True)
-            meta_merged = meta_merged.set_index(["pre_index"]).squeeze()
-            # reinsert drops
-            target_metacols[meta_key][replacement_mask[replacement_mask].index] = meta_merged[replacement_mask]
-            target_metacols[meta_key] = target_metacols[meta_key].reindex(
-                target_metacols[meta_key].index.join(target_metadrops.index, how="outer"))
-            target_metacols[meta_key].loc[target_metadrops.index] = target_metadrops.values
-
-    flagger = flagger.setFlags(field, flag=target_flagscol, with_extra=True, **target_metacols)
-    return data, flagger
-
-
-@register(masking='none')
-def proc_fork(data, field, flagger, suffix=ORIGINAL_SUFFIX, **kwargs):
-    """
-    The function generates a copy of the data "field" and inserts it under the name field + suffix into the existing
-    data.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to fork (copy).
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    suffix: str
-        Substring to append to the forked data variables name.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        data shape may have changed relatively to the flagger input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags shape may have changed relatively to the flagger input.
-    """
-    return proc_copy(data, field, flagger, newfield=str(field) + suffix, **kwargs)
-
-
-@register(masking='none')
-def proc_copy(data, field, flagger, newfield, **kwargs):
-    """
-    The function generates a copy of the data "field" and inserts it under the name field + suffix into the existing
-    data.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to fork (copy).
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    suffix: str
-        Substring to append to the forked data variables name.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        data shape may have changed relatively to the flagger input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags shape may have changed relatively to the flagger input.
-    """
-
-    if newfield in flagger.flags.columns.union(data.columns):
-        raise ValueError(f"{field}: field already exist")
-
-    flags, extras = flagger.getFlags(field, full=True)
-    newflagger = flagger.replaceField(newfield, flags=flags, **extras)
-    newdata = data.copy()
-    newdata[newfield] = data[field].copy()
-    return newdata, newflagger
-
-
-@register(masking='none')
-def proc_drop(data, field, flagger, **kwargs):
-    """
-    The function drops field from the data dios and the flagger.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to drop.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        data shape may have changed relatively to the flagger input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags shape may have changed relatively to the flagger input.
-    """
-
-    data = data.copy()
-    del data[field]
-    flagger = flagger.replaceField(field, flags=None)
-    return data, flagger
-
-
-@register(masking='none')
-def proc_rename(data, field, flagger, new_name, **kwargs):
-    """
-    The function renames field to new name (in both, the flagger and the data).
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to rename.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    new_name : str
-        String, field is to be replaced with.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-    """
-    # store
-    s = data[field]
-    f, e = flagger.getFlags(field, full=True)
-
-    # delete
-    data = data.copy()
-    del data[field]
-    flagger = flagger.replaceField(field, flags=None)
-
-    # insert
-    data[new_name] = s
-    flagger = flagger.replaceField(new_name, inplace=True, flags=f, **e)
-
-    return data, flagger
-
-
-def _drift_fit(x, shift_target, cal_mean):
-    x_index = x.index - x.index[0]
-    x_data = x_index.total_seconds().values
-    x_data = x_data / x_data[-1]
-    y_data = x.values
-    origin_mean = np.mean(y_data[:cal_mean])
-    target_mean = np.mean(y_data[-cal_mean:])
-
-    def modelWrapper(x, c, a=origin_mean, target_mean=target_mean):
-        # final fitted curves val = target mean
-        b = (target_mean - a) / (np.exp(c) - 1)
-        return expModelFunc(x, a, b, c)
-
-    dataFitFunc = functools.partial(modelWrapper, a=origin_mean, target_mean=target_mean)
-
-    try:
-        fitParas, _ = curve_fit(dataFitFunc, x_data, y_data, bounds=([0], [np.inf]))
-        dataFit = dataFitFunc(x_data, fitParas[0])
-        b_val = (shift_target - origin_mean) / (np.exp(fitParas[0]) - 1)
-        dataShiftFunc = functools.partial(expModelFunc, a=origin_mean, b=b_val, c=fitParas[0])
-        dataShift = dataShiftFunc(x_data)
-    except RuntimeError:
-        dataFit = np.array([0] * len(x_data))
-        dataShift = np.array([0] * len(x_data))
-
-    return dataFit, dataShift
-
-
-@register(masking='all')
-def proc_seefoExpDriftCorrecture(data, field, flagger, maint_data_field, cal_mean=5, flag_maint_period=False,
-                                 check_maint='1h', **kwargs):
-    """
-    The function fits an exponential model to chunks of data[field].
-    It is assumed, that between maintenance events, there is a drift effect shifting the meassurements in a way, that
-    can be described by the model M:
-
-    M(t, a, b, c) = a + b(exp(c*t))
-
-    Where as the values y_0 and y_1, describing the mean value directly after the last maintenance event (y_0) and
-    directly before the next maintenance event (y_1), impose the following additional conditions on the drift model:.
-
-    M(0, a, b, c) = y0
-    M(1, a, b, c) = y1
-
-    Solving the equation, one obtains the one-parameter Model:
-
-    M_drift(t, c) = y0 + [(y1 - y0)/(exp(c) - )] * (exp(c*t) - 1)
-
-    For every datachunk in between maintenance events.
-
-    After having found the optimal parameter c*, the correction is performed by bending the fitted curve M_drift(t, c*),
-    in a way that it matches y2 at t=1 (,with y2 being the mean value observed directly after the end of the next
-    maintenance event).
-    This bended curve is given by:
-
-    M_shift(t, c*) = M(t, y0, [(y1 - y0)/(exp(c*) - )], c*)
-
-    And the new values at t are computed via:
-
-    new_vals(t) = old_vals(t) + M_shift(t) - M_drift(t)
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    maint_data_field : str
-        The fieldname of the datacolumn holding the maintenance information.
-        The maint data is to expected to have following form:
-        The series' timestamp itself represents the beginning of a
-        maintenance event, wheras the values represent the endings of the maintenance intervals.
-    cal_mean : int, default 5
-        The number of values the mean is computed over, for obtaining the value level directly after and
-        directly before maintenance event. This values are needed for shift calibration. (see above description)
-    flag_maint_period : bool, default False
-        Wheather or not to flag BAD the values directly obtained while maintenance.
-    check_maint : bool, default True
-        Wheather or not to check, if the reported maintenance intervals match are plausible
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-    """
-
-
-    # 1: extract fit intervals:
-    if data[maint_data_field].empty:
-        return data, flagger
-    data = data.copy()
-    to_correct = data[field]
-    maint_data = data[maint_data_field]
-    drift_frame = pd.DataFrame({"drift_group": np.nan, to_correct.name: to_correct.values}, index=to_correct.index)
-
-    # group the drift frame
-    for k in range(0, maint_data.shape[0] - 1):
-        # assign group numbers for the timespans in between one maintenance ending and the beginning of the next
-        # maintenance time itself remains np.nan assigned
-        drift_frame.loc[maint_data.values[k] : pd.Timestamp(maint_data.index[k + 1]), "drift_group"] = k
-    drift_grouper = drift_frame.groupby("drift_group")
-    # define target values for correction
-    shift_targets = drift_grouper.aggregate(lambda x: x[:cal_mean].mean()).shift(-1)
-
-    for k, group in drift_grouper:
-        dataSeries = group[to_correct.name]
-        dataFit, dataShiftTarget = _drift_fit(dataSeries, shift_targets.loc[k, :][0], cal_mean)
-        dataFit = pd.Series(dataFit, index=group.index)
-        dataShiftTarget = pd.Series(dataShiftTarget, index=group.index)
-        dataShiftVektor = dataShiftTarget - dataFit
-        shiftedData = dataSeries + dataShiftVektor
-        to_correct[shiftedData.index] = shiftedData
-
-    if flag_maint_period:
-        to_flag = drift_frame["drift_group"]
-        to_flag = to_flag.drop(to_flag[: maint_data.index[0]].index)
-        to_flag = to_flag[to_flag.isna()]
-        flagger = flagger.setFlags(field, loc=to_flag, **kwargs)
-
-    data[field] = to_correct
-
-    return data, flagger
-
-
-@register
-def proc_seefoLinearDriftCorrecture(data, field, flagger, x_field, y_field, **kwargs):
-    """
-    Train a linear model that predicts data[y_field] by x_1*(data[x_field]) + x_0. (Least squares fit)
-
-    Then correct the data[field] via:
-
-    data[field] = data[field]*x_1 + x_0
-
-    Note, that data[x_field] and data[y_field] must be of equal length.
-    (Also, you might want them to be sampled at same timestamps.)
-
-    Parameters
-    ----------
-    x_field : String
-        Field name of x - data.
-    y_field : String
-        Field name of y - data.
-
-    """
-    data = data.copy()
-    datcol = data[field]
-    reg = LinearRegression()
-    reg.fit(data[x_field].values.reshape(-1,1), data[y_field].values)
-    datcol = (datcol * reg.coef_[0]) + reg.intercept_
-    data[field] = datcol
-    return data, flagger
-
-
-@register(masking='all')
-def proc_correctRegimeAnomaly(data, field, flagger, cluster_field, model, regime_transmission=None, x_date=False):
-    """
-    Function fits the passed model to the different regimes in data[field] and tries to correct
-    those values, that have assigned a negative label by data[cluster_field].
-
-    Currently, the only correction mode supported is the "parameter propagation."
-
-    This means, any regime :math:`z`, labeled negatively and being modeled by the parameters p, gets corrected via:
-
-    :math:`z_{correct} = z + (m(p^*) - m(p))`,
-
-    where :math:`p^*` denotes the parameter set belonging to the fit of the nearest not-negatively labeled cluster.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    clusterfield : str
-        A string denoting the field in data, holding the cluster label for the data you want to correct.
-    model : Callable
-        The model function to be fitted to the regimes.
-        It must be a function of the form :math:`f(x, *p)`, where :math:`x` is the ``numpy.array`` holding the
-        independent variables and :math:`p` are the model parameters that are to be obtained by fitting.
-        Depending on the `x_date` parameter, independent variable x will either be the timestamps
-        of every regime transformed to seconds from epoch, or it will be just seconds, counting the regimes length.
-    regime_transmission : {None, str}, default None:
-        If an offset string is passed, a data chunk of length `regime_transimission` right at the
-        start and right at the end is ignored when fitting the model. This is to account for the
-        unreliability of data near the changepoints of regimes.
-    x_date : bool, default False
-        If True, use "seconds from epoch" as x input to the model func, instead of "seconds from regime start".
-
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-    """
-
-    cluster_ser = data[cluster_field]
-    unique_successive = pd.unique(cluster_ser.values)
-    data_ser = data[field]
-    regimes = data_ser.groupby(cluster_ser)
-    para_dict = {}
-    x_dict = {}
-    x_mask = {}
-    if regime_transmission is not None:
-        # get seconds
-        regime_transmission = pd.Timedelta(regime_transmission).total_seconds()
-    for label, regime in regimes:
-        if x_date is False:
-            # get seconds data:
-            xdata = (regime.index - regime.index[0]).to_numpy(dtype=float)*10**(-9)
-        else:
-            # get seconds from epoch data
-            xdata = regime.index.to_numpy(dtype=float)*10**(-9)
-        ydata = regime.values
-        valid_mask = ~np.isnan(ydata)
-        if regime_transmission is not None:
-            valid_mask &= (xdata > xdata[0] + regime_transmission)
-            valid_mask &= (xdata < xdata[-1] - regime_transmission)
-        try:
-            p, pcov = curve_fit(model, xdata[valid_mask], ydata[valid_mask])
-        except (RuntimeError, ValueError):
-            p = np.array([np.nan])
-        para_dict[label] = p
-        x_dict[label] = xdata
-        x_mask[label] = valid_mask
-
-    first_normal = unique_successive > 0
-    first_valid = np.array([~pd.isna(para_dict[unique_successive[i]]).any() for i in range(0, unique_successive.shape[0])])
-    first_valid = np.where(first_normal & first_valid)[0][0]
-    last_valid = 1
-
-    for k in range(0, unique_successive.shape[0]):
-        if unique_successive[k] < 0 & (not pd.isna(para_dict[unique_successive[k]]).any()):
-            ydata = data_ser[regimes.groups[unique_successive[k]]].values
-            xdata = x_dict[unique_successive[k]]
-            ypara = para_dict[unique_successive[k]]
-            if k > 0:
-                target_para = para_dict[unique_successive[k-last_valid]]
-            else:
-                # first regime has no "last valid" to its left, so we use first valid to the right:
-                target_para = para_dict[unique_successive[k + first_valid]]
-            y_shifted = ydata + (model(xdata, *target_para) - model(xdata, *ypara))
-            data_ser[regimes.groups[unique_successive[k]]] = y_shifted
-            if k > 0:
-                last_valid += 1
-        elif pd.isna(para_dict[unique_successive[k]]).any() & (k > 0):
-            last_valid += 1
-        else:
-            last_valid = 1
-
-    data[field] = data_ser
-    return data, flagger
-
-
-@register(masking='all')
-def proc_offsetCorrecture(data, field, flagger, max_mean_jump, normal_spread, search_winsz, min_periods,
-                          regime_transmission=None):
-    """
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the data column, you want to correct.
-    flagger : saqc.flagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    max_mean_jump : float
-        when searching for changepoints in mean - this is the threshold a mean difference in the
-        sliding window search must exceed to trigger changepoint detection.
-    normal_spread : float
-        threshold denoting the maximum, regimes are allowed to abolutely differ in their means
-        to form the "normal group" of values.
-    search_winsz : str
-        Size of the adjacent windows that are used to search for the mean changepoints.
-    min_periods : int
-        Minimum number of periods a search window has to contain, for the result of the changepoint
-        detection to be considered valid.
-    regime_transmission : {None, str}, default None:
-        If an offset string is passed, a data chunk of length `regime_transimission` right from the
-        start and right before the end of any regime is ignored when calculating a regimes mean for data correcture.
-        This is to account for the unrelyability of data near the changepoints of regimes.
-
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-        Data values may have changed relatively to the data input.
-    flagger : saqc.flagger
-        The flagger object, holding flags and additional Informations related to `data`.
-
-    """
-
-    data, flagger = proc_fork(data, field, flagger, '_CPcluster')
-    data, flagger = modelling_changePointCluster(data, field + '_CPcluster', flagger,
-                                                 lambda x, y: np.abs(np.mean(x) - np.mean(y)),
-                                                 lambda x, y: max_mean_jump,
-                                                 bwd_window=search_winsz,
-                                                 min_periods_bwd=min_periods)
-    data, flagger = breaks_flagRegimeAnomaly(data, field, flagger, field + '_CPcluster', normal_spread, set_flags=False)
-    data, flagger = proc_correctRegimeAnomaly(data, field, flagger, field + '_CPcluster',
-                                              lambda x, p1: np.array([p1] * x.shape[0]),
-                                              regime_transmission=regime_transmission)
-    data, flagger = proc_drop(data, field + '_CPcluster', flagger)
-
-    return data, flagger
diff --git a/saqc/funcs/resampling.py b/saqc/funcs/resampling.py
new file mode 100644
index 0000000000000000000000000000000000000000..d617798384dac1b6bef059c6e59c3d0d3cd47871
--- /dev/null
+++ b/saqc/funcs/resampling.py
@@ -0,0 +1,547 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from typing import Callable, Tuple, Optional, Union
+from typing_extensions import Literal
+import numpy as np
+import pandas as pd
+from dios import DictOfSeries
+
+from saqc.constants import *
+from saqc.core import processing, Flags
+from saqc.core.register import _isflagged
+from saqc.lib.tools import evalFreqStr, getFreqDelta
+from saqc.lib.ts_operators import shift2Freq, aggregate2Freq
+from saqc.funcs.interpolation import interpolateIndex, _SUPPORTED_METHODS
+import saqc.funcs.tools as tools
+
+
+METHOD2ARGS = {
+    "inverse_fshift": ("backward", pd.Timedelta),
+    "inverse_bshift": ("forward", pd.Timedelta),
+    "inverse_nshift": ("nearest", lambda x: pd.Timedelta(x) / 2),
+    "inverse_fagg": ("bfill", pd.Timedelta),
+    "inverse_bagg": ("ffill", pd.Timedelta),
+    "inverse_nagg": ("nearest", lambda x: pd.Timedelta(x) / 2),
+    "match": (None, lambda _: "0min"),
+}
+
+
+@processing()
+def linear(
+    data: DictOfSeries, field: str, flags: Flags, freq: str, **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    A method to "regularize" data by interpolating linearly the data at regular timestamp.
+
+    A series of data is considered "regular", if it is sampled regularly (= having uniform sampling rate).
+
+    Interpolated values will get assigned the worst flag within freq-range.
+
+    Note, that the data only gets interpolated at those (regular) timestamps, that have a valid (existing and
+    not-na) datapoint preceeding them and one succeeding them within freq range.
+    Regular timestamp that do not suffice this condition get nan assigned AND The associated flag will be of value
+    ``UNFLAGGED``.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The fieldname of the column, holding the data-to-be-regularized.
+
+    flags : saqc.Flags
+        Container to store flags of the data.  freq
+
+    freq : str
+        An offset string. The frequency of the grid you want to interpolate your data at.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values and shape may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
+    """
+
+    return interpolateIndex(data, field, flags, freq, "time", **kwargs)
+
+
+@processing()
+def interpolate(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    freq: str,
+    method: _SUPPORTED_METHODS,
+    order: int = 1,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    A method to "regularize" data by interpolating the data at regular timestamp.
+
+    A series of data is considered "regular", if it is sampled regularly (= having uniform sampling rate).
+
+    Interpolated values will get assigned the worst flag within freq-range.
+
+    There are available all the interpolations from the pandas.Series.interpolate method and they are called by
+    the very same keywords.
+
+    Note, that, to perform a timestamp aware, linear interpolation, you have to pass ``'time'`` as `method`,
+    and NOT ``'linear'``.
+
+    Note, that the data only gets interpolated at those (regular) timestamps, that have a valid (existing and
+    not-na) datapoint preceeding them and one succeeding them within freq range.
+    Regular timestamp that do not suffice this condition get nan assigned AND The associated flag will be of value
+    ``UNFLAGGED``.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The fieldname of the column, holding the data-to-be-regularized.
+
+    flags : saqc.Flags
+        Container to store flags of the data.  freq
+
+    freq : str
+        An offset string. The frequency of the grid you want to interpolate your data at.
+
+    method : {"linear", "time", "nearest", "zero", "slinear", "quadratic", "cubic", "spline", "barycentric",
+        "polynomial", "krogh", "piecewise_polynomial", "spline", "pchip", "akima"}
+        The interpolation method you want to apply.
+
+    order : int, default 1
+        If your selected interpolation method can be performed at different *orders* - here you pass the desired
+        order.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values and shape may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
+    """
+
+    return interpolateIndex(
+        data, field, flags, freq, method=method, order=order, **kwargs
+    )
+
+
+@processing()
+def shift(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    freq: str,
+    method: Literal["fshift", "bshift", "nshift"] = "nshift",
+    freq_check: Optional[Literal["check", "auto"]] = None,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function to shift data and flags to a regular (equidistant) timestamp grid, according to ``method``.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The fieldname of the column, holding the data-to-be-shifted.
+
+    flags : saqc.Flags
+        Container to store flags of the data.
+
+    freq : str
+        An frequency Offset String that will be interpreted as the sampling rate you want the data to be shifted to.
+
+    method : {'fshift', 'bshift', 'nshift'}, default 'nshift'
+        Specifies how misaligned data-points get propagated to a grid timestamp.
+        Following choices are available:
+
+        * 'nshift' : every grid point gets assigned the nearest value in its range. (range = +/- 0.5 * `freq`)
+        * 'bshift' : every grid point gets assigned its first succeeding value, if one is available in
+          the succeeding sampling interval.
+        * 'fshift' : every grid point gets assigned its ultimately preceding value, if one is available in
+          the preceeding sampling interval.
+
+    freq_check : {None, 'check', 'auto'}, default None
+
+        * ``None`` : do not validate frequency-string passed to `freq`
+        * 'check' : estimate frequency and log a warning if estimate miss matches frequency string passed to `freq`,
+          or if no uniform sampling rate could be estimated
+        * 'auto' : estimate frequency and use estimate. (Ignores `freq` parameter.)
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values and shape may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
+    """
+    datcol = data[field]
+    if datcol.empty:
+        return data, flags
+
+    flagged = _isflagged(flags[field], kwargs["to_mask"])
+    datcol[flagged] = np.nan
+    freq = evalFreqStr(freq, freq_check, datcol.index)
+
+    # do the shift
+    datcol = shift2Freq(datcol, method, freq, fill_value=np.nan)
+
+    # do the shift on the history
+    history = flags.history[field]
+
+    kws = dict(method=method, freq=freq)
+    history = history.apply(
+        index=datcol.index,
+        func_handle_df=True,
+        copy=False,
+        func=shift2Freq,
+        func_kws={**kws, "fill_value": UNTOUCHED},
+    )
+
+    flags.history[field] = history
+    data[field] = datcol
+    return data, flags
+
+
+@processing()
+def resample(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    freq: str,
+    func: Callable[[pd.Series], pd.Series] = np.mean,
+    method: Literal["fagg", "bagg", "nagg"] = "bagg",
+    maxna: Optional[int] = None,
+    maxna_group: Optional[int] = None,
+    maxna_flags: Optional[int] = None,  # TODO: still a case ??
+    maxna_group_flags: Optional[int] = None,
+    flag_func: Callable[[pd.Series], float] = max,
+    freq_check: Optional[Literal["check", "auto"]] = None,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function to resample the data.
+
+    The data will be sampled at regular (equidistant) timestamps aka. Grid points.
+    Sampling intervals therefore get aggregated with a function, specified by
+    'agg_func' parameter and the result gets projected onto the new timestamps with a
+    method, specified by "method". The following method (keywords) are available:
+
+    * ``'nagg'``: all values in the range (+/- `freq`/2) of a grid point get
+        aggregated with agg_func and assigned to it.
+    * ``'bagg'``: all values in a sampling interval get aggregated with agg_func and
+        the result gets assigned to the last grid point.
+    * ``'fagg'``: all values in a sampling interval get aggregated with agg_func and
+        the result gets assigned to the next grid point.
+
+
+    Note, that. if possible, functions passed to agg_func will get projected
+    internally onto pandas.resample methods, wich results in some reasonable
+    performance boost - however, for this to work, you should pass functions that
+    have the __name__ attribute initialised and the according methods name assigned
+    to it. Furthermore, you shouldnt pass numpys nan-functions (``nansum``,
+    ``nanmean``,...) because those for example, have ``__name__ == 'nansum'`` and
+    they will thus not trigger ``resample.func()``, but the slower ``resample.apply(
+    nanfunc)``. Also, internally, no nans get passed to the functions anyway,
+    so that there is no point in passing the nan functions.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The fieldname of the column, holding the data-to-be-resampled.
+
+    flags : saqc.Flags
+        Container to store flags of the data.
+
+    freq : str
+        An Offset String, that will be interpreted as the frequency you want to
+        resample your data with.
+
+    func : Callable
+        The function you want to use for aggregation.
+
+    method: {'fagg', 'bagg', 'nagg'}, default 'bagg'
+        Specifies which intervals to be aggregated for a certain timestamp. (preceding,
+        succeeding or "surrounding" interval). See description above for more details.
+
+    maxna : {None, int}, default None
+        Maximum number NaNs in a resampling interval. If maxna is exceeded, the interval
+        is set entirely to NaN.
+
+    maxna_group : {None, int}, default None
+        Same as `maxna` but for consecutive NaNs.
+
+    maxna_flags : {None, int}, default None
+        Same as `max_invalid`, only applying for the flags. The flag regarded
+        as "invalid" value, is the one passed to empty_intervals_flag (
+        default=``BAD``). Also this is the flag assigned to invalid/empty intervals.
+
+    maxna_group_flags : {None, int}, default None
+        Same as `maxna_flags`, only applying onto flags. The flag regarded as
+        "invalid" value, is the one passed to empty_intervals_flag. Also this is the
+        flag assigned to invalid/empty intervals.
+
+    flag_func : Callable, default: max
+        The function you want to aggregate the flags with. It should be capable of
+        operating on the flags dtype (usually ordered categorical).
+
+    freq_check : {None, 'check', 'auto'}, default None
+
+        * ``None``: do not validate frequency-string passed to `freq`
+        * ``'check'``: estimate frequency and log a warning if estimate miss matchs
+            frequency string passed to 'freq', or if no uniform sampling rate could be
+            estimated
+        * ``'auto'``: estimate frequency and use estimate. (Ignores `freq` parameter.)
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values and shape may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
+    """
+    flagged = _isflagged(flags[field], kwargs["to_mask"])
+    datcol = data[field]
+    datcol[flagged] = np.nan
+    freq = evalFreqStr(freq, freq_check, datcol.index)
+
+    datcol = aggregate2Freq(
+        datcol,
+        method,
+        freq,
+        func,
+        fill_value=np.nan,
+        max_invalid_total=maxna,
+        max_invalid_consec=maxna_group,
+    )
+
+    kws = dict(
+        method=method,
+        freq=freq,
+        agg_func=flag_func,
+        fill_value=UNTOUCHED,
+        max_invalid_total=maxna_flags,
+        max_invalid_consec=maxna_group_flags,
+    )
+
+    history = flags.history[field].apply(
+        index=datcol.index,
+        func=aggregate2Freq,
+        func_kws=kws,
+        copy=False,
+    )
+
+    data[field] = datcol
+    flags.history[field] = history
+    return data, flags
+
+
+def _getChunkBounds(target: pd.Series, flagscol: pd.Series, freq: str):
+    chunk_end = target.reindex(flagscol.index, method="bfill", tolerance=freq)
+    chunk_start = target.reindex(flagscol.index, method="ffill", tolerance=freq)
+    ignore_flags = chunk_end.isna() | chunk_start.isna()
+    return ignore_flags
+
+
+def _inverseInterpolation(
+    source: pd.Series, target: pd.Series, freq: str, chunk_bounds
+) -> pd.Series:
+    source = source.copy()
+    if len(chunk_bounds) > 0:
+        source[chunk_bounds] = np.nan
+    backprojected = source.reindex(target.index, method="bfill", tolerance=freq)
+    fwrdprojected = source.reindex(target.index, method="ffill", tolerance=freq)
+    return pd.concat([backprojected, fwrdprojected], axis=1).max(axis=1)
+
+
+def _inverseAggregation(
+    source: Union[pd.Series, pd.DataFrame],
+    target: Union[pd.Series, pd.DataFrame],
+    freq: str,
+    method: str,
+):
+    return source.reindex(target.index, method=method, tolerance=freq)
+
+
+def _inverseShift(
+    source: pd.Series,
+    target: pd.Series,
+    drop_mask: pd.Series,
+    freq: str,
+    method: str,
+    fill_value,
+) -> pd.Series:
+    dtype = source.dtype
+
+    target_drops = target[drop_mask]
+    target = target[~drop_mask]
+    flags_merged = pd.merge_asof(
+        source,
+        target.index.to_series(name="pre_index"),
+        left_index=True,
+        right_index=True,
+        tolerance=freq,
+        direction=method,
+    )
+    flags_merged.dropna(subset=["pre_index"], inplace=True)
+    flags_merged = flags_merged.set_index(["pre_index"]).squeeze()
+    target[flags_merged.index] = flags_merged.values
+
+    # reinsert drops
+    source = target.reindex(target.index.union(target_drops.index))
+    source.loc[target_drops.index] = target_drops.values
+
+    return source.fillna(fill_value).astype(dtype, copy=False)
+
+
+@processing()
+def reindexFlags(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    method: Literal[
+        "inverse_fagg",
+        "inverse_bagg",
+        "inverse_nagg",
+        "inverse_fshift",
+        "inverse_bshift",
+        "inverse_nshift",
+        "inverse_interpolation",
+    ],
+    source: str,
+    freq: Optional[str] = None,
+    drop: Optional[bool] = False,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The Function projects flags of "source" onto flags of "field". Wherever the "field" flags are "better" then the
+    source flags projected on them, they get overridden with this associated source flag value.
+
+    Which "field"-flags are to be projected on which source flags, is controlled by the "method" and "freq"
+    parameters.
+
+    method: (field_flag in associated with "field", source_flags associated with "source")
+
+    'inverse_nagg' - all field_flags within the range +/- freq/2 of a source_flag, get assigned this source flags value.
+        (if source_flag > field_flag)
+    'inverse_bagg' - all field_flags succeeding a source_flag within the range of "freq", get assigned this source flags
+        value. (if source_flag > field_flag)
+    'inverse_fagg' - all field_flags preceeding a source_flag within the range of "freq", get assigned this source flags
+        value. (if source_flag > field_flag)
+
+    'inverse_interpolation' - all field_flags within the range +/- freq of a source_flag, get assigned this source flags value.
+        (if source_flag > field_flag)
+
+    'inverse_nshift' - That field_flag within the range +/- freq/2, that is nearest to a source_flag, gets the source
+        flags value. (if source_flag > field_flag)
+    'inverse_bshift' - That field_flag succeeding a source flag within the range freq, that is nearest to a
+        source_flag, gets assigned this source flags value. (if source_flag > field_flag)
+    'inverse_nshift' - That field_flag preceeding a source flag within the range freq, that is nearest to a
+        source_flag, gets assigned this source flags value. (if source_flag > field_flag)
+
+    'match' - any field_flag with a timestamp matching a source_flags timestamp gets this source_flags value
+    (if source_flag > field_flag)
+
+    Note, to undo or backtrack a resampling/shifting/interpolation that has been performed with a certain method,
+    you can just pass the associated "inverse" method. Also you should pass the same drop flags keyword.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+
+    field : str
+        The fieldname of the data column, you want to project the source-flags onto.
+
+    flags : saqc.Flags
+        Container to store flags of the data.
+
+    method : {'inverse_fagg', 'inverse_bagg', 'inverse_nagg', 'inverse_fshift', 'inverse_bshift', 'inverse_nshift'}
+        The method used for projection of source flags onto field flags. See description above for more details.
+
+    source : str
+        The source source of flags projection.
+
+    freq : {None, str},default None
+        The freq determines the projection range for the projection method. See above description for more details.
+        Defaultly (None), the sampling frequency of source is used.
+
+    drop : default False
+        If set to `True`, the `source` column will be removed
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values and shape may have changed relatively to the flags input.
+    """
+    flagscol = flags[source]
+    target_datcol = data[field]
+    target_flagscol = flags[field]
+
+    if target_datcol.empty or flagscol.empty:
+        return data, flags
+
+    dummy = pd.Series(np.nan, target_flagscol.index, dtype=float)
+
+    if freq is None:
+        freq = getFreqDelta(flagscol.index)
+        if freq is None and not method == "match":
+            raise ValueError(
+                'To project irregularly sampled data, either use method="match", or '
+                "pass custom projection range to freq parameter."
+            )
+
+    if method[-13:] == "interpolation":
+        ignore = _getChunkBounds(target_datcol, flagscol, freq)
+        func = _inverseInterpolation
+        func_kws = dict(freq=freq, chunk_bounds=ignore, target=dummy)
+
+    elif method[-3:] == "agg" or method == "match":
+        projection_method = METHOD2ARGS[method][0]
+        tolerance = METHOD2ARGS[method][1](freq)
+        func = _inverseAggregation
+        func_kws = dict(freq=tolerance, method=projection_method, target=dummy)
+
+    elif method[-5:] == "shift":
+        drop_mask = target_datcol.isna() | _isflagged(
+            target_flagscol, kwargs["to_mask"]
+        )
+        projection_method = METHOD2ARGS[method][0]
+        tolerance = METHOD2ARGS[method][1](freq)
+        func = _inverseShift
+        kws = dict(
+            freq=tolerance, method=projection_method, drop_mask=drop_mask, target=dummy
+        )
+        func_kws = {**kws, "fill_value": UNTOUCHED}
+
+    else:
+        raise ValueError(f"unknown method {method}")
+
+    history = flags.history[source].apply(dummy.index, func, func_kws, copy=False)
+    flags.history[field] = flags.history[field].append(history)
+
+    if drop:
+        data, flags = tools.dropField(data=data, flags=flags, field=source)
+
+    return data, flags
diff --git a/saqc/funcs/residues.py b/saqc/funcs/residues.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc21514671c6a6bb3bbf9c57aace11b484ac5c91
--- /dev/null
+++ b/saqc/funcs/residues.py
@@ -0,0 +1,138 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from typing import Tuple, Union, Optional, Callable
+from typing_extensions import Literal
+import numpy as np
+from dios import DictOfSeries
+
+from saqc.constants import *
+from saqc.core import flagging, Flags
+from saqc.funcs.rolling import roll
+from saqc.funcs.curvefit import fitPolynomial
+
+
+@flagging(masking="field")
+def calculatePolynomialResidues(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    window: Union[str, int],
+    order: int,
+    set_flags: bool = True,  # TODO, not valid anymore, if still needed, maybe assign user-passed ``flag``?
+    min_periods: Optional[int] = 0,
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function fits a polynomial model to the data and returns the residues.
+
+    The residue for value x is calculated by fitting a polynomial of degree "order" to a data slice
+    of size "window", wich has x at its center.
+
+    Note, that the residues will be stored to the `field` field of the input data, so that the original data, the
+    polynomial is fitted to, gets overridden.
+
+    Note, that, if data[field] is not alligned to an equidistant frequency grid, the window size passed,
+    has to be an offset string. Also numba boost options don`t apply for irregularly sampled
+    timeseries.
+
+    Note, that calculating the residues tends to be quite costy, because a function fitting is perfomed for every
+    sample. To improve performance, consider the following possibillities:
+
+    In case your data is sampled at an equidistant frequency grid:
+
+    (1) If you know your data to have no significant number of missing values, or if you do not want to
+        calculate residues for windows containing missing values any way, performance can be increased by setting
+        min_periods=window.
+
+    (2) If your data consists of more then around 200000 samples, setting numba=True, will boost the
+        calculations up to a factor of 5 (for samplesize > 300000) - however for lower sample sizes,
+        numba will slow down the calculations, also, up to a factor of 5, for sample_size < 50000.
+        By default (numba='auto'), numba is set to true, if the data sample size exceeds 200000.
+
+    in case your data is not sampled at an equidistant frequency grid:
+
+    (1) Harmonization/resampling of your data will have a noticable impact on polyfittings performance - since
+        numba_boost doesnt apply for irregularly sampled data in the current implementation.
+
+    Note, that in the current implementation, the initial and final window/2 values do not get fitted.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-modelled.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    window : {str, int}
+        The size of the window you want to use for fitting. If an integer is passed, the size
+        refers to the number of periods for every fitting window. If an offset string is passed,
+        the size refers to the total temporal extension. The window will be centered around the vaule-to-be-fitted.
+        For regularly sampled timeseries the period number will be casted down to an odd number if
+        even.
+    order : int
+        The degree of the polynomial used for fitting
+    set_flags : bool, default True
+        Wheather or not to assign new flags to the calculated residuals. If True, a residual gets assigned the worst
+        flag present in the interval, the data for its calculation was obtained from.
+    min_periods : {int, None}, default 0
+        The minimum number of periods, that has to be available in every values fitting surrounding for the polynomial
+        fit to be performed. If there are not enough values, np.nan gets assigned. Default (0) results in fitting
+        regardless of the number of values present (results in overfitting for too sparse intervals). To automatically
+        set the minimum number of periods to the number of values in an offset defined window size, pass np.nan.
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
+
+    """
+    return fitPolynomial(
+        data,
+        field,
+        flags,
+        window=window,
+        order=order,
+        set_flags=set_flags,
+        min_periods=min_periods,
+        return_residues=True,
+        flag=flag,
+        **kwargs
+    )
+
+
+@flagging(masking="field")
+def calculateRollingResidues(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    window: Union[str, int],
+    func: Callable[[np.ndarray], np.ndarray] = np.mean,
+    set_flags: bool = True,
+    min_periods: Optional[int] = 0,
+    center: bool = True,
+    flag: float = BAD,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """TODO: docstring needed"""
+    return roll(
+        data,
+        field,
+        flags,
+        window=window,
+        func=func,
+        set_flags=set_flags,
+        min_periods=min_periods,
+        center=center,
+        return_residues=True,
+        flag=flag,
+        **kwargs
+    )
diff --git a/saqc/funcs/rolling.py b/saqc/funcs/rolling.py
new file mode 100644
index 0000000000000000000000000000000000000000..5eb13520115314e2735234c1aeb2feccee23bb92
--- /dev/null
+++ b/saqc/funcs/rolling.py
@@ -0,0 +1,145 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from typing import Union, Callable
+import numpy as np
+import pandas as pd
+from dios import DictOfSeries
+
+from saqc.constants import *
+from saqc.core import flagging, Flags
+from saqc.lib.tools import getFreqDelta
+
+
+@flagging(masking="field")
+def roll(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    window: Union[str, int],
+    func: Callable[[pd.Series], float] = np.mean,
+    set_flags: bool = True,  # TODO: not applicable anymore
+    min_periods: int = 0,
+    center: bool = True,
+    return_residues=False,  # TODO: this should not be public, a wrapper would be better
+    flag: float = BAD,
+    **kwargs
+):
+    """
+    Models the data with the rolling mean and returns the residues.
+
+    Note, that the residues will be stored to the `field` field of the input data, so that the data that is modelled
+    gets overridden.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-modelled.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    window : {int, str}
+        The size of the window you want to roll with. If an integer is passed, the size
+        refers to the number of periods for every fitting window. If an offset string is passed,
+        the size refers to the total temporal extension.
+        For regularly sampled timeseries, the period number will be casted down to an odd number if
+        center = True.
+    func : Callable[np.array, float], default np.mean
+        Function to apply on the rolling window and obtain the curve fit value.
+    set_flags : bool, default True
+        Wheather or not to assign new flags to the calculated residuals. If True, a residual gets assigned the worst
+        flag present in the interval, the data for its calculation was obtained from.
+        Currently not implemented in combination with not-harmonized timeseries.
+    min_periods : int, default 0
+        The minimum number of periods, that has to be available in every values fitting surrounding for the mean
+        fitting to be performed. If there are not enough values, np.nan gets assigned. Default (0) results in fitting
+        regardless of the number of values present.
+    center : bool, default True
+        Wheather or not to center the window the mean is calculated of around the reference value. If False,
+        the reference value is placed to the right of the window (classic rolling mean with lag.)
+    flag : float, default BAD
+        flag to set.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+    data = data.copy()
+    to_fit = data[field]
+    if to_fit.empty:
+        flags[:, field] = UNTOUCHED
+        return data, flags
+
+    regular = getFreqDelta(to_fit.index)
+    # starting with the annoying case: finding the rolling interval centers of not-harmonized input time series:
+    if center and not regular:
+        if isinstance(window, int):
+            raise NotImplementedError(
+                "Integer based window size is not supported for not-harmonized"
+                'sample series when rolling with "center=True".'
+            )
+        # get interval centers
+        centers = np.floor(
+            (
+                to_fit.rolling(
+                    pd.Timedelta(window) / 2, closed="both", min_periods=min_periods
+                ).count()
+            )
+        )
+        centers = centers.drop(centers[centers.isna()].index)
+        centers = centers.astype(int)
+        roller = to_fit.rolling(
+            pd.Timedelta(window), closed="both", min_periods=min_periods
+        )
+        try:
+            means = getattr(roller, func.__name__)()
+        except AttributeError:
+            means = to_fit.rolling(
+                pd.Timedelta(window), closed="both", min_periods=min_periods
+            ).apply(func)
+
+        def center_func(x, y=centers):
+            pos = x.index[int(len(x) - y[x.index[-1]])]
+            return y.index.get_loc(pos)
+
+        centers_iloc = (
+            centers.rolling(window, closed="both")
+            .apply(center_func, raw=False)
+            .astype(int)
+        )
+        temp = means.copy()
+        for k in centers_iloc.iteritems():
+            means.iloc[k[1]] = temp[k[0]]
+        # last values are false, due to structural reasons:
+        means[means.index[centers_iloc[-1]] : means.index[-1]] = np.nan
+
+    # everything is more easy if data[field] is harmonized:
+    else:
+        if isinstance(window, str):
+            window = pd.Timedelta(window) // regular
+        if (window % 2 == 0) & center:
+            window = int(window - 1)
+
+        roller = to_fit.rolling(window=window, center=center, closed="both")
+        try:
+            means = getattr(roller, func.__name__)()
+        except AttributeError:
+            means = to_fit.rolling(window=window, center=center, closed="both").apply(
+                func
+            )
+
+    if return_residues:
+        means = to_fit - means
+
+    data[field] = means
+    if set_flags:
+        # TODO: we does not get any flags here, because of masking=field
+        worst = flags[field].rolling(window, center=True, min_periods=min_periods).max()
+        flags[field] = worst
+
+    return data, flags
diff --git a/saqc/funcs/scores.py b/saqc/funcs/scores.py
new file mode 100644
index 0000000000000000000000000000000000000000..dba04b3302cad157a5d489f7fdeb9212c503c9d2
--- /dev/null
+++ b/saqc/funcs/scores.py
@@ -0,0 +1,152 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+from typing import Union, Tuple, Callable, Sequence, Optional
+from typing_extensions import Literal
+import numpy as np
+import pandas as pd
+from dios import DictOfSeries
+
+from saqc.constants import *
+from saqc.core import flagging, Flags
+from saqc.lib.tools import toSequence
+import saqc.lib.ts_operators as ts_ops
+
+
+@flagging(masking="all")
+def assignKNNScore(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    fields: Sequence[str],
+    target: str = "kNNscores",
+    n: int = 10,
+    func: Callable[[pd.Series], float] = np.sum,
+    freq: Union[float, str] = np.inf,
+    min_periods: int = 2,
+    method: Literal["ball_tree", "kd_tree", "brute", "auto"] = "ball_tree",
+    metric: str = "minkowski",
+    p: int = 2,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    TODO: docstring need a rework
+    Score datapoints by an aggregation of the dictances to their k nearest neighbors.
+
+    The function is a wrapper around the NearestNeighbors method from pythons sklearn library (See reference [1]).
+
+    The steps taken to calculate the scores are as follows:
+
+    1. All the timeseries, named fields, are combined to one feature space by an *inner* join on their date time indexes.
+       thus, only samples, that share timestamps across all fields will be included in the feature space
+    2. Any datapoint/sample, where one ore more of the features is invalid (=np.nan) will get excluded.
+    3. For every data point, the distance to its `n` nearest neighbors is calculated by applying the
+       metric `metric` at grade `p` onto the feature space. The defaults lead to the euclidian to be applied.
+       If `radius` is not None, it sets the upper bound of distance for a neighbor to be considered one of the
+       `n` nearest neighbors. Furthermore, the `freq` argument determines wich samples can be
+       included into a datapoints nearest neighbors list, by segmenting the data into chunks of specified temporal
+       extension and feeding that chunks to the kNN algorithm seperatly.
+    4. For every datapoint, the calculated nearest neighbors distances get aggregated to a score, by the function
+       passed to `func`. The default, ``sum`` obviously just sums up the distances.
+    5. The resulting timeseries of scores gets assigned to the field target.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        Dummy Variable.
+    flags : saqc.flags
+        A flags object, holding flags and additional informations related to `data`.fields
+    n : int, default 10
+        The number of nearest neighbors to which the distance is comprised in every datapoints scoring calculation.
+    func : Callable[numpy.array, float], default np.sum
+        A function that assigns a score to every one dimensional array, containing the distances
+        to every datapoints `n` nearest neighbors.
+    freq : {np.inf, float, str}, default np.inf
+        Determines the segmentation of the data into partitions, the kNN algorithm is
+        applied onto individually.
+
+        * ``np.inf``: Apply Scoring on whole data set at once
+        * ``x`` > 0 : Apply scoring on successive data chunks of periods length ``x``
+        * Offset String : Apply scoring on successive partitions of temporal extension matching the passed offset
+          string
+
+    min_periods : int, default 2
+        The minimum number of periods that have to be present in a partition for the kNN scoring
+        to be applied. If the number of periods present is below `min_periods`, the score for the
+        datapoints in that partition will be np.nan.
+    method : {'ball_tree', 'kd_tree', 'brute', 'auto'}, default 'ball_tree'
+        The search algorithm to find each datapoints k nearest neighbors.
+        The keyword just gets passed on to the underlying sklearn method.
+        See reference [1] for more information on the algorithm.
+    metric : str, default 'minkowski'
+        The metric the distances to any datapoints neighbors is computed with. The default of `metric`
+        together with the default of `p` result in the euclidian to be applied.
+        The keyword just gets passed on to the underlying sklearn method.
+        See reference [1] for more information on the algorithm.
+    p : int, default 2
+        The grade of the metrice specified by parameter `metric`.
+        The keyword just gets passed on to the underlying sklearn method.
+        See reference [1] for more information on the algorithm.
+    radius : {None, float}, default None
+        If the radius is not None, only the distance to neighbors that ly within the range specified by `radius`
+        are comprised in the scoring aggregation.
+        The scoring method passed must be capable of handling np.nan values - since, for every point missing
+        within `radius` range to make complete the list of the distances to the `n` nearest neighbors,
+        one np.nan value gets appended to the list passed to the scoring method.
+        The keyword just gets passed on to the underlying sklearn method.
+        See reference [1] for more information on the algorithm.
+
+    References
+    ----------
+    [1] https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.NearestNeighbors.html
+    """
+    data = data.copy()
+    fields = toSequence(fields)
+
+    val_frame = data[fields]
+    score_index = val_frame.index_of("shared")
+    score_ser = pd.Series(np.nan, index=score_index, name=field)
+
+    val_frame = val_frame.loc[val_frame.index_of("shared")].to_df()
+    val_frame.dropna(inplace=True)
+
+    if val_frame.empty:
+        flags[:, target] = UNTOUCHED
+        return data, flags
+
+    # partitioning
+    if not freq:
+        freq = val_frame.shape[0]
+
+    if isinstance(freq, str):
+        grouper = pd.Grouper(freq=freq)
+    else:
+        grouper = pd.Series(
+            data=np.arange(0, val_frame.shape[0]), index=val_frame.index
+        )
+        grouper = grouper.transform(lambda x: int(np.floor(x / freq)))
+
+    partitions = val_frame.groupby(grouper)
+
+    for _, partition in partitions:
+        if partition.empty or (partition.shape[0] < min_periods):
+            continue
+
+        sample_size = partition.shape[0]
+        nn_neighbors = min(n, max(sample_size, 2) - 1)
+        dist, *_ = ts_ops.kNN(
+            partition.values, nn_neighbors, algorithm=method, metric=metric, p=p
+        )
+        try:
+            resids = getattr(dist, func.__name__)(axis=1)
+        except AttributeError:
+            resids = np.apply_along_axis(func, 1, dist)
+
+        score_ser[partition.index] = resids
+
+    flags[target] = pd.Series(UNFLAGGED, index=score_ser.index, dtype=float)
+
+    data[target] = score_ser
+
+    return data, flags
diff --git a/saqc/funcs/soil_moisture_tests.py b/saqc/funcs/soil_moisture_tests.py
deleted file mode 100644
index ecbe911a6a76f47e4ab6bf41b30dd95718d4c45f..0000000000000000000000000000000000000000
--- a/saqc/funcs/soil_moisture_tests.py
+++ /dev/null
@@ -1,620 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import numpy as np
-import pandas as pd
-import joblib
-import dios
-from scipy.signal import savgol_filter
-
-from saqc.funcs.breaks_detection import breaks_flagSpektrumBased
-from saqc.funcs.spikes_detection import spikes_flagSpektrumBased
-from saqc.funcs.constants_detection import constants_flagVarianceBased
-from saqc.core.register import register
-from saqc.lib.tools import retrieveTrustworthyOriginal
-
-
-@register(masking='field')
-def sm_flagSpikes(
-    data,
-    field,
-    flagger,
-    raise_factor=0.15,
-    deriv_factor=0.2,
-    noise_func="CoVar",
-    noise_window="12h",
-    noise_thresh=1,
-    smooth_window="3h",
-    smooth_poly_deg=2,
-    **kwargs,
-):
-
-    """
-    The Function provides just a call to ``flagSpikes_spektrumBased``, with parameter defaults,
-    that refer to References [1].
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    raise_factor : float, default 0.15
-        Minimum relative value difference between two values to consider the latter as a spike candidate.
-        See condition (1) (or reference [2]).
-    deriv_factor : float, default 0.2
-        See condition (2) (or reference [2]).
-    noise_func : {'CoVar', 'rVar'}, default 'CoVar'
-        Function to calculate noisiness of the data surrounding potential spikes.
-        ``'CoVar'``: Coefficient of Variation
-        ``'rVar'``: Relative Variance
-    noise_window : str, default '12h'
-        An offset string that determines the range of the time window of the "surrounding" data of a potential spike.
-        See condition (3) (or reference [2]).
-    noise_thresh : float, default 1
-        Upper threshold for noisiness of data surrounding potential spikes. See condition (3) (or reference [2]).
-    smooth_window : {None, str}, default None
-        Size of the smoothing window of the Savitsky-Golay filter.
-        The default value ``None`` results in a window of two times the sampling rate (i.e. containing three values).
-    smooth_poly_deg : int, default 2
-        Degree of the polynomial used for fitting with the Savitsky-Golay filter.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-
-    References
-    ----------
-    This Function is a generalization of the Spectrum based Spike flagging mechanism as presented in:
-
-    [1] Dorigo, W. et al: Global Automated Quality Control of In Situ Soil Moisture
-        Data from the international Soil Moisture Network. 2013. Vadoze Zone J.
-        doi:10.2136/vzj2012.0097.
-
-    [2] https://git.ufz.de/rdm-software/saqc/-/blob/testfuncDocs/docs/funcs/FormalDescriptions.md#spikes_flagspektrumbased
-
-    """
-
-    return spikes_flagSpektrumBased(
-        data,
-        field,
-        flagger,
-        raise_factor=raise_factor,
-        deriv_factor=deriv_factor,
-        noise_func=noise_func,
-        noise_window=noise_window,
-        noise_thresh=noise_thresh,
-        smooth_window=smooth_window,
-        smooth_poly_deg=smooth_poly_deg,
-        **kwargs,
-    )
-
-
-@register(masking='field')
-def sm_flagBreaks(
-    data,
-    field,
-    flagger,
-    thresh_rel=0.1,
-    thresh_abs=0.01,
-    first_der_factor=10,
-    first_der_window="12h",
-    scnd_der_ratio_range=0.05,
-    scnd_der_ratio_thresh=10,
-    smooth=False,
-    smooth_window="3h",
-    smooth_poly_deg=2,
-    **kwargs,
-):
-
-    """
-    The Function provides just a call to flagBreaks_spektrumBased, with parameter defaults that refer to references [1].
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    thresh_rel : float, default 0.1
-        Float in [0,1]. See (1) of function description above to learn more
-    thresh_abs : float, default 0.01
-        Float > 0. See (2) of function descritpion above to learn more.
-    first_der_factor : float, default 10
-        Float > 0. See (3) of function descritpion above to learn more.
-    first_der_window_range : str, default '12h'
-        Offset string. See (3) of function description to learn more.
-    scnd_der_ratio_margin_1 : float, default 0.05
-        Float in [0,1]. See (4) of function descritpion above to learn more.
-    scnd_der_ratio_margin_2 : float, default 10
-        Float in [0,1]. See (5) of function descritpion above to learn more.
-    smooth : bool, default True
-        Method for obtaining dataseries' derivatives.
-        * False: Just take series step differences (default)
-        * True: Smooth data with a Savitzky Golay Filter before differentiating.
-    smooth_window : {None, str}, default 2
-        Effective only if `smooth` = True
-        Offset string. Size of the filter window, used to calculate the derivatives.
-    smooth_poly_deg : int, default 2
-        Effective only, if `smooth` = True
-        Polynomial order, used for smoothing with savitzk golay filter.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    References
-    ----------
-    [1] Dorigo,W. et al.: Global Automated Quality Control of In Situ Soil Moisture
-        Data from the international Soil Moisture Network. 2013. Vadoze Zone J.
-        doi:10.2136/vzj2012.0097.
-
-    Find a brief mathematical description of the function here:
-
-    [2] https://git.ufz.de/rdm-software/saqc/-/blob/testfuncDocs/docs/funcs
-        /FormalDescriptions.md#breaks_flagspektrumbased
-
-    """
-    return breaks_flagSpektrumBased(
-        data,
-        field,
-        flagger,
-        thresh_rel=thresh_rel,
-        thresh_abs=thresh_abs,
-        first_der_factor=first_der_factor,
-        first_der_window=first_der_window,
-        scnd_der_ratio_range=scnd_der_ratio_range,
-        scnd_der_ratio_thresh=scnd_der_ratio_thresh,
-        smooth=smooth,
-        smooth_window=smooth_window,
-        smooth_poly_deg=smooth_poly_deg,
-        **kwargs,
-    )
-
-
-@register(masking='all')
-def sm_flagFrost(data, field, flagger, soil_temp_variable, window="1h", frost_thresh=0, **kwargs):
-
-    """
-    This Function is an implementation of the soil temperature based Soil Moisture flagging, as presented in
-    references [1]:
-
-    All parameters default to the values, suggested in this publication.
-
-    Function flags Soil moisture measurements by evaluating the soil-frost-level in the moment of measurement.
-    Soil temperatures below "frost_level" are regarded as denoting frozen soil state.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    soil_temp_variable : str,
-        An offset string, denoting the fields name in data, that holds the data series of soil temperature values,
-        the to-be-flagged values shall be checked against.
-    window : str
-        An offset string denoting the maximal temporal deviation, the soil frost states timestamp is allowed to have,
-        relative to the data point to-be-flagged.
-    frost_thresh : float
-        Value level, the flagger shall check against, when evaluating soil frost level.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    References
-    ----------
-    [1] Dorigo,W. et al.: Global Automated Quality Control of In Situ Soil Moisture
-        Data from the international Soil Moisture Network. 2013. Vadoze Zone J.
-        doi:10.2136/vzj2012.0097.
-    """
-
-    # retrieve reference series
-    refseries = data[soil_temp_variable].copy()
-    ref_use = flagger.isFlagged(soil_temp_variable, flag=flagger.GOOD, comparator="==") | flagger.isFlagged(
-        soil_temp_variable, flag=flagger.UNFLAGGED, comparator="=="
-    )
-    # drop flagged values:
-    refseries = refseries[ref_use.values]
-    # drop nan values from reference series, since those are values you dont want to refer to.
-    refseries = refseries.dropna()
-    # skip further processing if reference series is empty:
-    if refseries.empty:
-        return data, flagger
-
-    refseries = refseries.reindex(data[field].dropna().index, method="nearest", tolerance=window)
-    refseries = refseries[refseries < frost_thresh].index
-
-    flagger = flagger.setFlags(field, refseries, **kwargs)
-    return data, flagger
-
-
-@register(masking='all')
-def sm_flagPrecipitation(
-    data,
-    field,
-    flagger,
-    prec_variable,
-    raise_window=None,
-    sensor_depth=0,
-    sensor_accuracy=0,
-    soil_porosity=0,
-    std_factor=2,
-    std_window="24h",
-    ignore_missing=False,
-    **kwargs,
-):
-
-    """
-    This Function is an implementation of the precipitation based Soil Moisture flagging, as presented in
-    references [1].
-
-    All parameters default to the values, suggested in this publication. (excluding porosity,sensor accuracy and
-    sensor depth)
-
-
-    Function flags Soil moisture measurements by flagging moisture rises that do not follow up a sufficient
-    precipitation event. If measurement depth, sensor accuracy of the soil moisture sensor and the porosity of the
-    surrounding soil is passed to the function, an inferior level of precipitation, that has to preceed a significant
-    moisture raise within 24 hours, can be estimated. If those values are not delivered, this inferior bound is set
-    to zero. In that case, any non zero precipitation count will justify any soil moisture raise.
-
-    A data point y_t is flagged an invalid soil moisture raise, if:
-
-    (1) y_t > y_(t-`raise_window`)
-    (2) y_t - y_(t-`std_factor_range`) > `std_factor` * std(y_(t-`std_factor_range`),...,y_t)
-    (3) sum(prec(t-24h),...,prec(t)) > `sensor_depth` * `sensor_accuracy` * `soil_porosity`
-
-    NOTE1: np.nan entries in the input precipitation series will be regarded as susipicious and the test will be
-    omited for every 24h interval including a np.nan entrie in the original precipitation sampling rate.
-    Only entry "0" will be regarded as denoting "No Rainfall".
-
-    NOTE2: The function wont test any values that are flagged suspicious anyway - this may change in a future version.
-
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional informations related to `data`.
-    prec_variable : str
-        Fieldname of the precipitation meassurements column in data.
-    raise_window: {None, str}, default None
-        Denotes the distance to the datapoint, relatively to witch
-        it is decided if the current datapoint is a raise or not. Equation [1].
-        It defaults to None. When None is passed, raise_window is just the sample
-        rate of the data. Any raise reference must be a multiple of the (intended)
-        sample rate and below std_factor_range.
-    sensor_depth : float, default 0
-        Measurement depth of the soil moisture sensor, [m].
-    sensor_accuracy : float, default 0
-        Accuracy of the soil moisture sensor, [-].
-    soil_porosity : float, default 0
-        Porosity of moisture sensors surrounding soil, [-].
-    std_factor : int, default 2
-        The value determines by which rule it is decided, weather a raise in soil
-        moisture is significant enough to trigger the flag test or not:
-        Significance is assumed, if the raise is  greater then "std_factor" multiplied
-        with the last 24 hours standart deviation.
-    std_window: str, default '24h'
-        An offset string that denotes the range over witch the standart deviation is obtained,
-        to test condition [2]. (Should be a multiple of the sampling rate)
-    raise_window: str
-        Denotes the distance to the datapoint, relatively to witch
-        it is decided if the current datapoint is a raise or not. Equation [1].
-        It defaults to None. When None is passed, raise_window is just the sample
-        rate of the data. Any raise reference must be a multiple of the (intended)
-        sample rate and below std_factor_range.
-    ignore_missing: bool, default False
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    References
-    ----------
-    [1] Dorigo,W. et al.: Global Automated Quality Control of In Situ Soil Moisture
-        Data from the international Soil Moisture Network. 2013. Vadoze Zone J.
-        doi:10.2136/vzj2012.0097.
-    """
-
-    dataseries, moist_rate = retrieveTrustworthyOriginal(data, field, flagger)
-
-    # data not hamronized:
-    refseries = data[prec_variable].dropna()
-    # abort processing if any of the measurement series has no valid entries!
-    if moist_rate is np.nan:
-        return data, flagger
-    if refseries.empty:
-        return data, flagger
-
-    refseries = refseries.reindex(refseries.index.join(dataseries.index, how="outer"))
-    # get 24 h prec. monitor
-    prec_count = refseries.rolling(window="1D").sum()
-    # exclude data not signifying a raise::
-    if raise_window is None:
-        raise_window = 1
-    else:
-        raise_window = int(np.ceil(pd.Timedelta(raise_window) / moist_rate))
-
-    # first raise condition:
-    raise_mask = dataseries > dataseries.shift(raise_window)
-
-    # second raise condition:
-    std_window = int(np.ceil(pd.Timedelta(std_window) / moist_rate))
-    if ignore_missing:
-        std_mask = dataseries.dropna().rolling(std_window).std() < (
-            (dataseries - dataseries.shift(std_window)) / std_factor
-        )
-    else:
-        std_mask = dataseries.rolling(std_window).std() < ((dataseries - dataseries.shift(std_window)) / std_factor)
-
-    dataseries = dataseries[raise_mask & std_mask]
-    invalid_indices = prec_count[dataseries.index] <= sensor_depth * sensor_accuracy * soil_porosity
-
-    flagger = flagger.setFlags(field, loc=invalid_indices, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def sm_flagConstants(
-    data,
-    field,
-    flagger,
-    window="12h",
-    thresh=0.0005,
-    precipitation_window="12h",
-    tolerance=0.95,
-    deriv_max=0.0025,
-    deriv_min=0,
-    max_missing=None,
-    max_consec_missing=None,
-    smooth_window=None,
-    smooth_poly_deg=2,
-    **kwargs,
-):
-
-    """
-    This function flags plateaus/series of constant values in soil moisture data.
-
-    Mentionings of "conditions" in the following explanations refer to references [2].
-
-    The function represents a stricter version of
-    constants_flagVarianceBased.
-
-    The additional constraints (3)-(5), are designed to match the special cases of constant
-    values in soil moisture measurements and basically for preceding precipitation events
-    (conditions (3) and (4)) and certain plateau level (condition (5)).
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    window : str, default '12h'
-        Minimum duration during which values need to identical to become plateau candidates. See condition (1)
-    thresh : float, default 0.0005
-        Maximum variance of a group of values to still consider them constant. See condition (2)
-    precipitation_window : str, default '12h'
-        See condition (3) and (4)
-    tolerance : float, default 0.95
-        Tolerance factor, see condition (5)
-    deriv_max : float, default 0
-        See condition (4)
-    deriv_min : float, default 0.0025
-        See condition (3)
-    max_missing : {None, int}, default None
-        Maximum number of missing values allowed in window, by default this condition is ignored
-    max_consec_missing : {None, int}, default None
-        Maximum number of consecutive missing values allowed in window, by default this condition is ignored
-    smooth_window : {None, str}, default None
-        Size of the smoothing window of the Savitsky-Golay filter. The default value None results in a window of two
-        times the sampling rate (i.e. three values)
-    smooth_poly_deg : int, default 2
-        Degree of the polynomial used for smoothing with the Savitsky-Golay filter
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    References
-    ----------
-    [1] Dorigo,W. et al.: Global Automated Quality Control of In Situ Soil Moisture
-        Data from the international Soil Moisture Network. 2013. Vadoze Zone J.
-        doi:10.2136/vzj2012.0097.
-
-    [2] https://git.ufz.de/rdm-software/saqc/-/edit/testfuncDocs/docs/funcs/FormalDescriptions.md#sm_flagconstants
-    """
-
-    # get plateaus:
-    _, comp_flagger = constants_flagVarianceBased(
-        data,
-        field,
-        flagger,
-        window=window,
-        thresh=thresh,
-        max_missing=max_missing,
-        max_consec_missing=max_consec_missing,
-    )
-
-    new_plateaus = (comp_flagger.getFlags(field)).eq(flagger.getFlags(field))
-    # get dataseries at its sampling freq:
-    dataseries, moist_rate = retrieveTrustworthyOriginal(data, field, flagger)
-    # get valuse referring to dataseries:
-    new_plateaus.resample(pd.Timedelta(moist_rate)).asfreq()
-    # cut out test_slices for min/max derivatives condition check:
-    # offset 2 periods:
-    precipitation_window = int(np.ceil(pd.Timedelta(precipitation_window) / moist_rate))
-    window = int(np.ceil(pd.Timedelta(window) / moist_rate))
-    period_diff = precipitation_window - window
-    # we cast plateua series to int - because replace has problems with replacing bools by "method".
-    new_plateaus = new_plateaus.astype(int)
-    # get plateau groups:
-    group_counter = new_plateaus.cumsum()
-    group_counter = group_counter[group_counter.diff() == 0]
-    group_counter.name = "group_counter"
-    plateau_groups = pd.merge(group_counter, dataseries, left_index=True, right_index=True, how="inner")
-    # test mean-condition on plateau groups:
-    test_barrier = tolerance * dataseries.max()
-    plateau_group_drops = plateau_groups.groupby("group_counter").filter(lambda x: x[field].mean() <= test_barrier)
-    # discard values that didnt pass the test from plateau candidate series:
-    new_plateaus[plateau_group_drops.index] = 1
-
-    # we extend the plateaus to cover condition testing sets
-    # 1: extend backwards (with a technical "one" added):
-    cond1_sets = new_plateaus.replace(1, method="bfill", limit=(precipitation_window + window))
-    # 2. extend forwards:
-    if period_diff > 0:
-        cond1_sets = cond1_sets.replace(1, method="ffill", limit=period_diff)
-
-    # get first derivative
-    if smooth_window is None:
-        smooth_window = 3 * pd.Timedelta(moist_rate)
-    else:
-        smooth_window = pd.Timedelta(smooth_window)
-    filter_window_seconds = smooth_window.seconds
-    smoothing_periods = int(np.ceil((filter_window_seconds / moist_rate.n)))
-    first_derivate = savgol_filter(dataseries, window_length=smoothing_periods, polyorder=smooth_poly_deg, deriv=1,)
-    first_derivate = pd.Series(data=first_derivate, index=dataseries.index, name=dataseries.name)
-    # cumsumming to seperate continous plateau groups from each other:
-    group_counter = cond1_sets.cumsum()
-    group_counter = group_counter[group_counter.diff() == 0]
-    group_counter.name = "group_counter"
-    group_frame = pd.merge(group_counter, first_derivate, left_index=True, right_index=True, how="inner")
-    group_frame = group_frame.groupby("group_counter")
-    condition_passed = group_frame.filter(lambda x: (x[field].max() >= deriv_max) & (x[field].min() <= deriv_min))
-
-    flagger = flagger.setFlags(field, loc=condition_passed.index, **kwargs)
-
-    return data, flagger
-
-
-@register(masking='all')
-def sm_flagRandomForest(data, field, flagger, references, window_values: int, window_flags: int, path: str, **kwargs):
-    """
-    This Function uses pre-trained machine-learning model objects for flagging of a specific variable. The model is
-    supposed to be trained using the script provided in "ressources/machine_learning/train_machine_learning.py". For
-    flagging, Inputs to the model are the timeseries of the respective target at one specific sensors, the automatic
-    flags that were assigned by SaQC as well as multiple reference series. Internally, context information for each
-    point is gathered in form of moving windows to improve the flagging algorithm according to user input during
-    model training. For the model to work, the parameters 'references', 'window_values' and 'window_flags' have to be
-    set to the same values as during training.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    references : {str, List[str]}
-        List or list of strings, denoting the fieldnames of the data series that should be used as reference variables
-    window_values : int
-        An integer, denoting the window size that is used to derive the gradients of both the field- and
-        reference-series inside the moving window
-    window_flags : int
-        An integer, denoting the window size that is used to count the surrounding automatic flags that have been set
-        before
-    path : str
-        A string giving the path to the respective model object, i.e. its name and
-        the respective value of the grouping variable. e.g. "models/model_0.2.pkl"
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-    """
-
-    def _refCalc(reference, window_values):
-        """ Helper function for calculation of moving window values """
-        outdata = dios.DictOfSeries()
-        name = reference.name
-        # derive gradients from reference series
-        outdata[name + "_Dt_1"] = reference - reference.shift(1)  # gradient t vs. t-1
-        outdata[name + "_Dt1"] = reference - reference.shift(-1)  # gradient t vs. t+1
-        # moving mean of gradients var1 and var2 before/after
-        outdata[name + "_Dt_" + str(window_values)] = (
-            outdata[name + "_Dt_1"].rolling(window_values, center=False).mean()
-        )  # mean gradient t to t-window
-        outdata[name + "_Dt" + str(window_values)] = (
-            outdata[name + "_Dt_1"].iloc[::-1].rolling(window_values, center=False).mean()[::-1]
-        )  # mean gradient t to t+window
-        return outdata
-
-    # Function for moving window calculations
-    # Create custom df for easier processing
-    df = data.loc[:, [field] + references]
-    # Create binary column of BAD-Flags
-    df["flag_bin"] = flagger.isFlagged(field, flag=flagger.BAD, comparator="==").astype("int")
-
-    # Add context information of flags
-    # Flag at t +/-1
-    df["flag_bin_t_1"] = df["flag_bin"] - df["flag_bin"].shift(1)
-    df["flag_bin_t1"] = df["flag_bin"] - df["flag_bin"].shift(-1)
-    # n Flags in interval t to t-window_flags
-    df[f"flag_bin_t_{window_flags}"] = df["flag_bin"].rolling(window_flags + 1, center=False).sum()
-    # n Flags in interval t to t+window_flags
-    # forward-orientation not possible, so right-orientation on reversed data an reverse result
-    df[f"flag_bin_t{window_flags}"] = df["flag_bin"].iloc[::-1].rolling(window_flags + 1, center=False).sum()[::-1]
-
-    # TODO: dios.merge() / dios.join() ...
-    # replace the following version with its DictOfSeries -> DataFrame
-    # conversions as soon as merging/joining is available in dios
-
-    # Add context information for field+references
-    df = df.to_df()  # df is a dios
-    for i in [field] + references:
-        ref = _refCalc(reference=df[i], window_values=window_values).to_df()
-        df = pd.concat([df, ref], axis=1)
-    # all further actions work on pd.DataFrame. thats ok,
-    # because only the df.index is used to set the actual
-    # flags in the underlining dios.
-
-    # remove NAN-rows from predictor calculation
-    df = df.dropna(axis=0, how="any")
-    # drop column of automatic flags at time t
-    df = df.drop(columns="flag_bin")
-    # Load model and predict on df:
-    model = joblib.load(path)
-    preds = model.predict(df)
-
-    flag_indices = df[preds.astype("bool")].index
-    flagger = flagger.setFlags(field, loc=flag_indices, **kwargs)
-    return data, flagger
diff --git a/saqc/funcs/spikes_detection.py b/saqc/funcs/spikes_detection.py
deleted file mode 100644
index 39e34bf1fa6a44e70c0bb395a9922090cae104c5..0000000000000000000000000000000000000000
--- a/saqc/funcs/spikes_detection.py
+++ /dev/null
@@ -1,1231 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-
-import numpy as np
-import pandas as pd
-from scipy.signal import savgol_filter
-from scipy.stats import zscore
-from scipy.optimize import curve_fit
-from saqc.core.register import register
-import numpy.polynomial.polynomial as poly
-import numba
-import saqc.lib.ts_operators as ts_ops
-from saqc.lib.tools import (
-    retrieveTrustworthyOriginal,
-    offset2seconds,
-    slidingWindowIndices,
-    findIndex,
-    toSequence,
-    customRoller
-)
-from outliers import smirnov_grubbs
-
-def _stray(
-    val_frame,
-    partition_freq=None,
-    partition_min=11,
-    scoring_method="kNNMaxGap",
-    n_neighbors=10,
-    iter_start=0.5,
-    alpha=0.05,
-    trafo=lambda x: x
-
-):
-    """
-    Find outliers in multi dimensional observations.
-
-    The general idea is to assigning scores to every observation based on the observations neighborhood in the space
-    of observations. Then, the gaps between the (greatest) scores are tested for beeing drawn from the same
-    distribution, as the majority of the scores.
-
-    See the References section for a link to a detailed description of the algorithm.
-
-    Note, that the flagging result depends on the size of the partition under test and the distribution of the outliers
-    in it. For "normalish" and/or slightly "erratic" datasets, 5000 - 10000, periods turned out to be a good guess.
-
-    Note, that no normalizations/transformations are applied to the different components (data columns)
-    - those are expected to be applied previously, if necessary.
-
-    Parameters
-    ----------
-    val_frame : (N,M) ndarray
-        Input NxM array of observations, where N is the number of observations and M the number of components per
-        observation.
-    partition_freq : {None, str, int}, default None
-        Determines the size of the data partitions, the data is decomposed into. Each partition is checked seperately
-        for outliers. If a String is passed, it has to be an offset string and it results in partitioning the data into
-        parts of according temporal length. If an integer is passed, the data is simply split up into continous chunks
-        of `partition_freq` periods. if ``None`` is passed (default), all the data will be tested in one run.
-    partition_min : int, default 0
-        Minimum number of periods per partition that have to be present for a valid outlier dettection to be made in
-        this partition. (Only of effect, if `partition_freq` is an integer.) Partition min value must always be
-        greater then the nn_neighbors value.
-    scoring_method : {'kNNSum', 'kNNMaxGap'}, default 'kNNMaxGap'
-        Scoring method applied.
-        `'kNNSum'`: Assign to every point the sum of the distances to its 'n_neighbors' nearest neighbors.
-        `'kNNMaxGap'`: Assign to every point the distance to the neighbor with the "maximum gap" to its predecessor
-        in the hierarchy of the `n_neighbors` nearest neighbors. (see reference section for further descriptions)
-    n_neighbors : int, default 10
-        Number of neighbors included in the scoring process for every datapoint.
-    iter_start : float, default 0.5
-        Float in [0,1] that determines which percentage of data is considered "normal". 0.5 results in the stray
-        algorithm to search only the upper 50 % of the scores for the cut off point. (See reference section for more
-        information)
-    alpha : float, default 0.05
-        Niveau of significance by which it is tested, if a score might be drawn from another distribution, than the
-        majority of the data.
-
-    References
-    ----------
-    Detailed description of the Stray algorithm is covered here:
-
-    [1] Talagala, P. D., Hyndman, R. J., & Smith-Miles, K. (2019). Anomaly detection in high dimensional data.
-        arXiv preprint arXiv:1908.04000.
-    """
-
-    kNNfunc = getattr(ts_ops, scoring_method)
-    # partitioning
-    if not partition_freq:
-        partition_freq = val_frame.shape[0]
-
-    if isinstance(partition_freq, str):
-        partitions = val_frame.groupby(pd.Grouper(freq=partition_freq))
-    else:
-        grouper_series = pd.Series(data=np.arange(0, val_frame.shape[0]), index=val_frame.index)
-        grouper_series = grouper_series.transform(lambda x: int(np.floor(x / partition_freq)))
-        partitions = val_frame.groupby(grouper_series)
-
-    # calculate flags for every partition
-    to_flag = []
-    for _, partition in partitions:
-        if partition.empty | (partition.shape[0] < partition_min):
-            continue
-        partition = partition.apply(trafo)
-        sample_size = partition.shape[0]
-        nn_neighbors = min(n_neighbors, max(sample_size, 2))
-        resids = kNNfunc(partition.values, n_neighbors=nn_neighbors - 1, algorithm="ball_tree")
-        sorted_i = resids.argsort()
-        resids = resids[sorted_i]
-        gaps = np.append(0, np.diff(resids))
-
-        tail_size = int(max(min(50, np.floor(sample_size / 4)), 2))
-        tail_indices = np.arange(2, tail_size + 1)
-        i_start = int(max(np.floor(sample_size * iter_start), 1) + 1)
-        ghat = np.array([np.nan] * sample_size)
-        for i in range(i_start - 1, sample_size):
-            ghat[i] = sum((tail_indices / (tail_size - 1)) * gaps[i - tail_indices + 1])
-
-        log_alpha = np.log(1 / alpha)
-        for iter_index in range(i_start - 1, sample_size):
-            if gaps[iter_index] > log_alpha * ghat[iter_index]:
-                break
-
-        to_flag = np.append(to_flag, list(partition.index[sorted_i[iter_index:]]))
-
-    return to_flag
-
-
-def _expFit(val_frame, scoring_method="kNNMaxGap", n_neighbors=10, iter_start=0.5, alpha=0.05, bin_frac=10):
-    """
-    Find outliers in multi dimensional observations.
-
-    The general idea is to assigning scores to every observation based on the observations neighborhood in the space
-    of observations. Then, the gaps between the (greatest) scores are tested for beeing drawn from the same
-    distribution, as the majority of the scores.
-
-    Note, that no normalizations/transformations are applied to the different components (data columns)
-    - those are expected to be applied previously, if necessary.
-
-    Parameters
-    ----------
-    val_frame : (N,M) ndarray
-        Input NxM array of observations, where N is the number of observations and M the number of components per
-        observation.
-    scoring_method : {'kNNSum', 'kNNMaxGap'}, default 'kNNMaxGap'
-        Scoring method applied.
-        `'kNNSum'`: Assign to every point the sum of the distances to its 'n_neighbors' nearest neighbors.
-        `'kNNMaxGap'`: Assign to every point the distance to the neighbor with the "maximum gap" to its predecessor
-        in the hierarchy of the `n_neighbors` nearest neighbors. (see reference section for further descriptions)
-    n_neighbors : int, default 10
-        Number of neighbors included in the scoring process for every datapoint.
-    iter_start : float, default 0.5
-        Float in [0,1] that determines which percentage of data is considered "normal". 0.5 results in the expfit
-        algorithm to search only the upper 50 % of the scores for the cut off point. (See reference section for more
-        information)
-    alpha : float, default 0.05
-        Niveau of significance by which it is tested, if a score might be drawn from another distribution, than the
-        majority of the data.
-    bin_frac : {int, str}, default 10
-        Controls the binning for the histogram in the fitting step. If an integer is passed, the residues will
-        equidistantly be covered by `bin_frac` bins, ranging from the minimum to the maximum of the residues.
-        If a string is passed, it will be passed on to the ``numpy.histogram_bin_edges`` method.
-    """
-
-    kNNfunc = getattr(ts_ops, scoring_method)
-    resids = kNNfunc(val_frame.values, n_neighbors=n_neighbors, algorithm="ball_tree")
-    data_len = resids.shape[0]
-
-    # sorting
-    sorted_i = resids.argsort()
-    resids = resids[sorted_i]
-    iter_index = int(np.floor(resids.size * iter_start))
-    # initialize condition variables:
-    crit_val = np.inf
-    test_val = 0
-    neg_log_alpha = -np.log(alpha)
-
-    # define exponential dist density function:
-    def fit_function(x, lambd):
-        return lambd * np.exp(-lambd * x)
-
-    # initialise sampling bins
-    if isinstance(bin_frac, int):
-        binz = np.linspace(resids[0], resids[-1], 10 * int(np.ceil(data_len / bin_frac)))
-    elif bin_frac in ["auto", "fd", "doane", "scott", "stone", "rice", "sturges", "sqrt"]:
-        binz = np.histogram_bin_edges(resids, bins=bin_frac)
-    else:
-        raise ValueError(f"Can't interpret {bin_frac} as an binning technique.")
-
-    binzenters = np.array([0.5 * (binz[i] + binz[i + 1]) for i in range(len(binz) - 1)])
-    # inititialize full histogram:
-    full_hist, binz = np.histogram(resids, bins=binz)
-    # check if start index is sufficiently high (pointing at resids value beyond histogram maximum at least):
-    hist_argmax = full_hist.argmax()
-
-    if hist_argmax >= findIndex(binz, resids[iter_index - 1], 0):
-        raise ValueError(
-            "Either the data histogram is too strangely shaped for oddWater OD detection - "
-            "or a too low value for 'iter_start' was passed "
-            "(iter_start better be much greater 0.5)"
-        )
-    # GO!
-    iter_max_bin_index = findIndex(binz, resids[iter_index - 1], 0)
-    upper_tail_index = int(np.floor(0.5 * hist_argmax + 0.5 * iter_max_bin_index))
-    resids_tail_index = findIndex(resids, binz[upper_tail_index], 0)
-    upper_tail_hist, bins = np.histogram(
-        resids[resids_tail_index:iter_index], bins=binz[upper_tail_index : iter_max_bin_index + 1]
-    )
-
-    while (test_val < crit_val) & (iter_index < resids.size - 1):
-        iter_index += 1
-        new_iter_max_bin_index = findIndex(binz, resids[iter_index - 1], 0)
-        # following if/else block "manually" expands the data histogram and circumvents calculation of the complete
-        # histogram in any new iteration.
-        if new_iter_max_bin_index == iter_max_bin_index:
-            upper_tail_hist[-1] += 1
-        else:
-            upper_tail_hist = np.append(upper_tail_hist, np.zeros([new_iter_max_bin_index - iter_max_bin_index]))
-            upper_tail_hist[-1] += 1
-            iter_max_bin_index = new_iter_max_bin_index
-            upper_tail_index_new = int(np.floor(0.5 * hist_argmax + 0.5 * iter_max_bin_index))
-            upper_tail_hist = upper_tail_hist[upper_tail_index_new - upper_tail_index :]
-            upper_tail_index = upper_tail_index_new
-
-        # fitting
-
-        lambdA, _ = curve_fit(
-            fit_function,
-            xdata=binzenters[upper_tail_index:iter_max_bin_index],
-            ydata=upper_tail_hist,
-            p0=[-np.log(alpha / resids[iter_index])],
-        )
-
-        crit_val = neg_log_alpha / lambdA
-        test_val = resids[iter_index]
-
-    return val_frame.index[sorted_i[iter_index:]]
-
-
-def _reduceMVflags(
-    val_frame, fields, flagger, to_flag_frame, reduction_range, reduction_drop_flagged=False, reduction_thresh=3.5,
-        reduction_min_periods=1
-):
-    """
-    Function called by "spikes_flagMultivarScores" to reduce the number of false positives that result from
-    the algorithms confinement to only flag complete observations (all of its variables/components).
-
-    The function "reduces" an observations flag to components of it, by applying MAD (See references)
-    test onto every components temporal surrounding.
-
-    Parameters
-    ----------
-    val_frame : (N,M) pd.DataFrame
-        Input NxM DataFrame of observations, where N is the number of observations and M the number of components per
-        observation.
-    fields : str
-        Fieldnames of the components in `val_frame` that are to be tested for outlierishnes.
-    to_flag_frame : (K,M) pd.DataFrame
-        Input dataframe of observations to be tested, where N is the number of observations and M the number
-        of components per observation.
-    reduction_range : str
-        An offset string, denoting the range of the temporal surrounding to include into the MAD testing.
-    reduction_drop_flagged : bool, default False
-        Wheather or not to drop flagged values other than the value under test, from the temporal surrounding
-        before checking the value with MAD.
-    reduction_thresh : float, default 3.5
-        The `critical` value, controlling wheather the MAD score is considered referring to an outlier or not.
-        Higher values result in less rigid flagging. The default value is widely used in the literature. See references
-        section for more details ([1]).
-
-    References
-    ----------
-    [1] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
-    """
-
-    to_flag_frame[:] = False
-    to_flag_index = to_flag_frame.index
-    for var in fields:
-        for index in enumerate(to_flag_index):
-            index_slice = slice(index[1] - pd.Timedelta(reduction_range), index[1] + pd.Timedelta(reduction_range))
-
-            test_slice = val_frame[var][index_slice].dropna()
-            # check, wheather value under test is sufficiently centered:
-            first_valid = test_slice.first_valid_index()
-            last_valid = test_slice.last_valid_index()
-            min_range = pd.Timedelta(reduction_range)/4
-            polydeg = 2
-            if ((pd.Timedelta(index[1] - first_valid) < min_range) |
-                (pd.Timedelta(last_valid - index[1]) < min_range)):
-                polydeg = 0
-            if reduction_drop_flagged:
-                test_slice = test_slice.drop(to_flag_index, errors='ignore')
-            if test_slice.shape[0] >= reduction_min_periods:
-                x = (test_slice.index.values.astype(float))
-                x_0 = x[0]
-                x = (x - x_0)/10**12
-                polyfitted = poly.polyfit(y=test_slice.values, x=x, deg=polydeg)
-                testval = poly.polyval((float(index[1].to_numpy()) - x_0)/10**12, polyfitted)
-                testval = val_frame[var][index[1]] - testval
-                resids = test_slice.values - poly.polyval(x, polyfitted)
-                med_resids = np.median(resids)
-                MAD = np.median(np.abs(resids - med_resids))
-                crit_val = 0.6745 * (abs(med_resids - testval)) / MAD
-                if crit_val > reduction_thresh:
-                    to_flag_frame.loc[index[1], var] = True
-            else:
-                to_flag_frame.loc[index[1], var] = True
-
-    return to_flag_frame
-
-
-@register(masking='all')
-def spikes_flagMultivarScores(
-    data,
-    field,
-    flagger,
-    fields,
-    trafo=np.log,
-    alpha=0.05,
-    n_neighbors=10,
-    scoring_method="kNNMaxGap",
-    iter_start=0.5,
-    threshing="stray",
-    expfit_binning="auto",
-    stray_partition=None,
-    stray_partition_min=11,
-    post_reduction=False,
-    reduction_range=None,
-    reduction_drop_flagged=False,
-    reduction_thresh=3.5,
-    reduction_min_periods=1,
-    **kwargs,
-):
-    """
-    The algorithm implements a 3-step outlier detection procedure for simultaneously flagging of higher dimensional
-    data (dimensions > 3).
-
-    In references [1], the procedure is introduced and exemplified with an application on hydrological data.
-
-    See the notes section for an overview over the algorithms basic steps.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged. (Here a dummy, for structural reasons)
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    fields : List[str]
-        List of fieldnames, corresponding to the variables that are to be included into the flagging process.
-    trafo : callable, default np.log
-        Transformation to be applied onto every column before scoring. Will likely get deprecated soon. Its better
-        to transform the data in a processing step, preceeeding the call to ``flagMultivarScores``.
-    alpha : float, default 0.05
-        Level of significance by which it is tested, if an observations score might be drawn from another distribution
-        than the majority of the observation.
-    n_neighbors : int, default 10
-        Number of neighbors included in the scoring process for every datapoint.
-    scoring_method : {'kNNSum', 'kNNMaxGap'}, default 'kNNMaxGap'
-        Scoring method applied.
-        ``'kNNSum'``: Assign to every point the sum of the distances to its 'n_neighbors' nearest neighbors.
-        ``'kNNMaxGap'``: Assign to every point the distance to the neighbor with the "maximum gap" to its predecessor
-        in the hierarchy of the `n_neighbors` nearest neighbors. (see reference section for further descriptions)
-    iter_start : float, default 0.5
-        Float in [0,1] that determines which percentage of data is considered "normal". 0.5 results in the threshing
-        algorithm to search only the upper 50 % of the scores for the cut off point. (See reference section for more
-        information)
-    threshing : {'stray', 'expfit'}, default 'stray'
-        A string, denoting the threshing algorithm to be applied on the observations scores.
-        See the documentations of the algorithms (``_stray``, ``_expfit``) and/or the references sections paragraph [2]
-        for more informations on the algorithms.
-    expfit_binning : {int, str}, default 'auto'
-        Controls the binning for the histogram in the ``expfit`` algorithms fitting step.
-        If an integer is passed, the residues will equidistantly be covered by `bin_frac` bins, ranging from the
-        minimum to the maximum of the residues. If a string is passed, it will be passed on to the
-        ``numpy.histogram_bin_edges`` method.
-    stray_partition : {None, str, int}, default None
-        Only effective when `threshing` = 'stray'.
-        Determines the size of the data partitions, the data is decomposed into. Each partition is checked seperately
-        for outliers. If a String is passed, it has to be an offset string and it results in partitioning the data into
-        parts of according temporal length. If an integer is passed, the data is simply split up into continous chunks
-        of `partition_freq` periods. if ``None`` is passed (default), all the data will be tested in one run.
-    stray_partition_min : int, default 0
-        Only effective when `threshing` = 'stray'.
-        Minimum number of periods per partition that have to be present for a valid outlier detection to be made in
-        this partition. (Only of effect, if `stray_partition` is an integer.)
-    post_reduction : bool, default False
-        Wheather or not it should be tried to reduce the flag of an observation to one or more of its components. See
-        documentation of `_reduceMVflags` for more details.
-    reduction_range : {None, str}, default None
-        Only effective when `post_reduction` = True
-        An offset string, denoting the range of the temporal surrounding to include into the MAD testing while trying
-        to reduce flags.
-    reduction_drop_flagged : bool, default False
-        Only effective when `post_reduction` = True
-        Wheather or not to drop flagged values other than the value under test from the temporal surrounding
-        before checking the value with MAD.
-    reduction_thresh : float, default 3.5
-        Only effective when `post_reduction` = True
-        The `critical` value, controlling wheather the MAD score is considered referring to an outlier or not.
-        Higher values result in less rigid flagging. The default value is widely considered apropriate in the
-        literature.
-
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    Notes
-    -----
-    The basic steps are:
-
-    1. transforming
-
-    The different data columns are transformed via timeseries transformations to
-    (a) make them comparable and
-    (b) make outliers more stand out.
-
-    This step is usually subject to a phase of research/try and error. See [1] for more details.
-
-    Note, that the data transformation as an built-in step of the algorithm, will likely get deprecated soon. Its better
-    to transform the data in a processing step, preceeding the multivariate flagging process. Also, by doing so, one
-    gets mutch more control and variety in the transformation applied, since the `trafo` parameter only allows for
-    application of the same transformation to all of the variables involved.
-
-    2. scoring
-
-    Every observation gets assigned a score depending on its k nearest neighbors. See the `scoring_method` parameter
-    description for details on the different scoring methods. Furthermore [1], [2] may give some insight in the
-    pro and cons of the different methods.
-
-    3. threshing
-
-    The gaps between the (greatest) scores are tested for beeing drawn from the same
-    distribution as the majority of the scores. If a gap is encountered, that, with sufficient significance, can be
-    said to not be drawn from the same distribution as the one all the smaller gaps are drawn from, than
-    the observation belonging to this gap, and all the observations belonging to gaps larger then this gap, get flagged
-    outliers. See description of the `threshing` parameter for more details. Although [2] gives a fully detailed
-    overview over the `stray` algorithm.
-
-    References
-    ----------
-    Odd Water Algorithm:
-
-    [1] Talagala, P.D. et al (2019): A Feature-Based Procedure for Detecting Technical Outliers in Water-Quality Data
-        From In Situ Sensors. Water Ressources Research, 55(11), 8547-8568.
-
-    A detailed description of the stray algorithm:
-
-    [2] Talagala, P. D., Hyndman, R. J., & Smith-Miles, K. (2019). Anomaly detection in high dimensional data.
-        arXiv preprint arXiv:1908.04000.
-
-    A detailed description of the MAD outlier scoring:
-
-    [3] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
-    """
-
-    # data fransformation/extraction
-    data = data.copy()
-    fields = toSequence(fields)
-    val_frame = data[fields]
-    val_frame = val_frame.loc[val_frame.index_of("shared")].to_df()
-    val_frame.dropna(inplace=True)
-    val_frame = val_frame.apply(trafo)
-
-    if val_frame.empty:
-        return data, flagger
-
-    if threshing == "stray":
-        to_flag_index = _stray(
-            val_frame,
-            partition_freq=stray_partition,
-            partition_min=stray_partition_min,
-            scoring_method=scoring_method,
-            n_neighbors=n_neighbors,
-            iter_start=iter_start,
-            alpha=alpha
-        )
-
-    else:
-        val_frame = val_frame.apply(trafo)
-        to_flag_index = _expFit(val_frame,
-                                scoring_method=scoring_method,
-                                n_neighbors=n_neighbors,
-                                iter_start=iter_start,
-                                alpha=alpha,
-                                bin_frac=expfit_binning)
-
-    to_flag_frame = pd.DataFrame({var_name: True for var_name in fields}, index=to_flag_index)
-    if post_reduction:
-        val_frame = data[toSequence(fields)].to_df()
-        to_flag_frame = _reduceMVflags(val_frame, fields, flagger, to_flag_frame, reduction_range,
-                                       reduction_drop_flagged=reduction_drop_flagged,
-                                       reduction_thresh=reduction_thresh,
-                                       reduction_min_periods=reduction_min_periods)
-
-
-    for var in fields:
-        to_flag_ind = to_flag_frame.loc[:, var]
-        to_flag_ind = to_flag_ind[to_flag_ind].index
-        flagger = flagger.setFlags(var, to_flag_ind, **kwargs)
-
-    return data, flagger
-
-
-@register(masking='field')
-def spikes_flagRaise(
-    data,
-    field,
-    flagger,
-    thresh,
-    raise_window,
-    intended_freq,
-    average_window=None,
-    mean_raise_factor=2,
-    min_slope=None,
-    min_slope_weight=0.8,
-    numba_boost=True,
-    **kwargs,
-):
-    """
-    The function flags raises and drops in value courses, that exceed a certain threshold
-    within a certain timespan.
-
-    The parameter variety of the function is owned to the intriguing
-    case of values, that "return" from outlierish or anomalious value levels and
-    thus exceed the threshold, while actually being usual values.
-
-    NOTE, the dataset is NOT supposed to be harmonized to a time series with an
-    equidistant frequency grid.
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    thresh : float
-        The threshold, for the total rise (thresh > 0), or total drop (thresh < 0), value courses must
-        not exceed within a timespan of length `raise_window`.
-    raise_window : str
-        An offset string, determining the timespan, the rise/drop thresholding refers to. Window is inclusively defined.
-    intended_freq : str
-        An offset string, determining The frequency, the timeseries to-be-flagged is supposed to be sampled at.
-        The window is inclusively defined.
-    average_window : {None, str}, default None
-        See condition (2) of the description linked in the references. Window is inclusively defined.
-        The window defaults to 1.5 times the size of `raise_window`
-    mean_raise_factor : float, default 2
-        See second condition listed in the notes below.
-    min_slope : {None, float}, default None
-        See third condition listed in the notes below.
-    min_slope_weight : float, default 0.8
-        See third condition listed in the notes below.
-    numba_boost : bool, default True
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    Notes
-    -----
-    The value :math:`x_{k}` of a time series :math:`x` with associated
-    timestamps :math:`t_i`, is flagged a raise, if:
-
-    * There is any value :math:`x_{s}`, preceeding :math:`x_{k}` within `raise_window` range, so that:
-
-      * :math:`M = |x_k - x_s | >`  `thresh` :math:`> 0`
-
-    * The weighted average :math:`\\mu^{*}` of the values, preceding :math:`x_{k}` within `average_window`
-      range indicates, that :math:`x_{k}` does not return from an "outlierish" value course, meaning that:
-
-      * :math:`x_k > \\mu^* + ( M` / `mean_raise_factor` :math:`)`
-
-    * Additionally, if `min_slope` is not `None`, :math:`x_{k}` is checked for being sufficiently divergent from its
-      very predecessor :max:`x_{k-1}`$, meaning that, it is additionally checked if:
-
-      * :math:`x_k - x_{k-1} >` `min_slope`
-      * :math:`t_k - t_{k-1} >` `min_slope_weight` :math:`\\times` `intended_freq`
-
-    """
-
-    # prepare input args
-    dataseries = data[field].dropna()
-    raise_window = pd.Timedelta(raise_window)
-    intended_freq = pd.Timedelta(intended_freq)
-    if min_slope is not None:
-        min_slope = np.abs(min_slope)
-
-    if average_window is None:
-        average_window = 1.5 * pd.Timedelta(raise_window)
-
-    if thresh < 0:
-        dataseries *= -1
-        thresh *= -1
-
-    def raise_check(x, thresh):
-        test_set = x[-1] - x[0:-1]
-        max_val = np.max(test_set)
-        if max_val >= thresh:
-            return max_val
-        else:
-            return np.nan
-
-    def custom_rolling_mean(x):
-        return np.sum(x[:-1])
-
-    # get invalid-raise/drop mask:
-    raise_series = dataseries.rolling(raise_window, min_periods=2, closed="both")
-
-    if numba_boost:
-        raise_check = numba.jit(raise_check, nopython=True)
-        raise_series = raise_series.apply(raise_check, args=(thresh,), raw=True, engine="numba")
-    else:
-        raise_series = raise_series.apply(raise_check, args=(thresh,), raw=True)
-
-    if raise_series.isna().all():
-        return data, flagger
-
-    # "unflag" values of insufficient deviation to their predecessors
-    if min_slope is not None:
-        w_mask = (
-            pd.Series(dataseries.index).diff().dt.total_seconds() / intended_freq.total_seconds()
-        ) > min_slope_weight
-        slope_mask = np.abs(dataseries.diff()) < min_slope
-        to_unflag = raise_series.notna() & w_mask.values & slope_mask
-        raise_series[to_unflag] = np.nan
-
-    # calculate and apply the weighted mean weights (pseudo-harmonization):
-    weights = (
-        pd.Series(dataseries.index).diff(periods=2).shift(-1).dt.total_seconds() / intended_freq.total_seconds() / 2
-    )
-
-    weights.iloc[0] = 0.5 + (dataseries.index[1] - dataseries.index[0]).total_seconds() / (
-        intended_freq.total_seconds() * 2
-    )
-
-    weights.iloc[-1] = 0.5 + (dataseries.index[-1] - dataseries.index[-2]).total_seconds() / (
-        intended_freq.total_seconds() * 2
-    )
-
-    weights[weights > 1.5] = 1.5
-    weights.index = dataseries.index
-    weighted_data = dataseries.mul(weights)
-
-    # rolling weighted mean calculation
-    weighted_rolling_mean = weighted_data.rolling(average_window, min_periods=2, closed="both")
-    weights_rolling_sum = weights.rolling(average_window, min_periods=2, closed="both")
-    if numba_boost:
-        custom_rolling_mean = numba.jit(custom_rolling_mean, nopython=True)
-        weighted_rolling_mean = weighted_rolling_mean.apply(custom_rolling_mean, raw=True, engine="numba")
-        weights_rolling_sum = weights_rolling_sum.apply(custom_rolling_mean, raw=True, engine="numba")
-    else:
-        weighted_rolling_mean = weighted_rolling_mean.apply(custom_rolling_mean, raw=True)
-        weights_rolling_sum = weights_rolling_sum.apply(custom_rolling_mean, raw=True, engine="numba")
-
-    weighted_rolling_mean = weighted_rolling_mean / weights_rolling_sum
-    # check means against critical raise value:
-    to_flag = dataseries >= weighted_rolling_mean + (raise_series / mean_raise_factor)
-    to_flag &= raise_series.notna()
-    flagger = flagger.setFlags(field, to_flag[to_flag].index, **kwargs)
-
-    return data, flagger
-
-
-@register(masking='field')
-def spikes_flagSlidingZscore(
-    data, field, flagger, window, offset, count=1, polydeg=1, z=3.5, method="modZ", **kwargs,
-):
-    """
-    An outlier detection in a sliding window. The method for detection can be a simple Z-score or the more robust
-    modified Z-score, as introduced here [1].
-
-    The steps are:
-    1.  a window of size `window` is cut from the data
-    2.  the data is fit by a polynomial of the given degree `polydeg`
-    3.  the outlier `method` detect potential outlier
-    4.  the window is continued by `offset` to the next data-slot.
-    5.  processing continue at 1. until end of data.
-    6.  all potential outlier, that are detected `count`-many times, are promoted to real outlier and flagged by the `flagger`
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    window: {int, str}
-        Integer or offset string (see [2]). The size of the window the outlier detection is run in.
-    offset: {int, str}
-        Integer or offset string (see [2]). Stepsize the window is set further. default: 1h
-    count: int, default 1
-        Number of times a value has to be classified an outlier in different windows, to be finally flagged an outlier.
-    polydeg : int, default 1
-        The degree for the polynomial that is fitted to the data in order to calculate the residues.
-    z : float, default 3.5
-        The value the (mod.) Z-score is tested against. Defaulting to 3.5 (Recommendation of [1])
-    method: {'modZ', zscore}, default  'modZ'
-        See section `Z-Scores and Modified Z-Scores` in [1].
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    References
-    ----------
-    [1] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
-    [2] https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#dateoffset-objects
-
-    """
-
-    use_offset = False
-    dx_s = offset
-    winsz_s = window
-    # check param consistency
-    if isinstance(window, str) or isinstance(offset, str):
-        if isinstance(window, str) and isinstance(offset, str):
-            use_offset = True
-            dx_s = offset2seconds(offset)
-            winsz_s = offset2seconds(window)
-        else:
-            raise TypeError(
-                f"`window` and `offset` must both be an offset or both be numeric, {window} and {offset} was passed"
-            )
-
-    # check params
-    if polydeg < 0:
-        raise ValueError("polydeg must be positive")
-    if z < 0:
-        raise ValueError("z must be positive")
-    if count <= 0:
-        raise ValueError("count must be positive and not zero")
-
-    if dx_s >= winsz_s and count == 1:
-        pass
-    elif dx_s >= winsz_s and count > 1:
-        ValueError("If stepsize `offset` is bigger that the window-size, every value is seen just once, so use count=1")
-    elif count > winsz_s // dx_s:
-        raise ValueError(
-            f"Adjust `offset`, `stepsize` or `window`. A single data point is "
-            f"seen `floor(window / offset) = {winsz_s // dx_s}` times, but count is set to {count}"
-        )
-
-    # prepare the method
-    method = method.lower()
-    if method == "modz":
-
-        def _calc(residual):
-            diff = np.abs(residual - np.median(residual))
-            mad = np.median(diff)
-            return (mad > 0) & (0.6745 * diff > z * mad)
-
-    elif method == "zscore":
-
-        def _calc(residual):
-            score = zscore(residual, ddof=1)
-            return np.abs(score) > z
-
-    else:
-        raise NotImplementedError
-    method = _calc
-
-    # prepare data, work on numpy arrays for the fulfilling pleasure of performance
-    d = data[field].dropna()
-    if d.empty:
-        return data, flagger
-    all_indices = np.arange(len(d.index))
-    x = (d.index - d.index[0]).total_seconds().values
-    y = d.values
-    counters = np.full(len(d.index), count)
-
-    if use_offset:
-        _loopfun = slidingWindowIndices
-    else:
-
-        def _loopfun(arr, wsz, step):
-            for i in range(0, len(arr) - wsz + 1, step):
-                yield i, i + wsz
-
-    for start, end in _loopfun(d.index, window, offset):
-        # mask points that have been already discarded
-        mask = counters[start:end] > 0
-        indices = all_indices[all_indices[start:end][mask]]
-        xchunk = x[indices]
-        ychunk = y[indices]
-
-        if xchunk.size == 0:
-            continue
-
-        # get residual
-        coef = poly.polyfit(xchunk, ychunk, polydeg)
-        model = poly.polyval(xchunk, coef)
-        residual = ychunk - model
-
-        score = method(residual)
-
-        # count`em in
-        goneMad = score.nonzero()[0]
-        counters[indices[goneMad]] -= 1
-
-    outlier = np.where(counters <= 0)[0]
-    loc = d[outlier].index
-    flagger = flagger.setFlags(field, loc=loc, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def spikes_flagMad(data, field, flagger, window, z=3.5, **kwargs):
-
-    """
-
-    The function represents an implementation of the modyfied Z-score outlier detection method.
-
-    See references [1] for more details on the algorithm.
-
-    Note, that the test needs the input data to be sampled regularly (fixed sampling rate).
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged. (Here a dummy, for structural reasons)
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    window : str
-       Offset string. Denoting the windows size that the "Z-scored" values have to lie in.
-    z: float, default 3.5
-        The value the Z-score is tested against. Defaulting to 3.5 (Recommendation of [1])
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    References
-    ----------
-    [1] https://www.itl.nist.gov/div898/handbook/eda/section3/eda35h.htm
-
-    """
-    d = data[field].copy().mask(flagger.isFlagged(field))
-    median = d.rolling(window=window, closed="both").median()
-    diff = (d - median).abs()
-    mad = diff.rolling(window=window, closed="both").median()
-    mask = (mad > 0) & (0.6745 * diff > z * mad)
-    # NOTE:
-    # In pandas <= 0.25.3, the window size is not fixed if the
-    # window-argument to rolling is a frequency. That implies,
-    # that during the first iterations the window has a size of
-    # 1, 2, 3, ... until it eventually covers the disered time
-    # span. For stuff the calculation of median, that is rather
-    # unfortunate, as the size of calculation base might differ
-    # heavily. So don't flag something until, the window reaches
-    # its target size
-    if not isinstance(window, int):
-        index = mask.index
-        mask.loc[index < index[0] + pd.to_timedelta(window)] = False
-
-    flagger = flagger.setFlags(field, mask, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def spikes_flagBasic(data, field, flagger, thresh, tolerance, window, numba_kickin=200000, **kwargs):
-    """
-    A basic outlier test that is designed to work for harmonized and not harmonized data.
-
-    The test classifies values/value courses as outliers by detecting not only a rise in value, but also,
-    checking for a return to the initial value level.
-
-    Values :math:`x_n, x_{n+1}, .... , x_{n+k}` of a timeseries :math:`x` with associated timestamps
-    :math:`t_n, t_{n+1}, .... , t_{n+k}` are considered spikes, if
-
-    1. :math:`|x_{n-1} - x_{n + s}| >` `thresh`, for all :math:`s \\in [0,1,2,...,k]`
-
-    2. :math:`|x_{n-1} - x_{n+k+1}| <` `tolerance`
-
-    3. :math:`|t_{n-1} - t_{n+k+1}| <` `window`
-
-    Note, that this definition of a "spike" not only includes one-value outliers, but also plateau-ish value courses.
-
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged. (Here a dummy, for structural reasons)
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    thresh : float, default 7
-        Minimum difference between to values, to consider the latter one as a spike. See condition (1)
-    tolerance : float, default 0
-        Maximum difference between pre-spike and post-spike values. See condition (2)
-    window : str, default '15min'
-        Maximum length of "spiky" value courses. See condition (3)
-    numba_kickin : int, default 200000
-        When there are detected more than `numba_kickin` incidents of potential spikes,
-        the pandas.rolling - part of computation gets "jitted" with numba.
-        Default value hast proven to be around the break even point between "jit-boost" and "jit-costs".
-
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed, relatively to the flagger input.
-
-    References
-    ----------
-    The implementation is a time-window based version of an outlier test from the UFZ Python library,
-    that can be found here:
-
-    https://git.ufz.de/chs/python/blob/master/ufz/level1/spike.py
-
-    """
-
-    dataseries = data[field].dropna()
-    # get all the entries preceding a significant jump
-    post_jumps = dataseries.diff().abs() > thresh
-    post_jumps = post_jumps[post_jumps]
-    if post_jumps.empty:
-        return data, flagger
-    # get all the entries preceeding a significant jump and its successors within "length" range
-    to_roll = post_jumps.reindex(dataseries.index, method="bfill", tolerance=window, fill_value=False).dropna()
-
-    # define spike testing function to roll with:
-    def spikeTester(chunk, thresh=thresh, tol=tolerance):
-        # signum change!!!
-        chunk_stair = (np.sign(chunk[-2] - chunk[-1])*(chunk - chunk[-1]) < thresh)[::-1].cumsum()
-        initial = np.searchsorted(chunk_stair, 2)
-        if initial == len(chunk):
-            return 0
-        if np.abs(chunk[- initial - 1] - chunk[-1]) < tol:
-            return initial - 1
-        else:
-            return 0
-
-    to_roll = dataseries[to_roll]
-    roll_mask = pd.Series(False, index=to_roll.index)
-    roll_mask[post_jumps.index] = True
-
-    roller = customRoller(to_roll, window=window, mask=roll_mask, min_periods=2, closed='both')
-    engine = None if roll_mask.sum() < numba_kickin else 'numba'
-    result = roller.apply(spikeTester, raw=True, engine=engine)
-
-    # correct the result: only those values define plateaus, that do not have
-    # values at their left starting point, that belong to other plateaus themself:
-    def calcResult(result):
-        var_num = result.shape[0]
-        flag_scopes = np.zeros(var_num, dtype=bool)
-        for k in range(var_num):
-            if result[k] > 0:
-                k_r = int(result[k])
-                # validity check: plateuas start isnt another plateaus end:
-                if not flag_scopes[k - k_r - 1]:
-                    flag_scopes[(k - k_r):k] = True
-        return pd.Series(flag_scopes, index=result.index)
-
-    cresult = calcResult(result)
-    cresult = cresult[cresult].index
-    flagger = flagger.setFlags(field, cresult, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def spikes_flagSpektrumBased(
-    data,
-    field,
-    flagger,
-    raise_factor=0.15,
-    deriv_factor=0.2,
-    noise_func="CoVar",
-    noise_window="12h",
-    noise_thresh=1,
-    smooth_window=None,
-    smooth_poly_deg=2,
-    **kwargs,
-):
-    """
-
-    Function detects and flags spikes in input data series by evaluating its derivatives and applying some
-    conditions to it. A datapoint is considered a spike, if:
-
-    (1) the quotient to its preceeding datapoint exceeds a certain bound
-    (controlled by param `raise_factor`)
-    (2) the quotient of the datas second derivate at the preceeding and subsequent timestamps is close enough to 1.
-    (controlled by param `deriv_factor`)
-    (3) the surrounding data is not too noisy. (Coefficient of Variation[+/- noise_window] < 1)
-    (controlled by param `noise_thresh`)
-
-    Note, that the data-to-be-flagged is supposed to be sampled at an equidistant frequency grid
-
-    Note, that the derivative is calculated after applying a Savitsky-Golay filter to the data.
-
-    Parameters
-    ----------
-
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    raise_factor : float, default 0.15
-        Minimum relative value difference between two values to consider the latter as a spike candidate.
-        See condition (1) (or reference [2]).
-    deriv_factor : float, default 0.2
-        See condition (2) (or reference [2]).
-    noise_func : {'CoVar', 'rVar'}, default 'CoVar'
-        Function to calculate noisiness of the data surrounding potential spikes.
-
-        * ``'CoVar'``: Coefficient of Variation
-        * ``'rVar'``: Relative Variance
-
-    noise_window : str, default '12h'
-        An offset string that determines the range of the time window of the "surrounding" data of a potential spike.
-        See condition (3) (or reference [2]).
-    noise_thresh : float, default 1
-        Upper threshold for noisiness of data surrounding potential spikes. See condition (3) (or reference [2]).
-    smooth_window : {None, str}, default None
-        Size of the smoothing window of the Savitsky-Golay filter.
-        The default value ``None`` results in a window of two times the sampling rate (i.e. containing three values).
-    smooth_poly_deg : int, default 2
-        Degree of the polynomial used for fitting with the Savitsky-Golay filter.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-
-    References
-    ----------
-    This Function is a generalization of the Spectrum based Spike flagging mechanism as presented in:
-
-    [1] Dorigo, W. et al: Global Automated Quality Control of In Situ Soil Moisture
-        Data from the international Soil Moisture Network. 2013. Vadoze Zone J.
-        doi:10.2136/vzj2012.0097.
-
-    Notes
-    -----
-    A value is flagged a spike, if:
-
-    * The quotient to its preceding data point exceeds a certain bound:
-
-      * :math:`|\\frac{x_k}{x_{k-1}}| > 1 +` ``raise_factor``, or
-      * :math:`|\\frac{x_k}{x_{k-1}}| < 1 -` ``raise_factor``
-
-    * The quotient of the second derivative :math:`x''`, at the preceding
-      and subsequent timestamps is close enough to 1:
-
-      * :math:`|\\frac{x''_{k-1}}{x''_{k+1}} | > 1 -` ``deriv_factor``, and
-      * :math:`|\\frac{x''_{k-1}}{x''_{k+1}} | < 1 +` ``deriv_factor``
-
-    * The dataset :math:`X = x_i, ..., x_{k-1}, x_{k+1}, ..., x_j`, with
-      :math:`|t_{k-1} - t_i| = |t_j - t_{k+1}| =` ``noise_window`` fulfills the
-      following condition:
-
-      * ``noise_func``:math:`(X) <` ``noise_thresh``
-
-    """
-
-    dataseries, data_rate = retrieveTrustworthyOriginal(data, field, flagger)
-    noise_func_map = {"covar": pd.Series.var, "rvar": pd.Series.std}
-    noise_func = noise_func_map[noise_func.lower()]
-
-    if smooth_window is None:
-        smooth_window = 3 * pd.Timedelta(data_rate)
-    else:
-        smooth_window = pd.Timedelta(smooth_window)
-
-    quotient_series = dataseries / dataseries.shift(+1)
-    spikes = (quotient_series > (1 + raise_factor)) | (quotient_series < (1 - raise_factor))
-    spikes = spikes[spikes == True]
-
-    # loop through spikes: (loop may sound ugly - but since the number of spikes is supposed to not exceed the
-    # thousands for year data - a loop going through all the spikes instances is much faster than
-    # a rolling window, rolling all through a stacked year dataframe )
-
-    # calculate some values, repeatedly needed in the course of the loop:
-
-    filter_window_seconds = smooth_window.seconds
-    smoothing_periods = int(np.ceil((filter_window_seconds / data_rate.n)))
-    lower_dev_bound = 1 - deriv_factor
-    upper_dev_bound = 1 + deriv_factor
-
-    if smoothing_periods % 2 == 0:
-        smoothing_periods += 1
-
-    for spike in spikes.index:
-        start_slice = spike - smooth_window
-        end_slice = spike + smooth_window
-
-        scnd_derivate = savgol_filter(
-            dataseries[start_slice:end_slice], window_length=smoothing_periods, polyorder=smooth_poly_deg, deriv=2,
-        )
-
-        length = scnd_derivate.size
-        test_ratio_1 = np.abs(scnd_derivate[int(((length + 1) / 2) - 2)] / scnd_derivate[int(((length + 1) / 2))])
-
-        if lower_dev_bound < test_ratio_1 < upper_dev_bound:
-            # apply noise condition:
-            start_slice = spike - pd.Timedelta(noise_window)
-            end_slice = spike + pd.Timedelta(noise_window)
-            test_slice = dataseries[start_slice:end_slice].drop(spike)
-            test_ratio_2 = np.abs(noise_func(test_slice) / test_slice.mean())
-            # not a spike, we want to flag, if condition not satisfied:
-            if test_ratio_2 > noise_thresh:
-                spikes[spike] = False
-
-        # not a spike, we want to flag, if condition not satisfied
-        else:
-            spikes[spike] = False
-
-    spikes = spikes[spikes == True]
-
-    flagger = flagger.setFlags(field, spikes.index, **kwargs)
-    return data, flagger
-
-
-@register(masking='field')
-def spikes_flagGrubbs(data, field, flagger, winsz, alpha=0.05, min_periods=8, check_lagged=False, **kwargs):
-    """
-    The function flags values that are regarded outliers due to the grubbs test.
-
-    See reference [1] for more information on the grubbs tests definition.
-
-    The (two-sided) test gets applied onto data chunks of size "winsz". The tests application  will
-    be iterated on each data-chunk under test, till no more outliers are detected in that chunk.
-
-    Note, that the test performs poorely for small data chunks (resulting in heavy overflagging).
-    Therefor you should select "winsz" so that every window contains at least > 8 values and also
-    adjust the min_periods values accordingly.
-
-    Note, that the data to be tested by the grubbs test are expected to be distributed "normalish".
-
-    Parameters
-    ----------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    field : str
-        The fieldname of the column, holding the data-to-be-flagged.
-    flagger : saqc.flagger.BaseFlagger
-        A flagger object, holding flags and additional Informations related to `data`.
-    winsz : {int, str}
-        The size of the window you want to use for outlier testing. If an integer is passed, the size
-        refers to the number of periods of every testing window. If a string is passed, it has to be an offset string,
-        and will denote the total temporal extension of every window.
-    alpha : float, default 0.05
-        The level of significance, the grubbs test is to be performed at. (between 0 and 1)
-    min_periods : int, default 8
-        The minimum number of values that have to be present in an interval under test, for a grubbs test result to be
-        accepted. Only makes sence in case `winsz` is an offset string.
-    check_lagged: boolean, default False
-        If True, every value gets checked twice for being an outlier. Ones in the initial rolling window and one more
-        time in a rolling window that is lagged by half the windows delimeter (winsz/2). Recommended for avoiding false
-        positives at the window edges. Only available when rolling with integer defined window size.
-
-    Returns
-    -------
-    data : dios.DictOfSeries
-        A dictionary of pandas.Series, holding all the data.
-    flagger : saqc.flagger.BaseFlagger
-        The flagger object, holding flags and additional Informations related to `data`.
-        Flags values may have changed relatively to the flagger input.
-
-    References
-    ----------
-    introduction to the grubbs test:
-
-    [1] https://en.wikipedia.org/wiki/Grubbs%27s_test_for_outliers
-
-    """
-
-    data = data.copy()
-    datcol = data[field]
-    to_group = pd.DataFrame(data={"ts": datcol.index, "data": datcol})
-    to_flag = pd.Series(False, index=datcol.index)
-    if isinstance(winsz, int):
-        # period number defined test intervals
-        grouper_series = pd.Series(data=np.arange(0, datcol.shape[0]), index=datcol.index)
-        grouper_series_lagged = grouper_series + (winsz / 2)
-        grouper_series = grouper_series.transform(lambda x: int(np.floor(x / winsz)))
-        grouper_series_lagged = grouper_series_lagged.transform(lambda x: int(np.floor(x / winsz)))
-        partitions = to_group.groupby(grouper_series)
-        partitions_lagged = to_group.groupby(grouper_series_lagged)
-    else:
-        # offset defined test intervals:
-        partitions = to_group.groupby(pd.Grouper(freq=winsz))
-    for _, partition in partitions:
-        if partition.shape[0] > min_periods:
-            detected = smirnov_grubbs.two_sided_test_indices(partition["data"].values, alpha=alpha)
-            detected = partition["ts"].iloc[detected]
-            to_flag[detected.index] = True
-
-    if check_lagged & isinstance(winsz, int):
-        to_flag_lagged = pd.Series(False, index=datcol.index)
-        for _, partition in partitions_lagged:
-            if partition.shape[0] > min_periods:
-                detected = smirnov_grubbs.two_sided_test_indices(partition["data"].values, alpha=alpha)
-                detected = partition["ts"].iloc[detected]
-                to_flag_lagged[detected.index] = True
-        to_flag = to_flag & to_flag_lagged
-
-    flagger = flagger.setFlags(field, loc=to_flag, **kwargs)
-    return data, flagger
diff --git a/saqc/funcs/tools.py b/saqc/funcs/tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..c8cace05fc624445cd2a2e61f410780eefc5b2fd
--- /dev/null
+++ b/saqc/funcs/tools.py
@@ -0,0 +1,388 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from typing import Optional, Tuple
+
+from typing_extensions import Literal
+import numpy as np
+from dios import DictOfSeries
+
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+import pickle
+
+from saqc.constants import *
+from saqc.lib.types import FreqString
+from saqc.core import processing, Flags
+from saqc.lib.tools import periodicMask
+from saqc.lib.plotting import makeFig
+
+_MPL_DEFAULT_BACKEND = mpl.get_backend()
+
+
+@processing()
+def copyField(
+    data: DictOfSeries, field: str, flags: Flags, new_field: str, **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function generates a copy of the data "field" and inserts it under the name field + suffix into the existing
+    data.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the data column, you want to fork (copy).
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    new_field: str
+        Target name.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        data shape may have changed relatively to the flags input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags shape may have changed relatively to the flags input.
+    """
+    if new_field in flags.columns.union(data.columns):
+        raise ValueError(f"{field}: field already exist")
+
+    data[new_field] = data[field].copy()
+    # implicit copy in history access
+    flags.history[new_field] = flags.history[field]
+    return data, flags
+
+
+@processing()
+def dropField(
+    data: DictOfSeries, field: str, flags: Flags, **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function drops field from the data dios and the flags.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the data column, you want to drop.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        data shape may have changed relatively to the flags input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags shape may have changed relatively to the flags input.
+    """
+    del data[field]
+    del flags[field]
+    return data, flags
+
+
+@processing()
+def renameField(
+    data: DictOfSeries, field: str, flags: Flags, new_name: str, **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    The function renames field to new name (in both, the flags and the data).
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the data column, you want to rename.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    new_name : str
+        String, field is to be replaced with.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+    data[new_name] = data[field]
+    flags.history[new_name] = flags.history[field]
+    del data[field]
+    del flags[field]
+    return data, flags
+
+
+@processing()
+def maskTime(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    mode: Literal["periodic", "mask_field"],
+    mask_field: Optional[str] = None,
+    start: Optional[str] = None,
+    end: Optional[str] = None,
+    closed: bool = True,
+    **kwargs,
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    This function realizes masking within saqc.
+
+    Due to some inner saqc mechanics, it is not straight forwardly possible to exclude
+    values or datachunks from flagging routines. This function replaces flags with UNFLAGGED
+    value, wherever values are to get masked. Furthermore, the masked values get replaced by
+    np.nan, so that they dont effect calculations.
+
+    Here comes a recipe on how to apply a flagging function only on a masked chunk of the variable field:
+
+    1. dublicate "field" in the input data (copy)
+    2. mask the dublicated data (mask)
+    3. apply the tests you only want to be applied onto the masked data chunks (saqc_tests)
+    4. project the flags, calculated on the dublicated and masked data onto the original field data
+        (projectFlags or flagGeneric)
+    5. drop the dublicated data (drop)
+
+    To see an implemented example, checkout flagSeasonalRange in the saqc.functions module
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-masked.
+    flags : saqc.Flags
+        Container to store flags of the data.
+    mode : {"periodic", "mask_var"}
+        The masking mode.
+        - "periodic": parameters "period_start", "end" are evaluated to generate a periodical mask
+        - "mask_var": data[mask_var] is expected to be a boolean valued timeseries and is used as mask.
+    mask_field : {None, str}, default None
+        Only effective if mode == "mask_var"
+        Fieldname of the column, holding the data that is to be used as mask. (must be moolean series)
+        Neither the series` length nor its labels have to match data[field]`s index and length. An inner join of the
+        indices will be calculated and values get masked where the values of the inner join are "True".
+    start : {None, str}, default None
+        Only effective if mode == "seasonal"
+        String denoting starting point of every period. Formally, it has to be a truncated instance of "mm-ddTHH:MM:SS".
+        Has to be of same length as `end` parameter.
+        See examples section below for some examples.
+    end : {None, str}, default None
+        Only effective if mode == "periodic"
+        String denoting starting point of every period. Formally, it has to be a truncated instance of "mm-ddTHH:MM:SS".
+        Has to be of same length as `end` parameter.
+        See examples section below for some examples.
+    closed : boolean
+        Wheather or not to include the mask defining bounds to the mask.
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+        Flags values may have changed relatively to the flags input.
+
+
+    Examples
+    --------
+    The `period_start` and `end` parameters provide a conveniant way to generate seasonal / date-periodic masks.
+    They have to be strings of the forms: "mm-ddTHH:MM:SS", "ddTHH:MM:SS" , "HH:MM:SS", "MM:SS" or "SS"
+    (mm=month, dd=day, HH=hour, MM=minute, SS=second)
+    Single digit specifications have to be given with leading zeros.
+    `period_start` and `seas   on_end` strings have to be of same length (refer to the same periodicity)
+    The highest date unit gives the period.
+    For example:
+
+    >>> period_start = "01T15:00:00"
+    >>> end = "13T17:30:00"
+
+    Will result in all values sampled between 15:00 at the first and  17:30 at the 13th of every month get masked
+
+    >>> period_start = "01:00"
+    >>> end = "04:00"
+
+    All the values between the first and 4th minute of every hour get masked.
+
+    >>> period_start = "01-01T00:00:00"
+    >>> end = "01-03T00:00:00"
+
+    Mask january and february of evcomprosed in theery year. masking is inclusive always, so in this case the mask will
+    include 00:00:00 at the first of march. To exclude this one, pass:
+
+    >>> period_start = "01-01T00:00:00"
+    >>> end = "02-28T23:59:59"
+
+    To mask intervals that lap over a seasons frame, like nights, or winter, exchange sequence of season start and
+    season end. For example, to mask night hours between 22:00:00 in the evening and 06:00:00 in the morning, pass:
+
+    >>> period_start = "22:00:00"
+    >>> end = "06:00:00"
+
+    When inclusive_selection="season", all above examples work the same way, only that you now
+    determine wich values NOT TO mask (=wich values are to constitute the "seasons").
+    """
+    data = data.copy()
+    datcol_idx = data[field].index
+
+    if mode == "periodic":
+        to_mask = periodicMask(datcol_idx, start, end, closed)
+    elif mode == "mask_field":
+        idx = data[mask_field].index.intersection(datcol_idx)
+        to_mask = data.loc[idx, mask_field]
+    else:
+        raise ValueError("Keyword passed as masking mode is unknown ({})!".format(mode))
+
+    data.aloc[to_mask, field] = np.nan
+    flags[to_mask, field] = UNFLAGGED
+    return data, flags
+
+
+@processing()
+def plot(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    path: Optional[str] = None,
+    max_gap: Optional[FreqString] = None,
+    stats: bool = False,
+    plot_kwargs: Optional[dict] = None,
+    fig_kwargs: Optional[dict] = None,
+    stats_dict: Optional[dict] = None,
+    store_kwargs: Optional[dict] = None,
+    **kwargs,
+):
+    """
+    Stores or shows a figure object, containing data graph with flag marks for field.
+
+    There are two modes, 'interactive' and 'store' mode, wich is determind via the
+    ``save_path`` keyword. In interactive mode (default) the plot is shown at runtime
+    and the execution stops until the plot window is closed manually by a user. In
+    store mode the generated plot is stored to disk and no manually interaction is
+    needed.
+
+    Parameters
+    ----------
+    data : {pd.DataFrame, dios.DictOfSeries}
+        data
+
+    field : str
+        Name of the variable-to-plot
+
+    flags : {pd.DataFrame, dios.DictOfSeries, saqc.flagger}
+        Flags or flagger object
+
+    path : str, default None
+        If ``None`` is passed, interactive mode is entered; plots are shown immediatly
+        and a user need to close them manually before execution continues.
+        If a filepath is passed instead, store-mode is entered and
+        the plot is stored unter the passed location.
+
+    max_gap : str, default None
+        If None, all the points in the data will be connected, resulting in long linear
+        lines, where continous chunks of data is missing. Nans in the data get dropped
+        before plotting. If an Offset string is passed, only points that have a distance
+        below `max_gap` get connected via the plotting line.
+
+    stats : bool, default False
+        Whether to include statistics table in plot.
+
+    plot_kwargs : dict, default None
+        Keyword arguments controlling plot generation. Will be passed on to the
+        ``Matplotlib.axes.Axes.set()`` property batch setter for the axes showing the
+        data plot. The most relevant of those properties might be "ylabel", "title" and
+        "ylim". In Addition, following options are available:
+
+        * {'slice': s} property, that determines a chunk of the data to be plotted /
+            processed. `s` can be anything, that is a valid argument to the
+            ``pandas.Series.__getitem__`` method.
+        * {'history': str}
+            * str="all": All the flags are plotted with colored dots, refering to the
+                tests they originate from
+            * str="valid": - same as 'all' - but only plots those flags, that are not
+                removed by later tests
+
+    fig_kwargs : dict, default None
+        Keyword arguments controlling figure generation. In interactive mode,
+        ``None`` defaults to ``{"figsize": (16, 9)}`` to ensure a proper figure size
+        in store-mode.
+
+    store_kwargs : dict, default {}
+        Keywords to be passed on to the ``matplotlib.pyplot.savefig`` method, handling
+        the figure storing. To store an pickle object of the figure, use the option
+        ``{'pickle': True}``, but note that all other store_kwargs are ignored then.
+        Reopen with: ``pickle.load(open(savepath,'w')).show()``
+
+    stats_dict: dict, default None
+        (Only relevant if `stats`=True)
+        Dictionary of additional statisticts to write to the statistics table
+        accompanying the data plot. An entry to the stats_dict has to be of the form:
+
+        * {"stat_name": lambda x, y, z: func(x, y, z)}
+
+        The lambda args ``x``,``y``,``z`` will be fed by:
+
+        * ``x``: the data (``data[field]``).
+        * ``y``: the flags (``flags[field]``).
+        * ``z``: The passed flags level (``kwargs[flag]``)
+
+        See examples section for examples
+
+    Examples
+    --------
+    Summary statistic function examples:
+
+    >>> func = lambda x, y, z: len(x)
+
+    Total number of nan-values:
+
+    >>> func = lambda x, y, z: x.isna().sum()
+
+    Percentage of values, flagged greater than passed flag (always round float results
+    to avoid table cell overflow):
+
+    >>> func = lambda x, y, z: round((x.isna().sum()) / len(x), 2)
+    """
+    interactive = path is None
+
+    if store_kwargs is None:
+        store_kwargs = {}
+
+    if interactive:
+        mpl.use(_MPL_DEFAULT_BACKEND)
+
+    else:
+        mpl.use("Agg")
+        # ensure a proper size in stored plot
+        if fig_kwargs is None:
+            fig_kwargs = {"figsize": (16, 9)}
+
+    fig = makeFig(
+        data=data,
+        field=field,
+        flags=flags,
+        level=kwargs.get("flag", BAD),
+        max_gap=max_gap,
+        stats=stats,
+        plot_kwargs=plot_kwargs,
+        fig_kwargs=fig_kwargs,
+        stats_dict=stats_dict,
+    )
+
+    if interactive:
+        plt.show()
+
+    else:
+        if store_kwargs.pop("pickle", False):
+            with open(path, "wb") as f:
+                pickle.dump(fig, f)
+        else:
+            fig.savefig(path, **store_kwargs)
+
+    return data, flags
diff --git a/saqc/funcs/transformation.py b/saqc/funcs/transformation.py
new file mode 100644
index 0000000000000000000000000000000000000000..357e948cdbbf47a945b2a8f1e16ce5e1fa064197
--- /dev/null
+++ b/saqc/funcs/transformation.py
@@ -0,0 +1,73 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from typing import Optional, Callable, Tuple, Union
+import numpy as np
+import pandas as pd
+from dios import DictOfSeries
+
+from saqc.core import flagging, Flags
+
+
+@flagging(masking="field")
+def transform(
+    data: DictOfSeries,
+    field: str,
+    flags: Flags,
+    func: Callable[[pd.Series], pd.Series],
+    freq: Optional[Union[float, str]] = None,
+    **kwargs
+) -> Tuple[DictOfSeries, Flags]:
+    """
+    Function to transform data columns with a transformation that maps series onto series of the same length.
+
+    Note, that flags get preserved.
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+    field : str
+        The fieldname of the column, holding the data-to-be-transformed.
+    flags : saqc.Flags
+        Container to store quality flags to data.
+    func : Callable[{pd.Series, np.array}, np.array]
+        Function to transform data[field] with.
+    freq : {None, float, str}, default None
+        Determines the segmentation of the data into partitions, the transformation is applied on individually
+
+        * ``np.inf``: Apply transformation on whole data set at once
+        * ``x`` > 0 : Apply transformation on successive data chunks of periods length ``x``
+        * Offset String : Apply transformation on successive partitions of temporal extension matching the passed offset
+          string
+
+    Returns
+    -------
+    data : dios.DictOfSeries
+        A dictionary of pandas.Series, holding all the data.
+        Data values may have changed relatively to the data input.
+    flags : saqc.Flags
+        The quality flags of data
+    """
+
+    data = data.copy()
+    val_ser = data[field]
+    # partitioning
+    if not freq:
+        freq = val_ser.shape[0]
+
+    if isinstance(freq, str):
+        grouper = pd.Grouper(freq=freq)
+    else:
+        grouper = pd.Series(data=np.arange(0, val_ser.shape[0]), index=val_ser.index)
+        grouper = grouper.transform(lambda x: int(np.floor(x / freq)))
+
+    partitions = val_ser.groupby(grouper)
+
+    for _, partition in partitions:
+        if partition.empty:
+            continue
+        val_ser[partition.index] = func(partition)
+
+    data[field] = val_ser
+    return data, flags
diff --git a/saqc/lib/plotting.py b/saqc/lib/plotting.py
index 0a9ac3066895e89575a84232dbef171d85ca12f4..b9cab2c6bec2530fcc2bc4ac9a0263a7a0896e89 100644
--- a/saqc/lib/plotting.py
+++ b/saqc/lib/plotting.py
@@ -1,749 +1,221 @@
 #! /usr/bin/env python
 # -*- coding: utf-8 -*-
 
-import logging
+from typing import Optional
 
-import numpy as np
 import pandas as pd
-import dios
-import matplotlib.pyplot as plt
-from typing import List, Dict, Optional
-from saqc.flagger import BaseFlagger
-
-
-def __importHelper():
-    import matplotlib as mpl
-    from pandas.plotting import register_matplotlib_converters
-
-    # needed for datetime conversion
-    register_matplotlib_converters()
-
-    if not _interactive:
-        # Import plot libs without interactivity, if not needed.
-        # This ensures that we can produce an plot.png even if
-        # tkinter is not installed. E.g. if one want to run this
-        # on machines without X-Server aka. graphic interface.
-        mpl.use("Agg")
-
-
-# global switches - use is read-only
-_interactive = True
-_figsize = (16, 9)
-_layout_data_to_table_ratio = [5, 1]
-_show_info_table = True
-
-# order is important, because
-# latter may overwrite former
-_cols = [
-    # data - not mutually distinct
-    "data",
-    "data-nans",
-    # flags - mutually distinct
-    "unflagged",
-    "good",
-    "suspicious",
-    "bad",
-    "flag-nans",  # currently ignored
-    # special flags - mutually distinct
-    "unchanged",
-    "changed",
-]
-
-_plotstyle: Dict[str, dict] = {
-    # flags
-    "unflagged": dict(marker=".", ls="none", c="silver", label="UNFLAGGED"),
-    "good": dict(marker=".", fillstyle="none", ls="none", c="seagreen", label="GOOD"),
-    "bad": dict(marker=".", fillstyle="none", ls="none", c="firebrick", label="BAD"),
-    "suspicious": dict(marker=".", fillstyle="none", ls="none", c="gold", label="SUSPICIOUS"),
-    "old-flags": dict(marker=".", fillstyle="none", ls="none", c="black", label="old-flags"),
-    # data
-    "data": dict(c="silver", ls="-", label="data"),
-    "data-nans": dict(marker=".", fillstyle="none", ls="none", c="lightsteelblue", label="NaN"),
-}
-
-
-def _show():
-    if _interactive:
-        plt.show()
-
-
-def plotAllHook(
-    data, flagger, targets=None, show_info_table: bool = True, annotations: Optional[dios.DictOfSeries] = None,
-):
-    __importHelper()
-    targets = flagger.flags.columns if targets is None else targets
-    _plotMultipleVariables(
-        data_old=None,
-        flagger_old=None,
-        data_new=data,
-        flagger_new=flagger,
-        targets=targets,
-        show_info_table=show_info_table,
-        annotations=annotations,
-    )
-    plt.tight_layout()
-    _show()
-
-
-def plotHook(
-    data_old: Optional[dios.DictOfSeries],
-    data_new: dios.DictOfSeries,
-    flagger_old: Optional[BaseFlagger],
-    flagger_new: BaseFlagger,
-    sources: List[str],
-    targets: List[str],
-    plot_name: str = "",
-    annotations: Optional[dios.DictOfSeries] = None,
-):
-    assert len(targets) > 0
-    __importHelper()
-
-    args = dict(
-        data_old=data_old,
-        data_new=data_new,
-        flagger_old=flagger_old,
-        flagger_new=flagger_new,
-        targets=targets,
-        show_info_table=_show_info_table,
-        annotations=annotations,
-    )
-
-    if len(targets) == 1:
-        _plotSingleVariable(**args, sources=sources, show_reference_data=True, plot_name=plot_name)
-    else:
-        _plotMultipleVariables(**args)
-
-    _show()
-
-
-def _plotMultipleVariables(
-    data_old: Optional[dios.DictOfSeries],
-    data_new: dios.DictOfSeries,
-    flagger_old: Optional[BaseFlagger],
-    flagger_new: BaseFlagger,
-    targets: List[str],
-    show_info_table: bool = True,
-    annotations=None,
-):
-    """
-    Plot data and flags for a multiple target-variables.
-
-    For each variable specified in targets a own plot is generated.
-    If specified, a table with quantity information is shown on the
-    right of each plot. If more than 4 vars are specified always
-    four plots are combined and shown in a single window (figure).
-    Nevertheless the x-axis between all figures are joint together.
-    This allows to still zoom or scroll all plots simultaneously.
-
-    Parameters
-    ----------
-    data_old
-        data from the good old times
-    data_new
-        current state of data
-    flagger_old
-        flagger that hold flags corresponding to data_old
-    flagger_new
-        flagger that hold flags corresponding to data_new
-    targets
-        a list of strings, each indicating a column in flagger_new.flags
-    show_info_table
-        Show a info-table on the right of reference-data and data or not
-
-    Returns
-    -------
-    None
-    """
-    show_tab = show_info_table
-    tlen = len(targets)
-    tgen = (t for t in targets)
-
-    nfig, ncols_rest = divmod(tlen, 4)
-    ncols = [4] * nfig
-    if ncols_rest:
-        nfig += 1
-        ncols += [ncols_rest]
-
-    gs_kw = dict(width_ratios=_layout_data_to_table_ratio)
-    layout = dict(
-        figsize=_figsize,
-        sharex=True,
-        tight_layout=True,
-        squeeze=False,
-        gridspec_kw=gs_kw if show_tab else {}
-    )
-
-    # plot max. 4 plots per figure
-    allaxs = []
-    for n in range(nfig):
-
-        fig, axs = plt.subplots(nrows=ncols[n], ncols=2 if show_tab else 1, **layout)
-
-        for ax in axs:
-            var = next(tgen)
-            tar, _ = _getDataFromVar(data_old, data_new, flagger_old, flagger_new, var)
+import matplotlib as mpl
 
-            if show_tab:
-                plot_ax, tab_ax = ax
-                _plotInfoTable(tab_ax, tar, _plotstyle, len(tar["data"]))
-            else:
-                plot_ax = ax[0]
+from saqc.constants import *
+from saqc.core import Flags
+from saqc.lib.types import DiosLikeT, FreqString
 
-            _plotFromDicts(plot_ax, tar, _plotstyle)
 
-            if annotations is not None and var in annotations:
-                _annotate(plot_ax, tar, annotations[var])
-
-            plot_ax.set_title(str(var))
-            allaxs.append(plot_ax)
-
-    # we join all x-axis together. Surprisingly
-    # this also works between different figures :D
-    ax0 = allaxs[0]
-    for ax in allaxs:
-        ax.get_shared_x_axes().join(ax, ax0)
-        ax.autoscale()
+STATSDICT = {
+    "values total": lambda x, y, z: len(x),
+    "invalid total (=NaN)": lambda x, y, z: x.isna().sum(),
+    "invalid percentage": lambda x, y, z: round((x.isna().sum()) / len(x), 2),
+    "flagged total": lambda x, y, z: (y >= z).sum(),
+    "flagged percentage": lambda x, y, z: round(((y >= z).sum()) / len(x), 2),
+}
 
 
-def simplePlot(
-    data: dios.DictOfSeries,
-    flagger: BaseFlagger,
+def makeFig(
+    data: DiosLikeT,
     field: str,
-    plot_name=None,
-    show_info_table: bool = True,
-    annotations=None,
-):
-    __importHelper()
-    _plotSingleVariable(
-        data_old=None,
-        data_new=data,
-        flagger_old=None,
-        flagger_new=flagger,
-        sources=[],
-        targets=[field],
-        show_reference_data=False,
-        show_info_table=show_info_table,
-        plot_name=plot_name or str(field),
-        annotations=annotations,
-    )
-    _show()
-
-
-def _plotSingleVariable(
-    data_old: dios.DictOfSeries,
-    data_new: dios.DictOfSeries,
-    flagger_old: BaseFlagger,
-    flagger_new: BaseFlagger,
-    sources: List[str],
-    targets: List[str],
-    show_reference_data=True,
-    show_info_table: bool = True,
-    plot_name="current data",
-    annotations=None,
+    flags: Flags,
+    level: float,
+    max_gap: Optional[FreqString] = None,
+    stats: bool = False,
+    plot_kwargs: Optional[dict] = None,
+    fig_kwargs: Optional[dict] = None,
+    stats_dict: Optional[dict] = None,
 ):
     """
-    Plot data and flags for a single target-variable.
-
-    The resulting plot (the whole thing) can have up to 3 areas.
-
-    - The first **optional upper area** show up to 4 sources, if given.
-    - The **middle optional area** show the reference-plot, that show
-      the target variable in the state before the last test was run.
-      If specified, a table with quantity information is shown on the
-      right.
-    - The last **non-optional lower area**  shows the current data with
-      its flags. If specified, a table with quantity information is shown
-      on the right.
+    Returns a figure object, containing data graph with flag marks for field.
 
     Parameters
     ----------
-    data_old
-        data from the good old times
-    data_new
-        current state of data
-    flagger_old
-        flagger that hold flags corresponding to data_old
-    flagger_new
-        flagger that hold flags corresponding to data_new
-    sources
-        all sources that was used to change new to old
-    targets
-        a single(!) string that indicates flags in flagger_new.flags
-    show_reference_data
-        Show reference (aka. old) data, or not
-    show_info_table
-        Show a info-table on the right of reference-data and data or not
-    plot_name
-        The name of the data-plot
-
-    Returns
-    -------
-    None
-
-    """
-    assert len(targets) == 1
-    var = targets[0]
-    slen = len(sources)
-
-    curr, ref = _getDataFromVar(data_old, data_new, flagger_old, flagger_new, var)
-
-    show_ref = show_reference_data and ref is not None
-    show_tab = show_info_table
-    show_srces = slen > 0
-
-    nrows = 1
-    if show_ref:
-        nrows += 1
-    if show_srces:
-        nrows += 1
-        if slen > 4:
-            # possible future-fix: make own figure(s) with shared-x-axis for
-            # all sources. axis can be shared between figures !
-            logging.warning(f"plotting: only first 4 of {slen} sources are shown.")
-            slen = 4
-
-    fig = plt.figure(constrained_layout=True, figsize=_figsize,)
-    outer_gs = fig.add_gridspec(ncols=1, nrows=nrows)
-    gs_count = 0
-    allaxs = []
-
-    # plot srces
-    if show_srces:
-        srcs_gs_arr = outer_gs[gs_count].subgridspec(ncols=slen, nrows=1)
-        gs_count += 1
-        # NOTE: i implicit assume that all sources are available before the test run.
-        # if this ever fails, one could use data instead of ref. but i can't imagine
-        # any case, where this could happen -- bert.palm@ufz.de
-        for i, gs in enumerate(srcs_gs_arr):
-            ax = fig.add_subplot(gs)
-            v = sources[i]
-            _, src = _getDataFromVar(data_old, data_new, flagger_old, flagger_new, v)
-            _plotFromDicts(ax, src, _plotstyle)
-            ax.set_title(f"src{i + 1}: {v}")
-            allaxs.append(ax)
-
-    # plot reference data (the data as it was before the test)
-    if ref and show_ref:
-        ax = _plotDataWithTable(fig, outer_gs[gs_count], ref, show_tab=show_tab)
-        ax.set_title(f"Reference data (before the test)")
-        allaxs.append(ax)
-        gs_count += 1
-
-    # plot data
-    ax = _plotDataWithTable(fig, outer_gs[gs_count], curr, show_tab=show_tab)
-    ax.set_title(f"{plot_name}")
-    # also share y-axis with ref
-    if ref and show_ref:
-        ax.get_shared_y_axes().join(ax, allaxs[-1])
-    allaxs.append(ax)
-    gs_count += 1
-
-    if annotations is not None and var in annotations:
-        _annotate(ax, curr, annotations[var])
-
-    # share all x-axis
-    ax0 = allaxs[0]
-    for ax in allaxs:
-        ax.get_shared_x_axes().join(ax, ax0)
-        ax.autoscale()
-
-    # use all space
-    outer_gs.tight_layout(fig)
-
-
-def _getDataFromVar(
-    data_old: dios.DictOfSeries,
-    data_new: dios.DictOfSeries,
-    flagger_old: BaseFlagger,
-    flagger_new: BaseFlagger,
-    varname: str,
-):
-    """
-    Extract flag and data information and store them in separate pd.Series.
+    data : {pd.DataFrame, dios.DictOfSeries}
+        data
 
-    This is a helper that extract all relevant information from the flagger
-    and data and prepare those information, so it can be plotted easily.
-    This means, each information is stored in a separate pd.Series, whereby
-    its index is always a subset of the `data`-series index (which is always
-    be present). Also all info is projected to the y-coordinate of the data,
-    so plotting all info in the same plot, will result in a data-plot with
-    visible flags at the actual position.
+    flags : {pd.DataFrame, dios.DictOfSeries, saqc.flagger}
+        Flags or flagger object
 
-    Hard constrains:
-     0. var needs to be present in ``flagger_new.flags``
-     1. iff var is present in data_xxx, then var need to
-        be present in flags_xxx (``flagger_xxx.flags``)
+    field : str
+        Name of the variable-to-plot
 
-    Conditions:
-     2. if var is present in flags_new, but not in data_new, dummy-data is created
-     3. if var is present in data_old, (see also 1.) reference info is generated
+    level : str, float, default None
+        Flaglevel above wich flagged values should be displayed.
 
+    max_gap : str, default None
+        If None, all the points in the data will be connected, resulting in long linear
+        lines, where continous chunks of data is missing. Nans in the data get dropped
+        before plotting. If an Offset string is passed, only points that have a distance
+        below `max_gap` get connected via the plotting line.
 
-    Returns
-    -------
-    dict, {dict or None}
-        Returns two dictionaries, the first holds the infos corresponding
-        to the actual data and flags (from flagger_new), the second hold
-        the infos from the state before the last test run. The second is
-        ``None`` if condition 3. is not fulfilled.
-
-        Each dict have the following keys, and hold pd.Series as values:
-
-        - 'data': all data (with nan's if present) [3]
-        - 'data-nans': nan's projected on locations from interpolated data
-        - 'unflagged': flags that indicate unflagged [1][3]
-        - 'good':  flags that indicate good's [1][3]
-        - 'suspicious': flags that indicate suspicious'es [1][3]
-        - 'bad': flags that indicate bad's [1][3]
-        - 'flag-nans': nan's in flags [1][3]
-        - 'unchanged': flags that kept unchanged during the last test [2]
-        - 'changed': flags that did changed during the last test [2]
-
-        Series marked with [1] are completely distinct to others marked with [1],
-        and all [1]'s sum up to all flags, same apply for [2].
-        The series marked with [3] could be empty, if the infos are not present.
-        All infos are projected to the data locations.
-    """
-    var = varname
-    assert var in flagger_new.flags
-    flags_new: pd.Series = flagger_new.flags[var]
-    plotdict = _getPlotdict(data_new, flags_new, flagger_new, var)
-    ref_plotdict = None
-
-    # prepare flags
-    if flagger_old is not None and var in flagger_old.flags:
-        flags_old = flagger_old.flags[var]
-        ref_plotdict = _getPlotdict(data_old, flags_old, flagger_old, var)
-
-        # check flags-index changes:
-        # if we want to know locations, where the flags has changed between old and new,
-        # the index must match, otherwise, this could lead to wrong placed flags. Even
-        # though the calculations would work.
-        if flags_old.index.equals(flags_new.index):
-            unchanged, changed = _splitOldAndNew(flags_old, flags_new)
-            unchanged, changed = _projectFlagsOntoData([unchanged, changed], plotdict["data"])
-            plotdict["unchanged"] = unchanged
-            plotdict["changed"] = changed
-
-            # calculate old-flags and update flags, like BADs,
-            # to show only freshly new set values
-            unflagged = plotdict["unflagged"]
-            diff = unchanged.index.difference(unflagged.index)
-            plotdict["old-flags"] = unchanged.loc[diff]
-            for field in ["bad", "suspicious", "good"]:
-                data = plotdict[field]
-                isect = changed.index & data.index
-                plotdict[field] = data.loc[isect]
-
-    return plotdict, ref_plotdict
-
-
-def _getPlotdict(data: dios.DictOfSeries, flags: pd.Series, flagger, var):
-    """
-    Collect info and put them in a dict and creates dummy data if no data present.
-
-    The collected info include nan-data (projected to interpolated locations) and
-    flag-info for BAD, SUSP., GOOD, UNFLAGGED, and flag-nans. Except the flag-nans
-    all info is projected to the data-locations. E.g a BAD at the position N is
-    projected to the data's x- and y- location at the very same position.
-
-    Parameters
-    ----------
-    data: dios.DictOfSeries
-        holds the data. If data hold a series in `var` it is used,
-        otherwise a dummy series is created from flags.
+    stats : bool, default False
+        Whether to include statistics table in plot.
 
-    flags: pd.Series
-        hold the flags.
+    plot_kwargs : dict, default None
+        Keyword arguments controlling plot generation. Will be passed on to the
+        ``Matplotlib.axes.Axes.set()`` property batch setter for the axes showing the
+        data plot. The most relevant of those properties might be "ylabel",
+        "title" and "ylim".
+        In Addition, following options are available:
 
-    flagger: saqc.Flagger
-        flagger object, used for get flaginfo via ``flagger.isFlagged()``
+        * {'slice': s} property, that determines a chunk of the data to be plotted /
+            processed. `s` can be anything,
+            that is a valid argument to the ``pandas.Series.__getitem__`` method.
+        * {'history': str}
+            * str="all": All the flags are plotted with colored dots, refering to the
+                tests they originate from
+            * str="valid": - same as 'all' - but only plots those flags, that are not
+                removed by later tests
+    fig_kwargs : dict, default None
+        Keyword arguments controlling figure generation. None defaults to
+        {"figsize": (16, 9)}
 
-    var: str
-        identifies the data-series in ``data`` that correspond to ``flags``
+    stats_dict: dict, default None
+        (Only relevant if `stats`=True).
+        Dictionary of additional statisticts to write to the statistics table
+        accompanying the data plot. An entry to the stats_dict has to be of the form:
 
-    Returns
-    -------
-    dict
-        Returns a dictionary with the following keys, that hold pd.Series as values:
-
-        - 'data': all data (with nan's if present)
-        - 'data-nans': nan's projected on locations from interpolated data
-        - 'unflagged': flags that indicate unflagged [1]
-        - 'good':  flags that indicate good's [1]
-        - 'suspicious': flags that indicate suspicious'es [1]
-        - 'bad': flags that indicate bad's [1]
-        - 'flag-nans': nan's in flags [1]
-        - 'unchanged': flags that kept unchanged during the last test [2]
-        - 'changed': flags that did changed during the last test [2]
-
-        Flags marked with [1] are completely distinct, and sum up to all flags,
-        same apply for [2].
+        * {"stat_name": lambda x, y, z: func(x, y, z)}
 
-    """
-    pdict = dios.DictOfSeries(columns=_cols)
+        The lambda args ``x``,``y``,``z`` will be fed by:
 
-    # fill data
-    dat, nans = _getData(data, flags, var)
-    assert dat.index.equals(flags.index)
-    pdict["data"] = dat
-    pdict["data-nans"] = nans
+        * ``x``: the data (``data[field]``).
+        * ``y``: the flags (``flags[field]``).
+        * ``z``: The passed flags level (``kwargs[flag]``)
 
-    # fill flags
-    tup = _splitByFlag(flags, flagger, var)
-    assert sum(map(len, tup)) == len(flags)
-    g, s, b, u, n = _projectFlagsOntoData(list(tup), dat)
-    pdict["good"] = g
-    pdict["suspicious"] = s
-    pdict["bad"] = b
-    pdict["unflagged"] = u
-    pdict["flag-nans"] = n
+        See examples section for examples
 
-    return pdict
-
-
-def _getData(data: dios.DictOfSeries, flags: pd.Series, var: str):
-    """
-    Get data from a dios or create a dummy data.
-
-    A pd.Series is taken from `data` by `var`. If the
-    data does not hold such series, a dummy series is
-    created from flags, which have no y-information.
-    If the series indeed was present, also the nan-location
-    are extracted and projected to interpolated locations
-    in data.
-
-    Returns
-    -------
-    pd.Series, pd.Series
-        the data-series and nan-locations
-    """
-    if var in data:
-        dat = data[var]
-        nans = dat.interpolate().loc[dat.isna()]
-    else:
-        # create dummy data
-        dat = pd.Series(0, index=flags.index)
-        nans = pd.Series([], index=pd.DatetimeIndex([]))
-    return dat, nans
-
-
-def _splitOldAndNew(old: pd.Series, new: pd.Series):
-    """
-    Split new in two distinct series of equality and non-equality with old.
 
     Returns
     -------
-        Two distinct series, one with locations, where the old and new data(!)
-        are equal (including nans at same positions), the other with the rest
-        of locations seen from new. This means, the rest marks locations, that
-        are present(!) in new, but its data differs from old.
-    """
-    idx = old.index & new.index
-    both_nan = old.loc[idx].isna() & new.loc[idx].isna()
-    mask = (new.loc[idx] == old[idx]) | both_nan
-    old_idx = mask[mask].index
-    new_idx = new.index.difference(old_idx)
-    return new.loc[old_idx], new.loc[new_idx]
+    fig : matplotlib.pyplot.figure
+        figure object.
 
+    Examples
+    --------
+    Summary statistic function examples:
 
-def _splitByFlag(flags: pd.Series, flagger, var: str):
-    """
-    Splits flags in the five distinct bins: GOOD, SUSPICIOUS, BAD, UNFLAGGED and NaNs.
-    """
-    n = flags.isna()
-    loc = flags.dropna().index
-    g = flagger.isFlagged(field=var, loc=loc, flag=flagger.GOOD, comparator="==")
-    b = flagger.isFlagged(field=var, loc=loc, flag=flagger.BAD, comparator="==")
-    u = flagger.isFlagged(field=var, loc=loc, flag=flagger.UNFLAGGED, comparator="==")
-    s = flagger.isFlagged(field=var, loc=loc, flag=flagger.BAD, comparator="<")
-    s = flagger.isFlagged(field=var, loc=loc, flag=flagger.GOOD, comparator=">") & s
-    return g[g], s[s], b[b], u[u], n[n]
+    >>> func = lambda x, y, z: len(x)
 
+    Total number of nan-values:
 
-def _projectFlagsOntoData(idxlist: List[pd.Series], data: pd.Series):
-    """ Project flags to a xy-location, based on data. """
-    res = []
-    for item in idxlist:
-        res.append(data.loc[item.index])
-    return tuple(res)
+    >>> func = lambda x, y, z: x.isna().sum()
 
+    Percentage of values, flagged greater than passed flag (always round float results to avoid table cell overflow):
 
-def _plotDataWithTable(fig, gs, pdict, show_tab=True):
+    >>> func = lambda x, y, z: round((x.isna().sum()) / len(x), 2)
     """
-    Plot multiple series from a dict and optionally create a info table
-
-    Parameters
-    ----------
-    fig : matplotlib.figure
-        figure object to place the plot and info-table in
+    if plot_kwargs is None:
+        plot_kwargs = {"history": False}
+    if fig_kwargs is None:
+        fig_kwargs = {}
+    if stats_dict is None:
+        stats_dict = {}
 
-    gs : matplotlib.GridSpec
-        gridspec object which is devided in two subgridspec's,
-        where the first will hold the plot the second the info-
-        table. If `show_tab` is False, the plot is directly
-        places in the given gridspec.
+    # data retrieval
+    d = data[field]
+    # data slicing:
+    s = plot_kwargs.pop("slice", slice(None))
+    d = d[s]
+    flags_vals = flags[field][s]
+    flags_hist = flags.history[field].hist.loc[s]
+    if stats:
+        stats_dict.update(STATSDICT)
+        stats_dict = _evalStatsDict(stats_dict, d, flags_vals, level)
 
-    pdict: dict or dict-like
-        holds pd.Series with plotting-data.
+    na_mask = d.isna()
+    d = d[~na_mask]
 
-    show_tab : bool, default True
-        if True, show a table with quantity information of the data
-        if False, no table is shown
+    # insert nans between values mutually spaced > max_gap
+    if max_gap:
+        d = _insertBlockingNaNs(d, max_gap)
 
-    Returns
-    -------
-    matplotlib.Axes
-        the axes object from the plot
-
-    See Also
-    --------
-        _plotFromDicts()
-        _plotInfoTable()
-    """
-    if show_tab:
-        plot_gs, tab_gs = gs.subgridspec(ncols=2, nrows=1, width_ratios=_layout_data_to_table_ratio)
+    # figure composition
+    fig = mpl.pyplot.figure(constrained_layout=True, **fig_kwargs)
+    grid = fig.add_gridspec()
+    if stats:
+        plot_gs, tab_gs = grid[0].subgridspec(ncols=2, nrows=1, width_ratios=[5, 1])
         ax = fig.add_subplot(tab_gs)
-        _plotInfoTable(ax, pdict, _plotstyle, len(pdict["data"]))
+        _plotStatsTable(ax, stats_dict)
         ax = fig.add_subplot(plot_gs)
     else:
-        ax = fig.add_subplot(gs)
-    _plotFromDicts(ax, pdict, _plotstyle)
-    return ax
+        ax = fig.add_subplot(grid[0])
 
+    _plotVarWithFlags(ax, d, flags_vals, flags_hist, level, plot_kwargs, na_mask)
+    return fig
 
-def _plotFromDicts(ax, plotdict, styledict):
-    """
-    Plot multiple data from a dict in the same plot.
-
-    Each data stored in the plot dict is added to
-    the very same axes (plot) with its own plot-
-    Parameters that come from the styledict. If a
-    key is not present in the styledict the
-    corresponding data is ignored.
 
-    Parameters
-    ----------
-    ax: matplotlib.Axes
-        axes object to add the plot to
+def _evalStatsDict(in_dict, datser, flagser, level):
+    out_dict = {}
+    for key in in_dict:
+        out_dict[key] = str(in_dict[key](datser, flagser, level))
+    return out_dict
 
-    plotdict: dict or dict-like
-        holds pd.Series with plotting-data.
 
-    styledict: dict
-        holds dicts of kwargs that will passed to plot.
-
-    Notes
-    -----
-     - changes the axes
-     - styledict and plotdict must have same keys
-
-    """
-    for field in plotdict:
-        data = plotdict[field]
-        style = styledict.get(field, False)
-        if style and len(data) > 0:
-            ax.plot(data, **style)
-
-
-def _annotate(ax, plotdict, txtseries: pd.Series):
-    for x, txt in txtseries.iteritems():
-        try:
-            y = plotdict['data'].loc[x]
-            if np.isnan(y):
-                y = plotdict['data-nans'].loc[x]
-        except KeyError:
-            continue
-        ax.annotate(txt, xy=(x, y), rotation=45)
-
-
-def _plotInfoTable(ax, plotdict, styledict, total):
-    """
-    Make a nice table with information about the quantity of elements.
-
-    Makes a table from data in plotdict, which indicated, how many
-    elements each series in data have. The count is show as number
-    and in percent from total.
-
-    Parameters
-    ----------
-    ax: matplotlib.Axes
-        axes object to add the table to
-
-    plotdict: dict or dict-like
-        holds pd.Series with plotting-data. only the length of the
-        series is evaluated.
-
-    styledict: dict
-        holds dicts of kwargs that can passed to plot. currently only
-        the `color`-kw (or just `c`) is evaluated.
-
-    total: int/float
-        total count used to calculate percentage
-
-    Returns
-    -------
-        instance of matplotlib.table
-
-    Notes
-    -----
-     changes the axes object
-
-    """
-    cols = ["color", "name", "[#]", "[%]"]
-    tab = pd.DataFrame(columns=cols)
-
-    # extract counts and color
-    for field in plotdict:
-        data = plotdict[field]
-        style = styledict.get(field, {})
-        color = style.get("color", None) or style.get("c", "white")
-        if total == 0:
-            length = percent = 0
-        else:
-            length = len(data)
-            percent = length / total * 100
-        tab.loc[len(tab), :] = [color, field, length, round(percent, 2)]
-
-    # nested list of cell-colors
-    ccs = np.full([len(tab.columns) - 1, len(tab)], fill_value="white", dtype=object)
-    ccs[0] = tab["color"]
-    del tab["color"]
-
-    # disable the plot as we just
-    # want to have the table
+def _plotStatsTable(ax, stats_dict):
     ax.axis("tight")
     ax.axis("off")
-
-    # create and format layout
     tab_obj = ax.table(
-        cellColours=ccs.transpose(),
-        cellText=tab.iloc[:, :].values,
-        colLabels=tab.columns[:],
-        colWidths=[0.4, 0.3, 0.3],
+        cellText=[[a, b] for a, b in stats_dict.items()],
         in_layout=True,
         loc="center",
         # make the table a bit smaller than the plot
         bbox=[0.0, 0.1, 0.95, 0.8],
     )
-
-    # Somehow the automatic font resizing doesen't work - the
-    # font only can ahrink, not rise. There was a issue [1] in
-    # matplotlib, but it is closed in favor of a new project [2].
-    # Nevertheless i wasn't able to integrate it. Also it seems
-    # that it also does **not** fix the problem, even though the
-    # Readme promise else. See here:
-    # [1] https://github.com/matplotlib/matplotlib/pull/14344
-    # [2] https://github.com/swfiua/blume/
-    # As a suitable workaround, we use a fixed font size.
     tab_obj.auto_set_column_width(False)
     tab_obj.auto_set_font_size(False)
     tab_obj.set_fontsize(10)
 
-    # color fix - use white text color if background is dark
-    # sa: https://www.w3.org/TR/WCAG20/#relativeluminancedef
-    thresh = 0.5
-    for k, cell in tab_obj.get_celld().items():
-        r, g, b, a = cell.get_facecolor()
-        if 0.2126 * r + 0.7152 * g + 0.0722 * b < thresh:
-            cell.set_text_props(c="white")
+
+def _plotVarWithFlags(ax, datser, flags_vals, flags_hist, level, plot_kwargs, na_mask):
+    ax.set_title(datser.name)
+    ax.plot(datser)
+    history = plot_kwargs.pop("history", False)
+    ax.set(**plot_kwargs)
+    if history:
+        for i in flags_hist.columns:
+            if history == "all":
+                _plotFlags(
+                    ax,
+                    datser,
+                    flags_hist[i],
+                    na_mask,
+                    level,
+                    {"label": "test " + str(i)},
+                )
+            if history == "valid":
+                _plotFlags(
+                    ax,
+                    datser,
+                    flags_hist[i].combine(flags_vals, min),
+                    na_mask,
+                    level,
+                    {"label": "test " + str(i)},
+                )
+        ax.legend()
+    else:
+        _plotFlags(ax, datser, flags_vals, na_mask, level, {"color": "r"})
+
+
+def _plotFlags(ax, datser, flags, na_mask, level, scatter_kwargs):
+    is_flagged = flags.astype(float) >= level
+    is_flagged = is_flagged[~na_mask]
+    is_flagged = datser[is_flagged[is_flagged].index]
+    ax.scatter(is_flagged.index, is_flagged.values, **scatter_kwargs)
+
+
+def _insertBlockingNaNs(d, max_gap):
+    i = d.index
+    gaps = d.reindex(
+        pd.date_range(i[0].floor(max_gap), i[-1].ceil(max_gap), freq=max_gap),
+        method="bfill",
+        tolerance=max_gap,
+    )
+    gaps = gaps[gaps.isna()]
+    return d.reindex(d.index.join(gaps.index, how="outer"))
diff --git a/saqc/lib/rolling.py b/saqc/lib/rolling.py
index 4af3e7cb4e4f5c5272c4ee07b0f6f27f039abaf3..535b08ff29dde07122f3b118a437a2de71b4f2ef 100644
--- a/saqc/lib/rolling.py
+++ b/saqc/lib/rolling.py
@@ -1,401 +1,388 @@
 #!/usr/bin/env python
 
-__author__ = "Bert Palm"
-__email__ = "bert.palm@ufz.de"
-__copyright__ = "Copyright 2020, Helmholtz-Zentrum für Umweltforschung GmbH - UFZ"
-
-# We need to implement the
-# - calculation/skipping of min_periods,
-# because `calculate_center_offset` does ignore those and we cannot rely on rolling(min_periods), as
-# pointed out in customRoller. Also we need to implement
-# - centering of windows for fixed windows,
-# for variable windows this is not allowed (similar to pandas).
-# The close-param, for variable windows is already implemented in `calculate_center_offset`,
-# and we dont allow it for fixed windows (similar to pandas). We also want to
-# - fix the strange ramp-up behavior,
-# which occur if the window is shifted in the data but yet is not fully inside the data. In this
-# case we want to spit out nan's instead of results calculated by less than window-size many values.
-# This is slightly different than the min_periods parameter, because this mainly should control Nan-behavior
-# for fixed windows, and minimum needed observations (also excluding Nans) in a offset window, but should not apply
-# if window-size many values couldn't be even possible due to technical reasons. This is mainly because one
-# cannot know (except one knows the exact (and fixed) frequency) the number(!) of observations that can occur in a
-# given offset window. That's why rolling should spit out Nan's as long as the window is not fully shifted in the data.
+from __future__ import annotations
 
+import warnings
 import numpy as np
-from typing import Union
-from pandas.api.types import is_integer, is_bool
-from pandas.api.indexers import BaseIndexer
-from pandas.core.dtypes.generic import ABCSeries, ABCDataFrame
-from pandas.core.window.indexers import calculate_variable_window_bounds
-from pandas.core.window.rolling import Rolling, Window
+import pandas as pd
 
+from pandas.api.types import is_bool
 
-def is_slice(k): return isinstance(k, slice)
+if pd.__version__ < "1.4":
+    import pandas.core.window.indexers as indexers
+else:
+    import pandas.core.indexers.objects as indexers
 
 
-class _CustomBaseIndexer(BaseIndexer):
-    is_datetimelike = None
-
-    def __init__(self, index_array, window_size, center=False, forward=False,
-                 expand=False, step=None, mask=None):
-        super().__init__()
-        self.index_array = index_array
-        self.window_size = window_size
-        self._center = center
+class CustomNumericalIndexer(indexers.FixedWindowIndexer):
+    def __init__(
+        self,
+        index_array: np.ndarray | None = None,
+        window_size: int = 0,
+        forward: bool = False,
+        **kwargs,
+    ):
+        super().__init__(index_array, window_size, **kwargs)
         self.forward = forward
-        self.expand = expand
-        self.step = step
-        self.skip = mask
-        self.validate()
-
-    def validate(self) -> None:
-        if self._center is None:
-            self._center = False
-        if not is_bool(self._center):
-            raise ValueError("center must be a boolean")
-        if not is_bool(self.forward):
-            raise ValueError("forward must be a boolean")
-        if not is_bool(self.expand):
-            raise ValueError("expand must be a boolean")
-
-        if is_integer(self.step) or self.step is None:
-            self.step = slice(None, None, self.step or None)
-        if not is_slice(self.step):
-            raise TypeError('step must be integer or slice.')
-        if self.step == slice(None):
-            self.step = None
-
-        if self.skip is not None:
-            if len(self.index_array) != len(self.skip):
-                raise ValueError('mask must have same length as data to roll over.')
-            self.skip = np.array(self.skip)
-            if self.skip.dtype != bool:
-                raise TypeError('mask must have boolean values only.')
-            self.skip = ~self.skip
-
-    def get_window_bounds(self, num_values=0, min_periods=None, center=None, closed=None):
-        if min_periods is None:
-            assert self.is_datetimelike is False
-            min_periods = 1
-
-        # if one call us directly, one may pass a center value we should consider.
-        # pandas instead (via customRoller) will always pass None and the correct
-        # center value is set in __init__. This is because pandas cannot center on
-        # dt_like windows and would fail before even call us.
-        if center is None:
-            center = self._center
-
-        start, end = self._get_bounds(num_values, min_periods, center, closed)
-
-        # ensure correct length
-        start, end = start[:num_values], end[:num_values]
-
-        start, end = self._apply_skipmask(start, end)
-        start, end = self._apply_steps(start, end, num_values)
-        start, end = self._prepare_min_periods_masking(start, end, num_values)
-        return start, end
 
-    def _prepare_min_periods_masking(self, start, end, num_values):
-        # correction for min_periods calculation
-        end[end > num_values] = num_values
+    def get_window_bounds(
+        self,
+        num_values: int = 0,
+        min_periods: int | None = None,
+        center: bool | None = None,
+        closed: str | None = None,
+    ) -> tuple[np.ndarray, np.ndarray]:
 
-        # this is the same as .rolling will do, so leave the work to them ;)
-        # additional they are able to count the nans in each window, we couldn't.
-        # end[end - start < self.min_periods] = 0
-        return start, end
+        if closed is None:
+            closed = "right"
 
-    def _get_center_window_sizes(self, center, winsz):
-        ws1 = ws2 = winsz
-        if center:
-            # centering of dtlike windows is just looking left and right
-            # with half amount of window-size
-            ws2, ws1 = divmod(winsz, 2)
-            ws1 += ws2
-            if self.forward:
-                ws1, ws2 = ws2, ws1
-        return ws1, ws2
-
-    def _apply_skipmask(self, start, end):
-        if self.skip is not None:
-            end[self.skip] = 0
-        return start, end
+        if self.forward:
+            if closed == "left":
+                closed = "right"
+            elif closed == "right":
+                closed = "left"
+
+        start, end = super().get_window_bounds(num_values, min_periods, center, closed)
+
+        if self.forward:
+            start, end = end, start
+            start = num_values - start[::-1]
+            end = num_values - end[::-1]
 
-    def _apply_steps(self, start, end, num_values):
-        if self.step is not None:
-            m = np.full(num_values, 1)
-            m[self.step] = 0
-            m = m.astype(bool)
-            end[m] = 0
         return start, end
 
-    def _get_bounds(self, num_values=0, min_periods=None, center=False, closed=None):
-        raise NotImplementedError
 
+class CustomDatetimeIndexer(indexers.VariableWindowIndexer):
+    def __init__(
+        self,
+        index_array: np.ndarray | None = None,
+        window_size: int = 0,
+        forward: bool = False,
+        **kwargs,
+    ):
+        super().__init__(index_array, window_size, **kwargs)
+        self.forward = forward
 
-class _FixedWindowDirectionIndexer(_CustomBaseIndexer):
-    # automatically added in super call to init
-    index_array: np.array
-    window_size: int
-    # set here
-    is_datetimelike = False
+    def get_window_bounds(
+        self,
+        num_values: int = 0,
+        min_periods: int | None = None,
+        center: bool | None = None,
+        closed: str | None = None,
+    ) -> tuple[np.ndarray, np.ndarray]:
 
-    def _get_bounds(self, num_values=0, min_periods=None, center=False, closed=None):
-        # closed is always ignored and handled as 'both' other cases not implemented
-        offset = 0
-        if center:
-            offset = (self.window_size - 1) // 2
-        num_values += offset
+        # set the default
+        if closed is None and self.forward:
+            closed = "left"
+        elif closed is None:
+            closed = "right"
 
         if self.forward:
-            start, end = self._fw(num_values, offset)
-        else:
-            start, end = self._bw(num_values, offset)
+            self.index_array = self.index_array[::-1]
+            # swap left / right because we inverted the array
+            if closed == "left":
+                closed = "right"
+            elif closed == "right":
+                closed = "left"
 
-        if center:
-            start, end = self._center_result(start, end, offset)
-            num_values -= offset
+        start, end = super().get_window_bounds(num_values, min_periods, center, closed)
 
-        if not self.expand:
-            start, end = self._remove_ramps(start, end, center)
+        if self.forward:
+            start, end = end, start
+            start = num_values - start[::-1]
+            end = num_values - end[::-1]
 
         return start, end
 
-    def _center_result(self, start, end, offset):
-        # cut N values at the front that was inserted in _fw()
-        # or cut N values at the end if _bw()
-        if offset > 0:
-            if self.forward:
-                start = start[:-offset]
-                end = end[:-offset]
+
+class AttrWrapper(object):
+    """
+    This wraps a attribute like `rolling.closed` to `customRoller.closed`.
+    """
+
+    def __init__(self, name):
+        self.name = name
+
+    def __get__(self, instance, owner):
+        return getattr(instance._roller, self.name)
+
+    def __set__(self, instance, value):
+        setattr(instance._roller, self.name, value)
+
+
+class CustomRoller:
+    def __init__(
+        self,
+        obj: pd.DataFrame | pd.Series,
+        window: int | str | pd.Timedelta,
+        min_periods: int | None = None,  # aka minimum non-nan values
+        center: bool | None = False,
+        win_type: str | None = None,
+        on: str | None = None,
+        axis: int | str = 0,
+        closed: str | None = None,
+        forward: bool = False,
+        expand=None,
+    ):
+        """
+        A custom rolling implementation, using pandas as base.
+
+        Parameters
+        ----------
+        obj : pd.Series (or pd.DataFrame)
+            The object to roll over. DataFrame is currently still experimental.
+
+        window : int or offset
+            Size of the moving window. This is the number of observations used for
+            calculating the statistic. Each window will be a fixed size. If its an
+            offset then this will be the time period of each window. Each window will
+            be a variable sized based on the observations included in the
+            time-period. This is only valid for datetimelike indexes.
+
+        min_periods : int, default None
+            Minimum number of observations in window required to have a value (
+            otherwise result is NA). For a window that is specified by an offset,
+            min_periods will default to 1. Otherwise, min_periods will default to the
+            size of the window.
+
+        center : bool, default False
+            Set the labels at the center of the window. Also works for offset-based
+            windows (in contrary to pandas).
+
+        win_type : str, default None
+            Not implemented. Raise NotImplementedError if not None.
+
+        on : str, optional
+            For a DataFrame, a datetime-like column or MultiIndex level on which to
+            calculate the rolling window, rather than the DataFrame’s index. Provided
+            integer column is ignored and excluded from result since an integer index
+            is not used to calculate the rolling window.
+
+        axis : int or str, default 0
+
+        closed : str, default None
+            Make the interval closed on the `right`, `left`, `both` or `neither`
+            endpoints. For offset-based windows, with ``forward=False`` it defaults
+            to `right`, for ``forward=True`` it defaults to `left`.
+            For fixed windows, defaults to ‘both’ always.
+
+        forward : bool, default False
+            By default a window is 'looking' backwards (in time). If True the window
+            is looking forward in time.
+
+        expand : bool, default True
+            If True the window expands/shrink up to its final window size while
+            shifted in the data or shifted out respectively. For (normal)
+            backward-windows it only expands at the left border, for forward-windows
+            it shrinks on the right border and for centered windows both apply.
+
+            Also bear in mind that even if this is True, an many as `min_periods` values are necessary to get a
+            valid value, see there for more info.
+
+        Returns
+        -------
+        a Window or Rolling sub-classed for the particular operation
+
+
+        Notes
+        -----
+        If for some reason the start and end numeric indices of the window are needed, one can call
+        `start, end = customRoller(obj, window).window.get_window_bounds(num_values, min_periods)`,
+        which return two np.arrays, that are holding the start and end indices. Fill at least all
+        parameter which are shown in the example.
+
+        See Also
+        --------
+        pandas.Series.rolling
+        pandas.DataFrame.rolling
+        """
+
+        if not is_bool(forward):
+            raise ValueError("forward must be a boolean")
+
+        # only relevant for datetime-like windows
+        if expand is None:
+            if min_periods is None:
+                expand = False
+                warnings.warn(
+                    "`expand` defaults to False, if min_periods is None. The result "
+                    "will differ from pandas rolling implementation. To fallback to "
+                    "pandas rolling, use `expand=True`, to silence this warning and "
+                    "use our rolling use `expand=False` or specify `min_periods`."
+                )
             else:
-                start = start[offset:]
-                end = end[offset:]
-        return start, end
+                expand = True
+
+        if not is_bool(expand):
+            raise TypeError(f"expand must be bool or None not {type(expand).__name__}")
+
+        # ours
+        self._forward = forward
+        self._expand = expand
+
+        # dummy roller.
+        # 1. This lets pandas do all the checks.
+        # 2. After the call, all the attributes (public and private)
+        #    of `_roller` are accessible on self
+        verified = obj.rolling(
+            window=window,
+            min_periods=min_periods,
+            center=center,
+            win_type=win_type,
+            on=on,
+            axis=axis,
+            closed=closed,
+        )
+
+        self._dtlike_window = verified._win_freq_i8 is not None
+
+        # these roller attributes are fixed by us and not
+        # get looked up from self._roller in __getattr__
+        # because they might not be correct when passing
+        # a custom indexer to rolling
+        self.window = verified.window
+        self._win_freq_i8 = verified._win_freq_i8
+        self._index_array = verified._index_array
+
+        if self._dtlike_window:
+            self.window_indexer = CustomDatetimeIndexer(
+                index_array=verified._index_array,
+                window_size=verified._win_freq_i8,
+                center=verified.center,
+                forward=self._forward,
+            )
+        else:
+            self.window_indexer = CustomNumericalIndexer(
+                window_size=verified.window,
+                forward=self._forward,
+            )
+
+        # create the real roller with a custom Indexer
+        # from the attributes of the old roller.
+        self._roller = obj.rolling(
+            window=self.window_indexer,
+            min_periods=verified.min_periods,  # roller.min_periods
+            win_type=verified._win_type,  # use private here to silence warning
+            on=verified.on,
+            center=verified.center,
+            closed=verified.closed,
+            axis=verified.axis,
+        )
+
+    def _call_roll_func(self, name, args, kwargs):
+        result = getattr(self._roller, name)(*args, **kwargs)
+        if self._dtlike_window and not self._expand:
+            result = self._remove_expanding_ramps(result)
+        return result
+
+    def _remove_expanding_ramps(self, result):
+        if len(result.index) == 0:
+            return result
+
+        index = self._index_array
+        window_size = self._win_freq_i8
+        decreasing = index[0] > index[-1]
+        mask = np.full_like(index, False, dtype=bool)
+
+        if self.center:
+            window_size //= 2
+
+        if self.center or self._forward:
+            if decreasing:
+                mask |= index < index[-1] + window_size
+            else:
+                mask |= index > index[-1] - window_size
 
-    def _remove_ramps(self, start, end, center):
-        fw, bw = self.forward, not self.forward
-        ramp_l, ramp_r = self._get_center_window_sizes(center, self.window_size - 1)
-        if center:
-            fw = bw = True
+        if self.center or not self._forward:
+            if decreasing:
+                mask |= index > index[0] - window_size
+            else:
+                mask |= index < index[0] + window_size
 
-        if bw and ramp_l > 0:
-            end[:ramp_l] = 0
-        if fw and ramp_r > 0:
-            end[-ramp_r:] = 0
+        if window_size > 0:
+            result[mask] = np.nan
 
-        return start, end
+        return result
 
-    def _bw(self, num_values=0, offset=0):
-        start = np.arange(-self.window_size, num_values + offset, dtype="int64") + 1
-        end = start + self.window_size
-        start[:self.window_size] = 0
-        return start, end
+    # =========================================================================
+    # public interface
+    #
+    # All attributes of roller are accessible on self.
+    # Public attributes are listed below, for the only reason to provide an
+    # autocompletion for them.
+    # Private attributes are `wrapped` via ``__getattr__``
+    # =========================================================================
 
-    def _fw(self, num_values=0, offset=0):
-        start = np.arange(-offset, num_values, dtype="int64")
-        end = start + self.window_size
-        start[:offset] = 0
-        return start, end
+    def __getattr__(self, item):
+        return getattr(self._roller, item)
 
+    obj = AttrWrapper("obj")
+    closed = AttrWrapper("closed")
+    center = AttrWrapper("center")
+    axis = AttrWrapper("axis")
+    exclusions = AttrWrapper("exclusions")
+    is_datetimelike = AttrWrapper("is_datetimelike")
+    is_freq_type = AttrWrapper("is_freq_type")
+    min_periods = AttrWrapper("min_periods")
+    ndim = AttrWrapper("ndim")
+    on = AttrWrapper("on")
+    sparse = AttrWrapper("sparse")
+    win_freq = AttrWrapper("win_freq")
+    win_type = AttrWrapper("win_type")
+    method = AttrWrapper("method")
 
-class _VariableWindowDirectionIndexer(_CustomBaseIndexer):
-    # automatically added in super call to init
-    index_array: np.array
-    window_size: int
-    # set here
-    is_datetimelike = True
+    def sum(self, *args, **kwargs):
+        return self._call_roll_func("sum", args, kwargs)
 
-    def _get_bounds(self, num_values=0, min_periods=None, center=False, closed=None):
-        ws_bw, ws_fw = self._get_center_window_sizes(center, self.window_size)
-        if center:
-            c1 = c2 = closed
-            if closed == 'neither':
-                c1, c2 = 'right', 'left'
+    def count(self, *args, **kwargs):
+        return self._call_roll_func("count", args, kwargs)
 
-            start, _ = self._bw(num_values, ws_bw, c1)
-            _, end = self._fw(num_values, ws_fw, c2)
+    def mean(self, *args, **kwargs):
+        return self._call_roll_func("mean", args, kwargs)
 
-        elif self.forward:
-            start, end = self._fw(num_values, ws_fw, closed)
-        else:
-            start, end = self._bw(num_values, ws_bw, closed)
+    def median(self, *args, **kwargs):
+        return self._call_roll_func("median", args, kwargs)
 
-        if not self.expand:
-            start, end = self._remove_ramps(start, end, center)
+    def min(self, *args, **kwargs):
+        return self._call_roll_func("min", args, kwargs)
 
-        return start, end
+    def max(self, *args, **kwargs):
+        return self._call_roll_func("max", args, kwargs)
 
-    def _remove_ramps(self, start, end, center):
-        ws_bw, ws_fw = self._get_center_window_sizes(center, self.window_size)
-
-        if center or not self.forward:
-            # remove (up) ramp
-            # we dont want this: [1,1,1,1,1].rolling(window='2min').sum() -> [1,   2,   3, 3, 3]
-            # instead we want:   [1,1,1,1,1].rolling(window='2min').sum() -> [nan, nan, 3, 3, 3]
-            tresh = self.index_array[0] + ws_bw
-            mask = self.index_array < tresh
-            end[mask] = 0
-
-        if center or self.forward:
-            # remove (down) ramp
-            # we dont want this: [1,1,1,1,1].rolling(window='2min', forward=True).sum() -> [3, 3, 3,  2,  1  ]
-            # instead we want:   [1,1,1,1,1].rolling(window='2min', forward=True).sum() -> [3, 3, 3, nan, nan]
-            tresh = self.index_array[-1] - ws_fw
-            mask = self.index_array > tresh
-            end[mask] = 0
+    def skew(self, *args, **kwargs):
+        return self._call_roll_func("skew", args, kwargs)
 
-        return start, end
+    def kurt(self, *args, **kwargs):
+        return self._call_roll_func("kurt", args, kwargs)
 
-    def _bw(self, num_values, window_size, closed):
-        arr = self.index_array
-        start, end = calculate_variable_window_bounds(num_values, window_size, None, None, closed, arr)
-        return start, end
+    def var(self, *args, **kwargs):
+        return self._call_roll_func("var", args, kwargs)
 
-    def _fw(self, num_values, window_size, closed):
-        arr = self.index_array[::-1]
-        s, _ = calculate_variable_window_bounds(num_values, window_size, None, None, closed, arr)
-        start = np.arange(num_values)
-        end = num_values - s[::-1]
+    def std(self, *args, **kwargs):
+        return self._call_roll_func("std", args, kwargs)
 
-        if closed in ['left', 'neither']:
-            start += 1
-        return start, end
+    def sem(self, *args, **kwargs):
+        return self._call_roll_func("sem", args, kwargs)
 
+    def quantile(self, *args, **kwargs):
+        return self._call_roll_func("quantile", args, kwargs)
 
-def customRoller(obj, window, min_periods=None,  # aka minimum non-nan values
-                 center=False, win_type=None, on=None, axis=0, closed=None,
-                 forward=False, expand=True, step=None, mask=None) -> Union[Rolling, Window]:
-    """
-    A custom rolling implementation, using pandas as base.
-
-    Parameters
-    ----------
-    obj : pd.Series (or pd.DataFrame)
-        The object to roll over. DataFrame is currently still experimental.
-
-    window : int or offset
-        Size of the moving window. This is the number of observations used for calculating the statistic.
-        Each window will be a fixed size.
-        If its an offset then this will be the time period of each window. Each window will be a variable sized
-        based on the observations included in the time-period. This is only valid for datetimelike indexes.
-
-    min_periods : int, default None
-        Minimum number of observations in window required to have a value (otherwise result is NA).
-        For a window that is specified by an offset, min_periods will default to 1. Otherwise, min_periods
-        will default to the size of the window.
-
-    center : bool, default False
-        Set the labels at the center of the window. Also works for offset-based windows (in contrary to pandas).
-
-    win_type : str, default None
-        Not implemented. Raise NotImplementedError if not None.
-
-    on : str, optional
-        For a DataFrame, a datetime-like column or MultiIndex level on which to calculate the rolling window,
-        rather than the DataFrame’s index. Provided integer column is ignored and excluded from result since
-        an integer index is not used to calculate the rolling window.
-    
-    axis : int or str, default 0
-
-    closed : str, default None
-        Make the interval closed on the ‘right’, ‘left’, ‘both’ or ‘neither’ endpoints. For offset-based windows,
-        it defaults to ‘right’. For fixed windows, defaults to ‘both’. Remaining cases not implemented for fixed
-        windows.
-
-    forward : bool, default False
-        By default a window is 'looking' backwards (in time). If True the window is looking forward in time.
-
-    expand : bool, default True
-        If True the window expands/shrink up to its final window size while shifted in the data or shifted out
-        respectively.
-        For (normal) backward-windows it only expands at the left border, for forward-windows it shrinks on
-        the right border and for centered windows both apply.
-
-        Also bear in mind that even if this is True, an many as `min_periods` values are necessary to get a
-        valid value, see there for more info.
-
-
-    step : int, slice or None, default None
-        If given, only every n'th step a window is calculated starting from the very first. One can
-        give a slice if one want to start from eg. the second (`slice(2,None,n)`) or similar.
-
-    mask : boolean array-like
-        Only calculate the window if the mask is True, otherwise skip it.
-
-    Returns
-    -------
-    a Window or Rolling sub-classed for the particular operation
-
-
-    Notes
-    -----
-    If for some reason the start and end numeric indices of the window are needed, one can call
-    `start, end = customRoller(obj, window).window.get_window_bounds(num_values, min_periods)`,
-    which return two np.arrays, that are holding the start and end indices. Fill at least all
-    parameter which are shown in the example.
-
-    See Also
-    --------
-    pandas.Series.rolling
-    pandas.DataFrame.rolling
-    """
-    num_params = len(locals()) - 2  # do not count window and obj
-    if not isinstance(obj, (ABCSeries, ABCDataFrame)):
-        raise TypeError(f"invalid type: {type(obj)}")
-
-    # center is the only param from the pandas rolling implementation
-    # that we advance, namely we allow center=True on dt-indexed data
-    # that's why we take it as ours
-    theirs = dict(min_periods=min_periods, win_type=win_type, on=on, axis=axis, closed=closed)
-    ours = dict(center=center, forward=forward, expand=expand, step=step, mask=mask)
-    assert len(theirs) + len(ours) == num_params, "not all params covert (!)"
-
-    # use .rolling to do all the checks like if closed is one of [left, right, neither, both],
-    # closed not allowed for integer windows, index is monotonic (in- or decreasing), if freq-based
-    # windows can be transformed to nanoseconds (eg. fails for `1y` - it could have 364 or 365 days), etc.
-    # Also it converts window and the index to numpy-arrays (so we don't have to do it :D).
-    try:
-        x = obj.rolling(window, **theirs)
-    except Exception:
-        raise
-
-    indexer = _VariableWindowDirectionIndexer if x.is_freq_type else _FixedWindowDirectionIndexer
-    indexer = indexer(index_array=x._on.asi8, window_size=x.window, **ours)
-
-    # Centering is fully done in our own indexers. So we do not pass center to rolling(). Especially because
-    # we also allow centering on dt-based indexes. Also centering would fail in forward windows, because of
-    # pandas internal centering magic (append nans at the end of array, later cut values from beginning of the
-    # result).
-    # min_periods is also quite tricky. Especially if None is passed. For dt-based windows min_periods defaults to 1
-    # and is set during rolling setup (-> if r=obj.rolling() is called). For numeric windows instead, it keeps None
-    # during setup and defaults to indexer.window_size if a rolling-method is called (-> if r.sum()). Thats a bit
-    # odd and quite hard to find. So we are good if we pass the already calculated x.min_periods as this will just
-    # hold the correct initialised or not initialised value. (It gets even trickier if one evaluates which value is
-    # actually passed to the function that actually thrown them out; i leave that to the reader to find out. start
-    # @ pandas.core.window.rolling:_Window._apply)
-    # Lastly, it is necessary to pass min_periods at all (!) and do not set it to a fix value (1, 0, None,...). This
-    # is, because we cannot throw out values by ourself in the indexer, because min_periods also evaluates NA values
-    # in its count and we have no control over the actual values, just their indexes.
-    theirs.update(min_periods=x.min_periods)
-    roller = obj.rolling(indexer, center=None, **theirs)
-
-    # ----- count hack -------
-    # Unfortunately pandas calls count differently if a BaseIndexer
-    # instance is given. IMO, the intention behind this is to call
-    # count different for dt-like windows, but if a user pass a own
-    # indexer we also end up in this case /:
-    # The only possibility is to monkey-patch pandas...
-    def new_count():
-        self = roller
-        if not x.is_freq_type:
-            obj_new = obj.notna().astype(int)
-            if min_periods is None:
-                theirs.update(min_periods=0)
-            return obj_new.rolling(indexer, center=None, **theirs).sum()
-        return self._old_count()
+    def cov(self, *args, **kwargs):
+        return self._call_roll_func("cov", args, kwargs)
+
+    def corr(self, *args, **kwargs):
+        return self._call_roll_func("corr", args, kwargs)
+
+    def apply(self, *args, **kwargs):
+        return self._call_roll_func("apply", args, kwargs)
+
+    def aggregate(self, *args, **kwargs):
+        return self._call_roll_func("aggregate", args, kwargs)
+
+    agg = aggregate
+
+    def validate(self):  # dummy function to fit Rolling class
+        self._roller.validate()
 
-    roller._old_count = roller.count
-    roller.count = new_count
-    # ----- count hack -------
 
-    return roller
+customRoller = CustomRoller
diff --git a/saqc/lib/tools.py b/saqc/lib/tools.py
index 3cbe5ab766a7bdb0f58324307d36a9981d7f98a6..b5dc0037da21c180a714cf52a2ad216909cffe11 100644
--- a/saqc/lib/tools.py
+++ b/saqc/lib/tools.py
@@ -2,25 +2,26 @@
 # -*- coding: utf-8 -*-
 
 import re
-from typing import Sequence, Union, Any, Iterator
-
+import datetime
 import itertools
+import warnings
+from typing import Sequence, Union, Any, Iterator, Callable
+
 import numpy as np
 import numba as nb
 import pandas as pd
 from scipy import fft
-import logging
-import dios
 
+import dios
 import collections
 from scipy.cluster.hierarchy import linkage, fcluster
+
 from saqc.lib.types import T
 
 # keep this for external imports
+# TODO: fix the external imports
 from saqc.lib.rolling import customRoller
 
-logger = logging.getLogger("SaQC")
-
 
 def assertScalar(name, value, optional=False):
     if (not np.isscalar(value)) and (value is not None) and (optional is True):
@@ -29,7 +30,9 @@ def assertScalar(name, value, optional=False):
         raise ValueError(f"'{name}' needs to be a scalar")
 
 
-def toSequence(value: Union[T, Sequence[T]], default: Union[T, Sequence[T]] = None) -> Sequence[T]:
+def toSequence(
+    value: Union[T, Sequence[T]], default: Union[T, Sequence[T]] = None
+) -> Sequence[T]:
     if value is None:
         value = default
     if np.isscalar(value):
@@ -37,6 +40,13 @@ def toSequence(value: Union[T, Sequence[T]], default: Union[T, Sequence[T]] = No
     return value
 
 
+def toOffset(freq_string: str, raw: bool = False) -> datetime.timedelta:
+    offset = pd.tseries.frequencies.to_offset(freq_string)
+    if raw:
+        return offset
+    return offset.delta.to_pytimedelta()
+
+
 @nb.jit(nopython=True, cache=True)
 def findIndex(iterable, value, start):
     i = start
@@ -69,9 +79,9 @@ def slidingWindowIndices(dates, window_size, iter_delta=None):
     + There is no way to provide a step size, i.e. to not start the
       next rolling window at the very next row in the DataFrame/Series
     + The inconsistent bahaviour with numerical vs frequency based
-      window sizes. When winsz is an integer, all windows are equally
-      large (winsz=5 -> windows contain 5 elements), but variable in
-      size, when the winsz is a frequency string (winsz="2D" ->
+      window sizes. When window is an integer, all windows are equally
+      large (window=5 -> windows contain 5 elements), but variable in
+      size, when the window is a frequency string (window="2D" ->
       window grows from size 1 during the first iteration until it
       covers the given frequency). Especially the bahaviour with
       frequency strings is quite unfortunate when calling methods
@@ -116,57 +126,6 @@ def inferFrequency(data: pd.Series) -> pd.DateOffset:
     return pd.tseries.frequencies.to_offset(pd.infer_freq(data.index))
 
 
-def retrieveTrustworthyOriginal(
-    data: dios.DictOfSeries, field: str, flagger=None, level: Any = None
-) -> dios.DictOfSeries:
-    """Columns of data passed to the saqc runner may not be sampled to its original sampling rate - thus
-    differenciating between missng value - nans und fillvalue nans is impossible.
-
-    This function:
-    (1) if flagger is None:
-        (a) estimates the sampling rate of the input dataseries by dropping all nans and then returns the series at the
-            estimated samplng rate.
-
-    (2) if "flagger" is not None but "level" is None:
-        (a) all values are dropped, that are flagged worse then flagger.GOOD. (so unflagged values wont be dropped)
-        (b) estimates the sampling rate of the input dataseries by dropping all nans and then returns the series at the
-            estimated samplng rate.
-    (3) if "flagger" is not None and "level" is not None:
-        (a) all values are dropped, that are flagged worse then level. (so unflagged values wont be dropped)
-        (b) estimates the sampling rate of the input dataseries by dropping all nans and then returns the series at the
-            estimated samplng rate.
-
-    Note, that the passed dataseries should be harmonized to an equidistant
-        frequencie grid (maybe including blow up entries).
-
-    :param data:        DataFrame. The Data frame holding the data containing 'field'.
-    :param field:       String. Fieldname of the column in data, that you want to sample to original sampling rate.
-                        It has to have a harmonic
-    :param flagger:     None or a flagger object.
-    :param level:       Lower bound of flags that are excepted for data. Must be a flag the flagger can handle.
-
-    """
-    dataseries = data[field]
-
-    if flagger is not None:
-        mask = flagger.isFlagged(field, flag=level or flagger.GOOD, comparator="<=")
-        # drop all flags that are suspicious or worse
-        dataseries = dataseries[mask]
-
-    # drop the nan values that may result from any preceeding upsampling of the measurements:
-    dataseries = dataseries.dropna()
-
-    if dataseries.empty:
-        return dataseries, np.nan
-
-    # estimate original data sampling frequencie
-    # (the original series sampling rate may not match data-input sample rate):
-    seconds_rate = dataseries.index.to_series().diff().min().seconds
-    data_rate = pd.tseries.frequencies.to_offset(str(seconds_rate) + "s")
-
-    return dataseries.asfreq(data_rate), data_rate
-
-
 def offset2seconds(offset):
     """Function returns total seconds upon "offset like input
 
@@ -176,7 +135,7 @@ def offset2seconds(offset):
     return pd.Timedelta.total_seconds(pd.Timedelta(offset))
 
 
-def seasonalMask(dtindex, season_start, season_end, include_bounds):
+def periodicMask(dtindex, season_start, season_end, include_bounds):
     """
     This function generates date-periodic/seasonal masks from an index passed.
 
@@ -242,6 +201,7 @@ def seasonalMask(dtindex, season_start, season_end, include_bounds):
     When inclusive_selection="season", all above examples work the same way, only that you now
     determine wich values NOT TO mask (=wich values are to constitute the "seasons").
     """
+
     def _replaceBuilder(stamp):
         keys = ("second", "minute", "hour", "day", "month", "year")
         stamp_list = map(int, re.split(r"[-T:]", stamp)[::-1])
@@ -262,22 +222,27 @@ def seasonalMask(dtindex, season_start, season_end, include_bounds):
     end_replacer = _replaceBuilder(season_end)
 
     if pd.Timestamp(start_replacer(dtindex)) <= pd.Timestamp(end_replacer(dtindex)):
+
         def _selector(x, base_bool=include_bounds):
-            x[start_replacer(x.index):end_replacer(x.index)] = not base_bool
+            x[start_replacer(x.index) : end_replacer(x.index)] = not base_bool
             return x
+
     else:
+
         def _selector(x, base_bool=include_bounds):
-            x[:end_replacer(x.index)] = not base_bool
-            x[start_replacer(x.index):] = not base_bool
+            x[: end_replacer(x.index)] = not base_bool
+            x[start_replacer(x.index) :] = not base_bool
             return x
 
-    freq = '1' + 'mmmhhhdddMMMYYY'[len(season_start)]
+    freq = "1" + "mmmhhhdddMMMYYY"[len(season_start)]
     return mask.groupby(pd.Grouper(freq=freq)).transform(_selector)
 
 
 def assertDictOfSeries(df: Any, argname: str = "arg") -> None:
     if not isinstance(df, dios.DictOfSeries):
-        raise TypeError(f"{argname} must be of type dios.DictOfSeries, {type(df)} was given")
+        raise TypeError(
+            f"{argname} must be of type dios.DictOfSeries, {type(df)} was given"
+        )
 
 
 def assertSeries(srs: Any, argname: str = "arg") -> None:
@@ -351,16 +316,6 @@ def isQuoted(string):
     return bool(re.search(r"'.*'|\".*\"", string))
 
 
-def dropper(field, to_drop, flagger, default):
-    drop_mask = pd.Series(False, flagger.getFlags(field).index)
-    if to_drop is None:
-        to_drop = default
-    to_drop = toSequence(to_drop)
-    if len(to_drop) > 0:
-        drop_mask |= flagger.isFlagged(field, flag=to_drop)
-    return drop_mask
-
-
 def mutateIndex(index, old_name, new_name):
     pos = index.get_loc(old_name)
     index = index.drop(index[pos])
@@ -368,8 +323,16 @@ def mutateIndex(index, old_name, new_name):
     return index
 
 
-def estimateFrequency(index, delta_precision=-1, max_rate="10s", min_rate="1D", optimize=True,
-                      min_energy=0.2, max_freqs=10, bins=None):
+def estimateFrequency(
+    index,
+    delta_precision=-1,
+    max_rate="10s",
+    min_rate="1D",
+    optimize=True,
+    min_energy=0.2,
+    max_freqs=10,
+    bins=None,
+):
 
     """
     Function to estimate the sampling rate of an index.
@@ -424,23 +387,27 @@ def estimateFrequency(index, delta_precision=-1, max_rate="10s", min_rate="1D",
     """
     index_n = index.to_numpy(float)
     if index.empty:
-        return 'empty', []
+        return "empty", []
 
-    index_n = (index_n - index_n[0])*10**(-9 + delta_precision)
-    delta = np.zeros(int(index_n[-1])+1)
+    index_n = (index_n - index_n[0]) * 10 ** (-9 + delta_precision)
+    delta = np.zeros(int(index_n[-1]) + 1)
     delta[index_n.astype(int)] = 1
     if optimize:
         delta_f = np.abs(fft.rfft(delta, fft.next_fast_len(len(delta))))
     else:
         delta_f = np.abs(fft.rfft(delta))
 
-    len_f = len(delta_f)*2
-    min_energy = delta_f[0]*min_energy
+    len_f = len(delta_f) * 2
+    min_energy = delta_f[0] * min_energy
     # calc/assign low/high freq cut offs (makes life easier):
-    min_rate_i = int(len_f/(pd.Timedelta(min_rate).total_seconds()*(10**delta_precision)))
+    min_rate_i = int(
+        len_f / (pd.Timedelta(min_rate).total_seconds() * (10 ** delta_precision))
+    )
     delta_f[:min_rate_i] = 0
-    max_rate_i = int(len_f/(pd.Timedelta(max_rate).total_seconds()*(10**delta_precision)))
-    hf_cutoff = min(max_rate_i, len_f//2)
+    max_rate_i = int(
+        len_f / (pd.Timedelta(max_rate).total_seconds() * (10 ** delta_precision))
+    )
+    hf_cutoff = min(max_rate_i, len_f // 2)
     delta_f[hf_cutoff:] = 0
     delta_f[delta_f < min_energy] = 0
 
@@ -448,54 +415,66 @@ def estimateFrequency(index, delta_precision=-1, max_rate="10s", min_rate="1D",
     freqs = []
     f_i = np.argmax(delta_f)
     while (f_i > 0) & (len(freqs) < max_freqs):
-        f = (len_f / f_i)/(60*10**(delta_precision))
+        f = (len_f / f_i) / (60 * 10 ** (delta_precision))
         freqs.append(f)
-        for i in range(1, hf_cutoff//f_i + 1):
-            delta_f[(i*f_i) - min_rate_i:(i*f_i) + min_rate_i] = 0
+        for i in range(1, hf_cutoff // f_i + 1):
+            delta_f[(i * f_i) - min_rate_i : (i * f_i) + min_rate_i] = 0
         f_i = np.argmax(delta_f)
 
     if len(freqs) == 0:
         return None, []
 
     if bins is None:
-        r = range(0, int(pd.Timedelta(min_rate).total_seconds()/60))
+        r = range(0, int(pd.Timedelta(min_rate).total_seconds() / 60))
         bins = [0, 0.1, 0.2, 0.3, 0.4] + [i + 0.5 for i in r]
 
     f_hist, bins = np.histogram(freqs, bins=bins)
     freqs = np.ceil(bins[:-1][f_hist >= 1])
-    gcd_freq = np.gcd.reduce((10*freqs).astype(int))/10
+    gcd_freq = np.gcd.reduce((10 * freqs).astype(int)) / 10
 
-    return str(int(gcd_freq)) + 'min', [str(int(i)) + 'min' for i in freqs]
+    return str(int(gcd_freq)) + "min", [str(int(i)) + "min" for i in freqs]
 
 
 def evalFreqStr(freq, check, index):
-    if check in ['check', 'auto']:
+    if check in ["check", "auto"]:
         f_passed = freq
         freq = index.inferred_freq
         freqs = [freq]
         if freq is None:
             freq, freqs = estimateFrequency(index)
         if freq is None:
-            logging.warning('Sampling rate could not be estimated.')
+            warnings.warn("Sampling rate could not be estimated.")
         if len(freqs) > 1:
-            logging.warning(f"Sampling rate seems to be not uniform!."
-                            f"Detected: {freqs}")
+            warnings.warn(
+                f"Sampling rate seems to be not uniform!." f"Detected: {freqs}"
+            )
 
-        if check == 'check':
+        if check == "check":
             f_passed_seconds = pd.Timedelta(f_passed).total_seconds()
             freq_seconds = pd.Timedelta(freq).total_seconds()
-            if (f_passed_seconds != freq_seconds):
-                logging.warning(f"Sampling rate estimate ({freq}) missmatches passed frequency ({f_passed}).")
-        elif check == 'auto':
+            if f_passed_seconds != freq_seconds:
+                warnings.warn(
+                    f"Sampling rate estimate ({freq}) missmatches passed frequency ({f_passed})."
+                )
+        elif check == "auto":
             if freq is None:
-                raise ValueError('Frequency estimation for non-empty series failed with no fall back frequency passed.')
+                raise ValueError(
+                    "Frequency estimation for non-empty series failed with no fall back frequency passed."
+                )
             f_passed = freq
     else:
         f_passed = freq
     return f_passed
 
 
-def detectDeviants(data, metric, norm_spread, norm_frac, linkage_method='single', population='variables'):
+def detectDeviants(
+    data,
+    metric,
+    norm_spread,
+    norm_frac,
+    linkage_method="single",
+    population="variables",
+):
     """
     Helper function for carrying out the repeatedly upcoming task,
     of detecting variables a group of variables.
@@ -503,7 +482,7 @@ def detectDeviants(data, metric, norm_spread, norm_frac, linkage_method='single'
     "Normality" is determined in terms of a maximum spreading distance, that members of a normal group must not exceed
     in respect to a certain metric and linkage method.
 
-    In addition, only a group is considered "normal" if it contains more then `norm_frac` percent of the
+    In addition, only a group is considered "normal" if it contains more then `frac` percent of the
     variables in "fields".
 
     Note, that the function also can be used to detect anormal regimes in a variable by assigning the different regimes
@@ -544,17 +523,19 @@ def detectDeviants(data, metric, norm_spread, norm_frac, linkage_method='single'
 
     condensed = np.abs(dist_mat[tuple(zip(*combs))])
     Z = linkage(condensed, method=linkage_method)
-    cluster = fcluster(Z, norm_spread, criterion='distance')
-    if population == 'variables':
+    cluster = fcluster(Z, norm_spread, criterion="distance")
+    if population == "variables":
         counts = collections.Counter(cluster)
         pop_num = var_num
-    elif population == 'samples':
-        counts = {cluster[j]: 0 for j in range(0,var_num)}
+    elif population == "samples":
+        counts = {cluster[j]: 0 for j in range(0, var_num)}
         for c in range(var_num):
             counts[cluster[c]] += data.iloc[:, c].dropna().shape[0]
         pop_num = np.sum(list(counts.values()))
     else:
-        raise ValueError("Not a valid normality criteria keyword passed. Pass either 'variables' or 'population'.")
+        raise ValueError(
+            "Not a valid normality criteria keyword passed. Pass either 'variables' or 'population'."
+        )
     norm_cluster = -1
 
     for item in counts.items():
@@ -568,3 +549,77 @@ def detectDeviants(data, metric, norm_spread, norm_frac, linkage_method='single'
         return [i for i, x in enumerate(cluster) if x != norm_cluster]
 
 
+def getFreqDelta(index):
+    """
+    Function checks if the passed index is regularly sampled.
+
+    If yes, the according timedelta value is returned,
+
+    If no, ``None`` is returned.
+
+    (``None`` will also be returned for pd.RangeIndex type.)
+
+    """
+    delta = getattr(index, "freq", None)
+    if delta is None and not index.empty:
+        i = pd.date_range(index[0], index[-1], len(index))
+        if i.equals(index):
+            return i[1] - i[0]
+    return delta
+
+
+def getApply(in_obj, apply_obj, attr_access="__name__", attr_or="apply"):
+    """
+    For the repeating task of applying build in (accelerated) methods/funcs (`apply_obj`),
+    of rolling/resampling - like objects (`in_obj`) ,
+    if those build-ins are available, or pass the method/func to the objects apply-like method, otherwise.
+
+    """
+    try:
+        out = getattr(in_obj, getattr(apply_obj, attr_access))()
+    except AttributeError:
+        out = getattr(in_obj, attr_or)(apply_obj)
+
+    return out
+
+
+def statPass(
+    datcol: pd.Series,
+    stat: Callable[[np.array, pd.Series], float],
+    winsz: pd.Timedelta,
+    thresh: float,
+    comparator: Callable[[float, float], bool],
+    sub_winsz: pd.Timedelta = None,
+    sub_thresh: float = None,
+    min_periods: int = None,
+) -> pd.Series:
+    """
+    Check `datcol`, if it contains chunks of length `window`, exceeding `thresh` with
+    regard to `func` and `comparator`:
+
+    (check, if: `comparator`(func`(*chunk*), `thresh`)
+
+    If yes, subsequently check, if all (maybe overlapping) *sub-chunks* of *chunk*, with length `sub_window`,
+    satisfy, `comparator`(`func`(*sub_chunk*), `sub_thresh`)
+
+    returns boolean series with same index as input series
+    """
+    stat_parent = datcol.rolling(winsz, min_periods=min_periods)
+    stat_parent = getApply(stat_parent, stat)
+    exceeds = comparator(stat_parent, thresh)
+    if sub_winsz:
+        stat_sub = datcol.rolling(sub_winsz)
+        stat_sub = getApply(stat_sub, stat)
+        min_stat = stat_sub.rolling(winsz - sub_winsz, closed="both").min()
+        exceeding_sub = comparator(min_stat, sub_thresh)
+        exceeds = exceeding_sub & exceeds
+
+    to_set = pd.Series(False, index=exceeds.index)
+    for exceed, group in exceeds.groupby(by=exceeds.values):
+        if exceed:
+            # dt-slices include both bounds, so we subtract 1ns
+            start = group.index[0] - (winsz - pd.Timedelta("1ns"))
+            end = group.index[-1]
+            to_set[start:end] = True
+
+    return to_set
diff --git a/saqc/lib/ts_operators.py b/saqc/lib/ts_operators.py
index 30ce15899d4d5c21999f1e686f2ec2bf83598bbb..8bac0949edcce3c40aa3ab0bb5076dcd1d45f722 100644
--- a/saqc/lib/ts_operators.py
+++ b/saqc/lib/ts_operators.py
@@ -4,44 +4,42 @@
 """
 The module gathers all kinds of timeseries tranformations.
 """
-import logging
-
 import re
+import warnings
+from typing import Union
+import sys
 
 import pandas as pd
 import numpy as np
 import numba as nb
-
 from sklearn.neighbors import NearestNeighbors
-from scipy.stats import iqr
+from scipy.stats import iqr, median_abs_deviation
+from scipy.signal import filtfilt, butter
 import numpy.polynomial.polynomial as poly
 
 
-logger = logging.getLogger("SaQC")
-
-
 def identity(ts):
     # identity function
     return ts
 
 
 def count(ts):
-    # count is a dummy to trigger according built in count method of
-    # resamplers when passed to aggregate2freq. For consistency reasons, it works accordingly when
+    # count is a dummy to trigger according built in count method of resamplers when
+    # passed to aggregate2freq. For consistency reasons, it works accordingly when
     # applied directly:
     return ts.count()
 
 
 def first(ts):
-    # first is a dummy to trigger according built in count method of
-    # resamplers when passed to aggregate2freq. For consistency reasons, it works accordingly when
+    # first is a dummy to trigger according built in count method of resamplers when
+    # passed to aggregate2freq. For consistency reasons, it works accordingly when
     # applied directly:
     return ts.first()
 
 
 def last(ts):
-    # last is a dummy to trigger according built in count method of
-    # resamplers when passed to aggregate2freq. For consistency reasons, it works accordingly when
+    # last is a dummy to trigger according built in count method of resamplers when
+    # passed to aggregate2freq. For consistency reasons, it works accordingly when
     # applied directly:
     return ts.last()
 
@@ -51,7 +49,7 @@ def zeroLog(ts):
     # in internal processing, you only have to check for nan values if you need to
     # remove "invalidish" values from the data.
     log_ts = np.log(ts)
-    log_ts[log_ts == -np.inf] = np.nan
+    log_ts[log_ts == -np.inf] = sys.float_info.min
     return log_ts
 
 
@@ -62,7 +60,10 @@ def derivative(ts, unit="1min"):
 
 def deltaT(ts, unit="1min"):
     # calculates series of time gaps in ts
-    return ts.index.to_series().diff().dt.total_seconds() / pd.Timedelta(unit).total_seconds()
+    return (
+        ts.index.to_series().diff().dt.total_seconds()
+        / pd.Timedelta(unit).total_seconds()
+    )
 
 
 def difference(ts):
@@ -104,39 +105,37 @@ def standardizeByMean(ts):
 
 
 def standardizeByMedian(ts):
+    # standardization with median (MAD)
+    # NO SCALING
+    return (ts - np.median(ts)) / median_abs_deviation(ts, nan_policy="omit")
+
+
+def standardizeByIQR(ts):
     # standardization with median and interquartile range
     return (ts - np.median(ts)) / iqr(ts, nan_policy="omit")
 
 
-def kNN(in_arr, n_neighbors, algorithm="ball_tree"):
+def kNN(in_arr, n_neighbors, algorithm="ball_tree", metric="minkowski", p=2):
     # k-nearest-neighbor search
-    nbrs = NearestNeighbors(n_neighbors=n_neighbors, algorithm=algorithm).fit(in_arr.reshape(in_arr.shape[0], -1))
-    return nbrs.kneighbors()
-
 
-def kNNMaxGap(in_arr, n_neighbors=10, algorithm="ball_tree"):
-    # searches for the "n_neighbors" nearest neighbors of every value in "in_arr"
-    # and then returns the distance to the neighbor with the "maximum" Gap to its
-    # predecessor in the neighbor hierarchy
-    in_arr = np.asarray(in_arr)
-    dist, *_ = kNN(in_arr, n_neighbors, algorithm=algorithm)
-    sample_size = dist.shape[0]
-    to_gap = np.append(np.array([[0] * sample_size]).T, dist, axis=1)
-    max_gap_ind = np.diff(to_gap, axis=1).argmax(axis=1)
-    return dist[range(0, sample_size), max_gap_ind]
+    nbrs = NearestNeighbors(
+        n_neighbors=n_neighbors, algorithm=algorithm, metric=metric, p=p
+    ).fit(in_arr.reshape(in_arr.shape[0], -1))
+    return nbrs.kneighbors()
 
 
-def kNNSum(in_arr, n_neighbors=10, algorithm="ball_tree"):
-    # searches for the "n_neighbors" nearest neighbors of every value in "in_arr"
-    # and assigns that value the summed up distances to this neighbors
-    in_arr = np.asarray(in_arr)
-    dist, *_ = kNN(in_arr, n_neighbors, algorithm=algorithm)
-    return dist.sum(axis=1)
+def maxGap(in_arr):
+    """
+    Search for the maximum gap in an array of sorted distances (func for scoring kNN
+    distance matrice)
+    """
+    return max(in_arr[0], max(np.diff(in_arr)))
 
 
 @nb.njit
 def _maxConsecutiveNan(arr, max_consec):
-    # checks if arr (boolean array) has not more then "max_consec" consecutive True values
+    # checks if arr (boolean array) has not more then "max_consec" consecutive True
+    # values
     current = 0
     idx = 0
     while idx < arr.size:
@@ -151,8 +150,9 @@ def _maxConsecutiveNan(arr, max_consec):
 
 
 def validationTrafo(data, max_nan_total, max_nan_consec):
-    # data has to be boolean. False=Valid Value, True=invalid Value
-    # function returns True-array of input array size for invalid input arrays False array for valid ones
+    # data has to be boolean. False=Valid Value, True=invalid Value function returns
+    # True-array of input array size for invalid input arrays False array for valid
+    # ones
     data = data.copy()
     if (max_nan_total is np.inf) & (max_nan_consec is np.inf):
         return data
@@ -174,23 +174,32 @@ def validationTrafo(data, max_nan_total, max_nan_consec):
 
 
 def stdQC(data, max_nan_total=np.inf, max_nan_consec=np.inf):
-    return np.nanstd(data[~validationTrafo(data.isna(), max_nan_total, max_nan_consec)], ddof=1)
+    return np.nanstd(
+        data[~validationTrafo(data.isna(), max_nan_total, max_nan_consec)], ddof=1
+    )
 
 
 def varQC(data, max_nan_total=np.inf, max_nan_consec=np.inf):
-    return np.nanvar(data[~validationTrafo(data.isna(), max_nan_total, max_nan_consec)], ddof=1)
+    return np.nanvar(
+        data[~validationTrafo(data.isna(), max_nan_total, max_nan_consec)], ddof=1
+    )
 
 
 def meanQC(data, max_nan_total=np.inf, max_nan_consec=np.inf):
-    return np.nanmean(data[~validationTrafo(data.isna(), max_nan_total, max_nan_consec)])
+    return np.nanmean(
+        data[~validationTrafo(data.isna(), max_nan_total, max_nan_consec)]
+    )
 
 
-def interpolateNANs(data, method, order=2, inter_limit=2, downgrade_interpolation=False, return_chunk_bounds=False):
+def interpolateNANs(
+    data, method, order=2, inter_limit=2, downgrade_interpolation=False
+):
     """
-    The function interpolates nan-values (and nan-grids) in timeseries data. It can be passed all the method keywords
-    from the pd.Series.interpolate method and will than apply this very methods. Note, that the inter_limit keyword
-    really restricts the interpolation to chunks, not containing more than "inter_limit" nan entries
-    (thereby not being identical to the "limit" keyword of pd.Series.interpolate).
+    The function interpolates nan-values (and nan-grids) in timeseries data. It can
+    be passed all the method keywords from the pd.Series.interpolate method and will
+    than apply this very methods. Note, that the limit keyword really restricts
+    the interpolation to chunks, not containing more than "limit" nan entries (
+    thereby not being identical to the "limit" keyword of pd.Series.interpolate).
 
     :param data:                    pd.Series or np.array. The data series to be interpolated
     :param method:                  String. Method keyword designating interpolation method to use.
@@ -200,43 +209,35 @@ def interpolateNANs(data, method, order=2, inter_limit=2, downgrade_interpolatio
                                     replaced by interpolation.
                                     Its default value suits an interpolation that only will apply to points of an
                                     inserted frequency grid. (regularization by interpolation)
-                                    Gaps wider than "inter_limit" will NOT be interpolated at all.
+                                    Gaps wider than "limit" will NOT be interpolated at all.
     :param downgrade_interpolation:  Boolean. Default False. If True:
                                     If a data chunk not contains enough values for interpolation of the order "order",
                                     the highest order possible will be selected for that chunks interpolation.
-    :param return_chunk_bounds:     Boolean. Default False. If True:
-                                    Additionally to the interpolated data, the start and ending points of data chunks
-                                    not containing no series consisting of more then "inter_limit" nan values,
-                                    are calculated and returned.
-                                    (This option fits requirements of the "interpolateNANs" functions use in the
-                                    context of saqc harmonization mainly.)
 
     :return:
     """
     inter_limit = int(inter_limit)
     data = pd.Series(data).copy()
-    gap_mask = (data.rolling(inter_limit, min_periods=0).apply(lambda x: np.sum(np.isnan(x)), raw=True)) != inter_limit
+    gap_mask = data.isna().rolling(inter_limit, min_periods=0).sum() != inter_limit
 
     if inter_limit == 2:
         gap_mask = gap_mask & gap_mask.shift(-1, fill_value=True)
     else:
         gap_mask = (
-            gap_mask.replace(True, np.nan).fillna(method="bfill", limit=inter_limit).replace(np.nan, True).astype(bool)
+            gap_mask.replace(True, np.nan)
+            .fillna(method="bfill", limit=inter_limit)
+            .replace(np.nan, True)
+            .astype(bool)
         )
 
-    if return_chunk_bounds:
-        # start end ending points of interpolation chunks have to be memorized to block their flagging:
-        chunk_switches = gap_mask.astype(int).diff()
-        chunk_starts = chunk_switches[chunk_switches == -1].index
-        chunk_ends = chunk_switches[(chunk_switches.shift(-1) == 1)].index
-        chunk_bounds = chunk_starts.join(chunk_ends, how="outer", sort=True)
-
     pre_index = data.index
     data = data[gap_mask]
 
     if method in ["linear", "time"]:
 
-        data.interpolate(method=method, inplace=True, limit=inter_limit - 1, limit_area="inside")
+        data.interpolate(
+            method=method, inplace=True, limit=inter_limit - 1, limit_area="inside"
+        )
 
     else:
         dat_name = data.name
@@ -248,9 +249,9 @@ def interpolateNANs(data, method, order=2, inter_limit=2, downgrade_interpolatio
                 try:
                     return x.interpolate(method=wrap_method, order=int(wrap_order))
                 except (NotImplementedError, ValueError):
-                    logger.warning(
-                        f"Interpolation with method {method} is not supported at order {wrap_order}. "
-                        f"and will be performed at order {wrap_order-1}"
+                    warnings.warn(
+                        f"Interpolation with method {method} is not supported at order "
+                        f"{wrap_order}. and will be performed at order {wrap_order-1}"
                     )
                     return _interpolWrapper(x, int(wrap_order - 1), wrap_method)
             elif x.size < 3:
@@ -262,37 +263,47 @@ def interpolateNANs(data, method, order=2, inter_limit=2, downgrade_interpolatio
                     return x
 
         data = data.groupby(data.columns[0]).transform(_interpolWrapper)
-        # squeezing the 1-dimensional frame resulting from groupby for consistency reasons
+        # squeezing the 1-dimensional frame resulting from groupby for consistency
+        # reasons
         data = data.squeeze(axis=1)
         data.name = dat_name
     data = data.reindex(pre_index)
-    if return_chunk_bounds:
-        return data, chunk_bounds
-    else: return data
+
+    return data
 
 
 def aggregate2Freq(
-    data, method, freq, agg_func, fill_value=np.nan, max_invalid_total=np.inf, max_invalid_consec=np.inf
+    data: pd.Series,
+    method,
+    freq,
+    agg_func,
+    fill_value=np.nan,
+    max_invalid_total=None,
+    max_invalid_consec=None,
 ):
-    # The function aggregates values to an equidistant frequency grid with agg_func.
-    # Timestamps that have no values projected on them, get "fill_value" assigned. Also,
-    # "fill_value" serves as replacement for "invalid" intervals
-
+    """
+    The function aggregates values to an equidistant frequency grid with func.
+    Timestamps that gets no values projected, get filled with the fill-value. It
+    also serves as a replacement for "invalid" intervals.
+    """
     methods = {
-        "nagg": lambda seconds_total: (seconds_total/2, "left", "left"),
+        "nagg": lambda seconds_total: (seconds_total / 2, "left", "left"),
         "bagg": lambda _: (0, "left", "left"),
         "fagg": lambda _: (0, "right", "right"),
     }
 
-    # filter data for invalid patterns (since filtering is expensive we pre-check if it is demanded)
-    if (max_invalid_total is not np.inf) | (max_invalid_consec is not np.inf):
-        if pd.isnull(fill_value):
+    # filter data for invalid patterns (since filtering is expensive we pre-check if
+    # it is demanded)
+    if max_invalid_total is not None or max_invalid_consec is not None:
+        if pd.isna(fill_value):
             temp_mask = data.isna()
         else:
             temp_mask = data == fill_value
 
         temp_mask = temp_mask.groupby(pd.Grouper(freq=freq)).transform(
-            validationTrafo, max_nan_total=max_invalid_total, max_nan_consec=max_invalid_consec
+            validationTrafo,
+            max_nan_total=max_invalid_total,
+            max_nan_consec=max_invalid_consec,
         )
         data[temp_mask] = fill_value
 
@@ -300,27 +311,32 @@ def aggregate2Freq(
     base, label, closed = methods[method](seconds_total)
 
     # In the following, we check for empty intervals outside resample.apply, because:
-    # - resample AND groupBy do insert value zero for empty intervals if resampling with any kind of "sum" application -
-    #   we want "fill_value" to be inserted
-    # - we are aggregating data and flags with this function and empty intervals usually would get assigned flagger.BAD
-    #   flag (where resample inserts np.nan or 0)
+    # - resample AND groupBy do insert value zero for empty intervals if resampling
+    # with any kind of "sum" application - we want "fill_value" to be inserted - we
+    # are aggregating data and flags with this function and empty intervals usually
+    # would get assigned BAD flag (where resample inserts np.nan or 0)
 
-    data_resampler = data.resample(f"{seconds_total:.0f}s", base=base, closed=closed, label=label)
+    data_resampler = data.resample(
+        f"{seconds_total:.0f}s", base=base, closed=closed, label=label
+    )
 
     empty_intervals = data_resampler.count() == 0
-    # great performance gain can be achieved, when avoiding .apply and using pd.resampler
-    # methods instead. (this covers all the basic func aggregations, such as median, mean, sum, count, ...)
+    # great performance gain can be achieved, when avoiding .apply and using
+    # pd.resampler methods instead. (this covers all the basic func aggregations,
+    # such as median, mean, sum, count, ...)
     try:
         check_name = re.sub("^nan", "", agg_func.__name__)
-        # a nasty special case: if function "count" was passed, we not want empty intervals to be replaced by nan:
-        if check_name == 'count':
+        # a nasty special case: if function "count" was passed, we not want empty
+        # intervals to be replaced by nan:
+        if check_name == "count":
             empty_intervals[:] = False
         data = getattr(data_resampler, check_name)()
     except AttributeError:
         data = data_resampler.apply(agg_func)
 
-    # since loffset keyword of pandas.resample "discharges" after one use of the resampler (pandas logic) - we correct the
-    # resampled labels offset manually, if necessary.
+    # since loffset keyword of pandas.resample "discharges" after one use of the
+    # resampler (pandas logic), we correct the resampled labels offset manually,
+    # if necessary.
     if method == "nagg":
         data.index = data.index.shift(freq=pd.Timedelta(freq) / 2)
         empty_intervals.index = empty_intervals.index.shift(freq=pd.Timedelta(freq) / 2)
@@ -329,22 +345,64 @@ def aggregate2Freq(
     return data
 
 
-def shift2Freq(data, method, freq, fill_value=np.nan):
-    # shift timestamps backwards/forwards in order to allign them with an equidistant
-    # frequencie grid.
+def shift2Freq(
+    data: Union[pd.Series, pd.DataFrame], method: str, freq: str, fill_value
+):
+    """
+    shift timestamps backwards/forwards in order to align them with an equidistant
+    frequency grid. Resulting Nan's are replaced with the fill-value.
+    """
 
     methods = {
         "fshift": lambda freq: ("ffill", pd.Timedelta(freq)),
         "bshift": lambda freq: ("bfill", pd.Timedelta(freq)),
-        "nshift": lambda freq: ("nearest", pd.Timedelta(freq)/2),
+        "nshift": lambda freq: ("nearest", pd.Timedelta(freq) / 2),
     }
     direction, tolerance = methods[method](freq)
     target_ind = pd.date_range(
-        start=data.index[0].floor(freq), end=data.index[-1].ceil(freq),
+        start=data.index[0].floor(freq),
+        end=data.index[-1].ceil(freq),
         freq=freq,
-        name=data.index.name
+        name=data.index.name,
+    )
+    return data.reindex(
+        target_ind, method=direction, tolerance=tolerance, fill_value=fill_value
     )
-    return data.reindex(target_ind, method=direction, tolerance=tolerance, fill_value=fill_value)
+
+
+def butterFilter(
+    x, cutoff, nyq=0.5, filter_order=2, fill_method="linear", filter_type="low"
+):
+    """
+    Applies butterworth filter.
+    `x` is expected to be regularly sampled.
+
+    Parameters
+    ----------
+    x: pd.Series
+        input timeseries
+
+    cutoff: float
+        The cutoff-frequency, expressed in multiples of the sampling rate.
+
+    nyq: float
+        The niquist-frequency. expressed in multiples if the sampling rate.
+
+    fill_method: Literal[‘nearest’, ‘zero’, ‘slinear’, ‘quadratic’, ‘cubic’, ‘spline’, ‘barycentric’, ‘polynomial’]
+        Fill method to be applied on the data before filtering (butterfilter cant
+        handle ''np.nan''). See documentation of pandas.Series.interpolate method for
+        details on the methods associated with the different keywords.
+
+
+    Returns
+    -------
+    """
+    na_mask = x.isna()
+    x = x.interpolate(fill_method).interpolate("ffill").interpolate("bfill")
+    b, a = butter(N=filter_order, Wn=cutoff / nyq, btype=filter_type)
+    y = pd.Series(filtfilt(b, a, x), x.index, name=x.name)
+    y[na_mask] = np.nan
+    return y
 
 
 @nb.njit
@@ -419,8 +477,9 @@ def polyRollerNoMissing(in_slice, val_range, center_index, poly_deg):
 
 
 def polyRollerIrregular(in_slice, center_index_ser, poly_deg):
-    # a function to roll with, for polynomial fitting of data not having an equidistant frequency grid.
-    # (expects to get passed pandas timeseries), so raw parameter of rolling.apply should be set to False.
+    # a function to roll with, for polynomial fitting of data not having an
+    # equidistant frequency grid. (expects to get passed pandas timeseries),
+    # so raw parameter of rolling.apply should be set to False.
     x_data = ((in_slice.index - in_slice.index[0]).total_seconds()) / 60
     fitted = poly.polyfit(x_data, in_slice.values, poly_deg)
     center_pos = int(len(in_slice) - center_index_ser[in_slice.index[-1]])
@@ -432,9 +491,21 @@ def expModelFunc(x, a=0, b=0, c=0):
     return a + b * (np.exp(c * x) - 1)
 
 
+def expDriftModel(x, c, origin, target):
+    c = abs(c)
+    b = (target - origin) / (np.exp(c) - 1)
+    return expModelFunc(x, origin, b, c)
+
+
+def linearDriftModel(x, origin, target):
+    return origin + x * target
+
+
 def linearInterpolation(data, inter_limit=2):
     return interpolateNANs(data, "time", inter_limit=inter_limit)
 
 
 def polynomialInterpolation(data, inter_limit=2, inter_order=2):
-    return interpolateNANs(data, "polynomial", inter_limit=inter_limit, order=inter_order)
+    return interpolateNANs(
+        data, "polynomial", inter_limit=inter_limit, order=inter_order
+    )
diff --git a/saqc/lib/types.py b/saqc/lib/types.py
index facebe59987a4d3a74352b7146a0ab2320f8a73f..d4e76adf73526eadaf4ec09bfbfaa517f0eba4e2 100644
--- a/saqc/lib/types.py
+++ b/saqc/lib/types.py
@@ -1,13 +1,43 @@
 #! /usr/bin/env python
 # -*- coding: utf-8 -*-
+__all__ = [
+    "T",
+    "ArrayLike",
+    "PandasLike",
+    "DiosLikeT",
+    "FreqString",
+    "IntegerWindow",
+    "Timestampstr",
+    "CurveFitter",
+    "ExternalFlag",
+    "PositiveFloat",
+    "PositiveInt",
+]
 
-from typing import TypeVar, Union
-
+from typing import TypeVar, Union, NewType, List, Tuple
+from typing_extensions import Protocol, Literal
 import numpy as np
 import pandas as pd
-import dios
+from dios import DictOfSeries
 
 T = TypeVar("T")
 ArrayLike = TypeVar("ArrayLike", np.ndarray, pd.Series, pd.DataFrame)
-PandasLike = TypeVar("PandasLike", pd.Series, pd.DataFrame, dios.DictOfSeries)
-DiosLikeT = Union[dios.DictOfSeries, pd.DataFrame]
+PandasLike = Union[pd.Series, pd.DataFrame, DictOfSeries]
+DiosLikeT = Union[DictOfSeries, pd.DataFrame]
+
+ExternalFlag = Union[str, float, int]
+
+# we only support fixed length offsets
+FreqString = Literal["D", "H", "T", "min", "S", "L", "ms", "U", "us", "N"]
+
+# # we define a bunch of type aliases, mostly needed to generate appropiate fuzzy data through hypothesis
+# ColumnName = NewType("ColumnName", str)
+# IntegerWindow = NewType("IntegerWindow", int)
+# Timestampstr = TypeVar("Timestampstr", bound=str)
+# PositiveFloat = NewType("PositiveFloat", float)
+# PositiveInt = NewType("PositiveInt", int)
+
+# needed for deeper type hinting magic
+class CurveFitter(Protocol):
+    def __call__(self, data: np.ndarray, *params: float) -> np.ndarray:
+        ...
diff --git a/setup.py b/setup.py
index 0048952077db64fb09076749b32a99b8894e10e4..22722f0d74c48da012ed6946a1f59179ab7cbb0b 100644
--- a/setup.py
+++ b/setup.py
@@ -1,20 +1,19 @@
 from setuptools import setup, find_packages
-import saqc
 
 with open("README.md", "r") as fh:
     long_description = fh.read()
 
 setup(
     name="saqc",
-    version=saqc.__version__,
+    version="1.4",
     author="Bert Palm, David Schaefer, Peter Luenenschloss, Lennard Schmidt",
     author_email="david.schaefer@ufz.de",
     description="Data quality checking and processing tool/framework",
     long_description=long_description,
     long_description_content_type="text/markdown",
     url="https://git.ufz.de/rdm-software/saqc",
-    packages=find_packages(),
-    python_requires='>=3.7',
+    packages=find_packages(exclude=("tests",)),
+    python_requires=">=3.7, <3.10",
     install_requires=[
         "numpy",
         "pandas",
@@ -24,10 +23,14 @@ setup(
         "matplotlib",
         "click",
         "pyarrow",
-        "python-intervals",
-        "astor",
-        "dios"
+        "typing_extensions",
+        "outlier-utils",
+        "dtw",
+        "PyWavelets",
+        "mlxtend",
     ],
-    license="GPLv3",
-    entry_points={"console_scripts": ["saqc=saqc.__main__:main"],},
+    license_files=("LICENSE.md",),
+    entry_points={
+        "console_scripts": ["saqc=saqc.__main__:main"],
+    },
 )
diff --git a/sphinx-doc/FlagFunctions.rst b/sphinx-doc/FlagFunctions.rst
deleted file mode 100644
index 584d0dc5f48d0434a02d86ecbf884e47188d7236..0000000000000000000000000000000000000000
--- a/sphinx-doc/FlagFunctions.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-
-Functions
-=========
-
-.. automodapi:: saqc.funcs
-   :skip: register
-
diff --git a/sphinx-doc/Makefile b/sphinx-doc/Makefile
index efdfe91d931f29bf94f8b1f08c1b6f3d0661c8ab..992c9c8b91567974b9d6467826b5777cccd1e28f 100644
--- a/sphinx-doc/Makefile
+++ b/sphinx-doc/Makefile
@@ -18,8 +18,31 @@ help:
 clean:
 	rm -rf _build _static _api
 	rm -f *.automodsumm
+	rm -f func_modules/*.automodsumm
+	rm -f intro_modules/*.automodsumm
+	rm -rf ../docs
 	mkdir _static
 
+# trigger (saqc) customized documentation pipeline
+doc:
+	# generate fake modules to b documented by sphinx
+	python make_doc_module.py -p "saqc/funcs" -t "docs/intro_modules" -sr ".." -mo "intro_doc"
+	python make_doc_module.py -p "saqc/funcs" -t "docs/func_modules" -sr ".." -mo "registered_doc"
+	# make rest files from fake modules
+	python make_doc_rst.py -p "docs/intro_modules" -t "sphinx-doc/intro_modules" -sr ".."
+	python make_doc_rst.py -p "docs/func_modules" -t "sphinx-doc/func_modules" -sr ".."
+	# make rest folders from markdown folders
+	python make_md_to_rst.py -p "sphinx-doc/getting_started_md" -sr ".."
+	python make_md_to_rst.py -p "sphinx-doc/how_to_doc_md" -sr ".."
+	# make the html build
+	@$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+	# format docstring module domain strings to correct adresses
+	python make_html_headings_proppa.py -b "sphinx-doc/_build/html/_api" -p "docs/func_modules" -sr ".."
+	python make_html_headings_proppa.py -b "sphinx-doc/_build/html/_api" -p "docs/intro_modules" -sr ".."
+    # clear fake modules/intermediate rest files
+	rm -r getting_started_md_m2r
+	rm -r how_to_doc_md_m2r
+
 # Catch-all target: route all unknown targets to Sphinx using the new
 # "make mode" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).
 %: Makefile
diff --git a/sphinx-doc/conf.py b/sphinx-doc/conf.py
index 77bdd67bc2a821b18f35e06557be2ea665b6a3c2..e2792427e056fd82816a1ab08312e23a9d0bd111 100644
--- a/sphinx-doc/conf.py
+++ b/sphinx-doc/conf.py
@@ -12,17 +12,20 @@
 #
 import os
 import sys
-sys.path.insert(0, os.path.abspath('..'))
+
+sys.path.insert(0, os.path.abspath(".."))
 
 
 # -- Project information -----------------------------------------------------
 
-project = 'SaQC'
-copyright = '2020, Bert Palm, David Schäfer, Peter Lünenschloß, Lennart Schmidt, Juliane Geller'
-author = 'Bert Palm, David Schäfer, Peter Lünenschloß, Lennart Schmidt, Juliane Geller'
+project = "SaQC"
+copyright = (
+    "2020, Bert Palm, David Schäfer, Peter Lünenschloß, Lennart Schmidt, Juliane Geller"
+)
+author = "Bert Palm, David Schäfer, Peter Lünenschloß, Lennart Schmidt, Juliane Geller"
 
 # The full version, including alpha/beta/rc tags
-release = 'develop'
+release = "develop"
 
 
 # -- General configuration ---------------------------------------------------
@@ -41,25 +44,21 @@ extensions = [
     # "sphinx.ext.mathjax",
     # "sphinx.ext.ifconfig",
     "sphinx.ext.autosectionlabel",
-
     # link source code
     "sphinx.ext.viewcode",
-
     # add suupport for NumPy style docstrings
     "sphinx.ext.napoleon",
-
     # Doc a whole module
     # see https://sphinx-automodapi.readthedocs.io/en/latest/
-    'sphinx_automodapi.automodapi',
+    "sphinx_automodapi.automodapi",
     # 'sphinx_automodapi.smart_resolver',
     # see https://sphinxcontrib-fulltoc.readthedocs.io/en/latest/
-    'sphinxcontrib.fulltoc',
-
+    "sphinxcontrib.fulltoc",
     # Markdown sources support
     # https://recommonmark.readthedocs.io/en/latest/
-    'recommonmark',
+    "recommonmark",
     # https://github.com/ryanfox/sphinx-markdown-tables
-    'sphinx_markdown_tables',
+    "sphinx_markdown_tables",
 ]
 
 
@@ -72,21 +71,23 @@ automodsumm_inherited_members = True
 automodsumm_writereprocessed = True
 
 automodapi_inheritance_diagram = False
-automodapi_toctreedirnm = '_api'
+automodapi_toctreedirnm = "_api"
 autosectionlabel_prefix_document = True
 
+autodoc_typehints = "none"
+
 
 # -- Other options -----------------------------------------------------------
 
 # Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
 
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
 # This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
 
-source_suffix = ['.rst', '.md']
+source_suffix = [".rst", ".md"]
 
 
 # -- Options for HTML output -------------------------------------------------
@@ -108,4 +109,4 @@ html_theme = "nature"
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
\ No newline at end of file
+html_static_path = ["_static"]
diff --git a/sphinx-doc/cook_books_md/OutlierDetection.md b/sphinx-doc/cook_books_md/OutlierDetection.md
new file mode 100644
index 0000000000000000000000000000000000000000..e521eb948cd4f9eab38dfe59498d8e89308e655a
--- /dev/null
+++ b/sphinx-doc/cook_books_md/OutlierDetection.md
@@ -0,0 +1,151 @@
+# Basic Outlier Detection Workflows
+
+## Data 
+
+The [data set](https://git.ufz.de/rdm-software/saqc/-/blob/cookBux/sphinx-doc/ressources/data/incidentsLKG.csv) can be 
+downloaded from the saqc git repository.
+
+The data represents incidents of SARS-CoV-2 infections, on a daily basis, as reported by the 
+[RKI](https://www.rki.de/DE/Home/homepage_node.html) in 2020. 
+
+![](../ressources/images/cbooks_incidents1.png)
+
+## Outlier
+
+In June, an extreme spike can be observed. This spike relates to an incidence of so called "superspreading" in a local
+[meat factory](https://www.heise.de/tp/features/Superspreader-bei-Toennies-identifiziert-4852400.html).
+  
+For the sake of modelling the Corona disease, it can be of advantage, to filter the data for such extreme events, since
+they may not be consistent with underlying distributional assumptions and thus interfere with the parameter learning 
+process of the modelling.
+
+To just introduce into some basic `SaQC` workflows, we will concentrate on classic variance based outlier detection 
+approaches.
+
+## Preparation
+We, initially want to import the relevant packages. 
+
+```python
+import saqc
+import pandas
+import numpy as np
+from scipy.signal import filtfilt, butter
+import matplotlib.pyplot as plt
+``` 
+
+We include the data via pandas [csv file parser](https://pandas.pydata.org/docs/reference/api/pandas.read_csv.html). 
+This will give us a [data frame](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.html) object, 
+that we can directly feed into SaQC, in order to generate an SaQC object.
+
+```python
+import pandas as pd
+# data path variable should point to where you have the incidents data set stored.
+i_data = pd.read_csv(data_path)
+i_data.index = pd.DatetimeIndex(i_data.index)
+i_saqc = saqc.SaQC(data=i_data)
+``` 
+
+## Modelling
+
+First, we want to model our data, to obtain a stationary, residuish variable with zero mean.
+In SaQC, the results of data processing functions, defaultly overrides the processed data column. 
+So, if we want to transform our input data and reuse the original data later on, we need to duplicate 
+it first, with the :py:func:`saqc.tools.copy <docs.func_modules.outliers.flagRange>` method:
+
+```python
+i_saqc = i_saqc.tools.copy(field='incidents', new_field='incidents_model')
+```
+
+The copy method has 2 parameters - the `field` parameter controlls the name of the variable to
+copy, the `new_field` parameter holds the new column name of the duplicated variable. 
+
+Easiest thing to do, would be to apply some rolling mean
+model via the :py:func:`saqc.rolling.roll <docs.func_modules.rolling.roll>` method.
+
+```python
+i_saqc = i_saqc.rolling.roll(field='incidents_model', func=np.mean, window='13D')
+```
+
+We chose the rolling window to have a sice of 13 days.
+You can pass arbitrary functions to the rolling method. for example, you could go for the 
+`median` instead. this would be done via:
+
+```python
+i_saqc = i_saqc.tools.copy(field='incidents', new_field='incidents_median')
+i_saqc = i_saqc.rolling.roll(field='incidents_median', func=np.median, window='13D')
+```
+
+Another common approach, is, to fit polynomials of certain degrees to the data. This could, of course, also be applied 
+via a functions passed to the rolling method - since this can get computationally expensive easily, for greater data sets, 
+SaQC offers a build-in polynomial fit function 
+:py:func:`saqc.curvefit.fitPolynomial <docs.func_modules.curvefit.fitPolynomial>`:
+
+```python
+i_saqc = i_saqc.tools.copy(field='incidents', new_field='incidents_polynomial')
+i_saqc = i_saqc.curvefit.fitPolynomial(field='incidents_polynomial', order=2,
+                                       winsz='13D')
+```
+
+If you want to apply a completely arbitrary function to your data, without rolling, for example
+a smoothing filter from the [scipy.signal](https://docs.scipy.org/doc/scipy/reference/signal.html) 
+module, you would simply have to wrap the desired function up into a function of a single
+array-like variable. To wrap the scipy butterworth filter into a forward-backward application,
+you would need to define a function:
+
+```python
+def butterFilter(x, filter_order, nyq, cutoff, filter_type):
+    b, a = butter(N=filter_order, Wn=cutoff / nyq, btype=filter_type)
+    return filtfilt(b, a, x)
+```
+
+Than you can wrap it up with a lambda function and pass it to the 
+:py:func:`saqc.transformation.transform <docs.func_modules.transformation.transform>` 
+methods func argument.
+
+```python
+func=lambda x: butterFilter(x, cutoff=0.1, nyq=0.5, filter_order=2)
+i_saqc = i_saqc.tools.copy(field='incidents', new_field='incidents_lowPass')
+i_saqc = i_saqc.transformation.transform(field='incidents_lowPass',func=func)
+```
+
+You can check out the modelling results. Therefor we evaluate the qeued manipualations to the saqc object and return the 
+results.
+
+```python
+i_saqc = i_saqc.evaluate()
+result_data, _ saqc.getResult()
+result_data.plot()
+```
+
+![](../ressources/images/cbooks_incidents2.png)
+
+## Residues calculation
+
+We want to evaluate the residues of the model, in order to score the outlierish-nes of every point. 
+First, we retrieve the residues via the :py:func:`saqc.genericProcess <docs.func_modules.genericProcess>` method.
+The method generates a new variable, resulting from the processing of other variables. It automatically
+generates the field name it gets passed - so we do not have to generate new variable beforehand. The function we apply 
+is just the computation of the variables difference for any timestep.
+
+```python
+i_saqc = i_saqc.genericProcess('incidents_residues', func=lambda incidents, incidents_model:incidents - incidents_model)
+```
+
+Next, we score the residues simply by computing their [Z-scores](https://en.wikipedia.org/wiki/Standard_score).
+
+```python
+i_saqc = i_saqc.rolling.roll(field='incidents_residues', target='residues_mean',
+                             window='27D',
+                             func=np.mean)
+i_saqc = i_saqc.rolling.roll(field='incidents_residues', target='residues_std',
+                             window='27D',
+                             func=np.std)
+i_saqc = i_saqc.genericProcess(field='incidents_scores',
+                                func=lambda This, residues_mean, residues_std: (
+                                                                                           This - residues_mean) / residues_std)
+```
+
+
+
+
+
diff --git a/sphinx-doc/flagger.rst b/sphinx-doc/flags.rst
similarity index 59%
rename from sphinx-doc/flagger.rst
rename to sphinx-doc/flags.rst
index d8536aa3e39c53d92aa11b9057b92ceef84b7535..28e8d605c0bd27917148c3c30f7a8815d6a4671d 100644
--- a/sphinx-doc/flagger.rst
+++ b/sphinx-doc/flags.rst
@@ -1,8 +1,8 @@
 
-Flagger
+Flags
 =======
 
-.. automodapi:: saqc.flagger
+.. automodapi:: saqc.core.flags
    :include-all-objects:
    :no-heading:
 
diff --git a/sphinx-doc/func_modules/breaks.rst b/sphinx-doc/func_modules/breaks.rst
new file mode 100644
index 0000000000000000000000000000000000000000..309876aa1e9b54b24d9fb95d5ba2b61a8731c64b
--- /dev/null
+++ b/sphinx-doc/func_modules/breaks.rst
@@ -0,0 +1,6 @@
+
+breaks
+======
+
+.. automodapi:: docs.func_modules.breaks
+	:no-heading:
diff --git a/sphinx-doc/func_modules/changepoints.rst b/sphinx-doc/func_modules/changepoints.rst
new file mode 100644
index 0000000000000000000000000000000000000000..0c1baba6ff9c5633e02192684f323d49461d1ac1
--- /dev/null
+++ b/sphinx-doc/func_modules/changepoints.rst
@@ -0,0 +1,6 @@
+
+changepoints
+============
+
+.. automodapi:: docs.func_modules.changepoints
+	:no-heading:
diff --git a/sphinx-doc/func_modules/constants.rst b/sphinx-doc/func_modules/constants.rst
new file mode 100644
index 0000000000000000000000000000000000000000..02d8041d170702f7d7589222351e9a47a3997bc7
--- /dev/null
+++ b/sphinx-doc/func_modules/constants.rst
@@ -0,0 +1,6 @@
+
+constants
+=========
+
+.. automodapi:: docs.func_modules.constants
+	:no-heading:
diff --git a/sphinx-doc/func_modules/curvefit.rst b/sphinx-doc/func_modules/curvefit.rst
new file mode 100644
index 0000000000000000000000000000000000000000..3f77cf70eff8b8d2ebf7c82338c7f4fe31946636
--- /dev/null
+++ b/sphinx-doc/func_modules/curvefit.rst
@@ -0,0 +1,6 @@
+
+curvefit
+========
+
+.. automodapi:: docs.func_modules.curvefit
+	:no-heading:
diff --git a/sphinx-doc/func_modules/drift.rst b/sphinx-doc/func_modules/drift.rst
new file mode 100644
index 0000000000000000000000000000000000000000..eaab918ea2eef26e858a75cd09c923d4c2311d2f
--- /dev/null
+++ b/sphinx-doc/func_modules/drift.rst
@@ -0,0 +1,6 @@
+
+drift
+=====
+
+.. automodapi:: docs.func_modules.drift
+	:no-heading:
diff --git a/sphinx-doc/func_modules/flagtools.rst b/sphinx-doc/func_modules/flagtools.rst
new file mode 100644
index 0000000000000000000000000000000000000000..e249cad636b8f020da8e0674a10f42577a5ff038
--- /dev/null
+++ b/sphinx-doc/func_modules/flagtools.rst
@@ -0,0 +1,6 @@
+
+flagtools
+=========
+
+.. automodapi:: docs.func_modules.flagtools
+	:no-heading:
diff --git a/sphinx-doc/func_modules/generic.rst b/sphinx-doc/func_modules/generic.rst
new file mode 100644
index 0000000000000000000000000000000000000000..7774afe1e77b7690f55e34ee7bae55dbdf1c4802
--- /dev/null
+++ b/sphinx-doc/func_modules/generic.rst
@@ -0,0 +1,6 @@
+
+generic
+=======
+
+.. automodapi:: docs.func_modules.generic
+	:no-heading:
diff --git a/sphinx-doc/func_modules/interpolation.rst b/sphinx-doc/func_modules/interpolation.rst
new file mode 100644
index 0000000000000000000000000000000000000000..3d59ea4d5397bad85e30e60b799ae8d7f3b9c01e
--- /dev/null
+++ b/sphinx-doc/func_modules/interpolation.rst
@@ -0,0 +1,6 @@
+
+interpolation
+=============
+
+.. automodapi:: docs.func_modules.interpolation
+	:no-heading:
diff --git a/sphinx-doc/func_modules/outliers.rst b/sphinx-doc/func_modules/outliers.rst
new file mode 100644
index 0000000000000000000000000000000000000000..a4a888a57af550a3aaf7d5012c632a8b4da6bebe
--- /dev/null
+++ b/sphinx-doc/func_modules/outliers.rst
@@ -0,0 +1,6 @@
+
+outliers
+========
+
+.. automodapi:: docs.func_modules.outliers
+	:no-heading:
diff --git a/sphinx-doc/func_modules/pattern.rst b/sphinx-doc/func_modules/pattern.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c8d13fb5565d6bacb844684e66471d81141dc8a3
--- /dev/null
+++ b/sphinx-doc/func_modules/pattern.rst
@@ -0,0 +1,6 @@
+
+pattern
+=======
+
+.. automodapi:: docs.func_modules.pattern
+	:no-heading:
diff --git a/sphinx-doc/func_modules/resampling.rst b/sphinx-doc/func_modules/resampling.rst
new file mode 100644
index 0000000000000000000000000000000000000000..b348b2849c2e60a9dea0bd39a88fd78311e7ff0f
--- /dev/null
+++ b/sphinx-doc/func_modules/resampling.rst
@@ -0,0 +1,6 @@
+
+resampling
+==========
+
+.. automodapi:: docs.func_modules.resampling
+	:no-heading:
diff --git a/sphinx-doc/func_modules/residues.rst b/sphinx-doc/func_modules/residues.rst
new file mode 100644
index 0000000000000000000000000000000000000000..5e7133cf67c6cdbd5c51c0f933a32b0d3c355cb1
--- /dev/null
+++ b/sphinx-doc/func_modules/residues.rst
@@ -0,0 +1,6 @@
+
+residues
+========
+
+.. automodapi:: docs.func_modules.residues
+	:no-heading:
diff --git a/sphinx-doc/func_modules/rolling.rst b/sphinx-doc/func_modules/rolling.rst
new file mode 100644
index 0000000000000000000000000000000000000000..17425a73909505ecf5f6e1c84d684b1eeab546c6
--- /dev/null
+++ b/sphinx-doc/func_modules/rolling.rst
@@ -0,0 +1,6 @@
+
+rolling
+=======
+
+.. automodapi:: docs.func_modules.rolling
+	:no-heading:
diff --git a/sphinx-doc/func_modules/scores.rst b/sphinx-doc/func_modules/scores.rst
new file mode 100644
index 0000000000000000000000000000000000000000..7be28b825177fd78ad20e9799637731a13bbaada
--- /dev/null
+++ b/sphinx-doc/func_modules/scores.rst
@@ -0,0 +1,6 @@
+
+scores
+======
+
+.. automodapi:: docs.func_modules.scores
+	:no-heading:
diff --git a/sphinx-doc/func_modules/tools.rst b/sphinx-doc/func_modules/tools.rst
new file mode 100644
index 0000000000000000000000000000000000000000..79c6408a4a8d878fd92b1169620cbba93d647c09
--- /dev/null
+++ b/sphinx-doc/func_modules/tools.rst
@@ -0,0 +1,6 @@
+
+tools
+=====
+
+.. automodapi:: docs.func_modules.tools
+	:no-heading:
diff --git a/sphinx-doc/func_modules/transformation.rst b/sphinx-doc/func_modules/transformation.rst
new file mode 100644
index 0000000000000000000000000000000000000000..cec1d2ea5654414175581f498505ea04ee6fec49
--- /dev/null
+++ b/sphinx-doc/func_modules/transformation.rst
@@ -0,0 +1,6 @@
+
+transformation
+==============
+
+.. automodapi:: docs.func_modules.transformation
+	:no-heading:
diff --git a/docs/ConfigurationFiles.md b/sphinx-doc/getting_started_md/ConfigurationFiles.md
similarity index 100%
rename from docs/ConfigurationFiles.md
rename to sphinx-doc/getting_started_md/ConfigurationFiles.md
diff --git a/docs/Customizations.md b/sphinx-doc/getting_started_md/Customizations.md
similarity index 78%
rename from docs/Customizations.md
rename to sphinx-doc/getting_started_md/Customizations.md
index af438fcf6dc622f0a3aacfa679fc8ab7712de83c..b9d9ff359c7ff40e508b67935c536924dfd07aa5 100644
--- a/docs/Customizations.md
+++ b/sphinx-doc/getting_started_md/Customizations.md
@@ -1,6 +1,6 @@
 # Customizations
 SaQC comes with a continuously growing number of pre-implemented
-[quality check and processing routines](docs/FunctionIndex.md) and 
+[quality check and processing routines](sphinx-doc/getting_started_md/FunctionIndex.md) and 
 flagging schemes. 
 For any sufficiently large use case however it is very likely that the 
 functions provided won't fulfill all your needs and requirements.
@@ -17,20 +17,24 @@ welcome to file a feature request issue on the project's
 [gitlab repository](https://git.ufz.de/rdm-software/saqc). However, if 
 you are more the "no-way-I-get-this-done-by-myself" type of person,
 SaQC provides two ways to integrate custom routines into the system:
-1. The [extension language](docs/GenericFunctions.md)
+1. The [extension language](sphinx-doc/getting_started_md/GenericFunctions.md)
 2. An [interface](#interface) to the evaluation machinery
 
 ### Interface
 In order to make a function usable within the evaluation framework of SaQC the following interface is needed:
 
 ```python
+import pandas
+import dios
+import saqc
+
 def yourTestFunction(
    data: pandas.DataFrame,
    field: str,
-   flagger: saqc.flagger.BaseFlagger,
-   *args: Any,
-   **kwargs: Any
-   ) -> (dios.DictOfSeries, saqc.flagger.BaseFlagger)
+   flags: saqc.Flags,
+   *args,
+   **kwargs
+   ) -> (dios.DictOfSeries, saqc.Flags)
 ```
 
 #### Argument Descriptions
@@ -39,21 +43,21 @@ def yourTestFunction(
 |-----------|--------------------------------------------------------------------------------------------------|
 | `data`    | The actual dataset.                                                                               |
 | `field`   | The field/column within `data`, that function is processing.                              |
-| `flagger` | An instance of a flagger, responsible for the translation of test results into quality attributes. |
+| `flags`   | An instance of Flags, responsible for the translation of test results into quality attributes. |
 | `args`    | Any other arguments needed to parameterize the function.                                          |
 | `kwargs`  | Any other keyword arguments needed to parameterize the function.                                  |
 
 ### Integrate into SaQC
-In order make your function available to the system it needs to be registered. We provide the decorator 
-[`register`](saqc/functions/register.py) in the module `saqc.functions.register` to integrate your 
+In order make your function available to the system it needs to be registered. We provide a decorator 
+[`register`](saqc/functions/register.py) with saqc, to integrate your 
 test functions into SaQC. Here is a complete dummy example:
 
 ```python
-from saqc.functions.register import register
+from saqc import register
 
-@register
-def yourTestFunction(data, field, flagger, *args, **kwargs):
-    return data, flagger
+@register()
+def yourTestFunction(data, field, flags, *args, **kwargs):
+    return data, flags
 ```
 
 ### Example
diff --git a/docs/FlaggingSchemes.md b/sphinx-doc/getting_started_md/FlaggingSchemes.md
similarity index 100%
rename from docs/FlaggingSchemes.md
rename to sphinx-doc/getting_started_md/FlaggingSchemes.md
diff --git a/docs/GenericFunctions.md b/sphinx-doc/getting_started_md/GenericFunctions.md
similarity index 98%
rename from docs/GenericFunctions.md
rename to sphinx-doc/getting_started_md/GenericFunctions.md
index 9f91d4fd53385f2b7f7f795ad3b83c5ee21085bc..c2fa6654488728f72b43eef3188ff07f5d9baab8 100644
--- a/docs/GenericFunctions.md
+++ b/sphinx-doc/getting_started_md/GenericFunctions.md
@@ -16,7 +16,7 @@ challenging to translate them into generic source code.
 
 ### Specification
 Generic flagging functions are used in the same manner as their
-[non-generic counterparts](docs/FunctionIndex.md). The basic 
+[non-generic counterparts](sphinx-doc/getting_started_md/FunctionIndex.md). The basic 
 signature looks like that:
 ```sh
 flagGeneric(func=<expression>, flag=<flagging_constant>)
diff --git a/docs/GettingStarted.md b/sphinx-doc/getting_started_md/GettingStarted.md
similarity index 88%
rename from docs/GettingStarted.md
rename to sphinx-doc/getting_started_md/GettingStarted.md
index c034992821df5a9fa9f310748d801ccfeeb3c7f3..e1474bfe042c37160a6f5499ab369e197e8f8277 100644
--- a/docs/GettingStarted.md
+++ b/sphinx-doc/getting_started_md/GettingStarted.md
@@ -129,7 +129,7 @@ and paste the following lines into it:
 	
 	varname;test;plot
 	SM2;flagRange(min=10, max=60);False
-	SM2;spikes_flagMad(window="30d", z=3.5);True
+	SM2;flagMad(window="30d", z=3.5);True
 
 These lines illustrate how different quality control tests can be specified for
 different variables by following the pattern:
@@ -188,10 +188,10 @@ So, what do we see here?
 
 * The plot shows the data as well as the quality flags that were set by the
   tests for the variable `SM2`, as defined in the config-file
-* Following our definition in the config-file, first the `range`-test that flags
+* Following our definition in the config-file, first the `flagRange`-test that flags
   all values outside the range [10,60] was executed and after that,
-  the `spikes_simpleMad`-test to identify spikes in the data
-* In the config, we set the plotting option to `True` for `spikes_simpleMad`,
+  the `flagMad`-test to identify spikes in the data
+* In the config, we set the plotting option to `True` for `flagMad`,
   only. Thus, the plot aggregates all preceeding tests (here: `range`) to black
   points and highlights the flags of the selected test as red points.
 
@@ -226,7 +226,7 @@ range-test:
 	
 	varname;test;plot
 	SM2;flagRange(min=-20, max=60);False
-	SM2;spikes_flagMad(window="30d", z=3.5);True
+	SM2;flagMad(window="30d", z=3.5);True
 Rerunning SaQC as above produces the following plot:
 
 ![Changing the config](../ressources/images/example_plot_2.png "Changing the config")
@@ -245,16 +245,23 @@ something like this:
 	varname;test;plot
 	SM1;flagRange(min=10, max=60);False
 	SM2;flagRange(min=10, max=60);False
-	SM1;spikes_flagMad(window="15d", z=3.5);True
-	SM2;spikes_flagMad(window="30d", z=3.5);True
+	SM1;flagMad(window="15d", z=3.5);True
+	SM2;flagMad(window="30d", z=3.5);True
 
 which gives you separate plots for each line where the plotting option is set to
 `True` as well as one summary "data plot" that depicts the joint flags from all
 tests:
 
-SM1         |  SM2
-:-------------------------:|:-------------------------:
-![](../ressources/images/example_plot_31.png)  |  ![](../ressources/images/example_plot_32.png)
+|SM1         |  SM2 |
+|:-------------------------:|:-------------------------:|
+|here  |  there|
+
+
+
+|SM1         |  SM2 |
+|:-------------------------:|:-------------------------:|
+| ![](../ressources/images/example_plot_31.png)  | ![](../ressources/images/example_plot_32.png)|
+| ![](../ressources/images/example_plot_31.png)  | bumm|
 
 ![](../ressources/images/example_plot_33.png)
 
@@ -262,15 +269,15 @@ SM1         |  SM2
 
 SaQC includes functionality to harmonize the timestamps of one or more data
 series. Also, you can write your own tests using a python-based
-[extension language](docs/GenericFunctions.md). This would look like this:
+[extension language](sphinx-doc/getting_started_md/GenericFunctions.md). This would look like this:
 
 	varname;test;plot
-	SM2;harm_shift2Grid(freq="15Min");False
-	SM2;flagGeneric(func=(SM2 < 30));True
+	SM2;shiftToFreq(freq="15Min");False
+	SM2;generic(func=(SM2 < 30));True
 
 The above executes an internal framework that harmonizes the timestamps of SM2
 to a 15min-grid (see data below). Further information about this routine can be
-found in the [function definition](docs/FunctionIndex.md).
+found in the :ref:`Flagging Functions Overview <flaggingFunctions>`.
 
 	Date,SM1,SM1_flags,SM2,SM2_flags
 	2016-04-01 00:00:00,,,29.3157,OK
@@ -281,6 +288,8 @@ found in the [function definition](docs/FunctionIndex.md).
 
 Also, all values where SM2 is below 30 are flagged via the custom function (see
 plot below). You can learn more about the syntax of these custom functions
-[here](docs/GenericFunctions.md).
+[here](sphinx-doc/getting_started_md/GenericFunctions.md).
 
 ![Example custom function](../ressources/images/example_plot_4.png "Example custom function")
+
+:py:func:`lala <docs.func_modules.outliers.flagRange>`
\ No newline at end of file
diff --git a/docs/ParameterDescriptions.md b/sphinx-doc/getting_started_md/ParameterDescriptions.md
similarity index 98%
rename from docs/ParameterDescriptions.md
rename to sphinx-doc/getting_started_md/ParameterDescriptions.md
index 8fcfa0511100177240701bb9338174bf4dfde27a..0581f55c189f0c1b80c22296a188104d9b6a6716 100644
--- a/docs/ParameterDescriptions.md
+++ b/sphinx-doc/getting_started_md/ParameterDescriptions.md
@@ -32,4 +32,3 @@ and might range from numerical values to string constants.
 | Alias    | Description  |
 | ----     | ----         |
 | `NAN`    | Not a number |
-| `NODATA` | Missing data |
diff --git a/sphinx-doc/how_to_doc_md/HowToDoc.md b/sphinx-doc/how_to_doc_md/HowToDoc.md
new file mode 100644
index 0000000000000000000000000000000000000000..61a902aa080cac8016d2e3e44d5f205d105ac534
--- /dev/null
+++ b/sphinx-doc/how_to_doc_md/HowToDoc.md
@@ -0,0 +1,138 @@
+# Documentation Guide
+
+We document our code via docstrings in numpy-style. 
+Features, install and usage instructions and other more text intense stuff, 
+is written in extra documents. 
+The documents and the docstrings then are collected and rendered using [sphinx](https://www.sphinx-doc.org/). 
+
+ 
+## Documentation Strings
+
+
+- Write docstrings for all public modules, functions, classes, and methods. 
+    Docstrings are not necessary for non-public methods, 
+    but you should have a comment that describes what the method does. 
+    This comment should appear after the def line. 
+    [[PEP8](https://www.python.org/dev/peps/pep-0008/#documentation-strings)]
+
+- Note that most importantly, the `"""` that ends a multiline docstring should be on a line by itself [[PEP8](https://www.python.org/dev/peps/pep-0008/#documentation-strings)] :
+    ```python
+    """Return a foobang
+
+    Optional plotz says to frobnicate the bizbaz first.
+    """
+    ```
+    
+- For one liner docstrings, please keep the closing `"""` on the same line. 
+  [[PEP8](https://www.python.org/dev/peps/pep-0008/#documentation-strings)]
+
+### Pandas Style
+
+We use [Pandas-style](https://pandas.pydata.org/pandas-docs/stable/development/contributing_docstring.html) docstrings:
+
+
+
+## Flagger, data, field, etc.
+
+use this:
+```py
+def foo(data, field, flagger):
+    """
+    data : dios.DictOfSeries
+	A saqc-data object.
+
+    field : str
+	A field denoting a column in data.
+
+    flagger : saqc.flagger.BaseFlagger
+	A saqc-flagger object.
+    """
+```
+
+
+### IDE helper
+
+In pycharm one can activate autogeneration of numpy doc style like so:
+1. `File->Settings...`
+2. `Tools->Python Integrated Tools`
+3. `Docstrings->Docstring format`
+4. Choose `NumPy`
+
+
+### Docstring formatting pitfalls
+
+* Latex is included via :math:\`latex_code\`
+  
+  * note, the backticks surrounding the actual code
+  * Latex commands need to be signified with **double**    backlash! (``\\mu`` instead of ``\mu``)
+
+* Nested lists need to be all of the same kind (either   numbered or marked - otherwise result is salad) 
+* List items covering several lines in the docstring have to be all aligned - (so, not only the superfluent ones, but ALL, including the first one - otherwise result is salad)
+* Start of a list has to be seperated from preceding docstring code by *one blank line* - (otherwise list items get just chained in one line and result is salad)
+* Most formatting signifiers are not allowed to start or end with a space. (so no :math: \`1+1 \`, \` var2\`, \`\` a=1 \`\`, ...)
+* Do not include lines *only* containing two or more `-` signs, except it is the underscore line of the section heading (otherwise resulting html representation could be messed up)
+
+## Adding Markdown content to the Documentation
+
+* If you generate cookbooks and/or tutorials in markdown and want them to be integrated in the sphinx doc - there are some obstaclish thingies to care for
+
+- You will have to gather all markdown files in subfolders of "sphinx-doc". 
+
+- To include a folder named 'foo_md' of markdown files in the documentation, you will have to add the following line to the Makefile:
+
+```python
+python make_md_to_rst.py -p "sphinx-doc/gfoo_md"
+```
+
+- The markdown files must be in that subfolders - they cant be gathered in nested subfolders. 
+
+- You can not link to sections in other markdown files, that contain the `-` character.
+
+- The Section structure/ordering must be consistent in the ReST sence (otherwise they wont appear)
+
+- You can link to ressources - like pictures and include them in the markdown, if the pictures are in (possibly another) folder in `\sphinx-doc` and the pathes to this ressources are given relatively!
+
+- You can include a markdown file in a rest document, by appending '_m2r' to the folder name. So to include the markdown file 'foo_md/bar.md' in a toc tree for example - you would do something like:
+```python
+.. toctree::
+   :hidden:
+   :maxdepth: 1
+
+   foo_md_m2r/bar
+```
+
+- If you want to hyperlink/include other sources from the sphinx documentation that are rest-files, you will not be able to include them in a way, that they will appear in you markdown rendering. - however - there is the slightly hacky possibillity to just include the respective rest directives. This will mess up your markdown code - meaning that you will have those rest snippets flying around, but when the markdown file gets converted to the rest file and build into the sphinx html build, the linked sources will be integrated properly. The syntax is as follows:
+
+- to include the link to the rest source `functions.rst` in the folder `foo`, under the name `bar`, you would need to insert: 
+```python
+:doc:`foo <rel_path/functions>`
+```
+
+- to link to a section with name `foo` in a rest source named `bumm.rst`, under the name `bar`, you would just insert: 
+```python
+:ref:`bar <relative/path/from/sphinx/root/bumm:foo>`
+``` 
+
+- in that manner you might be able to smuggle most rest directives through into the resulting html build. Especially if you want to link to the docstrings of certain (domain specific) objects. Lets say you want to link to the *function* `saqc.funcs.flagRange` under the name `ranger` - you just include:
+
+```python
+:py:func:`Ranger <saqc.funcs.flagRange>`
+```
+
+whereas the `:func:` part determines the role, the object is documented as. See [this page](https://www.sphinx-doc.org/en/master/#ref-role) for an overview of the available roles
+
+## Refering to documented Functions
+
+* Since the documentation generates an own module structure to document the functions, linking to the documented functions is a bit hacky:
+
+- Functions: to link to any function in the 'saqc.funcs' module - you will have to link the rest file it is documented in. All functions from the function module can be linked via replacing the 'saqc.funcs' part of the module path by 'docs.func_modules':
+
+- For example, 'saqc.funcs.outliers.flagRange' is linked via:
+```python
+:py:func:`docs.func_modules.outliers.flagRange`
+```
+
+To hide the temporal module structure and/or make transparent the intended module structure, use named links, like so:
+```python
+:py:func:`saqc.outliers.flagRange <docs.func_modules.outliers.flagRange>`
+```
diff --git a/sphinx-doc/index.rst b/sphinx-doc/index.rst
index c8d40fbd90e200486bdc243c67445ccd0e690015..b5875e5e3ff1e2d5bbccf5afa0de8fdca830b0e1 100644
--- a/sphinx-doc/index.rst
+++ b/sphinx-doc/index.rst
@@ -1,27 +1,51 @@
-.. SaQC documentation master file, created by
-   sphinx-quickstart on Mon Aug 17 12:11:29 2020.
-   You can adapt this file completely to your liking, but it should at least
-   contain the root `toctree` directive.
-
-Welcome to SaQC's documentation!
-================================
+==================
+SaQC documentation
+==================
 
 Saqc is a great tool to clean data from rubbish.
 
 .. toctree::
    :hidden:
+   :maxdepth: 1
 
    Repository <https://git.ufz.de/rdm-software/saqc>
+   Documentation Guide <how_to_doc_m2r/HowToDoc>
+
+
+Getting Started
+===============
 
 .. toctree::
    :maxdepth: 2
 
-   flagger
+   getting_started_md_m2r/GettingStarted
+
+
+Flagging Functions
+==================
+
+Flagging Functions
 
 .. toctree::
    :maxdepth: 2
+   :glob:
+   :titlesonly:
+
+   intro_modules/*
+
+
+Func Modules
+============
+
+Function modules
+
+.. toctree::
+   :maxdepth: 3
+   :glob:
+   :titlesonly:
+
+   func_modules/*
 
-   FlagFunctions
 
 Indices and tables
 ==================
diff --git a/sphinx-doc/intro_modules/AdvancedFlagging.rst b/sphinx-doc/intro_modules/AdvancedFlagging.rst
new file mode 100644
index 0000000000000000000000000000000000000000..3fc3e5f13968f542df5a449c0bac962014a95888
--- /dev/null
+++ b/sphinx-doc/intro_modules/AdvancedFlagging.rst
@@ -0,0 +1,6 @@
+
+AdvancedFlagging
+================
+
+.. automodapi:: docs.intro_modules.AdvancedFlagging
+	:no-heading:
diff --git a/sphinx-doc/intro_modules/BasicFlagging.rst b/sphinx-doc/intro_modules/BasicFlagging.rst
new file mode 100644
index 0000000000000000000000000000000000000000..265498330e2f3881778dcf8d167adefaeb58eb81
--- /dev/null
+++ b/sphinx-doc/intro_modules/BasicFlagging.rst
@@ -0,0 +1,6 @@
+
+BasicFlagging
+=============
+
+.. automodapi:: docs.intro_modules.BasicFlagging
+	:no-heading:
diff --git a/sphinx-doc/make_doc_module.py b/sphinx-doc/make_doc_module.py
new file mode 100644
index 0000000000000000000000000000000000000000..f85db54991fba0a370d01ddaa2a366d990d7e0c8
--- /dev/null
+++ b/sphinx-doc/make_doc_module.py
@@ -0,0 +1,222 @@
+import ast
+import os
+import click
+import pkgutil
+import shutil
+import re
+from collections import OrderedDict
+import pickle
+
+new_line_re = "(\r\n|[\r\n])"
+
+doc_mod_structure = {
+    "BasicFlagging": ["outliers.flagRange", "breaks.flagMissing"],
+    "BasicFlagging_dcstring": "",
+    "AdvancedFlagging": ["pattern.flagPatternByDTW", "outliers.flagOffset"],
+    "AdvancedFlagging_dcstring": "",
+}
+
+
+def rm_section(dcstring, section, _return_section=False):
+    """
+    Detects a section in a docstring and (default) removes it, or (_return_section=True) returns it
+    """
+    section_re = f"{new_line_re}(?P<s_name>[^\n\r]{{2,}}){new_line_re}(?P<s_dash>-{{2,}}){new_line_re}"
+    triggers = re.finditer(section_re, dcstring)
+    matches = [
+        (trigger.groupdict()["s_name"], trigger.span())
+        for trigger in triggers
+        if len(trigger.groupdict()["s_name"]) == len(trigger.groupdict()["s_dash"])
+    ] + [(None, (len(dcstring), None))]
+    sections = [m[0] for m in matches]
+    starts = ends = 0
+    if section in sections:
+        i = sections.index(section)
+        starts = matches[i][1][0]
+        ends = matches[i + 1][1][0]
+
+    if _return_section:
+        return dcstring[starts:ends]
+    else:
+        return dcstring[:starts] + dcstring[ends:]
+
+
+def rm_parameter(dcstring, parameter):
+    """
+    remove a parameters documentation from a function docstring
+    """
+    paramatches = _get_paramatches(dcstring)
+    start = end = 0
+    for p in paramatches:
+        if parameter == p.groupdict()["paraname"]:
+            start = re.search(p[0], dcstring).span()[0]
+            try:
+                end = dcstring.find(next(paramatches)[0])
+            except StopIteration:
+                end = len(re.sub(new_line_re + "$", "", dcstring))
+
+    return dcstring[0:start] + dcstring[end:]
+
+
+def get_parameter(dcstr):
+    """
+    returns the list of parameters and their defaults, documented in a docstrings Parameters section
+    """
+    paramatches = _get_paramatches(dcstr)
+    return [
+        (p.groupdict()["paraname"], p.groupdict()["paradefaults"]) for p in paramatches
+    ]
+
+
+def _get_paramatches(dcstr):
+    parastr = rm_section(dcstr, "Parameters", _return_section=True)
+    match_re = f"{new_line_re}(?P<paraname>[\S]+) : [^\n\r]*(default (?P<paradefaults>[^\n\r]*))?"
+    return re.finditer(match_re, parastr)
+
+
+def parse_func_dcstrings(m_paths):
+    func_dict = {}
+    for m in m_paths:
+        with open(m) as f:
+            lines = f.readlines()
+        module_ast = ast.parse("".join(lines))
+        funcs = [node for node in module_ast.body if isinstance(node, ast.FunctionDef)]
+        for func in funcs:
+            dcstr = ast.get_docstring(func)
+            if func.name[0] == "_" or (dcstr is None):
+                continue
+            dcstr = rm_section(dcstr, "Returns")
+            dcstr = rm_parameter(dcstr, "data")
+            dcstr = rm_parameter(dcstr, "flags")
+            parameters = get_parameter(dcstr)
+            parameters = [f"{p[0]}={p[1]}" if p[1] else p[0] for p in parameters]
+            signature = f"def {func.name}({', '.join(parameters)}):"
+            # get @register module registration if present
+            reg_module = None
+            r = [d for d in func.decorator_list if d.func.id == "register"]
+            if r:
+                rm = [kw.value.s for kw in r[0].keywords if kw.arg == "module"]
+                if rm:
+                    reg_module = rm[0]
+
+            func_dict[f"{os.path.splitext(os.path.basename(m))[0]}.{func.name}"] = (
+                signature,
+                dcstr,
+                reg_module,
+            )
+
+    return func_dict
+
+
+def parse_module_dcstrings(m_paths):
+    mod_dict = {}
+    for m in m_paths:
+        with open(m) as f:
+            lines = f.readlines()
+
+        mod_docstr = ast.get_docstring(ast.parse("".join(lines)))
+        mod_dict[f"{os.path.splitext(os.path.basename(m))[0]}"] = mod_docstr or ""
+    return mod_dict
+
+
+def make_doc_module(targetpath, func_dict, doc_mod_structure):
+    for doc_mod in [
+        d for d in doc_mod_structure.keys() if not re.search("_dcstring$", d)
+    ]:
+        with open(os.path.join(targetpath, f"{doc_mod}.py"), "w+") as f:
+            mod_string = ['"""\n' + doc_mod_structure[doc_mod + "_dcstring"] + '\n"""']
+            mod_funcs = doc_mod_structure[doc_mod]
+            for func in mod_funcs:
+                mod_string.append(func_dict[func][0])
+                mod_string.append('    """')
+                # indent the docstring:
+                indented_doc_string = "\n".join(
+                    [f"    {l}" for l in func_dict[func][1].splitlines()]
+                )
+                mod_string.append(indented_doc_string)
+                mod_string.append('    """')
+                mod_string.append("    pass")
+                mod_string.append("")
+                mod_string.append("")
+            f.write("\n".join(mod_string))
+
+    with open(os.path.join(targetpath, "module_dict.pkl"), "wb+") as file:
+        pickle.dump(doc_mod_structure, file)
+
+    return 0
+
+
+@click.command()
+@click.option(
+    "-p",
+    "--pckpath",
+    type=str,
+    required=True,
+    default="saqc/funcs",
+    help="Relative path to the package to be documented (relative to sphinx root).",
+)
+@click.option(
+    "-t",
+    "--targetpath",
+    type=str,
+    required=True,
+    default="docs/intro_modules",
+    help="Output folder path (relative to sphinx root). Will be overridden if already existent.",
+)
+@click.option(
+    "-sr",
+    "--sphinxroot",
+    type=str,
+    required=True,
+    default="..",
+    help="Relative path to the sphinx root.",
+)
+@click.option(
+    "-mo",
+    "--mode",
+    type=str,
+    required=True,
+    default="intro_doc",
+    help="either 'intro_doc' or 'module_doc'.",
+)
+def main(pckpath, targetpath, sphinxroot, mode):
+    root_path = os.path.abspath(sphinxroot)
+    pkg_path = os.path.join(root_path, pckpath)
+    targetpath = os.path.join(root_path, targetpath)
+    modules = []
+    # collect modules
+    for _, modname, _ in pkgutil.walk_packages(path=[pkg_path], onerror=lambda x: None):
+        modules.append(modname)
+
+    # clear target dir
+    if os.path.isdir(targetpath):
+        shutil.rmtree(targetpath)
+    os.makedirs(targetpath, exist_ok=True)
+
+    # parse all the functions
+    module_paths = [os.path.join(pkg_path, f"{m}.py") for m in modules]
+    mod_dict = parse_module_dcstrings(module_paths)
+    func_dict = parse_func_dcstrings(module_paths)
+    if mode == "intro_doc":
+        make_doc_module(targetpath, func_dict, doc_mod_structure)
+    if mode == "registered_doc":
+        doc_struct = {}
+        for dm in func_dict.keys():
+            module = func_dict[dm][2]
+            if module:
+                if module in doc_struct.keys():
+                    doc_struct[module].append(dm)
+                else:
+                    doc_struct[module] = [dm]
+                    doc_struct[module + "_dcstring"] = mod_dict[module]
+        make_doc_module(targetpath, func_dict, doc_struct)
+    if mode == "module_doc":
+        doc_struct = {m: [] for m in modules}
+        for dm in func_dict.keys():
+            module = re.search("([^ .]*)\.[^ ]*$", dm).group(1)
+            doc_struct[module].append(dm)
+        make_doc_module(targetpath, func_dict, doc_struct)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sphinx-doc/make_doc_rst.py b/sphinx-doc/make_doc_rst.py
new file mode 100644
index 0000000000000000000000000000000000000000..b5f3cfb25fb5808db1aabf359de89b29f65c726c
--- /dev/null
+++ b/sphinx-doc/make_doc_rst.py
@@ -0,0 +1,83 @@
+import os
+import click
+import pkgutil
+import ast
+import shutil
+
+
+def parse_imports(path):
+    modules = []
+    file = open(path)
+    lines = file.readlines()
+    for node in ast.iter_child_nodes(ast.parse("".join(lines))):
+        if isinstance(node, ast.ImportFrom) | isinstance(node, ast.Import):
+            modules += [x.name for x in node.names] + [
+                x.asname for x in node.names if x.asname is not None
+            ]
+    file.close()
+    return modules
+
+
+@click.command()
+@click.option(
+    "-p",
+    "--pckpath",
+    type=str,
+    required=True,
+    default="saqc/funcs",
+    help="Relative path to the package to be documented (relative to sphinx root).",
+)
+@click.option(
+    "-t",
+    "--targetpath",
+    type=str,
+    required=True,
+    default="sphinx-doc/internal_doc_rst",
+    help="Output folder path (relative to sphinx root). Will be overridden if already existent.",
+)
+@click.option(
+    "-sr",
+    "--sphinxroot",
+    type=str,
+    required=True,
+    default="..",
+    help="Relative path to the sphinx root.",
+)
+def main(pckpath, targetpath, sphinxroot):
+    root_path = os.path.abspath(sphinxroot)
+    targetpath = os.path.join(root_path, targetpath)
+    pkg_path = os.path.join(root_path, pckpath)
+    modules = []
+    for _, modname, _ in pkgutil.walk_packages(path=[pkg_path], onerror=lambda x: None):
+        modules.append(modname)
+
+    emptyline = [""]
+
+    # clear target directory:
+    if os.path.isdir(targetpath):
+        shutil.rmtree(targetpath)
+    os.mkdir(targetpath)
+
+    for module in modules:
+        imports = parse_imports(os.path.join(pkg_path, f"{module}.py"))
+        skiplist = [f"\t:skip: {k}" for k in imports]
+        section = [module] + ["=" * len(module)]
+        automodapi_directive = [
+            ".. automodapi:: " + pckpath.replace("/", ".") + "." + module
+        ]
+        no_heading = [f"\t:no-heading:"]
+        to_write = (
+            emptyline
+            + section
+            + emptyline
+            + automodapi_directive
+            + skiplist
+            + no_heading
+        )
+        to_write = "".join([f"{k}\r\n" for k in to_write])
+        with open(os.path.join(targetpath, f"{module}.rst"), "w+") as f:
+            f.write(to_write)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sphinx-doc/make_html_headings_proppa.py b/sphinx-doc/make_html_headings_proppa.py
new file mode 100644
index 0000000000000000000000000000000000000000..42627f24438fc08cc769b494e884c475f217ce34
--- /dev/null
+++ b/sphinx-doc/make_html_headings_proppa.py
@@ -0,0 +1,66 @@
+import os
+import click
+import re
+import pickle
+
+
+@click.command()
+@click.option(
+    "-b",
+    "--buildpath",
+    type=str,
+    required=True,
+    default="sphinx-doc/_build/html/_api",
+    help="Relative path to the html api files to be manipulated (relative to sphinx root).",
+)
+@click.option(
+    "-sr",
+    "--sphinxroot",
+    type=str,
+    required=True,
+    default="..",
+    help="Relative path to the sphinx root.",
+)
+@click.option(
+    "-p",
+    "--pckpath",
+    type=str,
+    required=True,
+    default="docs/doc_modules/func_modules",
+    help="Relative path to the documented package (relative to sphinx root).",
+)
+def main(buildpath, sphinxroot, pckpath):
+    root_path = os.path.abspath(sphinxroot)
+    buildpath = os.path.join(root_path, buildpath)
+    pckpath = os.path.join(root_path, pckpath)
+    files = os.listdir(buildpath)
+    # gather all files from the doc module
+    files = [f for f in files if re.search("^docs\.", f)]
+    with open(os.path.join(pckpath, "module_dict.pkl"), "rb") as file_:
+        doc_mod_structure = pickle.load(file_)
+
+    for key in doc_mod_structure.keys():
+        # search for all function files assigned to the module
+        mod_f = [f for f in files if re.search(f"(^|[.]){key}\.[^.]*\.html", f)]
+        for file_ in mod_f:
+            parts = file_.split(".")
+            func = parts[-2]
+            module_domain = ".".join(parts[:-2])
+
+            with open(os.path.join(buildpath, file_), "r") as wf:
+                code = wf.read()
+
+            old_domain_str = f'<code class="sig-prename descclassname">{module_domain}'
+            new_domain = [
+                f.split(".")[0]
+                for f in doc_mod_structure[key]
+                if f.split(".")[1] == func
+            ][0]
+            new_domain_str = f'<code class="sig-prename descclassname">{new_domain}'
+            code = code.replace(old_domain_str, new_domain_str)
+            with open(os.path.join(buildpath, file_), "w+") as wf:
+                wf.write(code)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sphinx-doc/make_md_to_rst.py b/sphinx-doc/make_md_to_rst.py
new file mode 100644
index 0000000000000000000000000000000000000000..a02eecdcb7db4cf2663c0ca698186b74077466a9
--- /dev/null
+++ b/sphinx-doc/make_md_to_rst.py
@@ -0,0 +1,181 @@
+"""
+The script generates a folder of rest files from a folder of markdown files.
+Markdown Hyperlinks between the files in the folder get converted to rest links so that they function properly in a
+sphinx generated html build obtained from the resulting rest folder.
+"""
+
+import os
+import click
+import shutil
+from m2r import parse_from_file
+import re
+
+new_line_re = "(\r\n|[\r\n])"
+
+
+def rebaseAbsRoot(path, target, root):
+    """
+    If path and target intersect at root, return relative path from path to target
+    Functionality is limited.
+    path and target must be path strings pointing at FILES!
+    root is only allowed to appear once in every path
+    you cant root to os.sep (no folder seperators allowed in the root string)
+    """
+
+    p = path.find(root)
+    t = target.find(root)
+    if (p == -1) or (t == -1) or (".." in path):
+        return target
+
+    path = path[path.find(root) :].split(os.sep)
+    target = target[target.find(root) :].split(os.sep)
+    # remove common path chunks:
+    while path[0] == target[0]:
+        del path[0]
+        del target[0]
+
+    up_steps = (len(path) - 1) * f"..{os.sep}"
+    down_steps = os.sep.join(target)
+    new_path = os.path.join(up_steps, down_steps)
+    return new_path
+
+
+def fixTables(f_rst):
+    body_re = f"((.+){new_line_re})*{new_line_re}((.+){new_line_re})*"
+    tables = list(re.finditer(f"\.\. list-table::{new_line_re}" + body_re, f_rst))
+    for t in tables:
+        tab = t[0]
+
+        def pic_repl(match):
+            leading = match.groupdict()["list_level"]
+            pic_dir = match.groupdict()["pic_directive"]
+            pic_pad = re.match("^[ ]*", pic_dir).span()[1]
+            pic_dir = re.sub(f'{" " * pic_pad}', " " * len(leading), pic_dir)
+            pic_dir = leading + pic_dir[len(leading) :]
+            end_space = re.search(f"{new_line_re}[ ]*$", match[0])
+            if end_space:
+                pic_dir = re.sub(f"{new_line_re}[ ]*$", end_space[0], pic_dir)
+            return pic_dir
+
+        messy_re = (
+            f"(?P<list_level>.*){new_line_re}(?P<pic_directive>[ ]*.. image::[^*-]*)"
+        )
+        # using while loop cause messed pic patterns overlap
+        tab, repnum = re.subn(messy_re, pic_repl, tab, 1)
+        while repnum:
+            tab, repnum = re.subn(messy_re, pic_repl, tab, 1)
+
+        bullets = tab.split("   *")[1:]
+        items = [bullet.split("     -") for bullet in bullets]
+        last_items = items[-1]
+        item_num = len(items[0])
+        last_item_num = len(last_items)
+        if item_num > last_item_num:
+            has_content = (
+                len([content for content in last_items if re.search("[^\s-]", content)])
+                > 0
+            )
+            if has_content:
+                # append empty cells
+                tab += "     - \n" * (item_num - last_item_num)
+            else:
+                # delete last row (using replace to avoid false meta char interpretation
+                tab = tab.replace(bullets[-1][0], "")
+
+        bullet_num = len(
+            list(re.finditer(f"   \*(?P<items>([ ]+-.*{new_line_re})*)", tab))
+        )
+        if bullet_num == 1:
+            # fix empty body table error:
+            tab = re.sub(":header-rows: [0-9]", ":header-rows: 0", tab)
+
+        if tab != t[0]:
+            f_rst = f_rst.replace(t[0], tab)
+
+    return f_rst
+
+
+def fixLinks(f_rst, f, targetpath):
+    md_links = list(
+        re.finditer(
+            "(?P<numbered>\. )?`(?P<link_name>[^<`]*) <(?P<md_link>\S*.md)?(#)?(?P<section>[^>]*)?>`_?",
+            f_rst,
+        )
+    )
+    for link in md_links:
+        # change directory:
+        link_path = link.groupdict()["md_link"]
+        if not link_path:
+            link_path = f
+        # change directory to point at temporal rest dir (if link isnt relative):
+        if os.path.dirname(link_path) != "":
+            link_path = os.path.join(
+                os.path.dirname(link_path) + "_m2r", os.path.basename(link_path)
+            )
+        # rebase the link to relative link if its not
+        link_path = rebaseAbsRoot(os.path.join(targetpath, f), link_path, "sphinx-doc")
+        # remove extension name (rst syntax)
+        link_path = re.sub("\.md$", "", link_path)
+        if link.groupdict()["section"]:
+            # while document links have to be relative - section links have to be absolute from sphinx doc dir -
+            # markdown space representation by dash has to be removed...
+            abs_path = os.path.basename(os.path.abspath(""))
+            abs_path = targetpath[targetpath.find(abs_path) + len(abs_path) + 1 :]
+            link_path = os.path.join(abs_path, os.path.basename(link_path))
+            role = ":ref:"
+            section = ":" + link.groupdict()["section"].replace("-", " ")
+            # one more regex spell for the sake of numbered section linking:
+            if link.groupdict()["numbered"]:
+                section = re.sub("(:[0-9]+)", "\g<1>.", section)
+        else:
+            role = ":doc:"
+            section = ""
+
+        f_rst = re.sub(
+            f'`(?P<link_name>{link.groupdict()["link_name"]}) '
+            f'<({link.groupdict()["md_link"]})?(#[^>]*)?>`(_)?',
+            r"{}`\g<link_name> <{}{}>`".format(role, link_path, section),
+            f_rst,
+        )
+    return f_rst
+
+
+@click.command()
+@click.option(
+    "-p",
+    "--mdpath",
+    type=str,
+    required=True,
+    default="sphinx-doc/getting_started_md",
+    help="Relative path to the folder containing the .md files to be converted (relative to sphinx root).",
+)
+@click.option(
+    "-sr",
+    "--sphinxroot",
+    type=str,
+    required=True,
+    default="..",
+    help="Relative path to the sphinx root.",
+)
+def main(mdpath, sphinxroot):
+    root_path = os.path.abspath(sphinxroot)
+    mdpath = os.path.join(root_path, mdpath)
+    targetpath = mdpath + "_m2r"
+
+    # clear target directory:
+    if os.path.isdir(targetpath):
+        shutil.rmtree(targetpath)
+    os.mkdir(targetpath)
+
+    mdfiles = [f for f in os.listdir(mdpath) if os.path.splitext(f)[1] == ".md"]
+    for f in mdfiles:
+        f_rst = parse_from_file(os.path.join(mdpath, f))
+        # regex magic- replace invalid links:
+        f_rst = fixLinks(f_rst, f, targetpath)
+        f_rst = fixTables(f_rst)
+        with open(os.path.join(targetpath, f.replace(".md", ".rst")), "w+") as file_:
+            file_.write(f_rst)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/sphinx-doc/requirements_sphinx.txt b/sphinx-doc/requirements_sphinx.txt
index 511d1671395df732730a0d0807e394fea54e0569..eb84550313ea402f974e97ab1c7cb19b4f0bef06 100644
--- a/sphinx-doc/requirements_sphinx.txt
+++ b/sphinx-doc/requirements_sphinx.txt
@@ -3,3 +3,4 @@ sphinx
 sphinx-automodapi
 sphinxcontrib-fulltoc
 sphinx-markdown-tables
+m2r
diff --git a/sphinx-doc/ressources/data/config.csv b/sphinx-doc/ressources/data/config.csv
new file mode 100644
index 0000000000000000000000000000000000000000..6f31389afc600155fb18cd9710738d3fc809d065
--- /dev/null
+++ b/sphinx-doc/ressources/data/config.csv
@@ -0,0 +1,6 @@
+varname    ; test                                ; plot
+#----------;-------------------------------------;------
+SM2        ; shift(freq="15Min")                 ; False
+SM2        ; flagMissing()                       ; False
+'SM(1|2)+' ; flagRange(min=10, max=60)           ; False
+SM2        ; flagMAD(window="30d", z=3.5) ; True
diff --git a/sphinx-doc/ressources/data/config_ci.csv b/sphinx-doc/ressources/data/config_ci.csv
new file mode 100644
index 0000000000000000000000000000000000000000..05e22c807bd26768c2de338c35970720b60fdb3c
--- /dev/null
+++ b/sphinx-doc/ressources/data/config_ci.csv
@@ -0,0 +1,7 @@
+varname;test;plot
+SM2;shift(freq="15Min");False
+'.*';flagRange(min=10, max=60);False
+SM2;flagMissing();False
+SM2;flagRange(min=10, max=60);False
+SM2;flagMAD(window="30d", z=3.5);False
+Dummy;flag(func=(isflagged(SM1) | isflagged(SM2)))
diff --git a/sphinx-doc/ressources/data/data.csv b/sphinx-doc/ressources/data/data.csv
new file mode 100644
index 0000000000000000000000000000000000000000..bb853b10bcffe98dbfc8d11348bcfe3fdf47eb9a
--- /dev/null
+++ b/sphinx-doc/ressources/data/data.csv
@@ -0,0 +1,14694 @@
+Date,Battery,SM1,SM2
+2016-04-01 00:05:48,3573,32.685,29.3157
+2016-04-01 00:20:42,3572,32.7428,29.3157
+2016-04-01 00:35:37,3572,32.6186,29.3679
+2016-04-01 00:50:32,3572,32.736999999999995,29.3679
+2016-04-01 01:05:26,3572,32.736999999999995,29.3131
+2016-04-01 01:20:21,3571,32.6186,29.3157
+2016-04-01 01:35:16,3571,32.736999999999995,29.3157
+2016-04-01 01:50:11,3571,32.736999999999995,29.4727
+2016-04-01 02:05:05,3571,32.6186,29.5252
+2016-04-01 02:20:00,3571,32.911,29.5252
+2016-04-01 02:34:55,3570,32.85,29.5226
+2016-04-01 02:49:49,3570,32.911,29.4727
+2016-04-01 03:04:44,3570,32.9081,29.5252
+2016-04-01 03:19:39,3569,32.8993,29.5252
+2016-04-01 03:34:33,3569,32.9023,29.4176
+2016-04-01 03:49:28,3568,32.7861,29.5252
+2016-04-01 04:04:23,3568,32.9023,29.5252
+2016-04-01 04:19:17,3568,33.013000000000005,29.4727
+2016-04-01 04:34:12,3568,32.9023,29.5252
+2016-04-01 04:49:07,3567,32.8993,29.5252
+2016-04-01 05:04:01,3567,32.9023,29.5252
+2016-04-01 05:18:56,3566,32.9023,29.4727
+2016-04-01 05:33:50,3567,32.7861,29.5252
+2016-04-01 05:48:45,3566,32.8993,29.5252
+2016-04-01 06:03:39,3566,32.8993,29.4727
+2016-04-01 06:18:34,3566,32.8935,29.5252
+2016-04-01 06:33:28,3565,32.7774,29.4727
+2016-04-01 06:48:22,3565,32.8935,29.4727
+2016-04-01 07:03:17,3565,32.8935,29.5252
+2016-04-01 07:18:11,3565,32.8935,29.4727
+2016-04-01 07:33:06,3565,32.8935,29.5252
+2016-04-01 07:48:00,3565,32.8935,29.5252
+2016-04-01 08:02:55,3565,32.8935,29.47
+2016-04-01 08:17:49,3566,32.8935,29.5252
+2016-04-01 08:32:43,3567,32.8964,29.5252
+2016-04-01 08:47:38,3568,32.8935,29.5252
+2016-04-01 09:02:32,3570,32.8354,29.5252
+2016-04-01 09:17:27,3572,32.8935,29.4727
+2016-04-01 09:32:22,3575,32.8935,29.5252
+2016-04-01 09:47:17,3578,32.8354,29.4203
+2016-04-01 10:02:12,3581,32.8935,29.4176
+2016-04-01 10:17:07,3583,32.8354,29.47
+2016-04-01 10:32:02,3587,32.8964,29.4176
+2016-04-01 10:46:57,3591,32.8325,29.4203
+2016-04-01 11:01:52,3594,32.7774,29.5252
+2016-04-01 11:16:47,3596,32.7803,29.5252
+2016-04-01 11:31:42,3598,32.8935,29.4727
+2016-04-01 11:46:37,3598,32.7745,29.5252
+2016-04-01 12:01:32,3598,32.8935,29.5252
+2016-04-01 12:16:27,3598,32.8354,29.5252
+2016-04-01 12:31:22,3599,32.9023,29.4727
+2016-04-01 12:46:17,3599,32.7861,29.4727
+2016-04-01 13:01:12,3599,32.7861,29.47
+2016-04-01 13:16:08,3599,32.7949,29.4203
+2016-04-01 13:31:03,3599,32.9081,29.4176
+2016-04-01 13:45:58,3599,32.7891,29.4203
+2016-04-01 14:00:53,3599,32.8036,29.47
+2016-04-01 14:15:48,3599,32.7428,29.4203
+2016-04-01 14:30:43,3599,32.685,29.5252
+2016-04-01 14:45:38,3599,32.6937,29.4727
+2016-04-01 15:00:34,3599,32.636,29.4203
+2016-04-01 15:15:29,3599,32.636,29.4203
+2016-04-01 15:30:24,3599,32.636,29.4203
+2016-04-01 15:45:19,3599,32.636,29.47
+2016-04-01 16:00:15,3599,32.6389,29.4176
+2016-04-01 16:15:10,3599,32.7515,29.4203
+2016-04-01 16:30:05,3599,32.6937,29.4203
+2016-04-01 16:45:01,3599,32.636,29.4727
+2016-04-01 16:59:56,3599,32.7631,29.4727
+2016-04-01 17:14:51,3598,32.6447,29.4727
+2016-04-01 17:29:46,3598,32.6476,29.4727
+2016-04-01 17:44:41,3598,32.7024,29.47
+2016-04-01 17:59:36,3598,32.7024,29.4727
+2016-04-01 18:14:31,3597,32.5958,29.4203
+2016-04-01 18:29:26,3596,32.6534,29.4176
+2016-04-01 18:44:21,3595,32.6534,29.4727
+2016-04-01 18:59:16,3593,32.7689,29.5252
+2016-04-01 19:14:11,3591,32.7111,29.47
+2016-04-01 19:29:06,3589,32.7689,29.4727
+2016-04-01 19:44:01,3587,32.6563,29.47
+2016-04-01 19:58:55,3585,32.7111,29.4727
+2016-04-01 20:13:50,3583,32.6534,29.4727
+2016-04-01 20:28:45,3581,32.714,29.4727
+2016-04-01 20:43:39,3578,32.7689,29.5252
+2016-04-01 20:58:34,3577,32.7718,29.4203
+2016-04-01 21:13:28,3574,32.8269,29.4805
+2016-04-01 21:28:23,3573,32.7631,29.5252
+2016-04-01 21:43:17,3572,32.7053,29.5252
+2016-04-01 21:58:12,3570,32.7631,29.533
+2016-04-01 22:13:06,3569,32.6447,29.5304
+2016-04-01 22:28:00,3567,32.7631,29.4805
+2016-04-01 22:42:55,3566,32.7053,29.5304
+2016-04-01 22:57:50,3566,32.6937,29.533
+2016-04-01 23:12:44,3566,32.7631,29.533
+2016-04-01 23:27:38,3564,32.7631,29.533
+2016-04-01 23:42:33,3564,32.6937,29.4805
+2016-04-01 23:57:27,3563,32.6966,29.533
+2016-04-02 00:12:22,3562,32.6937,29.533
+2016-04-02 00:27:16,3561,32.636,29.533
+2016-04-02 00:42:11,3561,32.6937,29.4805
+2016-04-02 00:57:05,3560,32.6389,29.5304
+2016-04-02 01:11:59,3559,32.7515,29.5357
+2016-04-02 01:26:54,3559,32.8094,29.533
+2016-04-02 01:41:49,3559,32.6966,29.533
+2016-04-02 01:56:43,3559,32.7515,29.4805
+2016-04-02 02:11:38,3559,32.7515,29.533
+2016-04-02 02:26:32,3558,32.636,29.533
+2016-04-02 02:41:26,3558,32.7515,29.5857
+2016-04-02 02:56:21,3558,32.6389,29.4281
+2016-04-02 03:11:15,3557,32.6879,29.533
+2016-04-02 03:26:10,3557,32.6908,29.533
+2016-04-02 03:41:04,3557,32.685,29.5304
+2016-04-02 03:55:58,3556,32.6908,29.5357
+2016-04-02 04:10:53,3556,32.6215,29.4255
+2016-04-02 04:25:47,3556,32.6215,29.5357
+2016-04-02 04:40:42,3556,32.736999999999995,29.5357
+2016-04-02 04:55:36,3555,32.6763,29.5409
+2016-04-02 05:10:30,3556,32.6186,29.5435
+2016-04-02 05:25:32,3556,32.7341,29.5409
+2016-04-02 05:40:26,3556,32.7283,29.5909
+2016-04-02 05:55:21,3556,32.7254,29.5409
+2016-04-02 06:10:15,3557,32.7254,29.3758
+2016-04-02 06:25:10,3556,32.6705,29.4359
+2016-04-02 06:40:04,3557,32.6676,29.4281
+2016-04-02 06:54:59,3557,32.6013,29.3862
+2016-04-02 07:09:53,3556,32.6618,29.3836
+2016-04-02 07:24:48,3556,32.7196,29.3287
+2016-04-02 07:39:42,3557,32.6618,29.3235
+2016-04-02 07:54:37,3558,32.7196,29.3313
+2016-04-02 08:09:31,3559,32.7166,29.3732
+2016-04-02 08:24:26,3562,32.7196,29.3313
+2016-04-02 08:39:20,3565,32.7196,29.3287
+2016-04-02 08:54:15,3566,32.6618,29.381
+2016-04-02 09:09:10,3570,32.7196,29.3758
+2016-04-02 09:24:04,3572,32.7196,29.3313
+2016-04-02 09:38:59,3575,32.6042,29.3235
+2016-04-02 09:53:54,3577,32.7196,29.3235
+2016-04-02 10:08:49,3579,32.6071,29.3235
+2016-04-02 10:23:43,3582,32.4892,29.3209
+2016-04-02 10:38:38,3585,32.4892,29.3235
+2016-04-02 10:53:33,3588,32.4319,29.3235
+2016-04-02 11:08:28,3591,32.4892,29.3235
+2016-04-02 11:23:24,3593,32.4864,29.3235
+2016-04-02 11:38:19,3595,32.4892,29.3758
+2016-04-02 11:53:14,3597,32.4892,29.3235
+2016-04-02 12:08:09,3598,32.4892,29.3209
+2016-04-02 12:23:04,3598,32.4892,29.3235
+2016-04-02 12:38:00,3599,32.4864,29.3209
+2016-04-02 12:52:55,3599,32.4319,29.3209
+2016-04-02 13:07:50,3599,32.4892,29.3235
+2016-04-02 13:22:46,3599,32.4348,29.3209
+2016-04-02 13:37:41,3599,32.4377,29.3235
+2016-04-02 13:52:37,3599,32.5036,29.3209
+2016-04-02 14:07:32,3599,32.5036,29.3209
+2016-04-02 14:22:28,3599,32.5065,29.3758
+2016-04-02 14:37:23,3599,32.5123,29.3235
+2016-04-02 14:52:19,3599,32.455,29.3209
+2016-04-02 15:07:14,3599,32.521,29.3235
+2016-04-02 15:22:10,3599,32.4636,29.3235
+2016-04-02 15:37:06,3599,32.521,29.3209
+2016-04-02 15:52:01,3599,32.5784,29.3732
+2016-04-02 16:06:56,3599,32.521,29.3209
+2016-04-02 16:21:52,3599,32.4665,29.3235
+2016-04-02 16:36:47,3599,32.415,29.3732
+2016-04-02 16:51:43,3599,32.5296,29.3209
+2016-04-02 17:06:38,3599,32.4809,29.3235
+2016-04-02 17:21:33,3599,32.4809,29.3235
+2016-04-02 17:36:29,3599,32.4809,29.2714
+2016-04-02 17:51:24,3599,32.5412,29.3732
+2016-04-02 18:06:20,3599,32.5469,29.2688
+2016-04-02 18:21:15,3599,32.544000000000004,29.3732
+2016-04-02 18:36:10,3599,32.4867,29.3209
+2016-04-02 18:51:06,3599,32.6015,29.3235
+2016-04-02 19:06:01,3599,32.5469,29.3235
+2016-04-02 19:20:56,3599,32.5498,29.3235
+2016-04-02 19:35:51,3599,32.544000000000004,29.2714
+2016-04-02 19:50:46,3598,32.4895,29.3209
+2016-04-02 20:05:42,3598,32.4867,29.3235
+2016-04-02 20:20:37,3598,32.544000000000004,29.3758
+2016-04-02 20:35:32,3596,32.4867,29.381
+2016-04-02 20:50:27,3595,32.5469,29.3235
+2016-04-02 21:05:22,3594,32.6015,29.3235
+2016-04-02 21:20:24,3593,32.5469,29.3235
+2016-04-02 21:35:18,3592,32.5469,29.3235
+2016-04-02 21:50:13,3591,32.544000000000004,29.3313
+2016-04-02 22:05:08,3590,32.5469,29.3836
+2016-04-02 22:20:03,3588,32.544000000000004,29.3836
+2016-04-02 22:34:58,3588,32.544000000000004,29.3235
+2016-04-02 22:49:52,3588,32.5469,29.3313
+2016-04-02 23:04:47,3588,32.544000000000004,29.3836
+2016-04-02 23:19:42,3588,32.4895,29.3313
+2016-04-02 23:34:36,3587,32.6044,29.3836
+2016-04-02 23:49:31,3587,32.6015,29.3313
+2016-04-03 01:04:13,3587,32.4895,29.3287
+2016-04-03 01:19:07,3587,32.544000000000004,29.3313
+2016-04-03 01:34:02,3587,32.544000000000004,29.3313
+2016-04-03 01:48:57,3587,32.4867,29.3836
+2016-04-03 02:03:51,3586,32.5469,29.3391
+2016-04-03 02:18:46,3587,32.544000000000004,29.3313
+2016-04-03 02:33:41,3586,32.4294,29.3391
+2016-04-03 02:48:35,3586,32.4895,29.3365
+2016-04-03 03:03:30,3587,32.5469,29.3391
+2016-04-03 03:18:25,3587,32.544000000000004,29.287
+2016-04-03 03:33:20,3587,32.4867,29.3365
+2016-04-03 03:48:15,3586,32.6044,29.3888
+2016-04-03 04:03:09,3586,32.5469,29.183000000000003
+2016-04-03 04:18:04,3586,32.4867,29.2349
+2016-04-03 04:32:59,3585,32.6044,29.183000000000003
+2016-04-03 04:47:53,3585,32.5469,29.1311
+2016-04-03 05:02:48,3584,32.5498,29.183000000000003
+2016-04-03 05:17:43,3584,32.544000000000004,29.183000000000003
+2016-04-03 05:32:37,3584,32.6044,29.2349
+2016-04-03 05:47:32,3583,32.544000000000004,29.2349
+2016-04-03 06:02:27,3583,32.6044,29.183000000000003
+2016-04-03 06:17:21,3582,32.5469,29.183000000000003
+2016-04-03 06:32:16,3582,32.5469,29.183000000000003
+2016-04-03 06:47:10,3581,32.544000000000004,29.1882
+2016-04-03 07:02:04,3581,32.4895,29.2349
+2016-04-03 07:17:07,3581,32.544000000000004,29.1311
+2016-04-03 07:32:02,3582,32.544000000000004,29.1804
+2016-04-03 07:46:56,3583,32.5469,29.2401
+2016-04-03 08:01:51,3584,32.544000000000004,29.2401
+2016-04-03 08:16:46,3585,32.544000000000004,29.1882
+2016-04-03 08:31:41,3587,32.5469,29.1311
+2016-04-03 08:46:36,3588,32.544000000000004,29.1882
+2016-04-03 09:01:38,3591,32.4895,29.2401
+2016-04-03 09:16:33,3594,32.5469,29.1882
+2016-04-03 09:31:28,3597,32.544000000000004,29.1363
+2016-04-03 09:46:23,3598,32.4895,29.1882
+2016-04-03 10:01:18,3599,32.544000000000004,29.1882
+2016-04-03 10:16:13,3599,32.544000000000004,29.1882
+2016-04-03 10:31:08,3599,32.4867,29.1882
+2016-04-03 10:46:04,3599,32.544000000000004,29.1363
+2016-04-03 11:00:59,3599,32.544000000000004,29.1882
+2016-04-03 11:15:55,3599,32.4294,29.1363
+2016-04-03 11:30:50,3599,32.5469,29.1363
+2016-04-03 11:45:45,3599,32.4867,29.1882
+2016-04-03 12:00:41,3599,32.544000000000004,29.1882
+2016-04-03 12:15:36,3599,32.438,29.1907
+2016-04-03 12:30:32,3599,32.438,29.1882
+2016-04-03 12:45:28,3599,32.4953,29.1363
+2016-04-03 13:00:23,3599,32.504,29.1882
+2016-04-03 13:15:19,3599,32.5643,29.1882
+2016-04-03 13:30:15,3599,32.5729,29.1882
+2016-04-03 13:45:10,3599,32.5729,29.1882
+2016-04-03 14:00:06,3599,32.5126,29.1882
+2016-04-03 14:15:02,3599,32.6276,29.1363
+2016-04-03 14:29:58,3599,32.57,29.1882
+2016-04-03 14:44:54,3599,32.5155,29.1882
+2016-04-03 14:59:49,3599,32.5787,29.2401
+2016-04-03 15:14:45,3599,32.5241,29.1882
+2016-04-03 15:29:41,3599,32.645,29.1363
+2016-04-03 15:44:37,3599,32.5386,29.1363
+2016-04-03 15:59:32,3599,32.4812,29.1882
+2016-04-03 16:14:28,3599,32.5961,29.1882
+2016-04-03 16:29:24,3599,32.4812,29.1856
+2016-04-03 16:44:19,3599,32.5961,29.1882
+2016-04-03 16:59:15,3599,32.5961,29.1363
+2016-04-03 17:14:11,3599,32.6537,29.1882
+2016-04-03 17:29:06,3599,32.3754,29.1882
+2016-04-03 17:44:02,3599,32.3754,29.2401
+2016-04-03 17:58:58,3599,32.3841,29.1882
+2016-04-03 18:13:53,3599,32.3812,29.1882
+2016-04-03 18:28:49,3599,32.3956,29.2401
+2016-04-03 18:43:44,3599,32.3927,29.1882
+2016-04-03 18:58:40,3599,32.4499,29.1363
+2016-04-03 19:13:35,3599,32.3927,29.1363
+2016-04-03 19:28:30,3599,32.4499,29.1882
+2016-04-03 19:43:26,3599,32.3927,29.1363
+2016-04-03 19:58:21,3599,32.3927,29.1882
+2016-04-03 20:13:16,3599,32.4013,29.1882
+2016-04-03 20:28:11,3599,32.3927,29.1882
+2016-04-03 20:43:07,3599,32.4585,29.1882
+2016-04-03 20:58:02,3599,32.4585,29.1882
+2016-04-03 21:12:57,3599,32.4013,29.1882
+2016-04-03 21:27:53,3599,32.4013,29.1882
+2016-04-03 21:42:48,3599,32.4013,29.1882
+2016-04-03 21:57:43,3599,32.4013,29.1882
+2016-04-03 22:12:38,3599,32.4013,29.1882
+2016-04-03 22:27:34,3599,32.4585,29.2401
+2016-04-03 22:42:29,3599,32.3984,29.2479
+2016-04-03 22:57:24,3599,32.3984,29.144
+2016-04-03 23:12:19,3599,32.3984,29.1959
+2016-04-03 23:27:14,3599,32.4013,29.144
+2016-04-03 23:42:09,3599,32.4042,29.1985
+2016-04-03 23:57:04,3599,32.5158,29.1959
+2016-04-04 00:11:59,3599,32.4013,29.1959
+2016-04-04 00:26:54,3599,32.4585,29.2037
+2016-04-04 00:41:49,3599,32.4013,29.1959
+2016-04-04 00:56:44,3599,32.4585,29.2011
+2016-04-04 01:11:39,3599,32.4013,29.2037
+2016-04-04 01:26:34,3599,32.4013,29.2037
+2016-04-04 01:41:29,3599,32.4013,29.2037
+2016-04-04 01:56:24,3599,32.3442,29.2037
+2016-04-04 02:11:19,3599,32.5158,29.2037
+2016-04-04 02:26:14,3599,32.4013,29.2557
+2016-04-04 02:41:09,3599,32.4013,29.2037
+2016-04-04 02:56:04,3599,32.4585,29.2037
+2016-04-04 03:10:58,3598,32.4013,29.2115
+2016-04-04 03:25:53,3598,32.4585,29.2115
+2016-04-04 03:40:48,3598,32.4585,29.2115
+2016-04-04 03:55:43,3598,32.4013,29.2141
+2016-04-04 04:10:38,3598,32.3442,29.2115
+2016-04-04 04:25:33,3598,32.4614,29.2115
+2016-04-04 04:40:28,3598,32.4013,29.2115
+2016-04-04 04:55:22,3598,32.4585,29.2115
+2016-04-04 05:10:17,3598,32.4585,29.2635
+2016-04-04 05:25:12,3598,32.4585,29.2115
+2016-04-04 05:40:07,3598,32.5158,29.2115
+2016-04-04 05:55:02,3598,32.4013,29.2115
+2016-04-04 06:09:57,3598,32.3442,29.2635
+2016-04-04 06:24:52,3598,32.4585,29.2141
+2016-04-04 06:39:47,3598,32.4585,29.2635
+2016-04-04 06:54:42,3598,32.4013,29.2115
+2016-04-04 07:09:37,3598,32.4585,29.2115
+2016-04-04 07:24:32,3598,32.4013,29.2635
+2016-04-04 07:39:27,3598,32.4013,29.2115
+2016-04-04 07:54:22,3598,32.4585,29.2115
+2016-04-04 08:09:17,3599,32.4585,29.2115
+2016-04-04 08:24:12,3599,32.3442,29.2115
+2016-04-04 08:39:07,3599,32.4013,29.2635
+2016-04-04 08:54:02,3599,32.4585,29.2661
+2016-04-04 09:08:57,3599,32.4013,29.2115
+2016-04-04 09:23:52,3599,32.4013,29.2635
+2016-04-04 09:38:47,3599,32.4556,29.2115
+2016-04-04 09:53:43,3598,32.4013,29.1596
+2016-04-04 10:08:38,3599,32.3442,29.2115
+2016-04-04 10:23:33,3599,32.3984,29.2115
+2016-04-04 10:38:29,3599,32.4013,29.2141
+2016-04-04 10:53:24,3599,32.4042,29.2115
+2016-04-04 11:08:20,3599,32.4556,29.2635
+2016-04-04 11:23:15,3599,32.4585,29.2115
+2016-04-04 11:38:11,3599,32.4071,29.2635
+2016-04-04 11:53:07,3599,32.4672,29.1596
+2016-04-04 12:08:03,3599,32.4099,29.2115
+2016-04-04 12:22:58,3599,32.4758,29.2115
+2016-04-04 12:37:54,3599,32.4758,29.2167
+2016-04-04 12:52:50,3599,32.4157,29.2167
+2016-04-04 13:07:46,3599,32.4186,29.2193
+2016-04-04 13:22:42,3599,32.3614,29.2115
+2016-04-04 13:37:38,3599,32.3102,29.2167
+2016-04-04 13:52:34,3599,32.4243,29.2167
+2016-04-04 14:07:30,3599,32.4243,29.1647
+2016-04-04 14:22:26,3599,32.4243,29.2167
+2016-04-04 14:37:29,3599,32.433,29.2167
+2016-04-04 14:52:25,3599,32.4902,29.2167
+2016-04-04 15:07:21,3599,32.3188,29.2193
+2016-04-04 15:22:17,3599,32.4416,29.2167
+2016-04-04 15:37:13,3599,32.4445,29.1647
+2016-04-04 15:52:09,3599,32.4416,29.2167
+2016-04-04 16:07:05,3599,32.4387,29.1673
+2016-04-04 16:22:01,3599,32.3274,29.227
+2016-04-04 16:36:57,3599,32.279,29.2167
+2016-04-04 16:51:53,3599,32.2762,29.1725
+2016-04-04 17:06:49,3599,32.2193,29.2244
+2016-04-04 17:21:45,3599,32.336,29.2244
+2016-04-04 17:36:41,3599,32.279,29.2244
+2016-04-04 17:51:37,3599,32.2848,29.1725
+2016-04-04 18:06:33,3599,32.3417,29.227
+2016-04-04 18:21:29,3599,32.3417,29.1751
+2016-04-04 18:36:25,3599,32.2934,29.2843
+2016-04-04 18:51:21,3599,32.2962,29.2244
+2016-04-04 19:06:17,3599,32.2962,29.2322
+2016-04-04 19:21:12,3599,32.2962,29.2348
+2016-04-04 19:36:08,3599,32.2934,29.2322
+2016-04-04 19:51:04,3599,32.2962,29.2322
+2016-04-04 20:05:59,3599,32.2962,29.2348
+2016-04-04 20:20:55,3599,32.2962,29.2322
+2016-04-04 20:35:51,3599,32.2962,29.1803
+2016-04-04 20:50:46,3599,32.2962,29.2348
+2016-04-04 21:05:42,3599,32.3532,29.2348
+2016-04-04 21:20:37,3599,32.4675,29.2322
+2016-04-04 21:35:33,3599,32.3048,29.2322
+2016-04-04 21:50:28,3599,32.3503,29.2322
+2016-04-04 22:05:24,3599,32.3503,29.2322
+2016-04-04 22:20:19,3599,32.3532,29.2348
+2016-04-04 22:35:15,3599,32.2365,29.2348
+2016-04-04 22:50:10,3599,32.2394,29.2322
+2016-04-04 23:05:06,3599,32.2962,29.2322
+2016-04-04 23:20:01,3599,32.2962,29.2348
+2016-04-04 23:34:57,3599,32.2934,29.2322
+2016-04-04 23:49:52,3599,32.2962,29.2348
+2016-04-05 00:04:47,3599,32.3503,29.2348
+2016-04-05 00:19:43,3599,32.2962,29.2322
+2016-04-05 00:34:38,3598,32.2962,29.2348
+2016-04-05 00:49:37,3598,32.2962,29.2348
+2016-04-05 01:04:32,3598,32.3532,29.2348
+2016-04-05 01:19:28,3598,32.2394,29.2322
+2016-04-05 01:34:23,3598,32.2962,29.2348
+2016-04-05 01:49:18,3597,32.2962,29.2426
+2016-04-05 02:04:14,3597,32.2365,29.24
+2016-04-05 02:19:09,3596,32.2962,29.2426
+2016-04-05 02:34:04,3596,32.2394,29.2921
+2016-04-05 02:48:59,3595,32.3532,29.2426
+2016-04-05 03:03:55,3594,32.2394,29.2426
+2016-04-05 03:18:50,3594,32.2962,29.2478
+2016-04-05 03:33:45,3594,32.2876,29.24
+2016-04-05 03:48:40,3594,32.3446,29.24
+2016-04-05 04:03:35,3593,32.3446,29.2504
+2016-04-05 04:18:30,3593,32.2876,29.2504
+2016-04-05 04:33:25,3593,32.2876,29.2504
+2016-04-05 04:48:20,3593,32.2876,29.2999
+2016-04-05 05:03:15,3593,32.3446,29.2504
+2016-04-05 05:18:10,3592,32.3446,29.2504
+2016-04-05 05:33:05,3592,32.336,29.2504
+2016-04-05 05:48:00,3592,32.336,29.2504
+2016-04-05 06:02:55,3592,32.279,29.2478
+2016-04-05 06:17:50,3592,32.2762,29.3103
+2016-04-05 06:32:45,3592,32.336,29.3025
+2016-04-05 06:47:40,3592,32.2222,29.2478
+2016-04-05 07:02:35,3592,32.279,29.1465
+2016-04-05 07:17:30,3591,32.279,29.3025
+2016-04-05 07:32:25,3591,32.336,29.1025
+2016-04-05 07:47:20,3590,32.2762,29.1025
+2016-04-05 08:02:15,3590,32.336,29.1465
+2016-04-05 08:17:10,3590,32.4502,29.1025
+2016-04-05 08:32:05,3590,32.3331,29.1543
+2016-04-05 08:47:00,3592,32.279,29.1025
+2016-04-05 09:01:55,3592,32.336,29.0999
+2016-04-05 09:16:50,3594,32.336,29.1025
+2016-04-05 09:31:45,3595,32.2193,29.0999
+2016-04-05 09:46:40,3596,32.279,29.1543
+2016-04-05 10:01:35,3598,32.279,29.1025
+2016-04-05 10:16:31,3598,32.279,29.1025
+2016-04-05 10:31:26,3598,32.336,29.1543
+2016-04-05 10:46:21,3598,32.3331,29.1543
+2016-04-05 11:01:17,3598,32.279,29.0508
+2016-04-05 11:16:12,3599,32.3902,29.0999
+2016-04-05 11:31:08,3599,32.336,29.0999
+2016-04-05 11:46:03,3599,32.336,29.0508
+2016-04-05 12:00:59,3599,32.336,29.0999
+2016-04-05 12:15:54,3599,32.336,29.1025
+2016-04-05 12:30:50,3599,32.336,29.0999
+2016-04-05 12:45:46,3599,32.3331,29.1025
+2016-04-05 13:00:42,3599,32.336,29.0999
+2016-04-05 13:15:37,3599,32.3446,29.0999
+2016-04-05 13:30:33,3599,32.4017,29.0999
+2016-04-05 13:45:29,3599,32.3446,29.0999
+2016-04-05 14:00:24,3599,32.3503,29.0482
+2016-04-05 14:15:20,3599,32.3532,29.0999
+2016-04-05 14:30:16,3599,32.2934,29.0999
+2016-04-05 14:45:12,3599,32.3532,29.0999
+2016-04-05 15:00:08,3599,32.126,29.0999
+2016-04-05 15:15:03,3599,32.2962,29.0973
+2016-04-05 15:29:59,3599,32.248000000000005,29.0999
+2016-04-05 15:44:55,3599,32.1345,29.1025
+2016-04-05 15:59:51,3599,32.1345,29.0999
+2016-04-05 16:14:47,3599,32.1912,29.0508
+2016-04-05 16:29:43,3599,32.1431,29.0999
+2016-04-05 16:44:38,3599,32.1431,29.1543
+2016-04-05 16:59:34,3599,32.1402,29.0999
+2016-04-05 17:14:30,3599,32.1431,29.0999
+2016-04-05 17:29:26,3599,32.0865,29.1025
+2016-04-05 17:44:21,3599,32.1998,29.0999
+2016-04-05 17:59:17,3599,32.1431,29.1025
+2016-04-05 18:14:13,3599,32.1969,29.1025
+2016-04-05 18:29:08,3599,32.1488,29.0999
+2016-04-05 18:44:04,3599,32.1488,29.0999
+2016-04-05 18:59:00,3599,32.1488,29.0999
+2016-04-05 19:13:55,3599,32.2055,29.1025
+2016-04-05 19:28:51,3599,32.2623,29.0482
+2016-04-05 19:43:46,3599,32.2623,29.0999
+2016-04-05 19:58:42,3599,32.1488,29.1025
+2016-04-05 20:13:38,3599,32.1516,29.1025
+2016-04-05 20:28:33,3599,32.1488,29.1076
+2016-04-05 20:43:28,3599,32.1488,29.1076
+2016-04-05 20:58:24,3599,32.3192,29.1076
+2016-04-05 21:13:19,3599,32.1488,29.2114
+2016-04-05 21:28:14,3598,32.3192,29.1076
+2016-04-05 21:43:10,3598,32.4333,29.1076
+2016-04-05 21:58:05,3598,32.3791,29.1595
+2016-04-05 22:13:00,3596,32.3733,29.1076
+2016-04-05 22:27:55,3595,32.1516,29.1595
+2016-04-05 22:42:51,3593,32.2623,29.1076
+2016-04-05 22:57:46,3592,32.1488,29.1076
+2016-04-05 23:12:41,3591,32.1488,29.1595
+2016-04-05 23:27:36,3590,32.1488,29.1102
+2016-04-05 23:42:31,3589,32.3762,29.1076
+2016-04-05 23:57:26,3588,32.2623,29.1076
+2016-04-06 00:12:21,3588,32.1998,29.1076
+2016-04-06 00:27:16,3586,32.1431,29.118
+2016-04-06 00:42:11,3586,32.1402,29.1154
+2016-04-06 00:57:06,3585,32.0865,29.1154
+2016-04-06 01:12:01,3585,32.2565,29.1154
+2016-04-06 01:26:57,3585,32.1431,29.1672
+2016-04-06 01:41:52,3583,32.1431,29.1154
+2016-04-06 01:56:47,3583,32.0865,29.0714
+2016-04-06 02:11:42,3583,32.1998,29.118
+2016-04-06 02:26:37,3582,32.1431,29.118
+2016-04-06 02:41:32,3581,32.2508,29.1154
+2016-04-06 02:56:26,3581,32.1374,29.1154
+2016-04-06 03:11:21,3581,32.1345,29.1672
+2016-04-06 03:26:17,3581,32.1345,29.1672
+2016-04-06 03:41:12,3581,32.1345,29.1232
+2016-04-06 03:56:07,3580,32.126,29.1232
+2016-04-06 04:11:02,3579,32.078,29.2269
+2016-04-06 04:25:56,3579,32.126,29.1232
+2016-04-06 04:40:51,3578,32.0694,29.175
+2016-04-06 04:55:46,3577,32.126,29.1257
+2016-04-06 05:10:41,3577,32.0723,29.175
+2016-04-06 05:25:36,3575,32.1288,29.0714
+2016-04-06 05:40:30,3575,32.126,29.2269
+2016-04-06 05:55:25,3574,32.126,29.1776
+2016-04-06 06:10:20,3573,32.0609,29.1232
+2016-04-06 06:25:15,3572,32.2308,29.1776
+2016-04-06 06:40:10,3571,32.1174,29.2295
+2016-04-06 06:55:04,3571,32.0609,29.175
+2016-04-06 07:09:59,3570,32.1174,29.1257
+2016-04-06 07:24:54,3571,32.1088,29.2269
+2016-04-06 07:39:48,3571,32.0523,29.1232
+2016-04-06 07:54:43,3572,32.0523,29.2269
+2016-04-06 08:09:38,3573,32.2222,29.2269
+2016-04-06 08:24:33,3574,32.1117,29.1232
+2016-04-06 08:39:27,3576,32.106,29.074
+2016-04-06 08:54:22,3579,32.1088,29.1232
+2016-04-06 09:09:17,3581,32.2193,29.1257
+2016-04-06 09:24:12,3584,32.2193,29.1232
+2016-04-06 09:39:07,3585,32.1655,29.1232
+2016-04-06 09:54:02,3586,32.1003,29.1232
+2016-04-06 10:08:57,3588,32.1655,29.1232
+2016-04-06 10:23:52,3590,32.2222,29.1232
+2016-04-06 10:38:48,3592,32.106,29.1232
+2016-04-06 10:53:43,3594,32.1003,29.1232
+2016-04-06 11:08:38,3595,32.1003,29.1232
+2016-04-06 11:23:33,3597,32.1088,29.1232
+2016-04-06 11:38:28,3598,32.0523,29.1232
+2016-04-06 11:53:23,3598,32.1003,29.1232
+2016-04-06 12:08:18,3598,32.1598,29.1232
+2016-04-06 12:23:14,3598,32.1088,29.1232
+2016-04-06 12:38:09,3599,32.1655,29.1232
+2016-04-06 12:53:04,3599,32.1655,29.1232
+2016-04-06 13:07:59,3599,32.2222,29.1232
+2016-04-06 13:22:54,3599,32.2193,29.1232
+2016-04-06 13:37:50,3599,32.1088,29.175
+2016-04-06 13:52:45,3599,32.1626,29.1232
+2016-04-06 14:07:40,3599,32.106,29.1232
+2016-04-06 14:22:36,3599,32.2222,29.1232
+2016-04-06 14:37:31,3599,32.1088,29.1232
+2016-04-06 14:52:26,3599,32.2308,29.2269
+2016-04-06 15:07:22,3599,32.1174,29.1232
+2016-04-06 15:22:17,3599,32.2308,29.1776
+2016-04-06 15:37:12,3599,32.1174,29.1232
+2016-04-06 15:52:07,3599,32.2308,29.1232
+2016-04-06 16:07:03,3599,32.1174,29.1232
+2016-04-06 16:21:58,3599,32.2394,29.1232
+2016-04-06 16:36:53,3599,32.126,29.1257
+2016-04-06 16:51:49,3599,32.126,29.1232
+2016-04-06 17:06:44,3599,32.1826,29.1232
+2016-04-06 17:21:40,3599,32.0694,29.1232
+2016-04-06 17:36:35,3599,32.2394,29.1232
+2016-04-06 17:51:30,3599,32.1826,29.1232
+2016-04-06 18:06:26,3599,32.126,29.1257
+2016-04-06 18:21:21,3599,32.1826,29.1232
+2016-04-06 18:36:16,3599,32.1826,29.1232
+2016-04-06 18:51:12,3599,32.1826,29.1257
+2016-04-06 19:06:07,3599,32.126,29.1232
+2016-04-06 19:21:02,3599,32.2394,29.1232
+2016-04-06 19:35:57,3598,32.126,29.1232
+2016-04-06 19:50:53,3598,32.1826,29.1232
+2016-04-06 20:05:48,3597,32.126,29.1232
+2016-04-06 20:20:43,3596,32.0694,29.1232
+2016-04-06 20:35:38,3595,32.1231,29.1206
+2016-04-06 20:50:33,3594,32.1826,29.1232
+2016-04-06 21:05:28,3592,32.1826,29.1232
+2016-04-06 21:20:23,3591,32.126,29.1232
+2016-04-06 21:35:18,3590,32.2962,29.1232
+2016-04-06 21:50:13,3590,32.1826,29.1232
+2016-04-06 22:05:15,3589,32.126,29.0224
+2016-04-06 22:20:10,3589,32.126,29.0198
+2016-04-06 22:35:05,3588,32.126,28.9682
+2016-04-06 22:50:00,3589,32.2962,29.1232
+2016-04-06 23:04:55,3589,32.2394,28.9682
+2016-04-06 23:19:50,3589,32.1203,29.0198
+2016-04-06 23:34:45,3588,32.1174,29.0198
+2016-04-06 23:49:40,3588,32.174,28.9682
+2016-04-07 00:04:36,3588,32.174,28.9682
+2016-04-07 00:19:31,3587,32.0637,29.0198
+2016-04-07 00:34:26,3587,32.0609,28.9682
+2016-04-07 00:49:21,3587,32.1174,29.0198
+2016-04-07 01:04:15,3586,32.2308,28.9168
+2016-04-07 01:19:10,3585,32.2222,28.9682
+2016-04-07 01:34:05,3585,32.1655,29.0275
+2016-04-07 01:49:00,3584,32.2222,29.0275
+2016-04-07 02:03:55,3584,32.106,28.9759
+2016-04-07 02:18:50,3584,32.0523,28.9168
+2016-04-07 02:33:45,3582,32.1088,29.0275
+2016-04-07 02:48:40,3581,32.0495,29.0275
+2016-04-07 03:03:34,3581,32.2762,29.0198
+2016-04-07 03:18:29,3580,32.1088,28.9759
+2016-04-07 03:33:24,3579,32.1655,29.0275
+2016-04-07 03:48:19,3578,32.0523,28.9785
+2016-04-07 04:03:13,3577,32.2222,28.9759
+2016-04-07 04:18:08,3576,32.0523,29.0275
+2016-04-07 04:33:03,3575,32.106,29.0275
+2016-04-07 04:47:58,3574,32.0467,29.0198
+2016-04-07 05:02:52,3573,32.0438,29.0275
+2016-04-07 05:17:47,3571,32.0438,28.9759
+2016-04-07 05:32:42,3571,32.1031,28.9785
+2016-04-07 05:47:36,3569,32.1031,29.0275
+2016-04-07 06:02:31,3567,33.7913,29.0301
+2016-04-07 06:17:25,3566,32.1541,29.0327
+2016-04-07 06:32:20,3565,32.0467,29.0224
+2016-04-07 06:47:14,3563,32.0467,28.9759
+2016-04-07 07:02:09,3563,32.0946,28.9759
+2016-04-07 07:17:04,3561,32.0918,28.9785
+2016-04-07 07:31:58,3561,32.0946,29.0275
+2016-04-07 07:46:53,3560,32.0946,29.0275
+2016-04-07 08:01:47,3559,32.0946,28.9759
+2016-04-07 08:16:42,3561,32.0832,29.0301
+2016-04-07 08:31:37,3561,32.1398,28.9759
+2016-04-07 08:46:31,3564,32.0861,29.0275
+2016-04-07 09:01:26,3568,32.0267,29.0275
+2016-04-07 09:16:21,3572,32.0832,28.9245
+2016-04-07 09:31:16,3575,32.0832,29.0275
+2016-04-07 09:46:11,3577,32.0861,28.9759
+2016-04-07 10:01:06,3581,32.0832,29.0275
+2016-04-07 10:16:01,3582,32.0832,29.0275
+2016-04-07 10:30:56,3585,32.1398,29.0275
+2016-04-07 10:45:51,3586,32.0861,28.9682
+2016-04-07 11:00:46,3587,32.1398,28.9759
+2016-04-07 11:15:41,3588,32.0832,29.0275
+2016-04-07 11:30:37,3588,32.1398,28.9682
+2016-04-07 11:45:32,3588,32.1965,28.9682
+2016-04-07 12:00:27,3588,32.1426,28.9682
+2016-04-07 12:15:23,3588,32.0267,28.9708
+2016-04-07 12:30:18,3590,32.1398,28.9193
+2016-04-07 12:45:13,3592,32.1341,28.9682
+2016-04-07 13:00:08,3593,32.0832,28.9759
+2016-04-07 13:15:04,3594,32.0861,28.9682
+2016-04-07 13:29:59,3595,32.0861,28.9245
+2016-04-07 13:44:54,3595,32.0832,29.0198
+2016-04-07 13:59:50,3596,32.1965,28.9682
+2016-04-07 14:14:45,3597,32.1398,29.0198
+2016-04-07 14:29:40,3598,32.0832,29.0198
+2016-04-07 14:44:35,3598,32.0861,28.9193
+2016-04-07 14:59:31,3598,32.1398,28.9168
+2016-04-07 15:14:26,3598,32.0832,28.9682
+2016-04-07 15:29:21,3598,32.0832,28.9168
+2016-04-07 15:44:16,3598,32.0353,29.0198
+2016-04-07 15:59:12,3598,32.1512,29.0198
+2016-04-07 16:14:07,3598,32.205,28.9168
+2016-04-07 16:29:02,3598,32.0946,28.9682
+2016-04-07 16:43:57,3598,32.1512,28.9168
+2016-04-07 16:59:00,3598,32.1003,28.9682
+2016-04-07 17:13:55,3598,32.041,28.9682
+2016-04-07 17:28:51,3598,32.1003,28.9682
+2016-04-07 17:43:46,3597,32.1031,28.9168
+2016-04-07 17:58:41,3596,32.0467,28.9682
+2016-04-07 18:13:36,3596,32.1031,28.9682
+2016-04-07 18:28:31,3595,32.1598,28.9682
+2016-04-07 18:43:26,3594,32.2108,28.9168
+2016-04-07 18:58:22,3593,32.0975,28.9682
+2016-04-07 19:13:16,3592,32.2136,28.9168
+2016-04-07 19:28:11,3589,32.1598,28.9682
+2016-04-07 19:43:06,3588,32.1031,28.9682
+2016-04-07 19:58:01,3586,32.1031,28.9708
+2016-04-07 20:12:56,3585,32.1031,28.9682
+2016-04-07 20:27:58,3582,32.0438,29.0172
+2016-04-07 20:42:53,3580,32.0467,28.9682
+2016-04-07 20:57:48,3578,32.1031,28.9708
+2016-04-07 21:12:42,3576,32.0918,28.9682
+2016-04-07 21:27:37,3574,32.0918,28.9193
+2016-04-07 21:42:32,3572,32.0353,28.9682
+2016-04-07 21:57:26,3570,32.0918,29.0249
+2016-04-07 22:12:21,3568,32.0832,28.9168
+2016-04-07 22:27:16,3566,32.0861,29.0198
+2016-04-07 22:42:10,3565,32.0861,28.9193
+2016-04-07 22:57:05,3564,32.0832,29.0198
+2016-04-07 23:12:00,3563,32.0832,29.0198
+2016-04-07 23:26:54,3561,32.1426,29.0198
+2016-04-07 23:41:49,3559,32.0861,29.0198
+2016-04-07 23:56:43,3559,32.0861,29.0198
+2016-04-08 00:11:38,3557,32.0861,28.9682
+2016-04-08 00:26:32,3557,32.1993,29.0198
+2016-04-08 00:41:26,3556,32.0775,28.9682
+2016-04-08 00:56:21,3555,32.0775,29.0198
+2016-04-08 01:11:15,3553,32.0775,29.0198
+2016-04-08 01:26:09,3552,32.0775,29.0224
+2016-04-08 01:41:04,3552,32.1936,29.0224
+2016-04-08 01:55:58,3550,32.0239,28.9708
+2016-04-08 02:10:53,3550,32.0775,29.0198
+2016-04-08 02:25:47,3549,32.1341,29.0198
+2016-04-08 02:40:41,3548,32.069,29.0224
+2016-04-08 02:55:35,3548,32.0718,28.9708
+2016-04-08 03:10:29,3547,32.069,28.9682
+2016-04-08 03:25:24,3547,32.0604,29.0224
+2016-04-08 03:40:18,3546,32.1765,29.0224
+2016-04-08 03:55:12,3545,32.0604,28.9708
+2016-04-08 04:10:07,3545,32.117,29.0224
+2016-04-08 04:25:01,3545,32.117,28.9682
+2016-04-08 04:39:56,3545,32.0604,28.868
+2016-04-08 04:54:50,3545,32.117,28.8167
+2016-04-08 05:09:44,3545,32.0604,28.8654
+2016-04-08 05:24:38,3545,32.2218,28.8654
+2016-04-08 05:39:33,3545,32.0547,28.9168
+2016-04-08 05:54:27,3544,32.1679,28.8142
+2016-04-08 06:09:21,3544,32.1113,28.868
+2016-04-08 06:24:16,3543,32.1679,28.8654
+2016-04-08 06:39:10,3542,32.0519,28.868
+2016-04-08 06:54:04,3542,32.0547,28.868
+2016-04-08 07:08:59,3542,32.1565,28.8654
+2016-04-08 07:23:53,3542,32.0999,28.8654
+2016-04-08 07:38:47,3543,32.097,28.8654
+2016-04-08 07:53:42,3545,32.1565,28.8167
+2016-04-08 08:08:36,3546,32.0434,28.8654
+2016-04-08 08:23:31,3550,31.9785,28.8654
+2016-04-08 08:38:26,3554,32.2046,28.8654
+2016-04-08 08:53:20,3557,32.0913,28.868
+2016-04-08 09:08:15,3563,32.0348,28.8142
+2016-04-08 09:23:10,3567,31.9699,28.8167
+2016-04-08 09:38:05,3571,32.0263,28.8142
+2016-04-08 09:53:00,3574,32.0263,28.8142
+2016-04-08 10:07:55,3577,31.9728,28.8142
+2016-04-08 10:22:50,3581,32.0263,28.8142
+2016-04-08 10:37:45,3585,32.0828,28.8142
+2016-04-08 10:52:40,3588,32.0263,28.8142
+2016-04-08 11:07:35,3591,32.0263,28.8142
+2016-04-08 11:22:30,3593,32.0263,28.8167
+2016-04-08 11:37:26,3594,31.9785,28.8142
+2016-04-08 11:52:21,3596,32.0348,28.8065
+2016-04-08 12:07:16,3597,32.0348,28.8091
+2016-04-08 12:22:12,3598,32.0434,28.7554
+2016-04-08 12:37:07,3598,31.987,28.7554
+2016-04-08 12:52:02,3598,31.9279,28.8065
+2016-04-08 13:06:58,3599,31.8745,28.8065
+2016-04-08 13:21:53,3599,31.9955,28.8065
+2016-04-08 13:36:49,3599,31.9392,28.7554
+2016-04-08 13:51:44,3599,31.8269,28.8065
+2016-04-08 14:06:39,3599,31.9392,28.7554
+2016-04-08 14:21:34,3599,31.9477,28.8577
+2016-04-08 14:36:30,3599,31.8915,28.8065
+2016-04-08 14:51:25,3599,31.9449,28.8091
+2016-04-08 15:06:20,3599,31.9477,28.8091
+2016-04-08 15:21:16,3599,32.004,28.8065
+2016-04-08 15:36:11,3599,32.0012,28.8065
+2016-04-08 15:51:07,3599,31.9562,28.8577
+2016-04-08 16:06:02,3599,32.0154,28.7477
+2016-04-08 16:20:57,3599,31.9,28.7988
+2016-04-08 16:35:52,3599,31.9619,28.8014
+2016-04-08 16:50:48,3599,31.9647,28.8065
+2016-04-08 17:05:43,3599,32.0211,28.7988
+2016-04-08 17:20:38,3599,32.0211,28.7988
+2016-04-08 17:35:33,3599,32.0182,28.7988
+2016-04-08 17:50:28,3599,31.9619,28.7963
+2016-04-08 18:05:23,3598,31.9647,28.7988
+2016-04-08 18:20:18,3598,31.9085,28.8065
+2016-04-08 18:35:14,3598,32.0267,28.8065
+2016-04-08 18:50:09,3598,31.9704,28.7477
+2016-04-08 19:05:04,3595,32.0296,28.8577
+2016-04-08 19:19:58,3594,32.0296,28.8065
+2016-04-08 19:34:53,3592,31.9704,28.7988
+2016-04-08 19:49:48,3589,32.0267,28.8065
+2016-04-08 20:04:43,3588,32.0296,28.8065
+2016-04-08 20:19:38,3586,31.8608,28.7988
+2016-04-08 20:34:32,3584,32.0861,28.8065
+2016-04-08 20:49:27,3581,32.0296,28.8577
+2016-04-08 21:04:22,3579,31.917,28.7988
+2016-04-08 21:19:17,3577,31.9619,28.8577
+2016-04-08 21:34:11,3574,32.0211,28.8065
+2016-04-08 21:49:06,3574,31.9647,28.8577
+2016-04-08 22:04:00,3571,31.9085,28.7988
+2016-04-08 22:18:54,3570,31.9085,28.8603
+2016-04-08 22:33:56,3568,31.9085,28.8501
+2016-04-08 22:48:51,3566,31.9056,28.8501
+2016-04-08 23:03:45,3565,31.9647,28.8065
+2016-04-08 23:18:39,3564,32.0211,28.8603
+2016-04-08 23:33:34,3563,31.9647,28.8065
+2016-04-08 23:48:28,3561,31.9647,28.8577
+2016-04-09 00:03:23,3559,31.8467,28.8501
+2016-04-09 00:18:17,3559,31.9,28.8577
+2016-04-09 00:33:12,3558,31.9028,28.8577
+2016-04-09 00:48:06,3557,31.8467,28.8577
+2016-04-09 01:03:00,3556,31.9562,28.8629
+2016-04-09 01:17:55,3555,31.8354,28.8501
+2016-04-09 01:32:49,3554,31.8354,28.8603
+2016-04-09 01:47:44,3553,31.8382,28.8526
+2016-04-09 02:02:38,3552,32.004,28.8577
+2016-04-09 02:17:32,3552,32.0069,28.8603
+2016-04-09 02:32:34,3550,31.8915,28.8603
+2016-04-09 02:47:28,3550,31.9477,28.8629
+2016-04-09 03:02:23,3549,32.0633,28.8065
+2016-04-09 03:17:17,3549,31.9392,28.8577
+2016-04-09 03:32:11,3549,31.883000000000006,28.8603
+2016-04-09 03:47:05,3549,31.883000000000006,28.8603
+2016-04-09 04:02:00,3548,31.883000000000006,28.8603
+2016-04-09 04:16:54,3547,31.9955,28.8603
+2016-04-09 04:31:48,3546,31.883000000000006,28.8577
+2016-04-09 04:46:43,3545,31.8858,28.8065
+2016-04-09 05:01:37,3545,31.883000000000006,28.8577
+2016-04-09 05:16:31,3545,31.9955,28.8603
+2016-04-09 05:31:26,3545,31.9307,28.8577
+2016-04-09 05:46:20,3544,31.9307,28.8577
+2016-04-09 06:01:14,3544,31.9307,28.8091
+2016-04-09 06:16:08,3544,31.9222,28.8065
+2016-04-09 06:31:03,3544,31.9222,28.8091
+2016-04-09 06:45:57,3545,31.866,28.8577
+2016-04-09 07:00:51,3545,31.866,28.8577
+2016-04-09 07:15:46,3545,31.9785,28.8065
+2016-04-09 07:30:40,3546,31.9699,28.8091
+2016-04-09 07:45:34,3549,31.9699,28.8577
+2016-04-09 08:00:36,3550,31.9137,28.8014
+2016-04-09 08:15:31,3555,31.8072,28.8065
+2016-04-09 08:30:26,3559,31.8015,28.8091
+2016-04-09 08:45:20,3563,31.8575,28.8065
+2016-04-09 09:00:15,3567,31.8043,28.7988
+2016-04-09 09:15:10,3571,31.8015,28.7988
+2016-04-09 09:30:05,3575,31.8575,28.8065
+2016-04-09 09:45:00,3579,31.9137,28.7988
+2016-04-09 09:59:55,3582,31.9137,28.7988
+2016-04-09 10:14:50,3586,31.9137,28.8501
+2016-04-09 10:29:45,3588,31.8015,28.7988
+2016-04-09 10:44:40,3592,31.9137,28.7988
+2016-04-09 10:59:35,3595,31.9699,28.7988
+2016-04-09 11:14:31,3596,31.8015,28.7988
+2016-04-09 11:29:26,3597,31.8015,28.7963
+2016-04-09 11:44:21,3598,31.9699,28.7988
+2016-04-09 11:59:17,3598,31.8632,28.7988
+2016-04-09 12:14:12,3598,31.9222,28.7988
+2016-04-09 12:29:07,3599,31.81,28.7988
+2016-04-09 12:44:03,3599,31.9307,28.7988
+2016-04-09 12:58:58,3599,31.8184,28.7988
+2016-04-09 13:13:54,3599,31.987,28.8501
+2016-04-09 13:28:49,3599,31.883000000000006,28.7988
+2016-04-09 13:43:44,3599,31.9364,28.7988
+2016-04-09 13:58:40,3599,31.9955,28.7988
+2016-04-09 14:13:35,3599,31.9392,28.74
+2016-04-09 14:28:31,3599,31.9955,28.8501
+2016-04-09 14:43:26,3599,32.0012,28.7988
+2016-04-09 14:58:22,3599,31.8915,28.7988
+2016-04-09 15:13:18,3599,32.9202,28.7477
+2016-04-09 15:28:13,3599,31.959,28.7988
+2016-04-09 15:43:09,3599,31.959,28.7912
+2016-04-09 15:58:04,3599,31.959,28.7912
+2016-04-09 16:13:00,3599,31.9534,28.7988
+2016-04-09 16:27:55,3599,31.9647,28.7988
+2016-04-09 16:42:51,3599,31.7963,28.7988
+2016-04-09 16:57:46,3599,31.8524,28.7988
+2016-04-09 17:12:41,3599,31.8495,28.7937
+2016-04-09 17:27:37,3599,31.7963,28.7937
+2016-04-09 17:42:32,3599,31.858,28.7426
+2016-04-09 17:57:27,3599,31.858,28.7912
+2016-04-09 18:12:22,3598,31.7461,28.7912
+2016-04-09 18:27:18,3598,31.8608,28.7937
+2016-04-09 18:42:13,3598,31.858,28.7937
+2016-04-09 18:57:08,3598,31.802,28.7988
+2016-04-09 19:12:03,3596,31.9704,28.7988
+2016-04-09 19:26:58,3595,31.858,28.7988
+2016-04-09 19:41:53,3593,31.8048,28.7988
+2016-04-09 19:56:48,3591,31.802,28.8014
+2016-04-09 20:11:43,3588,31.6374,28.7988
+2016-04-09 20:26:37,3587,31.7461,28.7988
+2016-04-09 20:41:32,3586,31.7461,28.7988
+2016-04-09 20:56:27,3585,31.7489,28.8501
+2016-04-09 21:11:22,3582,31.8608,28.6482
+2016-04-09 21:26:17,3581,31.8048,28.8501
+2016-04-09 21:41:12,3581,31.7461,28.6967
+2016-04-09 21:56:07,3579,31.858,28.6916
+2016-04-09 22:11:02,3578,31.8608,28.6457
+2016-04-09 22:25:56,3577,31.802,28.6967
+2016-04-09 22:40:51,3576,31.802,28.6457
+2016-04-09 22:55:46,3574,31.7376,28.6457
+2016-04-09 23:10:41,3574,31.7404,28.6482
+2016-04-09 23:25:35,3573,31.7404,28.6992
+2016-04-09 23:40:30,3572,31.7963,28.6992
+2016-04-09 23:55:25,3571,31.7935,28.6967
+2016-04-10 00:10:19,3571,31.8524,28.6967
+2016-04-10 00:25:14,3571,31.9647,28.6457
+2016-04-10 00:40:08,3570,31.8524,28.7043
+2016-04-10 00:55:03,3570,31.7963,28.6967
+2016-04-10 01:10:00,3570,31.7404,28.6457
+2016-04-10 01:24:55,3569,31.7404,28.6457
+2016-04-10 01:39:49,3569,31.7963,28.6482
+2016-04-10 01:54:44,3568,31.7963,28.6457
+2016-04-10 02:09:38,3568,31.7907,28.6967
+2016-04-10 02:24:33,3567,31.7879,28.6457
+2016-04-10 02:39:27,3567,31.8439,28.6533
+2016-04-10 02:54:21,3566,31.732,28.6457
+2016-04-10 03:09:16,3566,31.6177,28.6533
+2016-04-10 03:24:10,3565,31.6734,28.6482
+2016-04-10 03:39:05,3565,31.7794,28.7068
+2016-04-10 03:53:59,3564,31.7794,28.6559
+2016-04-10 04:08:54,3564,31.7235,28.6967
+2016-04-10 04:23:48,3563,31.7794,28.7068
+2016-04-10 04:38:43,3562,31.7235,28.7094
+2016-04-10 04:53:37,3560,31.7822,28.6967
+2016-04-10 05:08:31,3559,31.8354,28.6533
+2016-04-10 05:23:26,3559,31.7235,28.7017
+2016-04-10 05:38:20,3558,31.7794,28.7043
+2016-04-10 05:53:14,3557,31.7263,28.6533
+2016-04-10 06:08:09,3556,31.7794,28.7043
+2016-04-10 06:23:03,3556,31.7179,28.7043
+2016-04-10 06:37:57,3555,31.7179,28.6533
+2016-04-10 06:52:51,3554,31.7151,28.6533
+2016-04-10 07:07:46,3554,31.7179,28.7043
+2016-04-10 07:22:40,3555,31.7179,28.7043
+2016-04-10 07:37:34,3556,31.7709,28.6533
+2016-04-10 07:52:29,3558,31.7151,28.6533
+2016-04-10 08:07:23,3560,31.7179,28.6533
+2016-04-10 08:22:17,3565,31.7709,28.6533
+2016-04-10 08:37:12,3568,31.8269,28.6533
+2016-04-10 08:52:06,3572,31.7179,28.6533
+2016-04-10 09:07:01,3575,31.8269,28.6533
+2016-04-10 09:21:56,3580,31.7597,28.6025
+2016-04-10 09:36:51,3583,31.7625,28.6533
+2016-04-10 09:51:46,3587,31.8269,28.6457
+2016-04-10 10:06:41,3589,31.7066,28.6533
+2016-04-10 10:21:35,3593,31.8184,28.6533
+2016-04-10 10:36:31,3595,31.8269,28.6967
+2016-04-10 10:51:26,3598,31.8297,28.6533
+2016-04-10 11:06:21,3598,31.9392,28.6457
+2016-04-10 11:21:16,3598,31.8269,28.6457
+2016-04-10 11:36:11,3599,31.883000000000006,28.6533
+2016-04-10 11:51:14,3599,31.7709,28.6533
+2016-04-10 12:06:10,3599,31.7709,28.6457
+2016-04-10 12:21:05,3599,31.8269,28.6457
+2016-04-10 12:36:00,3599,31.8354,28.6457
+2016-04-10 12:50:56,3599,31.7794,28.6432
+2016-04-10 13:05:51,3599,31.9449,28.6457
+2016-04-10 13:20:47,3599,31.7794,28.6533
+2016-04-10 13:35:42,3599,31.7794,28.6025
+2016-04-10 13:50:37,3599,31.8972,28.6533
+2016-04-10 14:05:33,3599,31.9,28.6457
+2016-04-10 14:20:28,3599,31.7879,28.6457
+2016-04-10 14:35:24,3599,31.7935,28.6457
+2016-04-10 14:50:19,3599,31.8495,28.6533
+2016-04-10 15:05:14,3599,31.8524,28.6457
+2016-04-10 15:20:10,3599,31.8524,28.6457
+2016-04-10 15:35:05,3599,31.7963,28.6457
+2016-04-10 15:50:01,3599,31.9085,28.6457
+2016-04-10 16:04:56,3599,31.9141,28.6533
+2016-04-10 16:19:51,3599,31.858,28.6457
+2016-04-10 16:34:47,3599,31.858,28.6457
+2016-04-10 16:49:42,3599,31.858,28.6457
+2016-04-10 17:04:37,3599,31.9704,28.5949
+2016-04-10 17:19:32,3598,31.802,28.5949
+2016-04-10 17:34:27,3599,31.802,28.6559
+2016-04-10 17:49:23,3599,31.802,28.6457
+2016-04-10 18:04:18,3598,31.9817,28.6457
+2016-04-10 18:19:13,3598,31.8133,28.6457
+2016-04-10 18:34:07,3598,31.9255,28.7043
+2016-04-10 18:49:02,3597,31.9789,28.6457
+2016-04-10 19:03:57,3595,31.8693,28.6457
+2016-04-10 19:18:52,3594,31.8133,28.6457
+2016-04-10 19:33:47,3592,31.8105,28.6457
+2016-04-10 19:48:42,3590,31.8693,28.7043
+2016-04-10 20:03:36,3588,31.8133,28.6457
+2016-04-10 20:18:31,3586,31.8693,28.6457
+2016-04-10 20:33:26,3585,31.8693,28.6533
+2016-04-10 20:48:20,3582,31.8665,28.7043
+2016-04-10 21:03:15,3581,31.8693,28.7068
+2016-04-10 21:18:10,3578,31.7573,28.6533
+2016-04-10 21:33:04,3576,31.8133,28.6533
+2016-04-10 21:47:59,3574,31.7573,28.6533
+2016-04-10 22:02:53,3573,31.9255,28.7043
+2016-04-10 22:17:48,3571,31.802,28.7554
+2016-04-10 22:32:42,3570,31.7461,28.7043
+2016-04-10 22:47:36,3568,31.8048,28.7043
+2016-04-10 23:02:31,3566,31.8048,28.6559
+2016-04-10 23:17:25,3565,31.8048,28.6559
+2016-04-10 23:32:19,3564,31.7489,28.6559
+2016-04-10 23:47:14,3563,31.8048,28.7043
+2016-04-11 00:02:08,3562,31.8608,28.7554
+2016-04-11 00:17:02,3560,31.8608,28.6533
+2016-04-11 00:31:57,3559,31.6931,28.6533
+2016-04-11 00:46:51,3558,31.8495,28.6533
+2016-04-11 01:01:52,3557,31.9676,28.7068
+2016-04-11 01:16:47,3556,31.7404,28.7043
+2016-04-11 01:31:41,3556,31.6846,28.7043
+2016-04-11 01:46:36,3554,31.7935,28.7145
+2016-04-11 02:01:30,3553,31.6818,28.7145
+2016-04-11 02:16:24,3552,31.6846,28.6533
+2016-04-11 02:31:19,3551,31.7963,28.7119
+2016-04-11 02:46:13,3551,31.7376,28.7043
+2016-04-11 03:01:07,3550,31.7879,28.7145
+2016-04-11 03:16:01,3549,31.7851,28.7119
+2016-04-11 03:30:55,3549,31.7348,28.7145
+2016-04-11 03:45:49,3547,31.7879,28.7119
+2016-04-11 04:00:43,3546,31.7235,28.7119
+2016-04-11 04:15:38,3545,31.7822,28.661
+2016-04-11 04:30:32,3545,31.7235,28.6635
+2016-04-11 04:45:26,3545,31.7794,28.661
+2016-04-11 05:00:21,3544,31.6706,28.7119
+2016-04-11 05:15:15,3543,31.7794,28.7145
+2016-04-11 05:30:09,3543,31.8915,28.7119
+2016-04-11 05:45:03,3542,31.6121,28.7119
+2016-04-11 05:59:58,3542,31.7738,28.7119
+2016-04-11 06:14:52,3541,31.7151,28.7656
+2016-04-11 06:29:46,3541,31.7738,28.661
+2016-04-11 06:44:40,3541,31.7738,28.7119
+2016-04-11 06:59:35,3541,31.8269,28.7068
+2016-04-11 07:14:29,3541,31.7179,28.661
+2016-04-11 07:29:23,3542,31.7151,28.7145
+2016-04-11 07:44:17,3544,31.7095,28.7068
+2016-04-11 07:59:12,3545,31.5397,28.7145
+2016-04-11 08:14:06,3546,31.5953,28.7119
+2016-04-11 08:29:01,3549,31.7095,28.6635
+2016-04-11 08:43:55,3550,31.5313,28.7043
+2016-04-11 08:58:50,3552,31.701,28.7119
+2016-04-11 09:13:44,3554,31.5897,28.6533
+2016-04-11 09:28:39,3556,31.6982,28.7068
+2016-04-11 09:43:33,3558,31.5841,28.7119
+2016-04-11 09:58:28,3559,31.5313,28.7043
+2016-04-11 10:13:23,3562,31.4676,28.661
+2016-04-11 10:28:17,3564,31.5229,28.7043
+2016-04-11 10:43:12,3566,31.5229,28.6533
+2016-04-11 10:58:07,3567,31.5229,28.7068
+2016-04-11 11:13:01,3570,31.5784,28.6533
+2016-04-11 11:27:56,3571,31.5229,28.6559
+2016-04-11 11:42:51,3573,31.5812,28.6533
+2016-04-11 11:57:46,3574,31.4676,28.6533
+2016-04-11 12:12:41,3577,31.5784,28.6533
+2016-04-11 12:27:36,3578,31.5229,28.6533
+2016-04-11 12:42:30,3581,31.5229,28.7043
+2016-04-11 12:57:33,3582,31.6425,28.6533
+2016-04-11 13:12:28,3583,31.5869,28.6533
+2016-04-11 13:27:23,3585,31.5313,28.6533
+2016-04-11 13:42:17,3586,31.5869,28.6533
+2016-04-11 13:57:12,3587,31.5869,28.7043
+2016-04-11 14:12:07,3588,31.5841,28.6533
+2016-04-11 14:27:02,3588,31.5869,28.7043
+2016-04-11 14:41:56,3590,31.6481,28.6533
+2016-04-11 14:56:51,3591,31.6509,28.6457
+2016-04-11 15:11:46,3592,31.5953,28.6533
+2016-04-11 15:26:41,3592,31.5953,28.6533
+2016-04-11 15:41:36,3593,31.5953,28.6457
+2016-04-11 15:56:32,3594,31.5953,28.6482
+2016-04-11 16:11:27,3594,31.7151,28.6432
+2016-04-11 16:26:22,3595,31.6037,28.6967
+2016-04-11 16:41:17,3595,31.5481,28.6457
+2016-04-11 16:56:12,3595,31.6621,28.6457
+2016-04-11 17:11:14,3595,31.5481,28.6457
+2016-04-11 17:26:10,3595,31.5481,28.6457
+2016-04-11 17:41:05,3594,31.5481,28.6967
+2016-04-11 17:56:00,3593,31.6037,28.6482
+2016-04-11 18:10:55,3593,31.6593,28.6457
+2016-04-11 18:25:50,3593,31.6593,28.6457
+2016-04-11 18:40:45,3592,31.6121,28.6967
+2016-04-11 18:55:40,3592,31.7235,28.5923
+2016-04-11 19:10:35,3591,31.6678,28.5949
+2016-04-11 19:25:30,3590,31.5565,28.6457
+2016-04-11 19:40:25,3588,31.5565,28.6992
+2016-04-11 19:55:20,3588,31.6093,28.6457
+2016-04-11 20:10:15,3588,31.6678,28.6482
+2016-04-11 20:25:10,3588,31.5537,28.6457
+2016-04-11 20:40:05,3587,31.7235,28.6432
+2016-04-11 20:55:00,3587,31.6121,28.6457
+2016-04-11 21:09:55,3586,31.6678,28.5949
+2016-04-11 21:24:50,3586,31.6678,28.6457
+2016-04-11 21:39:45,3585,31.6678,28.6457
+2016-04-11 21:54:40,3584,31.6121,28.6457
+2016-04-11 22:09:34,3584,31.6121,28.6457
+2016-04-11 22:24:29,3583,31.5565,28.6967
+2016-04-11 22:39:24,3583,31.5011,28.6457
+2016-04-11 22:54:19,3583,31.6093,28.6482
+2016-04-11 23:09:14,3582,31.5565,28.6482
+2016-04-11 23:24:09,3581,31.6121,28.6967
+2016-04-11 23:39:03,3581,31.6121,28.6457
+2016-04-11 23:53:58,3581,31.6121,28.6992
+2016-04-12 00:08:53,3581,31.5565,28.6967
+2016-04-12 00:23:48,3579,31.6121,28.6457
+2016-04-12 00:38:43,3581,31.5565,28.6457
+2016-04-12 00:53:38,3579,31.6121,28.6457
+2016-04-12 01:08:33,3579,31.5565,28.6482
+2016-04-12 01:23:28,3579,31.5565,28.6967
+2016-04-12 01:38:23,3579,31.6121,28.6457
+2016-04-12 01:53:18,3578,31.6121,28.6457
+2016-04-12 02:08:13,3578,31.5565,28.6457
+2016-04-12 02:23:08,3578,31.6121,28.6457
+2016-04-12 02:38:03,3578,31.6121,28.6967
+2016-04-12 02:52:58,3578,31.6121,28.6457
+2016-04-12 03:07:53,3578,31.5565,28.6482
+2016-04-12 03:22:48,3578,31.6706,28.6457
+2016-04-12 03:37:50,3577,31.5565,28.6967
+2016-04-12 03:52:45,3577,31.7235,28.6457
+2016-04-12 04:07:40,3577,31.6121,28.6967
+2016-04-12 04:22:35,3577,31.5565,28.6533
+2016-04-12 04:37:30,3577,31.7235,28.6457
+2016-04-12 04:52:25,3577,31.6121,28.6967
+2016-04-12 05:07:20,3576,31.5565,28.6457
+2016-04-12 05:22:15,3576,31.6678,28.6457
+2016-04-12 05:37:09,3576,31.6706,28.6457
+2016-04-12 05:52:04,3576,31.6678,28.6457
+2016-04-12 06:06:59,3576,31.7794,28.6533
+2016-04-12 06:21:54,3575,31.5565,28.6457
+2016-04-12 06:36:49,3575,31.5565,28.7043
+2016-04-12 06:51:44,3575,31.6121,28.7043
+2016-04-12 07:06:39,3575,31.6121,28.6457
+2016-04-12 07:21:34,3576,31.6093,28.6967
+2016-04-12 07:36:29,3576,31.7794,28.5949
+2016-04-12 07:51:23,3577,31.6678,28.7043
+2016-04-12 08:06:18,3577,31.5565,28.6967
+2016-04-12 08:21:13,3578,31.5565,28.6457
+2016-04-12 08:36:08,3580,31.6678,28.6457
+2016-04-12 08:51:03,3582,31.6121,28.7043
+2016-04-12 09:05:58,3585,31.5011,28.6508
+2016-04-12 09:20:54,3588,31.6678,28.6941
+2016-04-12 09:35:49,3591,31.7235,28.5949
+2016-04-12 09:50:44,3595,31.7235,28.5999
+2016-04-12 10:05:40,3598,31.6678,28.6457
+2016-04-12 10:20:35,3598,31.6121,28.6457
+2016-04-12 10:35:31,3598,31.6649,28.6533
+2016-04-12 10:50:26,3599,31.6121,28.6508
+2016-04-12 11:05:22,3599,31.6177,28.6533
+2016-04-12 11:20:18,3599,31.6762,28.6533
+2016-04-12 11:35:13,3599,31.732,28.6533
+2016-04-12 11:50:09,3599,31.7404,28.6025
+2016-04-12 12:05:05,3599,31.7376,28.6508
+2016-04-12 12:20:01,3599,31.6261,28.6559
+2016-04-12 12:34:57,3599,31.6261,28.6025
+2016-04-12 12:49:52,3599,31.6818,28.6457
+2016-04-12 13:04:48,3599,31.7461,28.6457
+2016-04-12 13:19:44,3599,31.6345,28.6533
+2016-04-12 13:34:39,3599,31.7461,28.6457
+2016-04-12 13:49:35,3599,31.6345,28.6025
+2016-04-12 14:04:31,3599,31.7461,28.6533
+2016-04-12 14:19:27,3599,31.7545,28.6508
+2016-04-12 14:34:23,3599,31.643,28.6025
+2016-04-12 14:49:19,3599,31.763,28.6508
+2016-04-12 15:04:14,3599,31.6514,28.6533
+2016-04-12 15:19:10,3599,31.7602,28.6025
+2016-04-12 15:34:06,3599,31.763,28.5999
+2016-04-12 15:49:02,3599,31.7686,28.5517
+2016-04-12 16:03:58,3599,31.6598,28.6025
+2016-04-12 16:18:54,3599,31.5487,28.6508
+2016-04-12 16:33:50,3599,31.657,28.6533
+2016-04-12 16:48:45,3599,31.8359,28.6533
+2016-04-12 17:03:41,3599,31.6655,28.6025
+2016-04-12 17:18:37,3599,31.7799,28.5999
+2016-04-12 17:33:32,3599,31.6767,28.6025
+2016-04-12 17:48:28,3599,31.7855,28.6533
+2016-04-12 18:03:24,3599,31.7884,28.6508
+2016-04-12 18:18:19,3599,31.7325,28.6533
+2016-04-12 18:33:15,3599,31.7325,28.6533
+2016-04-12 18:48:10,3599,31.7325,28.6533
+2016-04-12 19:03:05,3599,31.7884,28.6533
+2016-04-12 19:18:01,3599,31.7325,28.6533
+2016-04-12 19:32:56,3599,31.7968,28.661
+2016-04-12 19:47:51,3599,31.7325,28.661
+2016-04-12 20:02:46,3598,31.7409,28.661
+2016-04-12 20:17:41,3598,31.6851,28.661
+2016-04-12 20:32:36,3598,31.7409,28.661
+2016-04-12 20:47:31,3597,31.7968,28.661
+2016-04-12 21:02:26,3595,31.6851,28.661
+2016-04-12 21:17:21,3595,31.7409,28.6101
+2016-04-12 21:32:16,3594,31.7409,28.661
+2016-04-12 21:47:11,3592,31.7409,28.7119
+2016-04-12 22:02:06,3592,31.6851,28.661
+2016-04-12 22:17:01,3592,31.7968,28.7119
+2016-04-12 22:31:56,3590,31.7968,28.661
+2016-04-12 22:46:51,3589,31.7968,28.661
+2016-04-12 23:01:45,3588,31.7968,28.661
+2016-04-12 23:16:40,3588,31.7968,28.661
+2016-04-12 23:31:35,3588,31.6851,28.661
+2016-04-12 23:46:30,3587,31.7409,28.7119
+2016-04-13 01:01:03,3582,31.7884,28.7119
+2016-04-13 01:15:58,3581,31.6767,28.661
+2016-04-13 01:30:53,3580,31.6767,28.6686
+2016-04-13 01:45:47,3579,31.7884,28.6686
+2016-04-13 02:00:42,3578,31.7325,28.6686
+2016-04-13 02:15:37,3577,31.7297,28.6686
+2016-04-13 02:30:32,3577,31.7353,28.6711
+2016-04-13 02:45:26,3576,31.7325,28.6686
+2016-04-13 03:00:21,3576,31.6767,28.6686
+2016-04-13 03:15:16,3576,31.6767,28.6686
+2016-04-13 03:30:10,3576,31.7884,28.6711
+2016-04-13 03:45:05,3575,31.6126,28.7196
+2016-04-13 04:00:00,3575,31.7799,28.6686
+2016-04-13 04:14:54,3574,31.7799,28.6711
+2016-04-13 04:29:49,3574,31.7827,28.6686
+2016-04-13 04:44:43,3574,31.6683,28.7196
+2016-04-13 04:59:38,3574,31.6683,28.6686
+2016-04-13 05:14:33,3574,31.7799,28.6686
+2016-04-13 05:29:27,3574,31.7714,28.6686
+2016-04-13 05:44:22,3573,31.6598,28.7196
+2016-04-13 05:59:16,3572,31.7156,28.6686
+2016-04-13 06:14:11,3572,31.6042,28.6686
+2016-04-13 06:29:06,3572,31.657,28.6686
+2016-04-13 06:44:00,3572,31.7714,28.6711
+2016-04-13 06:58:55,3572,31.7686,28.7221
+2016-04-13 07:13:50,3573,31.7128,28.7196
+2016-04-13 07:28:44,3574,31.6598,28.6686
+2016-04-13 07:43:39,3574,31.6598,28.6686
+2016-04-13 07:58:34,3576,31.6598,28.7196
+2016-04-13 08:13:29,3578,31.6598,28.6686
+2016-04-13 08:28:24,3581,31.657,28.7196
+2016-04-13 08:43:18,3583,31.6542,28.6686
+2016-04-13 08:58:13,3585,31.7099,28.6686
+2016-04-13 09:13:08,3587,31.763,28.6686
+2016-04-13 09:28:03,3589,31.763,28.6686
+2016-04-13 09:42:58,3592,31.763,28.6686
+2016-04-13 09:57:54,3594,31.6514,28.6686
+2016-04-13 10:12:49,3597,31.6514,28.6686
+2016-04-13 10:27:44,3598,31.763,28.6152
+2016-04-13 10:42:40,3598,31.7156,28.6686
+2016-04-13 10:57:35,3599,31.8189,28.6686
+2016-04-13 11:12:31,3599,31.657,28.6686
+2016-04-13 11:27:26,3599,31.8274,28.6686
+2016-04-13 11:42:22,3599,31.7714,28.6686
+2016-04-13 11:57:17,3599,31.7156,28.6686
+2016-04-13 12:12:13,3599,31.6014,28.6686
+2016-04-13 12:27:08,3599,31.7686,28.6686
+2016-04-13 12:42:04,3599,31.7686,28.6737
+2016-04-13 12:56:59,3599,31.657,28.6737
+2016-04-13 13:11:54,3599,31.7799,28.6711
+2016-04-13 13:26:49,3599,31.7799,28.6152
+2016-04-13 13:41:45,3599,31.8359,28.6737
+2016-04-13 13:56:40,3599,31.7799,28.6686
+2016-04-13 14:11:36,3599,31.7884,28.6686
+2016-04-13 14:26:31,3599,31.7325,28.6737
+2016-04-13 14:41:26,3599,31.7884,28.6228
+2016-04-13 14:56:22,3599,31.7325,28.6686
+2016-04-13 15:11:17,3599,31.7884,28.6177
+2016-04-13 15:26:12,3599,31.7884,28.6177
+2016-04-13 15:41:07,3599,31.8444,28.6737
+2016-04-13 15:56:03,3599,31.7968,28.6177
+2016-04-13 16:10:58,3599,31.7968,28.6737
+2016-04-13 16:25:53,3599,31.6851,28.6686
+2016-04-13 16:40:48,3599,31.7968,28.6737
+2016-04-13 16:55:43,3599,31.7466,28.6737
+2016-04-13 17:10:39,3599,31.8025,28.6737
+2016-04-13 17:25:34,3599,31.8053,28.6737
+2016-04-13 17:40:29,3599,31.7997,28.6686
+2016-04-13 17:55:24,3599,31.8053,28.6737
+2016-04-13 18:10:19,3599,31.6936,28.6737
+2016-04-13 18:25:15,3599,31.7466,28.6762
+2016-04-13 18:40:10,3599,31.8613,28.6737
+2016-04-13 18:55:05,3598,31.6936,28.6762
+2016-04-13 19:10:00,3598,31.8585,28.6737
+2016-04-13 19:24:55,3598,31.8053,28.6762
+2016-04-13 19:39:50,3598,31.8053,28.6228
+2016-04-13 19:54:45,3597,31.8053,28.6737
+2016-04-13 20:09:40,3596,31.8053,28.6762
+2016-04-13 20:24:35,3595,31.7494,28.6762
+2016-04-13 20:39:30,3594,31.7494,28.6788
+2016-04-13 20:54:32,3593,31.8053,28.6762
+2016-04-13 21:09:27,3593,31.8053,28.6737
+2016-04-13 21:24:22,3592,31.7494,28.6762
+2016-04-13 21:39:17,3591,31.8613,28.6762
+2016-04-13 21:54:12,3590,31.6936,28.6839
+2016-04-13 22:09:06,3589,31.6936,28.6839
+2016-04-13 22:24:01,3588,31.9146,28.6737
+2016-04-13 22:38:56,3588,31.8053,28.6813
+2016-04-13 22:53:51,3588,31.6936,28.6813
+2016-04-13 23:08:46,3588,31.6936,28.6813
+2016-04-13 23:23:41,3588,31.8053,28.6813
+2016-04-13 23:38:35,3588,31.8613,28.6762
+2016-04-13 23:53:30,3588,31.9146,28.6813
+2016-04-14 00:08:25,3587,31.7494,28.7323
+2016-04-14 00:23:20,3587,31.8025,28.7323
+2016-04-14 00:38:15,3586,31.8025,28.6813
+2016-04-14 00:53:09,3586,31.8053,28.6304
+2016-04-14 01:08:04,3585,31.8053,28.6813
+2016-04-14 01:22:59,3585,31.6936,28.6839
+2016-04-14 01:37:53,3585,31.8053,28.6813
+2016-04-14 01:52:48,3584,31.7437,28.6813
+2016-04-14 02:07:43,3583,31.6294,28.6813
+2016-04-14 02:22:37,3582,31.7968,28.6813
+2016-04-14 02:37:32,3581,31.7968,28.6839
+2016-04-14 02:52:26,3580,31.8528,28.6839
+2016-04-14 03:07:21,3579,31.7968,28.6813
+2016-04-14 03:22:15,3578,31.7968,28.6839
+2016-04-14 03:37:10,3577,31.7968,28.7323
+2016-04-14 03:52:05,3576,31.7884,28.6813
+2016-04-14 04:06:59,3575,31.8444,28.6813
+2016-04-14 04:21:54,3574,31.6767,28.6839
+2016-04-14 04:36:48,3574,31.8444,28.6839
+2016-04-14 04:51:43,3574,31.8444,28.6839
+2016-04-14 05:06:37,3574,31.7325,28.7374
+2016-04-14 05:21:32,3574,31.621,28.6355
+2016-04-14 05:36:26,3574,31.6767,28.6864
+2016-04-14 05:51:21,3574,31.7325,28.6813
+2016-04-14 06:06:15,3574,31.6767,28.6839
+2016-04-14 06:21:10,3574,31.6767,28.6839
+2016-04-14 06:36:05,3574,31.7884,28.6813
+2016-04-14 06:50:59,3574,31.7884,28.6813
+2016-04-14 07:05:54,3574,31.6767,28.6813
+2016-04-14 07:20:48,3575,31.7325,28.6839
+2016-04-14 07:35:43,3575,31.6683,28.6839
+2016-04-14 07:50:37,3576,31.7799,28.6839
+2016-04-14 08:05:32,3576,31.7799,28.7349
+2016-04-14 08:20:26,3576,31.7799,28.6813
+2016-04-14 08:35:21,3576,31.7799,28.6839
+2016-04-14 08:50:16,3577,31.7799,28.7349
+2016-04-14 09:05:10,3577,31.7799,28.6839
+2016-04-14 09:20:05,3577,31.8359,28.7349
+2016-04-14 09:34:59,3578,31.6683,28.6813
+2016-04-14 09:49:54,3579,31.6598,28.6813
+2016-04-14 10:04:49,3579,31.6126,28.6839
+2016-04-14 10:19:43,3580,31.6683,28.6813
+2016-04-14 10:34:38,3581,31.8359,28.6839
+2016-04-14 10:49:33,3581,31.7714,28.7323
+2016-04-14 11:04:27,3581,31.7799,28.6915
+2016-04-14 11:19:22,3581,31.6042,28.6839
+2016-04-14 11:34:17,3581,31.7799,28.6813
+2016-04-14 11:49:12,3582,31.7714,28.6813
+2016-04-14 12:04:06,3582,31.6683,28.6839
+2016-04-14 12:19:01,3583,31.7686,28.6813
+2016-04-14 12:33:56,3585,31.7799,28.6813
+2016-04-14 12:48:50,3586,31.7714,28.7323
+2016-04-14 13:03:52,3586,31.7799,28.6813
+2016-04-14 13:18:47,3588,31.724,28.6813
+2016-04-14 13:33:42,3589,31.7799,28.6839
+2016-04-14 13:48:37,3592,31.7799,28.6839
+2016-04-14 14:03:32,3593,31.724,28.6329
+2016-04-14 14:18:27,3594,31.7799,28.6813
+2016-04-14 14:33:29,3595,31.6683,28.6813
+2016-04-14 14:48:24,3595,31.5571,28.6839
+2016-04-14 15:03:20,3596,31.724,28.6813
+2016-04-14 15:18:15,3597,31.724,28.6839
+2016-04-14 15:33:10,3597,31.7297,28.6813
+2016-04-14 15:48:05,3598,31.8444,28.6813
+2016-04-14 16:03:00,3598,31.6767,28.6839
+2016-04-14 16:17:55,3598,31.6739,28.6813
+2016-04-14 16:32:50,3598,31.7297,28.6813
+2016-04-14 16:47:46,3598,31.7884,28.6813
+2016-04-14 17:02:41,3598,31.7325,28.689
+2016-04-14 17:17:36,3598,31.8415,28.6813
+2016-04-14 17:32:31,3598,31.7353,28.5289
+2016-04-14 17:47:26,3598,31.7884,28.5314
+2016-04-14 18:02:22,3598,31.7325,28.6304
+2016-04-14 18:17:16,3598,31.7297,28.5365
+2016-04-14 18:32:11,3598,31.6767,28.6813
+2016-04-14 18:47:06,3598,31.7968,28.5289
+2016-04-14 19:02:01,3597,31.7968,28.5289
+2016-04-14 19:16:56,3595,31.6851,28.5289
+2016-04-14 19:31:51,3595,31.6294,28.6813
+2016-04-14 19:46:46,3593,31.7968,28.5796
+2016-04-14 20:01:41,3591,31.7968,28.5314
+2016-04-14 20:16:36,3589,31.7381,28.5314
+2016-04-14 20:31:31,3588,31.7409,28.5365
+2016-04-14 20:46:26,3586,31.7968,28.5289
+2016-04-14 21:01:20,3584,31.7968,28.539
+2016-04-14 21:16:15,3582,31.7884,28.5897
+2016-04-14 21:31:10,3580,31.7968,28.539
+2016-04-14 21:46:05,3578,31.8557,28.5897
+2016-04-14 22:00:59,3577,31.7884,28.539
+2016-04-14 22:15:54,3575,31.8444,28.5365
+2016-04-14 22:30:49,3574,31.7884,28.5289
+2016-04-14 22:45:43,3573,31.7884,28.539
+2016-04-14 23:00:38,3572,31.7884,28.5365
+2016-04-14 23:15:32,3571,31.7325,28.5897
+2016-04-14 23:30:27,3570,31.6767,28.638
+2016-04-14 23:45:22,3569,31.621,28.5872
+2016-04-15 00:00:16,3568,31.8444,28.5365
+2016-04-15 00:15:11,3567,31.6767,28.5365
+2016-04-15 00:30:05,3566,31.8444,28.4883
+2016-04-15 00:45:00,3566,31.7799,28.539
+2016-04-15 00:59:54,3566,31.6711,28.5872
+2016-04-15 01:14:49,3565,31.7799,28.5897
+2016-04-15 01:29:43,3565,31.724,28.539
+2016-04-15 01:44:38,3564,31.6126,28.5365
+2016-04-15 01:59:32,3564,31.7714,28.5897
+2016-04-15 02:14:27,3564,31.7714,28.5365
+2016-04-15 02:29:21,3564,31.7714,28.5897
+2016-04-15 02:44:16,3564,31.6598,28.539
+2016-04-15 02:59:10,3564,31.6598,28.539
+2016-04-15 03:14:04,3563,31.6598,28.539
+2016-04-15 03:28:59,3563,31.6598,28.5897
+2016-04-15 03:43:53,3564,31.7714,28.638
+2016-04-15 03:58:48,3564,31.6598,28.5365
+2016-04-15 04:13:42,3564,31.7184,28.539
+2016-04-15 04:28:37,3564,31.5958,28.5872
+2016-04-15 04:43:31,3564,31.7156,28.539
+2016-04-15 04:58:26,3565,31.7099,28.5872
+2016-04-15 05:13:20,3566,31.8217,28.5365
+2016-04-15 05:28:15,3566,31.7658,28.5365
+2016-04-15 05:43:09,3566,31.6514,28.539
+2016-04-15 05:58:03,3566,31.7099,28.5872
+2016-04-15 06:13:06,3566,31.7658,28.539
+2016-04-15 06:28:00,3567,31.7099,28.5289
+2016-04-15 06:42:55,3567,31.7071,28.5872
+2016-04-15 06:57:49,3568,31.5958,28.5365
+2016-04-15 07:12:44,3569,31.8217,28.5897
+2016-04-15 07:27:39,3570,31.7658,28.5365
+2016-04-15 07:42:33,3571,31.7658,28.5365
+2016-04-15 07:57:28,3572,31.7658,28.5365
+2016-04-15 08:12:22,3573,31.6542,28.5365
+2016-04-15 08:27:17,3574,31.763,28.5872
+2016-04-15 08:42:11,3576,31.7545,28.5365
+2016-04-15 08:57:06,3577,31.8133,28.539
+2016-04-15 09:12:01,3577,31.7545,28.539
+2016-04-15 09:26:55,3577,31.7573,28.5897
+2016-04-15 09:41:50,3577,31.6458,28.5365
+2016-04-15 09:56:44,3578,31.7573,28.539
+2016-04-15 10:11:39,3578,31.7573,28.539
+2016-04-15 10:26:33,3578,31.6987,28.539
+2016-04-15 10:41:28,3578,31.7128,28.5365
+2016-04-15 10:56:23,3578,31.6542,28.539
+2016-04-15 11:11:17,3578,31.6458,28.6329
+2016-04-15 11:26:12,3579,31.7573,28.5897
+2016-04-15 11:41:07,3580,31.6458,28.5897
+2016-04-15 11:56:01,3581,31.7099,28.5314
+2016-04-15 12:10:56,3581,31.763,28.5289
+2016-04-15 12:25:51,3581,31.9255,28.74
+2016-04-15 12:40:46,3582,31.934,28.6915
+2016-04-15 12:55:41,3583,31.8778,28.6813
+2016-04-15 13:10:35,3584,31.7573,28.6813
+2016-04-15 13:25:30,3584,31.8217,28.6915
+2016-04-15 13:40:25,3586,31.9903,28.7349
+2016-04-15 13:55:20,3587,31.875,28.6813
+2016-04-15 14:10:15,3588,31.8722,28.6839
+2016-04-15 14:25:11,3591,31.934,28.6813
+2016-04-15 14:40:06,3592,31.8778,28.6813
+2016-04-15 14:55:01,3594,31.8217,28.6304
+2016-04-15 15:09:56,3595,31.8778,28.6813
+2016-04-15 15:24:51,3596,31.9874,28.6813
+2016-04-15 15:39:46,3597,31.8217,28.6813
+2016-04-15 15:54:41,3597,31.8806,28.6813
+2016-04-15 16:09:36,3598,31.8246,28.6813
+2016-04-15 16:24:32,3597,31.8835,28.6813
+2016-04-15 16:39:27,3597,31.8835,28.6839
+2016-04-15 16:54:22,3597,31.8835,28.6813
+2016-04-15 17:09:16,3597,31.8835,28.6813
+2016-04-15 17:24:12,3597,31.8274,28.7323
+2016-04-15 17:39:06,3598,31.9482,28.6839
+2016-04-15 17:54:02,3597,32.0045,28.6329
+2016-04-15 18:08:57,3596,32.0045,28.6813
+2016-04-15 18:23:51,3596,31.8387,28.6839
+2016-04-15 18:38:46,3595,31.8359,28.6813
+2016-04-15 18:53:41,3595,31.8359,28.6839
+2016-04-15 19:08:36,3593,31.833,28.6839
+2016-04-15 19:23:31,3592,31.8359,28.6813
+2016-04-15 19:38:26,3591,32.0045,28.6813
+2016-04-15 19:53:21,3589,32.013000000000005,28.6813
+2016-04-15 20:08:16,3588,31.8444,28.6813
+2016-04-15 20:23:11,3587,31.9482,28.7323
+2016-04-15 20:38:06,3585,31.9482,28.6813
+2016-04-15 20:53:01,3583,31.8359,28.6813
+2016-04-15 21:07:55,3581,31.9482,28.7374
+2016-04-15 21:22:50,3580,31.8359,28.6813
+2016-04-15 21:37:45,3578,31.8359,28.6839
+2016-04-15 21:52:39,3577,31.8359,28.6839
+2016-04-15 22:07:41,3574,31.892,28.6813
+2016-04-15 22:22:36,3574,31.892,28.6839
+2016-04-15 22:37:31,3573,31.8359,28.6839
+2016-04-15 22:52:26,3572,32.0045,28.6839
+2016-04-15 23:07:21,3571,31.892,28.7349
+2016-04-15 23:22:15,3571,31.8948,28.6813
+2016-04-15 23:37:10,3570,32.0045,28.7323
+2016-04-15 23:52:05,3569,31.8359,28.6813
+2016-04-16 00:07:00,3568,31.8274,28.7349
+2016-04-16 00:21:54,3568,32.0523,28.6915
+2016-04-16 00:36:49,3567,31.9397,28.6864
+2016-04-16 00:51:44,3566,31.7156,28.638
+2016-04-16 01:06:38,3566,31.8835,28.6813
+2016-04-16 01:21:33,3566,31.7686,28.689
+2016-04-16 01:36:28,3566,31.7714,28.7349
+2016-04-16 01:51:22,3566,31.8274,28.7349
+2016-04-16 02:06:17,3566,31.8806,28.6813
+2016-04-16 02:21:12,3566,31.7714,28.6839
+2016-04-16 02:36:06,3566,31.8835,28.6864
+2016-04-16 02:51:01,3567,31.8778,28.6915
+2016-04-16 03:05:55,3567,31.9903,28.689
+2016-04-16 03:20:50,3566,31.8217,28.689
+2016-04-16 03:35:45,3567,31.9903,28.7425
+2016-04-16 03:50:39,3567,31.8778,28.74
+2016-04-16 04:05:34,3567,31.875,28.6915
+2016-04-16 04:20:29,3568,31.8778,28.6839
+2016-04-16 04:35:24,3569,31.934,28.7349
+2016-04-16 04:50:18,3569,31.875,28.6813
+2016-04-16 05:05:13,3570,31.8778,28.6915
+2016-04-16 05:20:08,3571,31.934,28.6839
+2016-04-16 05:35:03,3571,31.875,28.7323
+2016-04-16 05:49:58,3571,31.8778,28.74
+2016-04-16 06:04:53,3571,31.9874,28.6915
+2016-04-16 06:19:47,3571,31.8189,28.7425
+2016-04-16 06:34:42,3572,31.8217,28.6839
+2016-04-16 06:49:37,3572,31.7658,28.6839
+2016-04-16 07:04:32,3573,31.875,28.6813
+2016-04-16 07:19:26,3574,31.8217,28.6813
+2016-04-16 07:34:21,3574,31.8189,28.6915
+2016-04-16 07:49:16,3576,31.8693,28.689
+2016-04-16 08:04:11,3577,31.8778,28.6864
+2016-04-16 08:19:05,3578,31.9255,28.6813
+2016-04-16 08:34:00,3579,31.8665,28.6915
+2016-04-16 08:48:55,3581,31.8133,28.6304
+2016-04-16 09:03:50,3582,31.8665,28.6839
+2016-04-16 09:18:45,3585,31.8133,28.7323
+2016-04-16 09:33:39,3585,31.8105,28.6839
+2016-04-16 09:48:34,3587,31.9817,28.6915
+2016-04-16 10:03:29,3588,31.8778,28.689
+2016-04-16 10:18:24,3588,31.8693,28.6813
+2016-04-16 10:33:19,3590,31.875,28.6813
+2016-04-16 10:48:14,3591,31.9903,28.6915
+2016-04-16 11:03:09,3592,31.875,28.6813
+2016-04-16 11:18:04,3592,31.875,28.6915
+2016-04-16 11:33:06,3592,31.8189,28.6813
+2016-04-16 11:48:02,3592,31.875,28.6813
+2016-04-16 12:02:57,3592,32.1003,28.6839
+2016-04-16 12:17:52,3592,32.1003,28.6813
+2016-04-16 12:32:47,3593,32.1598,28.6813
+2016-04-16 12:47:42,3593,32.0467,28.6839
+2016-04-16 13:02:37,3593,32.0438,28.6839
+2016-04-16 13:17:32,3593,32.1031,28.6915
+2016-04-16 13:32:27,3593,32.0495,28.6839
+2016-04-16 13:47:22,3593,32.1088,28.6813
+2016-04-16 14:02:16,3592,32.1626,28.6839
+2016-04-16 14:17:11,3592,31.9959,28.7323
+2016-04-16 14:32:06,3592,32.1088,28.7349
+2016-04-16 14:47:01,3591,32.0523,28.6839
+2016-04-16 15:01:56,3590,32.0495,28.6304
+2016-04-16 15:16:51,3591,32.0523,28.7323
+2016-04-16 15:31:45,3591,32.0523,28.6839
+2016-04-16 15:46:40,3592,31.9959,28.6813
+2016-04-16 16:01:35,3593,31.9931,28.6813
+2016-04-16 16:16:30,3594,31.9931,28.6864
+2016-04-16 16:31:25,3595,32.1655,28.6813
+2016-04-16 16:46:20,3597,32.0609,28.6813
+2016-04-16 17:01:16,3597,32.1088,28.6839
+2016-04-16 17:16:11,3598,32.0609,28.6915
+2016-04-16 17:31:06,3598,32.0045,28.689
+2016-04-16 17:46:01,3598,32.1174,28.6813
+2016-04-16 18:00:56,3597,32.0045,28.6813
+2016-04-16 18:15:51,3596,32.126,28.6813
+2016-04-16 18:30:46,3596,32.0694,28.6839
+2016-04-16 18:45:41,3595,32.0694,28.689
+2016-04-16 19:00:36,3595,32.1826,28.6839
+2016-04-16 19:15:32,3594,32.126,28.6813
+2016-04-16 19:30:27,3593,32.0694,28.6839
+2016-04-16 19:45:22,3593,32.0694,28.6839
+2016-04-16 20:00:17,3592,32.0694,28.6813
+2016-04-16 20:15:12,3591,32.126,28.6813
+2016-04-16 20:30:06,3590,32.013000000000005,28.6915
+2016-04-16 20:45:01,3588,32.0666,28.6864
+2016-04-16 20:59:57,3588,32.013000000000005,28.6915
+2016-04-16 21:14:51,3587,32.0694,28.689
+2016-04-16 21:29:46,3586,32.0694,28.6839
+2016-04-16 21:44:41,3584,32.0694,28.7349
+2016-04-16 21:59:36,3584,32.0694,28.6839
+2016-04-16 22:14:31,3582,32.0101,28.6839
+2016-04-16 22:29:26,3581,32.0694,28.6839
+2016-04-16 22:44:21,3581,32.0694,28.689
+2016-04-16 22:59:16,3581,32.0694,28.74
+2016-04-16 23:14:11,3580,32.013000000000005,28.6915
+2016-04-16 23:29:05,3579,32.0694,28.689
+2016-04-16 23:44:00,3578,32.0101,28.6915
+2016-04-16 23:58:55,3578,32.0694,28.689
+2016-04-17 00:13:50,3578,31.9567,28.6915
+2016-04-17 00:28:45,3578,32.013000000000005,28.6915
+2016-04-17 00:43:39,3578,32.126,28.74
+2016-04-17 00:58:34,3577,32.126,28.6915
+2016-04-17 01:13:32,3577,32.013000000000005,28.7425
+2016-04-17 01:28:27,3577,32.0694,28.8423
+2016-04-17 01:43:22,3577,32.126,28.8449
+2016-04-17 01:58:16,3576,32.0694,28.8962
+2016-04-17 02:13:11,3575,32.0045,28.8962
+2016-04-17 02:28:06,3574,32.0694,28.8423
+2016-04-17 02:43:01,3574,32.0609,28.8449
+2016-04-17 02:57:55,3574,32.0045,28.8962
+2016-04-17 03:12:50,3573,32.0609,28.74
+2016-04-17 03:27:45,3572,32.0045,28.689
+2016-04-17 03:42:39,3572,32.1174,28.8423
+2016-04-17 03:57:34,3572,32.0045,28.8423
+2016-04-17 04:12:28,3571,32.0609,28.8423
+2016-04-17 04:27:23,3571,32.0045,28.8423
+2016-04-17 04:42:17,3571,32.0552,28.8423
+2016-04-17 04:57:12,3570,32.0609,28.6915
+2016-04-17 05:12:06,3570,32.1655,28.8423
+2016-04-17 05:27:01,3569,31.9931,28.7425
+2016-04-17 05:41:56,3568,32.0523,28.8962
+2016-04-17 05:56:50,3568,31.9959,28.7425
+2016-04-17 06:11:44,3567,31.8274,28.6915
+2016-04-17 06:26:39,3566,31.9959,28.8423
+2016-04-17 06:41:33,3566,31.9959,28.6915
+2016-04-17 06:56:28,3565,31.9959,28.6915
+2016-04-17 07:11:22,3565,32.1088,28.6915
+2016-04-17 07:26:17,3565,31.9931,28.7425
+2016-04-17 07:41:11,3565,31.9959,28.7425
+2016-04-17 07:56:06,3565,32.0523,28.7425
+2016-04-17 08:11:01,3566,31.8835,28.689
+2016-04-17 08:25:55,3566,31.9903,28.6915
+2016-04-17 08:40:50,3566,32.1598,28.689
+2016-04-17 08:55:45,3566,31.9931,28.689
+2016-04-17 09:10:39,3567,31.9903,28.74
+2016-04-17 09:25:34,3568,32.1031,28.6915
+2016-04-17 09:40:29,3568,32.0467,28.6915
+2016-04-17 09:55:24,3569,32.0467,28.6915
+2016-04-17 10:10:19,3570,32.0467,28.7425
+2016-04-17 10:25:13,3572,32.0467,28.6915
+2016-04-17 10:40:08,3573,32.1031,28.6915
+2016-04-17 10:55:03,3575,31.9874,28.6915
+2016-04-17 11:09:58,3577,31.9874,28.689
+2016-04-17 11:24:53,3579,32.0467,28.689
+2016-04-17 11:39:48,3581,31.9874,28.7425
+2016-04-17 11:54:43,3583,31.8189,28.6915
+2016-04-17 12:09:38,3585,31.875,28.689
+2016-04-17 12:24:33,3586,31.9311,28.6915
+2016-04-17 12:39:28,3587,31.8217,28.6915
+2016-04-17 12:54:23,3588,31.8778,28.689
+2016-04-17 13:09:18,3590,31.8189,28.6915
+2016-04-17 13:24:13,3591,31.763,28.6915
+2016-04-17 13:39:08,3592,31.8806,28.689
+2016-04-17 13:54:03,3593,31.8274,28.6915
+2016-04-17 14:08:58,3593,31.8246,28.689
+2016-04-17 14:23:53,3594,31.8274,28.6915
+2016-04-17 14:38:48,3594,31.9397,28.7425
+2016-04-17 14:53:43,3595,31.8835,28.6915
+2016-04-17 15:08:39,3595,31.9397,28.6915
+2016-04-17 15:23:34,3595,31.8835,28.689
+2016-04-17 15:38:29,3595,31.892,28.74
+2016-04-17 15:53:24,3594,31.9482,28.6915
+2016-04-17 16:08:19,3593,32.0045,28.689
+2016-04-17 16:23:14,3593,31.892,28.6915
+2016-04-17 16:38:09,3593,31.7799,28.6915
+2016-04-17 16:53:04,3592,31.8976,28.689
+2016-04-17 17:08:00,3592,31.7884,28.689
+2016-04-17 17:22:55,3592,31.9005,28.6915
+2016-04-17 17:37:50,3591,31.9567,28.74
+2016-04-17 17:52:45,3589,31.9005,28.689
+2016-04-17 18:07:40,3588,31.8415,28.689
+2016-04-17 18:22:35,3587,31.9005,28.7425
+2016-04-17 18:37:30,3586,32.013000000000005,28.7425
+2016-04-17 18:52:32,3585,31.9005,28.689
+2016-04-17 19:07:27,3584,31.9538,28.6915
+2016-04-17 19:22:22,3582,31.8976,28.7425
+2016-04-17 19:37:17,3581,31.9005,28.6915
+2016-04-17 19:52:12,3579,32.013000000000005,28.6915
+2016-04-17 20:07:06,3577,31.9005,28.689
+2016-04-17 20:22:01,3576,31.9567,28.6915
+2016-04-17 20:36:56,3574,31.8444,28.6915
+2016-04-17 20:51:51,3572,31.9567,28.689
+2016-04-17 21:06:45,3571,31.8444,28.7425
+2016-04-17 21:21:40,3568,31.9005,28.7425
+2016-04-17 21:36:35,3566,31.9005,28.7425
+2016-04-17 21:51:29,3565,31.9595,28.6915
+2016-04-17 22:06:24,3563,31.7799,28.7425
+2016-04-17 22:21:18,3561,31.8359,28.6915
+2016-04-17 22:36:13,3559,31.8359,28.74
+2016-04-17 22:51:08,3558,31.8976,28.7425
+2016-04-17 23:06:02,3557,31.6598,28.6915
+2016-04-17 23:20:57,3556,31.7184,28.74
+2016-04-17 23:35:51,3555,31.8835,28.7425
+2016-04-17 23:50:46,3554,31.6598,28.74
+2016-04-18 00:05:40,3553,31.6626,28.7425
+2016-04-18 00:20:35,3552,31.8274,28.6915
+2016-04-18 00:35:29,3551,31.8274,28.74
+2016-04-18 00:50:24,3550,31.8274,28.7425
+2016-04-18 01:05:18,3550,31.7686,28.7425
+2016-04-18 01:20:13,3549,31.7714,28.7425
+2016-04-18 01:35:08,3548,31.8217,28.689
+2016-04-18 01:50:02,3547,31.8217,28.7425
+2016-04-18 02:04:57,3546,31.8189,28.7425
+2016-04-18 02:19:51,3545,31.7658,28.6915
+2016-04-18 02:34:46,3545,31.7658,28.6915
+2016-04-18 02:49:40,3545,31.7658,28.6915
+2016-04-18 03:04:34,3544,31.8133,28.6915
+2016-04-18 03:19:29,3543,31.8133,28.7425
+2016-04-18 03:34:23,3542,31.7573,28.7425
+2016-04-18 03:49:18,3542,31.8105,28.74
+2016-04-18 04:04:12,3541,31.8693,28.689
+2016-04-18 04:19:06,3541,31.8048,28.6915
+2016-04-18 04:34:01,3540,31.8048,28.7425
+2016-04-18 04:48:55,3539,31.8048,28.6915
+2016-04-18 05:03:49,3538,31.7489,28.7425
+2016-04-18 05:18:43,3537,31.802,28.7425
+2016-04-18 05:33:38,3537,31.7489,28.74
+2016-04-18 05:48:32,3537,31.8608,28.7425
+2016-04-18 06:03:27,3537,31.8048,28.7425
+2016-04-18 06:18:21,3537,31.7963,28.74
+2016-04-18 06:33:15,3537,31.8524,28.7425
+2016-04-18 06:48:10,3537,31.7963,28.7425
+2016-04-18 07:03:04,3538,31.8524,28.7425
+2016-04-18 07:17:58,3540,31.8524,28.6915
+2016-04-18 07:32:53,3542,31.7963,28.7425
+2016-04-18 07:47:48,3545,31.7963,28.7425
+2016-04-18 08:02:42,3547,31.732,28.7425
+2016-04-18 08:17:37,3551,31.7907,28.6915
+2016-04-18 08:32:31,3554,31.732,28.7425
+2016-04-18 08:47:26,3557,31.9,28.74
+2016-04-18 09:02:21,3560,31.7348,28.7425
+2016-04-18 09:17:16,3565,31.7794,28.689
+2016-04-18 09:32:11,3569,31.732,28.689
+2016-04-18 09:47:05,3574,31.8354,28.6915
+2016-04-18 10:02:01,3577,31.7851,28.6915
+2016-04-18 10:16:56,3581,31.7907,28.689
+2016-04-18 10:31:51,3585,31.8326,28.6915
+2016-04-18 10:46:46,3587,31.7794,28.689
+2016-04-18 11:01:41,3588,31.8354,28.689
+2016-04-18 11:16:37,3591,31.8354,28.7425
+2016-04-18 11:31:32,3591,31.7879,28.689
+2016-04-18 11:46:27,3592,31.9,28.6813
+2016-04-18 12:01:23,3592,31.9562,28.6813
+2016-04-18 12:16:18,3592,31.7879,28.6915
+2016-04-18 12:31:13,3593,31.7935,28.6813
+2016-04-18 12:46:08,3594,31.7376,28.6813
+2016-04-18 13:01:03,3595,31.9056,28.689
+2016-04-18 13:15:58,3596,31.9085,28.6813
+2016-04-18 13:30:53,3596,31.8524,28.6329
+2016-04-18 13:45:56,3597,31.8495,28.6839
+2016-04-18 14:00:51,3598,31.8524,28.6813
+2016-04-18 14:15:47,3598,31.858,28.6813
+2016-04-18 14:30:42,3598,31.7461,28.6813
+2016-04-18 14:45:37,3598,31.6374,28.6813
+2016-04-18 15:00:33,3598,31.6345,28.6839
+2016-04-18 15:15:28,3598,31.7461,28.6813
+2016-04-18 15:30:23,3598,31.7545,28.6813
+2016-04-18 15:45:18,3598,31.643,28.6813
+2016-04-18 16:00:13,3598,31.8105,28.6329
+2016-04-18 16:15:09,3598,31.6486,28.6813
+2016-04-18 16:30:04,3598,31.7071,28.6813
+2016-04-18 16:44:59,3598,31.7099,28.6813
+2016-04-18 16:59:54,3597,31.5958,28.6839
+2016-04-18 17:14:49,3597,31.6514,28.6813
+2016-04-18 17:29:44,3596,31.6542,28.6839
+2016-04-18 17:44:39,3596,31.763,28.7323
+2016-04-18 17:59:34,3595,31.763,28.7323
+2016-04-18 18:14:29,3594,31.8217,28.6813
+2016-04-18 18:29:24,3593,31.7156,28.6839
+2016-04-18 18:44:19,3592,31.7156,28.6813
+2016-04-18 18:59:14,3591,31.7686,28.6839
+2016-04-18 19:14:09,3589,31.7714,28.6813
+2016-04-18 19:29:04,3588,31.6014,28.6813
+2016-04-18 19:43:59,3586,31.7714,28.6839
+2016-04-18 19:58:53,3585,31.8274,28.6839
+2016-04-18 20:13:48,3583,31.6598,28.6864
+2016-04-18 20:28:43,3581,31.7686,28.6839
+2016-04-18 20:43:38,3580,31.7686,28.7349
+2016-04-18 20:58:32,3578,31.6598,28.6813
+2016-04-18 21:13:27,3577,31.8274,28.6813
+2016-04-18 21:28:22,3577,31.6598,28.6813
+2016-04-18 21:43:17,3576,31.7714,28.6839
+2016-04-18 21:58:11,3575,31.7156,28.6813
+2016-04-18 22:13:06,3574,31.7602,28.6864
+2016-04-18 22:28:01,3574,31.6542,28.6839
+2016-04-18 22:42:56,3574,31.7658,28.6864
+2016-04-18 22:57:50,3573,31.7099,28.7349
+2016-04-18 23:12:45,3573,31.6514,28.6813
+2016-04-18 23:27:40,3573,31.5958,28.7323
+2016-04-18 23:42:34,3572,31.763,28.6839
+2016-04-18 23:57:37,3572,31.5986,28.6813
+2016-04-19 00:12:31,3572,31.8217,28.6813
+2016-04-19 00:27:26,3572,31.6542,28.7323
+2016-04-19 00:42:20,3572,31.6542,28.6839
+2016-04-19 00:57:15,3572,31.7658,28.6864
+2016-04-19 01:12:10,3572,31.6542,28.6864
+2016-04-19 01:27:04,3572,31.7071,28.6839
+2016-04-19 01:41:59,3573,31.5958,28.6813
+2016-04-19 01:56:54,3573,31.6542,28.7349
+2016-04-19 02:11:48,3572,31.7099,28.6839
+2016-04-19 02:26:43,3572,31.7071,28.6839
+2016-04-19 02:41:38,3573,31.6458,28.6839
+2016-04-19 02:56:33,3573,31.6987,28.6813
+2016-04-19 03:11:28,3573,31.6458,28.7323
+2016-04-19 03:26:22,3572,31.643,28.6839
+2016-04-19 03:41:17,3572,31.5902,28.6839
+2016-04-19 03:56:11,3572,31.643,28.7323
+2016-04-19 04:11:06,3572,31.5874,28.6813
+2016-04-19 04:26:01,3571,31.7015,28.6839
+2016-04-19 04:40:56,3571,31.643,28.6864
+2016-04-19 04:55:50,3571,31.5902,28.6813
+2016-04-19 05:10:45,3571,31.6458,28.6813
+2016-04-19 05:25:40,3571,31.6987,28.7349
+2016-04-19 05:40:35,3570,31.7461,28.6839
+2016-04-19 05:55:29,3570,31.6931,28.7349
+2016-04-19 06:10:24,3570,31.6374,28.6813
+2016-04-19 06:25:19,3571,31.5818,28.7349
+2016-04-19 06:40:14,3570,31.7461,28.7349
+2016-04-19 06:55:09,3570,31.7461,28.7323
+2016-04-19 07:10:03,3570,31.7489,28.7349
+2016-04-19 07:24:58,3570,31.6345,28.6864
+2016-04-19 07:39:53,3570,31.6345,28.7349
+2016-04-19 07:54:48,3570,31.6374,28.7374
+2016-04-19 08:09:43,3570,31.6374,28.6813
+2016-04-19 08:24:37,3571,31.6374,28.6813
+2016-04-19 08:39:32,3571,31.5789,28.7323
+2016-04-19 08:54:27,3572,31.6345,28.6839
+2016-04-19 09:09:22,3573,31.7489,28.6813
+2016-04-19 09:24:17,3574,31.6931,28.6813
+2016-04-19 09:39:12,3577,31.6374,28.6839
+2016-04-19 09:54:07,3579,31.6931,28.6813
+2016-04-19 10:09:02,3581,31.6345,28.6813
+2016-04-19 10:23:57,3585,31.7461,28.6813
+2016-04-19 10:39:00,3587,31.6345,28.6813
+2016-04-19 10:53:55,3588,31.7461,28.6813
+2016-04-19 11:08:50,3589,31.7461,28.6813
+2016-04-19 11:23:53,3589,31.6345,28.6839
+2016-04-19 11:38:48,3590,31.7489,28.6813
+2016-04-19 11:53:43,3591,31.6903,28.6839
+2016-04-19 12:08:38,3592,31.6903,28.6813
+2016-04-19 12:23:34,3592,31.6903,28.6839
+2016-04-19 12:38:29,3593,31.7461,28.6813
+2016-04-19 12:53:24,3592,31.643,28.6813
+2016-04-19 13:08:19,3593,31.6987,28.6839
+2016-04-19 13:23:14,3593,31.7573,28.6813
+2016-04-19 13:38:09,3593,31.7545,28.6839
+2016-04-19 13:53:04,3593,31.5874,28.6839
+2016-04-19 14:08:00,3594,31.7015,28.6813
+2016-04-19 14:22:55,3594,31.763,28.6813
+2016-04-19 14:37:50,3594,31.7658,28.6304
+2016-04-19 14:52:46,3594,31.7658,28.6839
+2016-04-19 15:07:41,3595,31.763,28.6813
+2016-04-19 15:22:36,3595,31.7071,28.6839
+2016-04-19 15:37:31,3595,31.7658,28.6839
+2016-04-19 15:52:27,3595,31.6486,28.6839
+2016-04-19 16:07:22,3595,31.763,28.6329
+2016-04-19 16:22:17,3595,31.7071,28.6839
+2016-04-19 16:37:13,3595,31.7071,28.6864
+2016-04-19 16:52:08,3595,31.7658,28.6839
+2016-04-19 17:07:04,3595,31.8189,28.6813
+2016-04-19 17:21:59,3595,31.7714,28.6839
+2016-04-19 17:36:54,3595,31.6514,28.6813
+2016-04-19 17:51:49,3594,31.7686,28.6813
+2016-04-19 18:06:44,3593,31.7156,28.6839
+2016-04-19 18:21:39,3593,31.657,28.6839
+2016-04-19 18:36:42,3592,31.7714,28.6864
+2016-04-19 18:51:37,3590,31.7156,28.6813
+2016-04-19 19:06:32,3588,31.8274,28.6813
+2016-04-19 19:21:26,3588,31.6598,28.6813
+2016-04-19 19:36:21,3586,31.657,28.6864
+2016-04-19 19:51:16,3585,31.7714,28.6839
+2016-04-19 20:06:11,3583,31.7714,28.6813
+2016-04-19 20:21:06,3581,31.7156,28.6839
+2016-04-19 20:36:01,3580,31.7714,28.7349
+2016-04-19 20:50:56,3578,31.7714,28.6813
+2016-04-19 21:05:51,3577,31.8246,28.7349
+2016-04-19 21:20:46,3575,31.7714,28.6839
+2016-04-19 21:35:41,3574,31.8274,28.6813
+2016-04-19 21:50:35,3573,31.6598,28.6839
+2016-04-19 22:05:30,3571,31.7099,28.7349
+2016-04-19 22:20:25,3570,31.7658,28.6839
+2016-04-19 22:35:20,3568,31.6542,28.6813
+2016-04-19 22:50:15,3567,31.7658,28.6839
+2016-04-19 23:05:09,3566,31.7071,28.7374
+2016-04-19 23:20:04,3565,31.6542,28.7349
+2016-04-19 23:34:59,3564,31.8217,28.6839
+2016-04-19 23:49:54,3563,31.7099,28.6839
+2016-04-20 00:04:49,3561,31.7658,28.7349
+2016-04-20 00:19:44,3559,31.6542,28.6839
+2016-04-20 00:34:39,3559,31.5431,28.7349
+2016-04-20 00:49:33,3557,31.6542,28.7323
+2016-04-20 01:04:28,3557,31.7658,28.7349
+2016-04-20 01:19:23,3556,31.8133,28.7323
+2016-04-20 01:34:18,3554,31.6458,28.7349
+2016-04-20 01:49:12,3552,31.4765,28.6813
+2016-04-20 02:04:07,3551,31.5818,28.7349
+2016-04-20 02:19:01,3550,31.6374,28.7374
+2016-04-20 02:33:56,3549,31.5789,28.7323
+2016-04-20 02:48:50,3548,31.6374,28.7349
+2016-04-20 03:03:44,3547,31.6374,28.7374
+2016-04-20 03:18:39,3546,31.5263,28.7349
+2016-04-20 03:33:33,3545,31.5789,28.7374
+2016-04-20 03:48:28,3545,31.5818,28.6839
+2016-04-20 04:03:22,3545,31.8048,28.7349
+2016-04-20 04:18:17,3544,31.6317,28.7374
+2016-04-20 04:33:11,3544,31.5733,28.7349
+2016-04-20 04:48:05,3544,31.6289,28.7374
+2016-04-20 05:03:00,3544,31.6289,28.7349
+2016-04-20 05:17:54,3544,31.5733,28.7349
+2016-04-20 05:32:49,3544,31.6289,28.6813
+2016-04-20 05:47:43,3544,31.6846,28.6839
+2016-04-20 06:02:38,3544,31.7963,28.6839
+2016-04-20 06:17:32,3545,31.5733,28.7323
+2016-04-20 06:32:27,3545,31.6762,28.7349
+2016-04-20 06:47:21,3545,31.6205,28.7374
+2016-04-20 07:02:16,3545,31.6233,28.6864
+2016-04-20 07:17:10,3546,31.679,28.6839
+2016-04-20 07:32:05,3548,31.6233,28.7374
+2016-04-20 07:46:59,3550,31.6121,28.7349
+2016-04-20 08:01:54,3552,31.5565,28.7323
+2016-04-20 08:16:48,3556,31.6121,28.6864
+2016-04-20 08:31:43,3559,31.5011,28.6839
+2016-04-20 08:46:38,3563,31.6121,28.6839
+2016-04-20 09:01:32,3566,31.4983,28.6839
+2016-04-20 09:16:27,3568,31.7235,28.6839
+2016-04-20 09:31:22,3571,31.6121,28.6839
+2016-04-20 09:46:17,3573,31.7263,28.6813
+2016-04-20 10:01:11,3574,31.6678,28.6839
+2016-04-20 10:16:06,3577,31.6121,28.7323
+2016-04-20 10:31:01,3579,31.7235,28.6813
+2016-04-20 10:45:56,3582,31.6121,28.6864
+2016-04-20 11:00:51,3584,31.5565,28.7349
+2016-04-20 11:15:46,3586,31.6678,28.6813
+2016-04-20 11:30:42,3587,31.7207,28.6864
+2016-04-20 11:45:37,3588,31.7235,28.6813
+2016-04-20 12:00:32,3589,31.6734,28.6329
+2016-04-20 12:15:27,3590,31.732,28.6839
+2016-04-20 12:30:22,3592,31.732,28.6839
+2016-04-20 12:45:17,3593,31.6734,28.7323
+2016-04-20 13:00:12,3593,31.7376,28.7323
+2016-04-20 13:15:08,3594,31.6818,28.7349
+2016-04-20 13:30:03,3595,31.6289,28.6839
+2016-04-20 13:44:58,3595,31.7376,28.6839
+2016-04-20 13:59:54,3596,31.7376,28.6813
+2016-04-20 14:14:49,3597,31.6818,28.6839
+2016-04-20 14:29:44,3597,31.7461,28.6813
+2016-04-20 14:44:39,3596,31.7461,28.6839
+2016-04-20 14:59:34,3596,31.7461,28.6813
+2016-04-20 15:14:29,3596,31.6903,28.6813
+2016-04-20 15:29:25,3596,31.7461,28.5314
+2016-04-20 15:44:20,3596,31.6458,28.5314
+2016-04-20 15:59:15,3597,31.7015,28.5314
+2016-04-20 16:14:10,3596,31.6987,28.5796
+2016-04-20 16:29:05,3596,31.763,28.6813
+2016-04-20 16:44:01,3596,31.7071,28.5289
+2016-04-20 16:58:56,3596,31.6514,28.5314
+2016-04-20 17:13:51,3595,31.7099,28.5289
+2016-04-20 17:28:46,3595,31.763,28.5238
+2016-04-20 17:43:41,3595,31.6514,28.5289
+2016-04-20 17:58:37,3594,31.763,28.5289
+2016-04-20 18:13:32,3593,31.7099,28.5339
+2016-04-20 18:28:27,3593,31.7658,28.5314
+2016-04-20 18:43:22,3592,31.7658,28.5314
+2016-04-20 18:58:17,3590,31.5986,28.5796
+2016-04-20 19:13:12,3588,31.7071,28.5289
+2016-04-20 19:28:07,3587,31.6514,28.5339
+2016-04-20 19:43:02,3586,31.5487,28.5314
+2016-04-20 19:57:57,3585,31.5487,28.5821
+2016-04-20 20:12:52,3583,31.607,28.5314
+2016-04-20 20:27:47,3581,31.5487,28.5339
+2016-04-20 20:42:42,3578,31.4932,28.5796
+2016-04-20 20:57:37,3577,31.5403,28.5339
+2016-04-20 21:12:32,3575,31.4848,28.5821
+2016-04-20 21:27:27,3573,31.4351,28.6329
+2016-04-20 21:42:21,3572,31.4848,28.5314
+2016-04-20 21:57:16,3570,31.5403,28.5796
+2016-04-20 22:12:10,3568,31.4323,28.5796
+2016-04-20 22:27:05,3566,31.5431,28.5796
+2016-04-20 22:42:00,3565,31.5986,28.5821
+2016-04-20 22:56:54,3564,31.4323,28.5314
+2016-04-20 23:11:48,3562,31.5986,28.5796
+2016-04-20 23:26:43,3560,31.6542,28.5314
+2016-04-20 23:41:38,3559,31.4323,28.5821
+2016-04-20 23:56:32,3558,31.4323,28.5821
+2016-04-21 00:11:27,3557,31.4295,28.5314
+2016-04-21 00:26:21,3556,31.4295,28.5339
+2016-04-21 00:41:16,3555,31.5375,28.5339
+2016-04-21 00:56:10,3553,31.5347,28.6355
+2016-04-21 01:11:05,3552,31.4765,28.5821
+2016-04-21 01:25:59,3551,31.5347,28.5847
+2016-04-21 01:40:54,3550,31.4681,28.6304
+2016-04-21 01:55:48,3549,31.5263,28.5821
+2016-04-21 02:10:43,3548,31.5235,28.5847
+2016-04-21 02:25:38,3547,31.4156,28.5821
+2016-04-21 02:40:32,3546,31.5789,28.5796
+2016-04-21 02:55:27,3545,31.4156,28.6329
+2016-04-21 03:10:21,3545,31.4156,28.5821
+2016-04-21 03:25:15,3544,31.4156,28.6355
+2016-04-21 03:40:10,3543,31.5263,28.5821
+2016-04-21 03:55:04,3543,31.5818,28.5314
+2016-04-21 04:09:58,3542,31.5179,28.5821
+2016-04-21 04:24:52,3541,31.4625,28.5339
+2016-04-21 04:39:46,3539,31.4072,28.5847
+2016-04-21 04:54:41,3539,31.4625,28.6355
+2016-04-21 05:09:35,3537,31.5179,28.5821
+2016-04-21 05:24:29,3537,31.5179,28.6329
+2016-04-21 05:39:23,3537,31.5179,28.5314
+2016-04-21 05:54:17,3536,31.5179,28.5796
+2016-04-21 06:09:11,3535,31.5095,28.5821
+2016-04-21 06:24:05,3534,31.5123,28.5821
+2016-04-21 06:39:07,3535,31.5123,28.6329
+2016-04-21 06:54:01,3536,31.4569,28.6355
+2016-04-21 07:08:55,3536,31.5039,28.6355
+2016-04-21 07:23:49,3537,31.5011,28.5821
+2016-04-21 07:38:43,3539,31.3905,28.5339
+2016-04-21 07:53:37,3543,31.4457,28.5821
+2016-04-21 08:08:32,3545,31.5011,28.5796
+2016-04-21 08:23:26,3549,31.5011,28.5796
+2016-04-21 08:38:28,3553,31.3354,28.5289
+2016-04-21 08:53:22,3557,31.3905,28.5847
+2016-04-21 09:08:17,3562,31.3905,28.5314
+2016-04-21 09:23:12,3566,31.5011,28.5289
+2016-04-21 09:38:07,3572,31.5565,28.5289
+2016-04-21 09:53:02,3577,31.5565,28.5289
+2016-04-21 10:07:57,3581,31.5565,28.5796
+2016-04-21 10:22:52,3585,31.5011,28.5314
+2016-04-21 10:37:48,3588,31.5565,28.5289
+2016-04-21 10:52:43,3592,31.5565,28.5314
+2016-04-21 11:07:38,3594,31.5011,28.4782
+2016-04-21 11:22:34,3595,31.6121,28.5314
+2016-04-21 11:37:29,3596,31.5565,28.5289
+2016-04-21 11:52:25,3598,31.5565,28.5289
+2016-04-21 12:07:20,3598,31.5011,28.5289
+2016-04-21 12:22:16,3598,31.6177,28.4807
+2016-04-21 12:37:11,3598,31.5067,28.4706
+2016-04-21 12:52:07,3598,31.5649,28.5289
+2016-04-21 13:07:02,3598,31.5705,28.5289
+2016-04-21 13:21:57,3598,31.5179,28.5289
+2016-04-21 13:36:53,3599,31.5151,28.5314
+2016-04-21 13:51:48,3599,31.5705,28.5213
+2016-04-21 14:06:44,3599,31.4625,28.4706
+2016-04-21 14:21:40,3599,31.5789,28.5213
+2016-04-21 14:36:35,3599,31.5235,28.5213
+2016-04-21 14:51:31,3599,31.5235,28.5314
+2016-04-21 15:06:27,3599,31.5789,28.5314
+2016-04-21 15:21:22,3599,31.5319,28.5213
+2016-04-21 15:36:18,3599,31.5347,28.5213
+2016-04-21 15:51:13,3599,31.5403,28.5213
+2016-04-21 16:06:09,3599,31.5958,28.5213
+2016-04-21 16:21:04,3599,31.4295,28.5213
+2016-04-21 16:36:00,3599,31.6014,28.5745
+2016-04-21 16:50:55,3599,31.6014,28.5213
+2016-04-21 17:05:51,3599,31.6598,28.5213
+2016-04-21 17:20:46,3599,31.5487,28.5238
+2016-04-21 17:35:42,3599,31.6042,28.5238
+2016-04-21 17:50:37,3599,31.6598,28.5238
+2016-04-21 18:05:32,3599,31.5459,28.5213
+2016-04-21 18:20:28,3599,31.5459,28.5213
+2016-04-21 18:35:23,3599,31.6042,28.5213
+2016-04-21 18:50:18,3598,31.6126,28.5213
+2016-04-21 19:05:14,3598,31.5571,28.572
+2016-04-21 19:20:09,3598,31.6126,28.5213
+2016-04-21 19:35:04,3597,31.6683,28.5238
+2016-04-21 19:49:59,3595,31.4463,28.5745
+2016-04-21 20:04:54,3593,31.6683,28.5238
+2016-04-21 20:19:49,3591,31.4546,28.5238
+2016-04-21 20:34:44,3588,31.391,28.572
+2016-04-21 20:49:39,3587,31.4435,28.5238
+2016-04-21 21:04:34,3585,31.4463,28.5796
+2016-04-21 21:19:29,3583,31.51,28.5238
+2016-04-21 21:34:24,3581,31.3359,28.5745
+2016-04-21 21:49:19,3579,31.391,28.5821
+2016-04-21 22:04:14,3577,31.4463,28.5314
+2016-04-21 22:19:09,3576,31.391,28.5314
+2016-04-21 22:34:04,3574,31.3883,28.6329
+2016-04-21 22:48:59,3573,31.391,28.6329
+2016-04-21 23:03:53,3572,31.4932,28.5289
+2016-04-21 23:18:48,3571,31.4463,28.5314
+2016-04-21 23:33:43,3569,31.3827,28.5821
+2016-04-21 23:48:37,3568,31.5487,28.5289
+2016-04-22 00:03:32,3566,31.496,28.5339
+2016-04-22 00:18:27,3566,31.4379,28.5847
+2016-04-22 00:33:21,3564,31.3799,28.5821
+2016-04-22 00:48:16,3563,31.3827,28.5314
+2016-04-22 01:03:11,3562,31.3827,28.5821
+2016-04-22 01:18:05,3561,31.2726,28.5339
+2016-04-22 01:33:00,3559,31.4379,28.5796
+2016-04-22 01:47:54,3559,31.5487,28.5821
+2016-04-22 02:02:49,3558,31.4323,28.5821
+2016-04-22 02:17:43,3556,31.5431,28.6355
+2016-04-22 02:32:37,3556,31.4848,28.5796
+2016-04-22 02:47:32,3555,31.5431,28.6329
+2016-04-22 03:02:26,3554,31.482,28.5847
+2016-04-22 03:17:21,3552,31.4323,28.5289
+2016-04-22 03:32:15,3552,31.4295,28.5847
+2016-04-22 03:47:10,3551,31.4323,28.6304
+2016-04-22 04:02:04,3550,31.3771,28.6355
+2016-04-22 04:16:59,3549,31.267,28.5847
+2016-04-22 04:31:53,3549,31.3688,28.6304
+2016-04-22 04:46:48,3548,31.3137,28.5847
+2016-04-22 05:01:42,3546,31.3688,28.5796
+2016-04-22 05:16:37,3545,31.3109,28.5821
+2016-04-22 05:31:31,3545,31.4156,28.5821
+2016-04-22 05:46:26,3545,31.3576,28.5339
+2016-04-22 06:01:20,3545,31.4128,28.5796
+2016-04-22 06:16:14,3545,31.3576,28.6355
+2016-04-22 06:31:09,3544,31.3053,28.6329
+2016-04-22 06:46:03,3544,31.4128,28.5821
+2016-04-22 07:00:58,3545,31.2504,28.6304
+2016-04-22 07:15:52,3545,31.3026,28.4807
+2016-04-22 07:30:47,3547,31.3521,28.5821
+2016-04-22 07:45:41,3549,31.4072,28.5796
+2016-04-22 08:00:36,3551,31.297,28.5796
+2016-04-22 08:15:30,3553,31.1872,28.6329
+2016-04-22 08:30:25,3556,31.297,28.4327
+2016-04-22 08:45:19,3559,31.3521,28.4302
+2016-04-22 09:00:14,3563,31.4072,28.4302
+2016-04-22 09:15:09,3566,31.4072,28.3797
+2016-04-22 09:30:04,3570,31.242,28.3772
+2016-04-22 09:44:59,3574,31.5179,28.4277
+2016-04-22 09:59:54,3577,31.3521,28.3823
+2016-04-22 10:14:49,3581,31.3521,28.3772
+2016-04-22 10:29:44,3584,31.4044,28.3797
+2016-04-22 10:44:40,3586,31.4044,28.3797
+2016-04-22 10:59:35,3588,31.3493,28.3772
+2016-04-22 11:14:30,3591,31.4044,28.3797
+2016-04-22 11:29:25,3592,31.297,28.3772
+2016-04-22 11:44:21,3593,31.4597,28.3772
+2016-04-22 11:59:16,3593,31.4597,28.3772
+2016-04-22 12:14:11,3594,31.4597,28.3772
+2016-04-22 12:29:07,3594,31.4044,28.4302
+2016-04-22 12:44:02,3595,31.4681,28.3797
+2016-04-22 12:58:57,3595,31.3576,28.3797
+2016-04-22 13:13:53,3596,31.3576,28.4277
+2016-04-22 13:28:48,3597,31.4709,28.3772
+2016-04-22 13:43:43,3597,31.5235,28.3772
+2016-04-22 13:58:38,3598,31.4681,28.3797
+2016-04-22 14:13:34,3598,31.4765,28.3269
+2016-04-22 14:28:29,3598,31.4239,28.3772
+2016-04-22 14:43:24,3598,31.366,28.3797
+2016-04-22 14:58:20,3598,31.4876,28.3772
+2016-04-22 15:13:15,3598,31.4876,28.3772
+2016-04-22 15:28:10,3598,31.4267,28.3823
+2016-04-22 15:43:06,3598,31.4295,28.3797
+2016-04-22 15:58:01,3598,31.5431,28.3772
+2016-04-22 16:12:56,3598,31.4323,28.3772
+2016-04-22 16:27:51,3598,31.5431,28.3772
+2016-04-22 16:42:47,3598,31.4932,28.3772
+2016-04-22 16:57:42,3598,31.4904,28.3772
+2016-04-22 17:12:37,3598,31.4379,28.3772
+2016-04-22 17:27:32,3597,31.4932,28.3772
+2016-04-22 17:42:27,3596,31.4932,28.3772
+2016-04-22 17:57:22,3595,31.3827,28.4302
+2016-04-22 18:12:18,3595,31.4351,28.3797
+2016-04-22 18:27:13,3595,31.5487,28.3823
+2016-04-22 18:42:08,3594,31.4932,28.3772
+2016-04-22 18:57:03,3593,31.4463,28.4302
+2016-04-22 19:11:57,3591,31.4463,28.3797
+2016-04-22 19:26:52,3590,31.4463,28.3797
+2016-04-22 19:41:47,3588,31.5571,28.4782
+2016-04-22 19:56:42,3587,31.5571,28.3797
+2016-04-22 20:11:37,3586,31.5016,28.3294
+2016-04-22 20:26:32,3585,31.5571,28.3772
+2016-04-22 20:41:27,3583,31.391,28.4302
+2016-04-22 20:56:22,3582,31.4463,28.3797
+2016-04-22 21:11:16,3580,31.6126,28.3772
+2016-04-22 21:26:11,3578,31.5016,28.3797
+2016-04-22 21:41:06,3577,31.391,28.4302
+2016-04-22 21:56:01,3576,31.4379,28.4782
+2016-04-22 22:10:55,3574,31.3359,28.3823
+2016-04-22 22:25:50,3574,31.4379,28.4302
+2016-04-22 22:40:45,3572,31.4379,28.3797
+2016-04-22 22:55:40,3571,31.3276,28.3797
+2016-04-22 23:10:34,3570,31.4379,28.4782
+2016-04-22 23:25:29,3569,31.4379,28.4782
+2016-04-22 23:40:24,3567,31.5487,28.4782
+2016-04-22 23:55:18,3566,31.4932,28.4327
+2016-04-23 01:09:52,3559,31.4379,28.4782
+2016-04-23 01:24:46,3558,31.2177,28.3797
+2016-04-23 01:39:41,3557,31.3743,28.4782
+2016-04-23 01:54:35,3556,31.322,28.4807
+2016-04-23 02:09:30,3555,31.0999,28.3772
+2016-04-23 02:24:25,3553,31.3771,28.4782
+2016-04-23 02:39:19,3552,31.2642,28.3797
+2016-04-23 02:54:14,3552,31.2121,28.3823
+2016-04-23 03:09:08,3551,31.1573,28.4782
+2016-04-23 03:24:02,3550,31.267,28.3823
+2016-04-23 03:38:56,3550,31.2121,28.4277
+2016-04-23 03:53:51,3550,31.3771,28.4833
+2016-04-23 04:08:45,3550,31.3688,28.4782
+2016-04-23 04:23:40,3549,31.3109,28.3797
+2016-04-23 04:38:34,3549,31.3137,28.4909
+2016-04-23 04:53:29,3549,31.0916,28.4277
+2016-04-23 05:08:23,3548,31.0861,28.3823
+2016-04-23 05:23:18,3548,31.149,28.4277
+2016-04-23 05:38:12,3547,31.1955,28.4327
+2016-04-23 05:53:07,3547,31.1955,28.4302
+2016-04-23 06:08:01,3546,31.3026,28.4327
+2016-04-23 06:22:56,3546,31.1407,28.4302
+2016-04-23 06:37:50,3546,31.3026,28.4302
+2016-04-23 06:52:45,3545,31.1927,28.3848
+2016-04-23 07:07:39,3546,31.1955,28.4302
+2016-04-23 07:22:34,3547,31.3053,28.4302
+2016-04-23 07:37:28,3548,31.1955,28.4807
+2016-04-23 07:52:23,3549,31.1872,28.3797
+2016-04-23 08:07:17,3550,31.2998,28.4807
+2016-04-23 08:22:12,3551,31.297,28.3823
+2016-04-23 08:37:06,3553,31.2476,28.3772
+2016-04-23 08:52:01,3554,31.1872,28.4782
+2016-04-23 09:06:55,3556,31.0778,28.3797
+2016-04-23 09:21:57,3557,31.1872,28.4302
+2016-04-23 09:36:52,3559,31.1872,28.3797
+2016-04-23 09:51:47,3562,31.242,28.4277
+2016-04-23 10:06:41,3564,31.297,28.4833
+2016-04-23 10:21:36,3566,31.297,28.4302
+2016-04-23 10:36:31,3567,31.297,28.4807
+2016-04-23 10:51:26,3568,31.297,28.4302
+2016-04-23 11:06:21,3569,31.297,28.3772
+2016-04-23 11:21:23,3571,31.1872,28.4807
+2016-04-23 11:36:18,3571,31.297,28.4302
+2016-04-23 11:51:13,3571,31.2942,28.3772
+2016-04-23 12:06:08,3572,31.297,28.3823
+2016-04-23 12:21:02,3573,31.297,28.3797
+2016-04-23 12:35:57,3573,31.2942,28.3797
+2016-04-23 12:50:52,3574,31.297,28.3823
+2016-04-23 13:05:47,3574,31.3521,28.3772
+2016-04-23 13:20:42,3575,31.3493,28.3823
+2016-04-23 13:35:37,3575,31.297,28.4807
+2016-04-23 13:50:32,3577,31.1872,28.3797
+2016-04-23 14:05:26,3577,31.297,28.4277
+2016-04-23 14:20:21,3578,31.1872,28.4302
+2016-04-23 14:35:16,3579,31.242,28.3797
+2016-04-23 14:50:11,3578,31.2942,28.3772
+2016-04-23 15:05:06,3579,31.297,28.4302
+2016-04-23 15:20:01,3578,31.242,28.4277
+2016-04-23 15:34:56,3578,31.297,28.3797
+2016-04-23 15:49:51,3578,31.3026,28.3772
+2016-04-23 16:04:46,3578,31.242,28.4302
+2016-04-23 16:19:41,3578,31.1927,28.3797
+2016-04-23 16:34:35,3577,31.3026,28.4807
+2016-04-23 16:49:30,3577,31.3053,28.3772
+2016-04-23 17:04:25,3577,31.3053,28.3772
+2016-04-23 17:19:20,3576,31.3026,28.4277
+2016-04-23 17:34:14,3575,31.2476,28.4277
+2016-04-23 17:49:09,3575,31.2504,28.4327
+2016-04-23 18:04:04,3574,31.3053,28.3797
+2016-04-23 18:18:59,3574,31.3576,28.4302
+2016-04-23 18:33:54,3573,31.3026,28.4302
+2016-04-23 18:48:48,3572,31.3604,28.4302
+2016-04-23 19:03:43,3570,31.3026,28.3797
+2016-04-23 19:18:38,3568,31.3053,28.4302
+2016-04-23 19:33:32,3566,31.2476,28.4782
+2016-04-23 19:48:27,3565,31.3026,28.4277
+2016-04-23 20:03:22,3563,31.1927,28.4302
+2016-04-23 20:18:16,3561,31.2504,28.3823
+2016-04-23 20:33:11,3559,31.3026,28.4277
+2016-04-23 20:48:05,3557,31.1324,28.4807
+2016-04-23 21:03:00,3555,31.2942,28.4302
+2016-04-23 21:17:54,3553,31.242,28.3797
+2016-04-23 21:32:48,3552,31.1324,28.2816
+2016-04-23 21:47:42,3550,31.1324,28.2791
+2016-04-23 22:02:37,3549,31.1324,28.2766
+2016-04-23 22:17:31,3548,31.1872,28.2791
+2016-04-23 22:32:25,3546,31.297,28.2791
+2016-04-23 22:47:19,3545,31.0778,28.2791
+2016-04-23 23:02:13,3545,31.1872,28.2791
+2016-04-23 23:17:08,3543,31.1872,28.2791
+2016-04-23 23:32:02,3542,31.1324,28.2791
+2016-04-23 23:46:56,3540,31.1844,28.3319
+2016-04-24 00:01:51,3539,31.1789,28.3294
+2016-04-24 00:16:45,3537,31.1817,28.2816
+2016-04-24 00:31:39,3537,31.1789,28.2791
+2016-04-24 00:46:33,3536,31.2914,28.3269
+2016-04-24 01:01:30,3536,31.1159,28.3294
+2016-04-24 01:16:25,3535,31.2282,28.3294
+2016-04-24 01:31:19,3534,31.2282,28.2816
+2016-04-24 01:46:13,3533,31.1706,28.2791
+2016-04-24 02:01:08,3533,31.1734,28.3319
+2016-04-24 02:16:02,3532,31.1706,28.3319
+2016-04-24 02:30:56,3532,31.2803,28.2791
+2016-04-24 02:45:50,3531,31.1706,28.2791
+2016-04-24 03:00:44,3531,31.2748,28.3294
+2016-04-24 03:15:38,3531,31.2199,28.3269
+2016-04-24 03:30:32,3530,31.272,28.2816
+2016-04-24 03:45:27,3530,31.1651,28.2791
+2016-04-24 04:00:21,3529,31.327,28.3319
+2016-04-24 04:15:15,3528,31.2748,28.2791
+2016-04-24 04:30:09,3527,31.1651,28.3319
+2016-04-24 04:45:03,3526,31.272,28.2716
+2016-04-24 04:59:57,3526,31.154,28.3319
+2016-04-24 05:14:51,3524,31.2088,28.2791
+2016-04-24 05:29:46,3524,31.2088,28.2791
+2016-04-24 05:44:40,3523,31.154,28.2716
+2016-04-24 05:59:34,3523,31.3187,28.2741
+2016-04-24 06:14:28,3523,31.2581,28.3244
+2016-04-24 06:29:23,3523,31.2005,28.2816
+2016-04-24 06:44:17,3525,31.1457,28.3218
+2016-04-24 06:59:11,3526,31.1374,28.2791
+2016-04-24 07:14:05,3528,31.1922,28.2716
+2016-04-24 07:29:00,3530,31.0827,28.2716
+2016-04-24 07:43:54,3531,31.247,28.2816
+2016-04-24 07:58:48,3534,31.0827,28.3244
+2016-04-24 08:13:43,3535,31.1374,28.3244
+2016-04-24 08:28:37,3537,31.1374,28.2741
+2016-04-24 08:43:31,3538,31.247,28.3244
+2016-04-24 08:58:26,3539,31.247,28.3244
+2016-04-24 09:13:20,3541,31.302,28.3218
+2016-04-24 09:28:14,3544,31.1374,28.3218
+2016-04-24 09:43:09,3546,31.1374,28.2691
+2016-04-24 09:58:04,3549,31.1374,28.2691
+2016-04-24 10:12:58,3551,30.9737,28.3143
+2016-04-24 10:27:53,3553,31.0309,28.3269
+2016-04-24 10:42:47,3556,31.1374,28.2641
+2016-04-24 10:57:42,3557,31.302,28.2716
+2016-04-24 11:12:37,3559,31.1374,28.2641
+2016-04-24 11:27:31,3559,31.247,28.2139
+2016-04-24 11:42:26,3560,31.2443,28.2716
+2016-04-24 11:57:21,3561,31.1402,28.2641
+2016-04-24 12:12:15,3562,31.1374,28.2641
+2016-04-24 12:27:10,3564,31.1922,28.2666
+2016-04-24 12:42:05,3565,31.2498,28.2641
+2016-04-24 12:56:59,3566,31.302,28.3143
+2016-04-24 13:11:54,3567,31.0827,28.2641
+2016-04-24 13:26:49,3570,31.1374,28.2641
+2016-04-24 13:41:44,3572,31.1374,28.2666
+2016-04-24 13:56:39,3574,31.247,28.2641
+2016-04-24 14:11:34,3577,31.1374,28.2641
+2016-04-24 14:26:29,3577,30.9737,28.2139
+2016-04-24 14:41:24,3577,31.0282,28.2666
+2016-04-24 14:56:19,3577,31.0827,28.2666
+2016-04-24 15:11:13,3577,31.0282,28.2641
+2016-04-24 15:26:08,3575,31.0282,28.2164
+2016-04-24 15:41:03,3573,31.0365,28.2666
+2016-04-24 15:55:58,3571,31.091,28.2666
+2016-04-24 16:10:53,3569,31.0365,28.2139
+2016-04-24 16:25:47,3568,31.0365,28.2164
+2016-04-24 16:40:49,3566,31.091,28.2641
+2016-04-24 16:55:44,3566,31.0447,28.2641
+2016-04-24 17:10:39,3566,31.0965,28.2666
+2016-04-24 17:25:33,3565,31.0447,28.2641
+2016-04-24 17:40:28,3565,31.0447,28.2641
+2016-04-24 17:55:23,3564,30.8816,28.2641
+2016-04-24 18:10:18,3564,31.042,28.2641
+2016-04-24 18:25:13,3562,31.0447,28.2666
+2016-04-24 18:40:07,3560,31.0447,28.2139
+2016-04-24 18:55:02,3559,31.0993,28.2641
+2016-04-24 19:09:57,3557,30.993,28.2641
+2016-04-24 19:24:52,3555,30.8788,28.3168
+2016-04-24 19:39:46,3553,30.8816,28.2641
+2016-04-24 19:54:41,3551,31.0447,28.3168
+2016-04-24 20:09:35,3549,30.8816,28.3143
+2016-04-24 20:24:30,3547,30.982,28.3143
+2016-04-24 20:39:24,3545,31.0365,28.2641
+2016-04-24 20:54:19,3543,30.982,28.2565
+2016-04-24 21:09:13,3541,31.0365,28.2641
+2016-04-24 21:24:07,3539,30.982,28.2666
+2016-04-24 21:39:01,3537,30.982,28.2666
+2016-04-24 21:53:56,3536,30.9276,28.3068
+2016-04-24 22:08:50,3535,30.9194,28.2641
+2016-04-24 22:23:44,3532,31.0309,28.2641
+2016-04-24 22:38:38,3531,30.9765,28.3093
+2016-04-24 22:53:33,3530,31.0827,28.2641
+2016-04-24 23:08:27,3529,31.0282,28.2666
+2016-04-24 23:23:21,3528,31.0309,28.3143
+2016-04-24 23:38:16,3527,31.0282,28.3168
+2016-04-24 23:53:10,3526,30.9194,28.3168
+2016-04-25 00:08:04,3524,31.0827,28.3093
+2016-04-25 00:22:58,3523,31.0282,28.3068
+2016-04-25 00:37:52,3523,30.9737,28.3143
+2016-04-25 00:52:46,3522,31.1374,28.3093
+2016-04-25 01:07:41,3522,31.1374,28.259
+2016-04-25 01:22:35,3521,31.0827,28.2089
+2016-04-25 01:37:29,3521,30.9221,28.3068
+2016-04-25 01:52:24,3521,30.9682,28.2565
+2016-04-25 02:07:18,3520,31.0227,28.259
+2016-04-25 02:22:12,3520,31.0772,28.2565
+2016-04-25 02:37:06,3519,31.1319,28.259
+2016-04-25 02:52:00,3518,31.069000000000006,28.2565
+2016-04-25 03:06:54,3518,31.0144,28.259
+2016-04-25 03:21:48,3518,31.0144,28.3093
+2016-04-25 03:36:43,3517,30.96,28.3093
+2016-04-25 03:51:37,3517,31.0144,28.259
+2016-04-25 04:06:31,3517,31.0062,28.3093
+2016-04-25 04:21:25,3517,31.0607,28.3068
+2016-04-25 04:36:19,3516,31.0062,28.2565
+2016-04-25 04:51:13,3516,31.0062,28.3068
+2016-04-25 05:06:07,3516,31.0607,28.259
+2016-04-25 05:21:01,3516,31.1153,28.259
+2016-04-25 05:35:55,3516,31.0034,28.259
+2016-04-25 05:50:49,3516,31.0062,28.3068
+2016-04-25 06:05:43,3516,31.0062,28.3093
+2016-04-25 06:20:37,3516,31.0062,28.259
+2016-04-25 06:35:31,3517,30.9518,28.2565
+2016-04-25 06:50:26,3518,31.0062,28.3093
+2016-04-25 07:05:20,3520,30.9518,28.3093
+2016-04-25 07:20:14,3521,30.9979,28.2565
+2016-04-25 07:35:08,3523,30.9518,28.2565
+2016-04-25 07:50:02,3527,31.0524,28.259
+2016-04-25 08:04:57,3529,30.9979,28.3068
+2016-04-25 08:19:51,3531,30.8865,28.3068
+2016-04-25 08:34:45,3535,30.9435,28.2565
+2016-04-25 08:49:39,3538,31.0524,28.3068
+2016-04-25 09:04:34,3543,30.881,28.3068
+2016-04-25 09:19:28,3546,30.9952,28.2565
+2016-04-25 09:34:23,3550,31.0442,28.3068
+2016-04-25 09:49:17,3553,30.8865,28.2064
+2016-04-25 10:04:12,3556,30.8268,28.259
+2016-04-25 10:19:06,3558,30.9353,28.2565
+2016-04-25 10:34:01,3560,30.8892,28.2465
+2016-04-25 10:48:55,3563,31.0524,28.249
+2016-04-25 11:03:50,3564,30.9952,28.249
+2016-04-25 11:18:45,3565,30.8892,28.2992
+2016-04-25 11:33:39,3566,31.0524,28.249
+2016-04-25 11:48:34,3566,30.9979,28.2992
+2016-04-25 12:03:29,3566,30.9952,28.2039
+2016-04-25 12:18:23,3567,30.9435,28.249
+2016-04-25 12:33:18,3567,30.9435,28.2014
+2016-04-25 12:48:13,3569,30.9979,28.249
+2016-04-25 13:03:07,3570,31.1125,28.1488
+2016-04-25 13:18:02,3571,31.0034,28.249
+2016-04-25 13:32:57,3572,31.1153,28.249
+2016-04-25 13:47:52,3574,31.0034,28.249
+2016-04-25 14:02:46,3576,30.949,28.2415
+2016-04-25 14:17:41,3577,31.0607,28.2415
+2016-04-25 14:32:36,3578,31.0034,28.2515
+2016-04-25 14:47:31,3578,30.8947,28.3495
+2016-04-25 15:02:26,3578,30.949,28.2415
+2016-04-25 15:17:21,3578,31.1153,28.2415
+2016-04-25 15:32:16,3578,31.0062,28.1414
+2016-04-25 15:47:11,3577,31.0062,28.249
+2016-04-25 16:02:06,3578,31.0579,28.244
+2016-04-25 16:17:01,3577,31.0034,28.2415
+2016-04-25 16:31:56,3577,31.1236,28.2917
+2016-04-25 16:46:51,3577,31.069000000000006,28.2415
+2016-04-25 17:01:45,3576,31.0662,28.2415
+2016-04-25 17:16:40,3576,31.0144,28.1939
+2016-04-25 17:31:35,3575,31.0662,28.2415
+2016-04-25 17:46:30,3574,30.9057,28.2415
+2016-04-25 18:01:25,3574,30.9112,28.2415
+2016-04-25 18:16:20,3574,31.1291,28.2415
+2016-04-25 18:31:14,3573,31.0199,28.2415
+2016-04-25 18:46:09,3572,31.0772,28.2415
+2016-04-25 19:01:04,3571,31.0227,28.2415
+2016-04-25 19:15:58,3570,31.0745,28.244
+2016-04-25 19:30:53,3569,31.0772,28.244
+2016-04-25 19:45:48,3568,31.0227,28.2415
+2016-04-25 20:00:42,3566,31.0227,28.2917
+2016-04-25 20:15:37,3565,30.9139,28.2415
+2016-04-25 20:30:32,3564,31.0772,28.2415
+2016-04-25 20:45:26,3563,30.9112,28.2415
+2016-04-25 21:00:28,3561,30.9139,28.2415
+2016-04-25 21:15:23,3559,30.9112,28.2917
+2016-04-25 21:30:17,3559,31.1319,28.244
+2016-04-25 21:45:12,3558,31.0199,28.2415
+2016-04-25 22:00:06,3557,31.0745,28.244
+2016-04-25 22:15:01,3556,31.0745,28.2415
+2016-04-25 22:29:55,3555,31.0227,28.244
+2016-04-25 22:44:50,3554,30.9139,28.2892
+2016-04-25 22:59:44,3552,30.9084,28.2967
+2016-04-25 23:14:38,3551,30.9139,28.244
+2016-04-25 23:29:33,3550,31.0772,28.2415
+2016-04-25 23:44:28,3549,31.0117,28.2917
+2016-04-25 23:59:22,3548,30.9029,28.2415
+2016-04-26 00:14:17,3548,31.0662,28.2942
+2016-04-26 00:29:11,3546,30.9057,28.234
+2016-04-26 00:44:06,3545,30.9572,28.2917
+2016-04-26 00:59:00,3545,31.0144,28.2917
+2016-04-26 01:13:55,3545,30.9029,28.244
+2016-04-26 01:28:49,3545,31.0144,28.2365
+2016-04-26 01:43:43,3545,31.0144,28.2917
+2016-04-26 01:58:38,3544,31.0144,28.2365
+2016-04-26 02:13:32,3544,30.9057,28.2365
+2016-04-26 02:28:27,3543,31.0144,28.2917
+2016-04-26 02:43:21,3543,30.8432,28.2415
+2016-04-26 02:58:15,3542,30.9518,28.2365
+2016-04-26 03:13:10,3542,31.0607,28.2867
+2016-04-26 03:28:04,3542,31.0607,28.2917
+2016-04-26 03:42:59,3541,30.8947,28.234
+2016-04-26 03:57:53,3540,31.0607,28.2415
+2016-04-26 04:12:48,3540,31.0062,28.1864
+2016-04-26 04:27:42,3539,30.949,28.2867
+2016-04-26 04:42:37,3537,31.0062,28.1864
+2016-04-26 04:57:31,3537,31.0607,28.2415
+2016-04-26 05:12:26,3536,31.0034,28.2415
+2016-04-26 05:27:20,3536,31.0607,28.2365
+2016-04-26 05:42:14,3536,30.9518,28.2917
+2016-04-26 05:57:08,3535,30.949,28.2867
+2016-04-26 06:12:03,3534,31.0607,28.2365
+2016-04-26 06:26:57,3534,30.949,28.2365
+2016-04-26 06:41:51,3533,31.0062,28.2917
+2016-04-26 06:56:45,3533,31.0062,28.2465
+2016-04-26 07:11:40,3533,31.0034,28.2365
+2016-04-26 07:26:34,3533,31.1153,28.2917
+2016-04-26 07:41:28,3533,30.8974,28.2365
+2016-04-26 07:56:23,3535,30.9518,28.2365
+2016-04-26 08:11:17,3536,31.0034,28.2365
+2016-04-26 08:26:12,3537,30.8947,28.1864
+2016-04-26 08:41:06,3539,30.9518,28.1864
+2016-04-26 08:56:01,3542,30.8974,28.2867
+2016-04-26 09:10:55,3544,31.0034,28.2365
+2016-04-26 09:25:50,3546,30.8865,28.2365
+2016-04-26 09:40:44,3549,30.9435,28.2365
+2016-04-26 09:55:39,3551,30.9435,28.2867
+2016-04-26 10:10:34,3553,30.9435,28.234
+2016-04-26 10:25:29,3555,30.9979,28.2365
+2016-04-26 10:40:24,3557,30.8323,28.2315
+2016-04-26 10:55:18,3559,30.8865,28.2365
+2016-04-26 11:10:13,3559,30.8323,28.2817
+2016-04-26 11:25:08,3561,30.8892,28.1864
+2016-04-26 11:40:03,3563,31.0034,28.234
+2016-04-26 11:54:58,3564,30.835,28.1364
+2016-04-26 12:09:53,3566,30.835,28.2365
+2016-04-26 12:24:47,3566,31.0062,28.2365
+2016-04-26 12:39:42,3567,31.0579,28.234
+2016-04-26 12:54:37,3569,30.9518,28.1864
+2016-04-26 13:09:31,3570,31.0034,28.1364
+2016-04-26 13:24:26,3571,31.0034,28.2842
+2016-04-26 13:39:21,3572,31.0034,28.2365
+2016-04-26 13:54:16,3572,31.0579,28.2365
+2016-04-26 14:09:11,3572,31.0579,28.1364
+2016-04-26 14:24:06,3571,31.0579,28.1864
+2016-04-26 14:39:01,3571,31.0034,28.234
+2016-04-26 14:53:55,3570,30.949,28.2365
+2016-04-26 15:08:50,3570,31.0607,28.2365
+2016-04-26 15:23:45,3568,31.0034,28.2365
+2016-04-26 15:38:39,3567,31.0034,28.234
+2016-04-26 15:53:34,3566,31.0062,28.2365
+2016-04-26 16:08:29,3565,31.1153,28.1864
+2016-04-26 16:23:24,3564,30.949,28.1864
+2016-04-26 16:38:19,3562,30.9518,28.2867
+2016-04-26 16:53:13,3560,30.8432,28.2867
+2016-04-26 17:08:08,3559,31.0607,28.1839
+2016-04-26 17:23:14,3556,31.0062,28.2867
+2016-04-26 17:38:46,3552,30.8405,28.2365
+2016-04-26 17:53:40,3551,31.0034,28.2365
+2016-04-26 18:08:35,3549,30.8432,28.2867
+2016-04-26 18:23:29,3549,31.0607,28.2365
+2016-04-26 18:38:24,3547,31.0034,28.2365
+2016-04-26 18:53:18,3546,30.9518,28.234
+2016-04-26 19:08:13,3545,31.0607,28.2867
+2016-04-26 19:23:07,3545,31.1153,28.1364
+2016-04-26 19:38:02,3545,31.1153,28.2365
+2016-04-26 19:52:56,3544,31.1673,28.2365
+2016-04-26 20:07:51,3543,31.1153,28.234
+2016-04-26 20:22:45,3542,31.1673,28.2365
+2016-04-26 20:37:40,3541,31.0607,28.1364
+2016-04-26 20:52:34,3539,31.2221,28.2867
+2016-04-26 21:07:28,3538,31.17,28.2867
+2016-04-26 21:22:23,3537,31.2249,28.2842
+2016-04-26 21:37:17,3537,31.0579,28.2365
+2016-04-26 21:52:12,3536,31.1153,28.2867
+2016-04-26 22:07:06,3536,31.1153,28.2892
+2016-04-26 22:22:00,3536,31.1153,28.2365
+2016-04-26 22:36:55,3536,31.1153,28.2365
+2016-04-26 22:51:49,3536,31.1125,28.2365
+2016-04-26 23:06:44,3537,31.0607,28.2842
+2016-04-26 23:21:38,3537,31.0607,28.234
+2016-04-26 23:36:32,3537,31.2249,28.2365
+2016-04-26 23:51:27,3537,31.107,28.2365
+2016-04-27 00:06:21,3537,31.0524,28.2867
+2016-04-27 00:21:15,3537,31.2166,28.2842
+2016-04-27 00:36:10,3537,31.1617,28.1864
+2016-04-27 00:51:04,3537,31.2166,28.2365
+2016-04-27 01:05:58,3537,31.1617,28.1839
+2016-04-27 01:20:52,3537,31.0524,28.2365
+2016-04-27 01:35:47,3537,31.1617,28.234
+2016-04-27 01:50:41,3537,30.9897,28.1864
+2016-04-27 02:05:35,3536,31.107,28.2365
+2016-04-27 02:20:29,3536,31.1015,28.2365
+2016-04-27 02:35:23,3537,31.0987,28.2315
+2016-04-27 02:50:17,3536,31.0987,28.2867
+2016-04-27 03:05:22,3536,31.1534,28.2365
+2016-04-27 03:20:55,3536,31.1534,28.2867
+2016-04-27 03:35:49,3536,31.0987,28.2867
+2016-04-27 03:50:43,3537,31.1015,28.1864
+2016-04-27 04:05:38,3537,31.0442,28.234
+2016-04-27 04:20:32,3537,31.0469,28.2867
+2016-04-27 04:35:26,3537,31.0877,28.2365
+2016-04-27 04:50:21,3537,31.0386,28.2365
+2016-04-27 05:05:15,3538,31.0905,28.2365
+2016-04-27 05:20:10,3538,31.1999,28.2867
+2016-04-27 05:35:04,3538,31.0932,28.2365
+2016-04-27 05:49:59,3538,31.0932,28.1864
+2016-04-27 06:04:53,3538,31.0331,28.2842
+2016-04-27 06:19:47,3538,31.0905,28.234
+2016-04-27 06:34:41,3539,31.0386,28.2365
+2016-04-27 06:49:36,3539,31.0905,28.234
+2016-04-27 07:04:30,3540,31.0359,28.1864
+2016-04-27 07:19:25,3540,31.0359,28.234
+2016-04-27 07:34:19,3541,31.0905,28.234
+2016-04-27 07:49:14,3542,30.9814,28.2867
+2016-04-27 08:04:08,3543,31.1999,28.2365
+2016-04-27 08:19:10,3544,31.0359,28.2365
+2016-04-27 08:34:04,3545,31.0331,28.2365
+2016-04-27 08:48:59,3546,31.0905,28.1864
+2016-04-27 09:03:53,3549,31.0331,28.2365
+2016-04-27 09:18:48,3550,31.0331,28.2842
+2016-04-27 09:33:43,3551,31.0331,28.234
+2016-04-27 09:48:37,3552,31.2027,28.1864
+2016-04-27 10:03:31,3552,31.0877,28.2315
+2016-04-27 10:18:26,3553,31.0932,28.2265
+2016-04-27 10:33:21,3553,31.1424,28.1864
+2016-04-27 10:48:15,3554,31.1999,28.1889
+2016-04-27 11:03:10,3554,31.0386,28.229
+2016-04-27 11:18:05,3554,31.0877,28.1864
+2016-04-27 11:32:59,3555,31.0331,28.2365
+2016-04-27 11:48:01,3554,31.0877,28.1789
+2016-04-27 12:02:56,3554,31.1999,28.1364
+2016-04-27 12:17:50,3555,30.9814,28.2791
+2016-04-27 12:32:45,3555,31.0386,28.1789
+2016-04-27 12:47:40,3556,31.0359,28.1864
+2016-04-27 13:02:35,3556,31.0877,28.1764
+2016-04-27 13:17:29,3557,31.0905,28.1789
+2016-04-27 13:32:24,3557,30.9842,28.229
+2016-04-27 13:47:18,3557,31.0359,28.229
+2016-04-27 14:02:13,3558,31.0359,28.2265
+2016-04-27 14:17:08,3557,31.0877,28.2265
+2016-04-27 14:32:03,3558,31.0905,28.2791
+2016-04-27 14:46:57,3558,31.0386,28.2791
+2016-04-27 15:01:52,3558,31.0331,28.2791
+2016-04-27 15:16:47,3558,31.0331,28.2791
+2016-04-27 15:31:42,3558,31.0877,28.1789
+2016-04-27 15:46:36,3557,31.1424,28.1789
+2016-04-27 16:01:31,3557,31.0877,28.2265
+2016-04-27 16:16:26,3557,31.0331,28.2265
+2016-04-27 16:31:20,3558,30.9814,28.2239
+2016-04-27 16:46:15,3558,31.0414,28.1789
+2016-04-27 17:01:10,3557,31.0331,28.229
+2016-04-27 17:16:04,3556,31.0469,28.2791
+2016-04-27 17:30:59,3555,31.2082,28.1764
+2016-04-27 17:45:54,3554,31.0469,28.2265
+2016-04-27 18:00:48,3553,30.9353,28.2265
+2016-04-27 18:15:43,3552,30.9869,28.1764
+2016-04-27 18:30:38,3552,30.9869,28.1764
+2016-04-27 18:45:32,3551,31.0414,28.229
+2016-04-27 19:00:27,3551,30.9353,28.229
+2016-04-27 19:15:22,3550,31.1015,28.1789
+2016-04-27 19:30:16,3549,30.9924,28.229
+2016-04-27 19:45:10,3547,31.1015,28.229
+2016-04-27 20:00:05,3545,31.0987,28.1764
+2016-04-27 20:14:59,3544,31.0469,28.229
+2016-04-27 20:29:53,3543,31.3182,28.2265
+2016-04-27 20:44:48,3541,31.2631,28.229
+2016-04-27 20:59:42,3539,31.2082,28.2265
+2016-04-27 21:14:37,3537,31.2659,28.1764
+2016-04-27 21:29:31,3537,31.3182,28.229
+2016-04-27 21:44:25,3536,31.211,28.229
+2016-04-27 21:59:20,3535,31.3098,28.1264
+2016-04-27 22:14:14,3534,31.1999,28.4302
+2016-04-27 22:29:08,3533,31.1999,28.4732
+2016-04-27 22:44:03,3532,31.3098,28.3722
+2016-04-27 22:58:57,3532,31.2576,28.4302
+2016-04-27 23:13:52,3531,31.1999,28.3798
+2016-04-27 23:28:46,3530,31.2548,28.4302
+2016-04-27 23:43:40,3530,31.3126,28.3722
+2016-04-27 23:58:35,3529,31.2548,28.3294
+2016-04-28 00:13:29,3528,31.1999,28.3219
+2016-04-28 00:28:23,3528,31.2548,28.3722
+2016-04-28 00:43:17,3528,31.2548,28.4302
+2016-04-28 00:58:12,3527,31.1452,28.3194
+2016-04-28 01:13:06,3528,31.1999,28.3219
+2016-04-28 01:28:00,3527,31.2027,28.3722
+2016-04-28 01:42:54,3527,31.2027,28.3798
+2016-04-28 01:57:49,3527,31.2548,28.3219
+2016-04-28 02:12:43,3526,31.2576,28.4227
+2016-04-28 02:27:37,3526,31.2548,28.3294
+2016-04-28 02:42:31,3526,31.3126,28.4227
+2016-04-28 02:57:25,3525,31.1999,28.3798
+2016-04-28 03:12:19,3524,31.2576,28.4227
+2016-04-28 03:27:14,3524,31.2493,28.3219
+2016-04-28 03:42:08,3523,31.2493,28.4227
+2016-04-28 03:57:02,3523,31.1916,28.3219
+2016-04-28 04:11:57,3523,31.2493,28.4227
+2016-04-28 04:26:51,3522,31.2493,28.3722
+2016-04-28 04:41:45,3522,31.2493,28.3219
+2016-04-28 04:56:40,3522,31.3043,28.2716
+2016-04-28 05:11:34,3522,31.2959,28.3722
+2016-04-28 05:26:28,3521,31.1313,28.4227
+2016-04-28 05:41:22,3521,31.2959,28.3722
+2016-04-28 05:56:17,3522,31.5169,28.4201
+2016-04-28 06:11:11,3522,31.241,28.3722
+2016-04-28 06:26:05,3523,31.2959,28.4227
+2016-04-28 06:41:00,3524,31.2959,28.3219
+2016-04-28 06:55:54,3525,31.1778,28.3219
+2016-04-28 07:10:48,3527,31.175,28.3747
+2016-04-28 07:25:43,3529,31.2299,28.3722
+2016-04-28 07:40:37,3530,31.395,28.4227
+2016-04-28 07:55:31,3531,31.3399,28.3219
+2016-04-28 08:10:26,3534,31.2848,28.4201
+2016-04-28 08:25:20,3536,31.175,28.3219
+2016-04-28 08:40:14,3538,31.3399,28.3722
+2016-04-28 08:55:09,3542,31.3399,28.3143
+2016-04-28 09:10:03,3545,31.2848,28.3697
+2016-04-28 09:24:58,3548,31.2848,28.3219
+2016-04-28 09:39:53,3550,31.2848,28.3219
+2016-04-28 09:54:47,3554,31.395,28.3722
+2016-04-28 10:09:42,3556,31.2848,28.3194
+2016-04-28 10:24:37,3558,31.395,28.3622
+2016-04-28 10:39:32,3559,31.2299,28.3622
+2016-04-28 10:54:26,3560,31.395,28.4656
+2016-04-28 11:09:21,3561,31.2848,28.3143
+2016-04-28 11:24:16,3561,31.2848,28.3118
+2016-04-28 11:39:10,3563,31.2848,28.2641
+2016-04-28 11:54:05,3563,31.2382,28.3143
+2016-04-28 12:09:00,3563,31.2932,28.3143
+2016-04-28 12:23:55,3563,31.4062,28.3118
+2016-04-28 12:38:49,3564,31.4587,28.3143
+2016-04-28 12:53:44,3564,31.2382,28.4126
+2016-04-28 13:08:39,3566,31.241,28.2641
+2016-04-28 13:23:34,3569,31.2932,28.3143
+2016-04-28 13:38:29,3571,31.1916,28.3143
+2016-04-28 13:53:24,3573,31.4698,28.3622
+2016-04-28 14:08:19,3573,31.1916,28.4151
+2016-04-28 14:23:14,3573,31.1916,28.3647
+2016-04-28 14:38:09,3573,31.1916,28.3143
+2016-04-28 14:53:04,3573,31.2027,28.2641
+2016-04-28 15:07:59,3573,31.3098,28.3647
+2016-04-28 15:23:01,3573,31.3621,28.3622
+2016-04-28 15:37:56,3573,31.3649,28.2616
+2016-04-28 15:52:50,3573,31.3621,28.3143
+2016-04-28 16:07:45,3571,31.4173,28.4126
+2016-04-28 16:22:40,3569,31.4201,28.3647
+2016-04-28 16:37:35,3568,31.4201,28.3143
+2016-04-28 16:52:30,3566,31.3621,28.3118
+2016-04-28 17:07:24,3566,31.307,28.2641
+2016-04-28 17:22:19,3565,31.2521,28.3143
+2016-04-28 17:37:14,3565,31.4754,28.3143
+2016-04-28 17:52:09,3564,31.4201,28.3143
+2016-04-28 18:07:03,3563,31.3621,28.3143
+2016-04-28 18:21:58,3562,31.3733,28.3647
+2016-04-28 18:36:53,3561,31.3182,28.3143
+2016-04-28 18:51:48,3560,31.4285,28.3143
+2016-04-28 19:06:42,3559,31.3733,28.2641
+2016-04-28 19:21:36,3558,31.4285,28.3118
+2016-04-28 19:36:38,3557,31.3761,28.3143
+2016-04-28 19:51:33,3556,31.3705,28.2616
+2016-04-28 20:06:27,3555,31.2631,28.4151
+2016-04-28 20:21:22,3552,31.4257,28.3143
+2016-04-28 20:36:16,3551,31.5392,28.3622
+2016-04-28 20:51:11,3549,31.2659,28.4126
+2016-04-28 21:06:05,3547,31.3182,28.4126
+2016-04-28 21:21:00,3545,31.3182,28.3143
+2016-04-28 21:35:54,3544,31.3182,28.3143
+2016-04-28 21:50:49,3542,31.4313,28.3143
+2016-04-28 22:05:43,3540,31.3209,28.3143
+2016-04-28 22:20:38,3539,31.3209,28.3143
+2016-04-28 22:35:32,3537,31.3209,28.4151
+2016-04-28 22:50:33,3536,31.4201,28.4151
+2016-04-28 23:05:28,3535,31.3761,28.4656
+2016-04-28 23:20:22,3534,31.4201,28.3647
+2016-04-28 23:35:16,3531,31.4782,28.3143
+2016-04-28 23:50:11,3531,31.3098,28.3647
+2016-04-29 00:05:05,3530,31.4782,28.3647
+2016-04-29 00:20:00,3530,31.4201,28.3647
+2016-04-29 00:34:54,3529,31.3677,28.4151
+2016-04-29 00:49:48,3528,31.4173,28.4656
+2016-04-29 01:04:42,3527,31.4173,28.3143
+2016-04-29 01:19:36,3526,31.3649,28.3143
+2016-04-29 01:34:31,3525,31.3621,28.4151
+2016-04-29 01:49:25,3524,31.3649,28.3143
+2016-04-29 02:04:19,3523,31.4726,28.4656
+2016-04-29 02:19:14,3523,31.4782,28.3672
+2016-04-29 02:34:08,3523,31.4229,28.5162
+2016-04-29 02:49:03,3523,31.4229,28.5137
+2016-04-29 03:03:57,3522,31.4201,28.4656
+2016-04-29 03:18:51,3522,31.4201,28.5162
+2016-04-29 03:33:46,3522,31.4118,28.4151
+2016-04-29 03:48:40,3521,31.4145,28.4656
+2016-04-29 04:03:34,3521,31.4698,28.4656
+2016-04-29 04:18:29,3521,31.3594,28.5162
+2016-04-29 04:33:23,3521,31.3566,28.3143
+2016-04-29 04:48:17,3521,31.4698,28.4656
+2016-04-29 05:03:11,3521,31.3043,28.4656
+2016-04-29 05:18:06,3521,31.4062,28.3143
+2016-04-29 05:33:00,3520,31.351,28.4151
+2016-04-29 05:48:01,3520,31.5169,28.3143
+2016-04-29 06:02:56,3520,31.351,28.3647
+2016-04-29 06:17:50,3521,31.4062,28.4656
+2016-04-29 06:32:44,3521,31.395,28.3118
+2016-04-29 06:47:38,3522,31.395,28.4151
+2016-04-29 07:02:40,3524,31.395,28.4656
+2016-04-29 07:17:34,3526,31.3978,28.4656
+2016-04-29 07:32:28,3529,31.395,28.3143
+2016-04-29 07:47:23,3531,31.395,28.3143
+2016-04-29 08:02:17,3535,31.4503,28.3143
+2016-04-29 08:17:12,3539,31.395,28.3647
+2016-04-29 08:32:06,3544,31.2848,28.2616
+2016-04-29 08:47:01,3547,31.395,28.3143
+2016-04-29 09:01:56,3552,31.2848,28.4151
+2016-04-29 09:16:51,3556,31.2848,28.3647
+2016-04-29 09:31:46,3559,31.2848,28.2641
+2016-04-29 09:46:40,3563,31.2848,28.2616
+2016-04-29 10:01:35,3566,31.4503,28.3143
+2016-04-29 10:16:30,3571,31.3399,28.3118
+2016-04-29 10:31:25,3574,31.2848,28.2641
+2016-04-29 10:46:20,3575,31.395,28.3647
+2016-04-29 11:01:15,3577,31.2848,28.2641
+2016-04-29 11:16:10,3577,31.395,28.4151
+2016-04-29 11:31:05,3577,31.4034,28.2641
+2016-04-29 11:46:00,3577,31.4587,28.2641
+2016-04-29 12:00:55,3577,31.2932,28.2616
+2016-04-29 12:15:57,3577,31.1833,28.3143
+2016-04-29 12:30:52,3577,31.4034,28.2616
+2016-04-29 12:45:47,3577,31.2465,28.3143
+2016-04-29 13:00:42,3577,31.2465,28.3143
+2016-04-29 13:15:38,3578,31.1916,28.3143
+2016-04-29 13:30:33,3578,31.1916,28.3143
+2016-04-29 13:45:28,3578,31.2465,28.3118
+2016-04-29 14:00:23,3579,31.2521,28.3118
+2016-04-29 14:15:18,3580,31.1424,28.2641
+2016-04-29 14:30:13,3581,31.1972,28.3118
+2016-04-29 14:45:09,3582,31.1972,28.3143
+2016-04-29 15:00:04,3583,31.1972,28.4126
+2016-04-29 15:14:59,3585,31.2027,28.3118
+2016-04-29 15:29:55,3586,31.3098,28.2616
+2016-04-29 15:44:50,3587,31.1999,28.2641
+2016-04-29 15:59:45,3587,31.2521,28.2616
+2016-04-29 16:14:40,3587,31.2521,28.2641
+2016-04-29 16:29:36,3588,31.2548,28.3647
+2016-04-29 16:44:31,3587,31.3154,28.3118
+2016-04-29 16:59:26,3587,31.2631,28.2641
+2016-04-29 17:14:21,3587,31.3209,28.2641
+2016-04-29 17:29:17,3587,31.2055,28.3118
+2016-04-29 17:44:12,3587,31.3237,28.3143
+2016-04-29 17:59:07,3587,31.3265,28.3118
+2016-04-29 18:14:02,3586,31.2687,28.3118
+2016-04-29 18:28:57,3586,31.3816,28.2641
+2016-04-29 18:43:53,3585,31.277,28.3118
+2016-04-29 18:58:48,3585,31.3348,28.2616
+2016-04-29 19:13:43,3584,31.2798,28.3143
+2016-04-29 19:28:38,3582,31.277,28.3118
+2016-04-29 19:43:40,3581,31.3348,28.3118
+2016-04-29 19:58:35,3580,31.2221,28.3143
+2016-04-29 20:13:30,3578,31.2798,28.2641
+2016-04-29 20:28:25,3576,31.4424,28.3622
+2016-04-29 20:43:20,3574,31.2798,28.3143
+2016-04-29 20:58:15,3573,31.2798,28.3143
+2016-04-29 21:13:10,3572,31.3348,28.3143
+2016-04-29 21:28:05,3570,31.3348,28.3143
+2016-04-29 21:43:00,3569,31.3321,28.4126
+2016-04-29 21:57:55,3567,31.2249,28.3143
+2016-04-29 22:12:50,3566,31.2221,28.2641
+2016-04-29 22:27:59,3566,31.277,28.3143
+2016-04-29 22:42:54,3564,31.2221,28.3143
+2016-04-29 22:57:49,3564,31.1153,28.3118
+2016-04-29 23:12:44,3564,31.39,28.3143
+2016-04-29 23:27:39,3563,31.277,28.3647
+2016-04-29 23:42:34,3562,31.277,28.3622
+2016-04-29 23:57:28,3561,31.2249,28.3118
+2016-04-30 00:12:23,3560,31.277,28.3647
+2016-04-30 00:27:18,3559,31.277,28.3143
+2016-04-30 00:42:13,3559,31.2221,28.3118
+2016-04-30 00:57:08,3559,31.277,28.3118
+2016-04-30 01:12:03,3558,31.2249,28.3647
+2016-04-30 01:26:57,3557,31.3321,28.3647
+2016-04-30 01:41:52,3557,31.17,28.3118
+2016-04-30 01:56:47,3556,31.2798,28.4126
+2016-04-30 02:11:42,3556,31.17,28.3143
+2016-04-30 02:26:36,3556,31.277,28.3118
+2016-04-30 02:41:31,3555,31.2249,28.3143
+2016-04-30 02:56:26,3554,31.1125,28.3118
+2016-04-30 03:11:20,3554,31.2221,28.3143
+2016-04-30 03:26:15,3553,31.277,28.3647
+2016-04-30 03:41:10,3552,31.1153,28.3143
+2016-04-30 03:56:05,3552,31.277,28.3118
+2016-04-30 04:11:00,3551,31.2249,28.3168
+2016-04-30 04:25:55,3550,31.2798,28.4126
+2016-04-30 04:40:49,3550,31.2221,28.3143
+2016-04-30 04:55:44,3550,31.3348,28.3143
+2016-04-30 05:10:39,3550,31.2221,28.3647
+2016-04-30 05:25:34,3550,31.2249,28.2641
+2016-04-30 05:40:28,3550,31.1125,28.3143
+2016-04-30 05:55:23,3550,31.1673,28.3622
+2016-04-30 06:10:18,3551,31.2249,28.3143
+2016-04-30 06:25:12,3551,31.1153,28.3143
+2016-04-30 06:40:07,3552,31.2798,28.4151
+2016-04-30 06:55:02,3553,31.2249,28.3143
+2016-04-30 07:09:56,3553,31.3348,28.3622
+2016-04-30 07:24:51,3555,31.2798,28.2641
+2016-04-30 07:39:46,3556,31.2221,28.4151
+2016-04-30 07:54:40,3557,31.2221,28.3143
+2016-04-30 08:09:35,3558,31.2221,28.3143
+2016-04-30 08:24:30,3559,31.2249,28.3143
+2016-04-30 08:39:25,3562,31.3321,28.3647
+2016-04-30 08:54:20,3564,31.1153,28.3143
+2016-04-30 09:09:15,3567,31.3348,28.2616
+2016-04-30 09:24:10,3570,31.3321,28.3118
+2016-04-30 09:39:05,3573,31.277,28.3143
+2016-04-30 09:54:00,3576,31.3321,28.3118
+2016-04-30 10:08:55,3579,31.277,28.2641
+2016-04-30 10:23:50,3583,31.3321,28.2616
+2016-04-30 10:38:53,3586,31.277,28.2616
+2016-04-30 10:53:48,3588,31.277,28.3118
+2016-04-30 11:08:44,3592,31.2221,28.3118
+2016-04-30 11:23:39,3594,31.2249,28.2616
+2016-04-30 11:38:35,3595,31.277,28.2641
+2016-04-30 11:53:30,3596,31.4424,28.3118
+2016-04-30 12:08:26,3597,31.2853,28.3143
+2016-04-30 12:23:21,3598,31.3404,28.3118
+2016-04-30 12:38:17,3598,31.2881,28.2616
+2016-04-30 12:53:12,3598,31.3487,28.3143
+2016-04-30 13:08:08,3598,31.346,28.3143
+2016-04-30 13:23:03,3599,31.346,28.2641
+2016-04-30 13:37:59,3599,31.3571,28.3118
+2016-04-30 13:52:54,3599,31.3571,28.2641
+2016-04-30 14:07:50,3599,31.3543,28.3622
+2016-04-30 14:22:46,3599,31.302,28.3143
+2016-04-30 14:37:41,3599,31.3571,28.3143
+2016-04-30 14:52:36,3599,31.3103,28.3118
+2016-04-30 15:07:32,3598,31.3076,28.2616
+2016-04-30 15:22:27,3598,31.3076,28.3118
+2016-04-30 15:37:23,3598,31.3103,28.3143
+2016-04-30 15:52:18,3598,31.3159,28.3622
+2016-04-30 16:07:13,3597,31.371,28.3143
+2016-04-30 16:22:09,3595,31.371,28.3118
+2016-04-30 16:37:04,3595,31.3849,28.2616
+2016-04-30 16:51:59,3594,31.3821,28.2616
+2016-04-30 17:06:55,3593,31.272,28.3143
+2016-04-30 17:21:50,3593,31.4374,28.3219
+2016-04-30 17:36:45,3592,31.3821,28.3219
+2016-04-30 17:51:41,3592,31.4374,28.3118
+2016-04-30 18:06:36,3591,31.3821,28.2691
+2016-04-30 18:21:31,3591,31.327,28.3219
+2016-04-30 18:36:27,3589,31.327,28.3697
+2016-04-30 18:51:22,3588,31.3326,28.3194
+2016-04-30 19:06:17,3588,31.3905,28.3194
+2016-04-30 19:21:12,3586,31.5011,28.2716
+2016-04-30 19:36:07,3585,31.3905,28.3118
+2016-04-30 19:51:02,3583,31.3877,28.3219
+2016-04-30 20:05:57,3581,31.2803,28.3697
+2016-04-30 20:20:52,3579,31.3905,28.2691
+2016-04-30 20:35:47,3577,31.3905,28.4227
+2016-04-30 20:50:42,3576,31.5011,28.3219
+2016-04-30 21:05:37,3574,31.3354,28.3219
+2016-04-30 21:20:32,3573,31.3905,28.4227
+2016-04-30 21:35:27,3571,31.3354,28.3798
+2016-04-30 21:50:22,3570,31.2803,28.3269
+2016-04-30 22:05:17,3567,31.3354,28.3294
+2016-04-30 22:20:11,3567,31.3877,28.3294
+2016-04-30 22:35:06,3566,31.3905,28.4302
+2016-04-30 22:50:01,3565,31.2803,28.3294
+2016-04-30 23:04:56,3564,31.3877,28.3798
+2016-04-30 23:19:51,3562,31.3354,28.4302
+2016-04-30 23:34:45,3562,31.3905,28.3773
+2016-04-30 23:49:40,3561,31.2803,28.3294
+2016-05-01 00:04:35,3560,31.3354,28.3798
+2016-05-01 00:19:30,3559,31.2282,28.337
+2016-05-01 00:34:25,3559,31.2254,28.4302
+2016-05-01 00:49:20,3559,31.1706,28.337
+2016-05-01 01:04:17,3559,31.3354,28.3294
+2016-05-01 01:19:12,3559,31.3326,28.3294
+2016-05-01 01:34:06,3559,31.2226,28.4353
+2016-05-01 01:49:01,3558,31.3877,28.337
+2016-05-01 02:03:56,3557,31.3821,28.337
+2016-05-01 02:18:51,3557,31.327,28.3873
+2016-05-01 02:33:45,3556,31.2748,28.3823
+2016-05-01 02:48:40,3556,31.1595,28.4378
+2016-05-01 03:03:35,3556,31.3794,28.3848
+2016-05-01 03:18:29,3555,31.1076,28.3873
+2016-05-01 03:33:31,3554,31.272,28.3873
+2016-05-01 03:48:26,3553,31.2171,28.4353
+2016-05-01 04:03:21,3553,31.1651,28.337
+2016-05-01 04:18:15,3552,31.2776,28.2867
+2016-05-01 04:33:10,3552,31.272,28.337
+2016-05-01 04:48:04,3552,31.3821,28.337
+2016-05-01 05:02:59,3552,31.3821,28.3873
+2016-05-01 05:17:53,3551,31.272,28.3319
+2016-05-01 05:32:48,3551,31.3187,28.2892
+2016-05-01 05:47:42,3551,31.2637,28.337
+2016-05-01 06:02:37,3552,31.2637,28.337
+2016-05-01 06:17:32,3551,31.3187,28.3848
+2016-05-01 06:32:26,3551,31.154,28.4884
+2016-05-01 06:47:21,3551,31.3187,28.3848
+2016-05-01 07:02:15,3552,31.2637,28.2842
+2016-05-01 07:17:10,3552,31.3159,28.337
+2016-05-01 07:32:04,3553,31.2005,28.4378
+2016-05-01 07:46:59,3555,31.2554,28.3873
+2016-05-01 08:01:54,3556,31.2005,28.3873
+2016-05-01 08:16:49,3556,31.3654,28.3873
+2016-05-01 08:31:44,3558,31.2554,28.3344
+2016-05-01 08:46:39,3560,31.1457,28.337
+2016-05-01 09:01:34,3564,31.3103,28.337
+2016-05-01 09:16:29,3567,31.2526,28.3344
+2016-05-01 09:31:24,3571,31.3103,28.3344
+2016-05-01 09:46:19,3573,31.3627,28.337
+2016-05-01 10:01:14,3576,31.3654,28.3823
+2016-05-01 10:16:09,3578,31.3654,28.337
+2016-05-01 10:31:04,3581,31.3103,28.337
+2016-05-01 10:45:59,3583,31.1457,28.3344
+2016-05-01 11:00:55,3585,31.2637,28.3344
+2016-05-01 11:15:50,3587,31.3738,28.337
+2016-05-01 11:30:46,3588,31.3159,28.2842
+2016-05-01 11:45:41,3590,31.3738,28.337
+2016-05-01 12:00:36,3591,31.2609,28.3848
+2016-05-01 12:15:32,3592,31.272,28.3344
+2016-05-01 12:30:27,3593,31.1623,28.337
+2016-05-01 12:45:23,3594,31.2171,28.3873
+2016-05-01 13:00:18,3595,31.1623,28.2867
+2016-05-01 13:15:14,3597,31.1623,28.3344
+2016-05-01 13:30:09,3598,31.1623,28.337
+2016-05-01 13:45:05,3598,31.2748,28.2867
+2016-05-01 14:00:00,3598,31.1678,28.3344
+2016-05-01 14:14:56,3598,31.2226,28.3344
+2016-05-01 14:29:52,3598,31.2803,28.3344
+2016-05-01 14:44:47,3599,31.1678,28.3344
+2016-05-01 14:59:43,3599,31.2776,28.3344
+2016-05-01 15:14:39,3598,31.2859,28.337
+2016-05-01 15:29:34,3599,31.1789,28.3344
+2016-05-01 15:44:30,3599,31.2942,28.337
+2016-05-01 15:59:26,3599,31.2942,28.3344
+2016-05-01 16:14:21,3599,31.2393,28.2867
+2016-05-01 16:29:24,3599,31.242,28.3344
+2016-05-01 16:44:20,3599,31.1844,28.3344
+2016-05-01 16:59:15,3599,31.3026,28.3344
+2016-05-01 17:14:10,3599,31.1927,28.3344
+2016-05-01 17:29:06,3599,31.3053,28.234
+2016-05-01 17:44:01,3599,31.1927,28.337
+2016-05-01 17:58:57,3599,31.2504,28.234
+2016-05-01 18:13:52,3599,31.1955,28.1339
+2016-05-01 18:28:48,3598,31.3026,28.2365
+2016-05-01 18:43:44,3598,31.3053,28.1814
+2016-05-01 18:58:39,3598,31.2559,28.1364
+2016-05-01 19:13:35,3598,31.3137,28.234
+2016-05-01 19:28:30,3597,31.3109,28.2365
+2016-05-01 19:43:25,3595,31.2038,28.2365
+2016-05-01 19:58:21,3594,31.201,28.1364
+2016-05-01 20:13:16,3593,31.3164,28.1914
+2016-05-01 20:28:11,3591,31.3192,28.1364
+2016-05-01 20:43:06,3588,31.1573,28.1839
+2016-05-01 20:58:02,3587,31.2093,28.2365
+2016-05-01 21:12:57,3586,31.2615,28.1364
+2016-05-01 21:27:52,3585,31.2121,28.2867
+2016-05-01 21:42:48,3584,31.2642,28.1914
+2016-05-01 21:57:43,3582,31.2093,28.234
+2016-05-01 22:12:38,3581,31.3743,28.1914
+2016-05-01 22:27:33,3579,31.2121,28.2917
+2016-05-01 22:42:28,3578,31.1573,28.2415
+2016-05-01 22:57:23,3577,31.3192,28.2415
+2016-05-01 23:12:18,3576,31.1573,28.2415
+2016-05-01 23:27:13,3574,31.3192,28.1914
+2016-05-01 23:42:08,3574,31.3771,28.2415
+2016-05-01 23:57:03,3573,31.2093,28.2892
+2016-05-02 00:11:58,3572,31.366,28.2415
+2016-05-02 00:26:52,3571,31.3109,28.2415
+2016-05-02 00:41:47,3570,31.201,28.2515
+2016-05-02 00:56:42,3569,31.1463,28.2515
+2016-05-02 01:11:37,3568,31.3109,28.249
+2016-05-02 01:26:31,3567,31.0944,28.2415
+2016-05-02 01:41:26,3566,31.3137,28.249
+2016-05-02 01:56:21,3565,31.1463,28.1989
+2016-05-02 02:11:15,3564,31.0944,28.249
+2016-05-02 02:26:10,3564,31.1463,28.249
+2016-05-02 02:41:05,3563,31.3137,28.249
+2016-05-02 02:56:00,3561,31.3109,28.2565
+2016-05-02 03:10:55,3560,31.3026,28.2515
+2016-05-02 03:25:49,3559,31.1407,28.2992
+2016-05-02 03:40:44,3559,31.2504,28.2992
+2016-05-02 03:55:39,3559,31.138,28.2515
+2016-05-02 04:10:33,3558,31.3026,28.259
+2016-05-02 04:25:28,3557,31.3053,28.259
+2016-05-02 04:40:22,3556,31.0315,28.3068
+2016-05-02 04:55:17,3556,31.1955,28.2089
+2016-05-02 05:10:12,3554,31.0315,28.3093
+2016-05-02 05:25:06,3554,31.1955,28.3017
+2016-05-02 05:40:01,3552,31.2504,28.3068
+2016-05-02 05:54:55,3552,31.0833,28.2064
+2016-05-02 06:09:50,3552,31.0778,28.3068
+2016-05-02 06:24:44,3552,31.2504,28.2565
+2016-05-02 06:39:39,3552,31.1324,28.3093
+2016-05-02 06:54:34,3552,31.242,28.254
+2016-05-02 07:09:28,3552,31.0778,28.2565
+2016-05-02 07:24:23,3554,31.0778,28.2565
+2016-05-02 07:39:18,3555,31.1844,28.2565
+2016-05-02 07:54:12,3557,31.1872,28.2565
+2016-05-02 08:09:07,3559,31.1872,28.2565
+2016-05-02 08:24:02,3562,31.297,28.259
+2016-05-02 08:38:57,3566,31.1324,28.2565
+2016-05-02 08:53:52,3571,31.075,28.2565
+2016-05-02 09:08:55,3574,31.1844,28.2064
+2016-05-02 09:23:50,3578,31.2914,28.2565
+2016-05-02 09:38:45,3581,31.1872,28.2064
+2016-05-02 09:53:40,3585,31.3521,28.1563
+2016-05-02 10:08:36,3588,31.2942,28.2565
+2016-05-02 10:23:31,3591,31.1872,28.2565
+2016-05-02 10:38:27,3593,31.2942,28.2565
+2016-05-02 10:53:22,3595,31.2942,28.2565
+2016-05-02 11:08:18,3596,31.297,28.2565
+2016-05-02 11:23:13,3597,31.2393,28.2565
+2016-05-02 11:38:09,3598,31.2942,28.2565
+2016-05-02 11:53:05,3598,31.1955,28.2565
+2016-05-02 12:08:00,3598,31.3026,28.2565
+2016-05-02 12:22:56,3598,31.3026,28.2565
+2016-05-02 12:37:51,3598,31.2476,28.2565
+2016-05-02 12:52:47,3599,31.2476,28.2565
+2016-05-02 13:07:43,3599,31.3026,28.1563
+2016-05-02 13:22:38,3599,31.3109,28.2064
+2016-05-02 13:37:34,3599,31.3137,28.1563
+2016-05-02 13:52:30,3599,31.2038,28.2064
+2016-05-02 14:07:25,3599,31.2615,28.2064
+2016-05-02 14:22:20,3599,31.3192,28.2565
+2016-05-02 14:37:24,3599,31.2642,28.2565
+2016-05-02 14:52:19,3599,31.322,28.2565
+2016-05-02 15:07:15,3599,31.1546,28.2565
+2016-05-02 15:22:11,3599,31.1082,28.2064
+2016-05-02 15:37:07,3599,31.1054,28.2565
+2016-05-02 15:52:03,3599,31.1082,28.2064
+2016-05-02 16:06:58,3599,31.2177,28.2565
+2016-05-02 16:21:54,3599,31.1629,28.2064
+2016-05-02 16:36:50,3599,31.1137,28.2064
+2016-05-02 16:51:46,3599,31.1712,28.2565
+2016-05-02 17:06:41,3599,31.1712,28.2064
+2016-05-02 17:21:37,3599,31.1795,28.2064
+2016-05-02 17:36:32,3599,31.1767,28.2565
+2016-05-02 17:51:28,3599,31.1767,28.2565
+2016-05-02 18:06:24,3599,31.122,28.2064
+2016-05-02 18:21:19,3599,31.2343,28.259
+2016-05-02 18:36:15,3599,31.1795,28.1538
+2016-05-02 18:51:11,3599,31.2343,28.2565
+2016-05-02 19:06:06,3599,31.1878,28.2064
+2016-05-02 19:21:02,3599,31.1878,28.2064
+2016-05-02 19:35:57,3598,31.1878,28.2064
+2016-05-02 19:50:53,3598,31.1878,28.2114
+2016-05-02 20:05:48,3598,31.1878,28.2164
+2016-05-02 20:20:43,3597,31.1303,28.2565
+2016-05-02 20:35:39,3595,31.2426,28.2164
+2016-05-02 20:50:34,3594,31.1961,28.2565
+2016-05-02 21:05:29,3593,31.1385,28.2641
+2016-05-02 21:20:25,3592,31.2426,28.2641
+2016-05-02 21:35:20,3590,31.1933,28.2641
+2016-05-02 21:50:15,3588,31.2509,28.2641
+2016-05-02 22:05:10,3587,31.1413,28.2641
+2016-05-02 22:20:05,3586,31.1413,28.2164
+2016-05-02 22:35:00,3585,31.0839,28.2641
+2016-05-02 22:49:56,3585,31.1413,28.2139
+2016-05-02 23:04:51,3583,31.1961,28.2641
+2016-05-02 23:19:46,3582,31.1878,28.2641
+2016-05-02 23:34:41,3581,31.1878,28.2139
+2016-05-02 23:49:36,3580,31.2426,28.2666
+2016-05-03 01:04:11,3576,31.133000000000006,28.2641
+2016-05-03 01:19:06,3575,31.0238,28.2641
+2016-05-03 01:34:00,3574,31.1878,28.2641
+2016-05-03 01:48:55,3574,31.0811,28.2641
+2016-05-03 02:03:50,3574,31.1247,28.3143
+2016-05-03 02:18:45,3573,31.1795,28.2641
+2016-05-03 02:33:40,3573,31.2343,28.2641
+2016-05-03 02:48:35,3572,31.1795,28.2716
+2016-05-03 03:03:30,3572,31.1767,28.2716
+2016-05-03 03:18:24,3572,31.1247,28.2741
+2016-05-03 03:33:19,3571,31.2343,28.2214
+2016-05-03 03:48:14,3571,31.1795,28.1713
+2016-05-03 04:03:09,3571,31.2343,28.1713
+2016-05-03 04:18:04,3570,31.1247,28.2641
+2016-05-03 04:32:59,3569,31.1795,28.2691
+2016-05-03 04:47:54,3568,31.1795,28.2691
+2016-05-03 05:02:49,3568,31.1247,28.3244
+2016-05-03 05:17:43,3567,31.0701,28.3218
+2016-05-03 05:32:38,3566,31.1795,28.3244
+2016-05-03 05:47:33,3566,31.1795,28.2716
+2016-05-03 06:02:28,3566,31.0156,28.3244
+2016-05-03 06:17:23,3566,31.0156,28.2716
+2016-05-03 06:32:18,3566,31.226,28.2716
+2016-05-03 06:47:13,3566,31.0073,28.2741
+2016-05-03 07:02:07,3567,31.226,28.2289
+2016-05-03 07:17:03,3569,31.1164,28.2716
+2016-05-03 07:31:57,3571,31.1712,28.2766
+2016-05-03 07:46:52,3573,31.1164,28.2766
+2016-05-03 08:01:48,3575,31.1164,28.2264
+2016-05-03 08:16:43,3578,31.1712,28.2716
+2016-05-03 08:31:38,3581,31.226,28.2289
+2016-05-03 08:46:33,3585,31.1712,28.2264
+2016-05-03 09:01:29,3587,31.226,28.2289
+2016-05-03 09:16:24,3589,31.1164,28.2766
+2016-05-03 09:31:27,3592,31.1137,28.2791
+2016-05-03 09:46:22,3593,31.1712,28.2791
+2016-05-03 10:01:18,3595,31.0618,28.2289
+2016-05-03 10:16:13,3597,31.226,28.2289
+2016-05-03 10:31:09,3598,31.226,28.2766
+2016-05-03 10:46:04,3598,31.1247,28.2264
+2016-05-03 11:01:00,3598,31.1247,28.2791
+2016-05-03 11:15:55,3598,31.1247,28.2264
+2016-05-03 11:30:51,3598,31.1247,28.1763
+2016-05-03 11:46:01,3598,31.1795,28.2264
+2016-05-03 12:00:56,3598,31.1795,28.2766
+2016-05-03 12:15:52,3598,31.1247,28.2791
+2016-05-03 12:30:48,3599,31.133000000000006,28.2766
+2016-05-03 12:45:43,3598,31.1878,28.2264
+2016-05-03 13:00:46,3598,31.1878,28.2766
+2016-05-03 13:15:42,3598,31.133000000000006,28.2766
+2016-05-03 13:30:37,3598,31.2509,28.2791
+2016-05-03 13:45:32,3596,31.1933,28.2766
+2016-05-03 14:00:28,3595,31.1933,28.2791
+2016-05-03 14:15:23,3593,31.1413,28.2791
+2016-05-03 14:30:18,3592,31.2509,28.2791
+2016-05-03 14:45:13,3589,31.1413,28.2791
+2016-05-03 15:00:09,3588,31.1385,28.2766
+2016-05-03 15:15:04,3587,31.1933,28.2264
+2016-05-03 15:29:59,3585,31.1413,28.2766
+2016-05-03 15:44:54,3585,31.1413,28.2791
+2016-05-03 15:59:49,3583,31.0867,28.2791
+2016-05-03 16:14:44,3583,31.1961,28.2791
+2016-05-03 16:29:39,3582,31.2509,28.2791
+2016-05-03 16:44:34,3582,31.1961,28.2791
+2016-05-03 16:59:29,3581,31.1961,28.2791
+2016-05-03 17:14:24,3581,31.1413,28.2791
+2016-05-03 17:29:19,3581,31.0321,28.2816
+2016-05-03 17:44:14,3580,31.1933,28.2791
+2016-05-03 17:59:09,3579,31.0867,28.2791
+2016-05-03 18:14:04,3578,31.1385,28.2766
+2016-05-03 18:28:59,3578,31.1961,28.2766
+2016-05-03 18:43:54,3578,31.1413,28.2766
+2016-05-03 18:58:49,3578,31.1385,28.2791
+2016-05-03 19:13:44,3577,31.1385,28.2766
+2016-05-03 19:28:39,3577,31.0321,28.2766
+2016-05-03 19:43:34,3577,31.1933,28.2791
+2016-05-03 19:58:29,3576,31.0321,28.3294
+2016-05-03 20:13:24,3575,31.1413,28.2866
+2016-05-03 20:28:19,3574,31.2482,28.2791
+2016-05-03 20:43:14,3574,31.1961,28.3294
+2016-05-03 20:58:10,3574,31.1961,28.2766
+2016-05-03 21:13:05,3573,31.1413,28.2866
+2016-05-03 21:28:00,3573,31.0321,28.2866
+2016-05-03 21:42:55,3572,31.0867,28.2841
+2016-05-03 21:57:50,3571,31.1413,28.3369
+2016-05-03 22:12:45,3571,31.1961,28.3344
+2016-05-03 22:27:40,3571,31.1413,28.3369
+2016-05-03 22:42:35,3570,31.1961,28.2866
+2016-05-03 22:57:30,3570,31.1413,28.2841
+2016-05-03 23:12:25,3569,31.1961,28.2866
+2016-05-03 23:27:20,3568,31.133000000000006,28.2339
+2016-05-03 23:42:14,3567,31.0238,28.3369
+2016-05-03 23:57:09,3566,31.0238,28.3344
+2016-05-04 00:12:04,3565,31.0784,28.3344
+2016-05-04 00:26:59,3564,31.133000000000006,28.3369
+2016-05-04 00:41:53,3563,31.1905,28.2364
+2016-05-04 00:56:47,3561,31.1878,28.3369
+2016-05-04 01:11:50,3560,31.1878,28.3344
+2016-05-04 01:26:45,3559,31.0266,28.3369
+2016-05-04 01:41:39,3558,30.9151,28.2866
+2016-05-04 01:56:34,3557,30.9612,28.2866
+2016-05-04 02:11:29,3556,30.9068,28.2866
+2016-05-04 02:26:23,3555,31.0183,28.3369
+2016-05-04 02:41:18,3554,31.1795,28.2866
+2016-05-04 02:56:12,3553,31.1247,28.3369
+2016-05-04 03:11:06,3552,31.0183,28.2841
+2016-05-04 03:26:09,3551,31.1247,28.3344
+2016-05-04 03:41:03,3550,31.1247,28.2866
+2016-05-04 03:55:58,3549,31.0618,28.3369
+2016-05-04 04:10:52,3549,30.9612,28.3369
+2016-05-04 04:25:46,3548,31.0101,28.3369
+2016-05-04 04:40:41,3547,31.0073,28.2866
+2016-05-04 04:55:35,3546,31.0618,28.2866
+2016-05-04 05:10:30,3546,31.1629,28.2866
+2016-05-04 05:25:24,3545,30.9447,28.3369
+2016-05-04 05:40:18,3545,30.9991,28.2866
+2016-05-04 05:55:13,3545,31.0536,28.2866
+2016-05-04 06:10:07,3545,30.9991,28.2866
+2016-05-04 06:25:01,3546,31.1082,28.3369
+2016-05-04 06:39:56,3546,31.1082,28.3369
+2016-05-04 06:54:50,3547,30.9991,28.2841
+2016-05-04 07:09:45,3549,31.1656,28.3369
+2016-05-04 07:24:39,3550,30.9991,28.2866
+2016-05-04 07:39:34,3551,31.1082,28.3344
+2016-05-04 07:54:29,3553,31.0971,28.3369
+2016-05-04 08:09:23,3554,30.9991,28.2866
+2016-05-04 08:24:18,3556,31.1026,28.3369
+2016-05-04 08:39:13,3557,30.9936,28.2866
+2016-05-04 08:54:07,3559,30.9392,28.3369
+2016-05-04 09:09:02,3559,31.1573,28.2866
+2016-05-04 09:23:57,3560,30.9908,28.2866
+2016-05-04 09:38:52,3563,31.1026,28.3344
+2016-05-04 09:53:46,3564,30.9908,28.2841
+2016-05-04 10:08:41,3566,31.048,28.2866
+2016-05-04 10:23:36,3568,31.1573,28.3369
+2016-05-04 10:38:31,3570,30.9936,28.3344
+2016-05-04 10:53:26,3571,31.2093,28.2866
+2016-05-04 11:08:21,3573,31.1546,28.2866
+2016-05-04 11:23:16,3574,31.1026,28.2339
+2016-05-04 11:38:11,3575,31.0453,28.2866
+2016-05-04 11:53:06,3577,31.1546,28.2339
+2016-05-04 12:08:01,3577,31.0999,28.2841
+2016-05-04 12:22:56,3578,31.0999,28.3344
+2016-05-04 12:37:51,3580,31.1573,28.2866
+2016-05-04 12:52:47,3581,31.1573,28.2841
+2016-05-04 13:07:42,3582,31.1546,28.2866
+2016-05-04 13:22:37,3583,31.1629,28.2841
+2016-05-04 13:37:32,3585,31.1026,28.2866
+2016-05-04 13:52:28,3585,31.1629,28.2841
+2016-05-04 14:07:23,3586,31.1082,28.2866
+2016-05-04 14:22:18,3587,31.2177,28.2866
+2016-05-04 14:37:14,3588,31.1629,28.2364
+2016-05-04 14:52:09,3588,31.2149,28.2841
+2016-05-04 15:07:04,3588,31.1082,28.2339
+2016-05-04 15:22:07,3589,31.1601,28.2866
+2016-05-04 15:37:02,3589,31.1054,28.2364
+2016-05-04 15:51:57,3590,31.1082,28.3369
+2016-05-04 16:06:53,3591,31.1601,28.2841
+2016-05-04 16:21:48,3591,31.1054,28.2866
+2016-05-04 16:36:44,3591,31.1164,28.2841
+2016-05-04 16:51:39,3590,31.1712,28.2841
+2016-05-04 17:06:35,3590,31.0618,28.2841
+2016-05-04 17:21:30,3589,31.1712,28.2841
+2016-05-04 17:36:26,3588,31.1164,28.2866
+2016-05-04 17:51:21,3588,31.1795,28.2841
+2016-05-04 18:06:16,3588,31.1247,28.2841
+2016-05-04 18:21:11,3587,31.1795,28.2364
+2016-05-04 18:36:07,3586,31.1795,28.2841
+2016-05-04 18:51:02,3585,31.1795,28.2841
+2016-05-04 19:05:57,3584,31.1795,28.3369
+2016-05-04 19:20:52,3583,31.2343,28.2866
+2016-05-04 19:35:47,3582,31.1795,28.2339
+2016-05-04 19:50:42,3581,31.1795,28.2339
+2016-05-04 20:05:37,3579,31.0701,28.2841
+2016-05-04 20:20:33,3578,31.1795,28.2866
+2016-05-04 20:35:28,3577,31.2343,28.2866
+2016-05-04 20:50:23,3575,31.1795,28.2866
+2016-05-04 21:05:18,3574,31.1795,28.2841
+2016-05-04 21:20:20,3572,31.1795,28.3369
+2016-05-04 21:35:14,3571,31.1247,28.2866
+2016-05-04 21:50:09,3570,31.2343,28.3369
+2016-05-04 22:05:04,3568,31.1247,28.2866
+2016-05-04 22:19:59,3568,31.0729,28.3369
+2016-05-04 22:34:54,3566,31.1247,28.2866
+2016-05-04 22:49:49,3566,31.1247,28.3369
+2016-05-04 23:04:44,3565,31.0701,28.2841
+2016-05-04 23:19:39,3564,30.9612,28.3369
+2016-05-04 23:34:34,3562,31.1247,28.2841
+2016-05-04 23:49:29,3561,31.1164,28.2866
+2016-05-05 00:04:24,3560,31.0618,28.3369
+2016-05-05 00:19:18,3559,31.226,28.2866
+2016-05-05 00:34:13,3559,30.9529,28.2866
+2016-05-05 00:49:08,3559,31.226,28.2364
+2016-05-05 01:04:03,3558,30.9529,28.2364
+2016-05-05 01:18:58,3557,30.9529,28.3369
+2016-05-05 01:33:52,3557,31.1192,28.3369
+2016-05-05 01:48:47,3556,31.1082,28.2866
+2016-05-05 02:03:42,3556,31.2177,28.2866
+2016-05-05 02:18:37,3556,31.1629,28.3369
+2016-05-05 02:33:31,3555,31.1629,28.3369
+2016-05-05 02:48:26,3554,31.1082,28.1863
+2016-05-05 03:03:20,3554,30.8904,28.1338
+2016-05-05 03:18:15,3553,30.9963,28.1338
+2016-05-05 03:33:09,3553,31.2177,28.0889
+2016-05-05 03:48:11,3552,31.1109,28.1838
+2016-05-05 04:03:06,3552,30.9419,28.1338
+2016-05-05 04:18:00,3552,31.0508,28.1838
+2016-05-05 04:32:55,3551,31.2204,28.1363
+2016-05-05 04:47:50,3550,30.9447,28.1863
+2016-05-05 05:02:44,3550,30.9447,28.1863
+2016-05-05 05:17:39,3549,31.048,28.1863
+2016-05-05 05:32:34,3549,31.1026,28.1363
+2016-05-05 05:47:29,3549,31.1573,28.1363
+2016-05-05 06:02:23,3548,31.1573,28.1363
+2016-05-05 06:17:18,3548,31.1573,28.1863
+2016-05-05 06:32:12,3549,31.1026,28.1363
+2016-05-05 06:47:07,3549,30.9908,28.1363
+2016-05-05 07:02:01,3549,31.1026,28.1863
+2016-05-05 07:16:56,3550,31.1573,28.1363
+2016-05-05 07:31:51,3552,30.9936,28.1338
+2016-05-05 07:46:46,3554,30.7227,28.0864
+2016-05-05 08:01:41,3556,30.8794,28.1363
+2016-05-05 08:16:35,3557,30.7767,28.1863
+2016-05-05 08:31:30,3559,30.7685,28.1363
+2016-05-05 08:46:25,3562,30.9364,28.1388
+2016-05-05 09:01:20,3565,30.7657,28.1363
+2016-05-05 09:16:15,3568,30.8849,28.0864
+2016-05-05 09:31:11,3571,30.9853,28.0839
+2016-05-05 09:46:06,3574,30.9853,28.1338
+2016-05-05 10:01:01,3577,31.0453,28.1338
+2016-05-05 10:15:57,3581,30.9908,28.1338
+2016-05-05 10:30:52,3583,30.9364,28.1363
+2016-05-05 10:45:48,3585,30.9364,28.1338
+2016-05-05 11:00:43,3587,30.9392,28.1363
+2016-05-05 11:15:39,3588,30.9908,28.1338
+2016-05-05 11:30:34,3589,30.9364,28.1338
+2016-05-05 11:45:29,3591,30.9364,28.0839
+2016-05-05 12:00:25,3592,30.9392,28.1363
+2016-05-05 12:15:20,3592,30.9908,28.1363
+2016-05-05 12:30:16,3593,30.9936,28.1338
+2016-05-05 12:45:11,3593,30.9963,28.1363
+2016-05-05 13:00:07,3594,31.0508,28.1338
+2016-05-05 13:15:02,3595,30.9963,28.0864
+2016-05-05 13:29:58,3595,31.0508,28.1363
+2016-05-05 13:44:53,3596,31.1054,28.1338
+2016-05-05 13:59:49,3597,31.0536,28.0864
+2016-05-05 14:14:44,3598,30.9991,28.1363
+2016-05-05 14:29:40,3598,30.9991,28.1338
+2016-05-05 14:44:36,3598,30.9529,28.0839
+2016-05-05 14:59:31,3598,31.0073,28.1338
+2016-05-05 15:14:27,3598,31.1164,28.1363
+2016-05-05 15:29:23,3598,31.0156,28.0864
+2016-05-05 15:44:19,3598,30.9584,28.1338
+2016-05-05 15:59:14,3598,31.0701,28.0839
+2016-05-05 16:14:10,3598,31.0156,28.1363
+2016-05-05 16:29:06,3599,31.0156,28.1338
+2016-05-05 16:44:02,3599,31.0156,28.0864
+2016-05-05 16:58:57,3599,30.9041,28.0839
+2016-05-05 17:13:53,3599,31.0156,28.1338
+2016-05-05 17:28:49,3599,31.0784,28.0864
+2016-05-05 17:43:45,3599,31.0784,28.0839
+2016-05-05 17:58:40,3599,31.0867,28.1338
+2016-05-05 18:13:36,3598,31.0867,28.1363
+2016-05-05 18:28:32,3598,31.0867,28.1363
+2016-05-05 18:43:27,3598,31.0321,28.0839
+2016-05-05 18:58:23,3598,31.0321,28.1338
+2016-05-05 19:13:18,3598,30.9749,28.1363
+2016-05-05 19:28:14,3598,31.0321,28.0839
+2016-05-05 19:43:09,3598,31.0321,28.1838
+2016-05-05 19:58:05,3597,31.0266,28.1363
+2016-05-05 20:13:00,3596,31.0867,28.0864
+2016-05-05 20:27:55,3595,30.9749,28.1363
+2016-05-05 20:42:51,3594,31.0922,28.1363
+2016-05-05 20:57:46,3593,31.0376,28.0839
+2016-05-05 21:12:41,3592,31.1468,28.1363
+2016-05-05 21:27:37,3591,30.9832,28.1363
+2016-05-05 21:42:32,3588,31.0376,28.0839
+2016-05-05 21:57:27,3588,30.9832,28.1338
+2016-05-05 22:12:22,3587,30.9832,28.1363
+2016-05-05 22:27:17,3586,31.0376,28.1363
+2016-05-05 22:42:12,3586,31.0376,28.1363
+2016-05-05 22:57:08,3585,31.0922,28.1338
+2016-05-05 23:12:03,3584,31.0376,28.1363
+2016-05-05 23:26:58,3582,31.0376,28.1363
+2016-05-05 23:41:53,3582,31.0376,28.1363
+2016-05-05 23:56:48,3581,31.0949,28.1338
+2016-05-06 00:11:43,3581,30.9832,28.1363
+2016-05-06 00:26:38,3580,31.0376,28.1363
+2016-05-06 00:41:33,3579,31.1468,28.1438
+2016-05-06 00:56:28,3578,31.0376,28.1438
+2016-05-06 01:11:23,3578,31.0922,28.1914
+2016-05-06 01:26:18,3577,31.0376,28.1939
+2016-05-06 01:41:13,3577,30.9832,28.1438
+2016-05-06 01:56:08,3576,30.9832,28.1413
+2016-05-06 02:11:03,3576,30.9288,28.1413
+2016-05-06 02:25:58,3576,31.0922,28.1413
+2016-05-06 02:40:53,3575,31.0867,28.1438
+2016-05-06 02:55:48,3575,30.9316,28.1413
+2016-05-06 03:10:43,3574,30.9288,28.1413
+2016-05-06 03:25:38,3574,30.9233,28.1413
+2016-05-06 03:40:33,3573,30.9233,28.0939
+2016-05-06 03:55:28,3573,30.9233,28.1438
+2016-05-06 04:10:23,3572,31.0321,28.1939
+2016-05-06 04:25:18,3572,30.9233,28.1438
+2016-05-06 04:40:13,3571,30.8149,28.1438
+2016-05-06 04:55:08,3571,30.8691,28.2014
+2016-05-06 05:10:03,3570,30.9233,28.1438
+2016-05-06 05:24:58,3569,30.9233,28.1438
+2016-05-06 05:39:53,3570,30.9233,28.1438
+2016-05-06 05:54:47,3568,30.8691,28.1438
+2016-05-06 06:09:42,3568,30.9233,28.1513
+2016-05-06 06:24:37,3569,30.8149,28.1989
+2016-05-06 06:39:32,3569,30.8149,28.1488
+2016-05-06 06:54:26,3570,30.9233,28.1438
+2016-05-06 07:09:21,3570,30.9233,28.2014
+2016-05-06 07:24:16,3571,30.7609,28.1513
+2016-05-06 07:39:11,3572,30.9206,28.1513
+2016-05-06 07:54:06,3574,30.8691,28.1014
+2016-05-06 08:09:01,3576,30.8149,28.2014
+2016-05-06 08:23:56,3578,30.9233,28.1014
+2016-05-06 08:38:52,3581,30.9233,28.1014
+2016-05-06 08:53:47,3584,30.8149,28.1513
+2016-05-06 09:08:42,3586,30.9206,28.1014
+2016-05-06 09:23:37,3588,30.9206,28.1513
+2016-05-06 09:38:40,3590,30.8149,28.1513
+2016-05-06 09:53:36,3593,30.8691,28.1014
+2016-05-06 10:08:31,3595,30.9233,28.0939
+2016-05-06 10:23:27,3597,30.9233,28.1513
+2016-05-06 10:38:22,3598,30.9206,28.1488
+2016-05-06 10:53:18,3598,30.9233,28.1513
+2016-05-06 11:08:13,3599,30.9233,28.0964
+2016-05-06 11:23:09,3599,30.9233,28.1014
+2016-05-06 11:38:05,3599,30.9288,28.1014
+2016-05-06 11:53:00,3599,30.9288,28.1014
+2016-05-06 12:07:56,3599,30.9288,28.0989
+2016-05-06 12:22:52,3599,30.9288,28.1513
+2016-05-06 12:37:47,3599,30.9832,28.1014
+2016-05-06 12:52:43,3599,30.937,28.1488
+2016-05-06 13:07:39,3599,30.937,28.0964
+2016-05-06 13:22:35,3599,30.8313,28.0989
+2016-05-06 13:37:31,3599,30.9453,27.9993
+2016-05-06 13:52:27,3599,30.9997,27.9521
+2016-05-06 14:07:23,3599,30.9453,27.9521
+2016-05-06 14:22:19,3599,30.9453,27.9496
+2016-05-06 14:37:15,3599,30.891,27.9496
+2016-05-06 14:52:11,3599,30.9453,27.9471
+2016-05-06 15:07:07,3599,30.9535,27.9521
+2016-05-06 15:22:03,3599,30.8992,28.0018
+2016-05-06 15:36:59,3599,30.9535,27.9993
+2016-05-06 15:51:55,3599,30.9535,27.9496
+2016-05-06 16:06:51,3599,30.9535,27.9496
+2016-05-06 16:21:47,3599,30.9535,27.9993
+2016-05-06 16:36:43,3599,30.959,27.9471
+2016-05-06 16:51:39,3599,30.9618,28.0018
+2016-05-06 17:06:34,3599,30.9618,27.9496
+2016-05-06 17:21:30,3599,30.9673,27.9496
+2016-05-06 17:36:26,3599,30.9673,27.9496
+2016-05-06 17:51:22,3599,30.913,27.9496
+2016-05-06 18:06:18,3599,30.8046,27.9521
+2016-05-06 18:21:14,3599,30.7533,27.9521
+2016-05-06 18:36:10,3599,30.8046,27.9496
+2016-05-06 18:51:05,3599,30.8073,27.9993
+2016-05-06 19:06:01,3599,30.8046,27.9496
+2016-05-06 19:20:57,3599,30.8156,27.9496
+2016-05-06 19:35:53,3599,30.8156,28.0018
+2016-05-06 19:50:48,3599,30.7615,27.9521
+2016-05-06 20:05:44,3599,30.8156,27.9496
+2016-05-06 20:20:39,3599,30.8156,27.9521
+2016-05-06 20:35:35,3599,30.8156,27.957
+2016-05-06 20:50:30,3599,30.7615,28.0067
+2016-05-06 21:05:26,3599,30.8697,28.0067
+2016-05-06 21:20:21,3599,30.8156,27.957
+2016-05-06 21:35:17,3598,30.7615,28.0067
+2016-05-06 21:50:12,3598,30.8156,27.9595
+2016-05-06 22:05:08,3598,30.8156,28.0067
+2016-05-06 22:20:03,3598,30.8156,28.0042
+2016-05-06 22:34:58,3597,30.8752,28.0067
+2016-05-06 22:49:53,3596,30.8128,27.957
+2016-05-06 23:04:49,3595,30.8238,27.9546
+2016-05-06 23:19:44,3595,30.8156,28.0092
+2016-05-06 23:34:39,3594,30.8697,27.957
+2016-05-06 23:49:34,3593,30.8156,27.9546
+2016-05-07 00:04:30,3592,30.8156,28.0092
+2016-05-07 00:19:25,3592,30.8697,27.9595
+2016-05-07 00:34:20,3591,30.8265,27.9595
+2016-05-07 00:49:15,3590,30.867,27.9595
+2016-05-07 01:04:11,3588,30.8697,28.0092
+2016-05-07 01:19:06,3588,30.8156,28.059
+2016-05-07 01:34:01,3588,30.7076,28.0067
+2016-05-07 01:48:56,3587,30.8697,28.0067
+2016-05-07 02:03:51,3587,30.8697,28.0067
+2016-05-07 02:18:47,3586,30.8697,28.0092
+2016-05-07 02:33:42,3586,30.8697,27.957
+2016-05-07 02:48:37,3585,30.8697,27.957
+2016-05-07 03:03:32,3584,30.8697,27.9645
+2016-05-07 03:18:27,3584,30.8697,28.0142
+2016-05-07 03:33:22,3583,30.8156,27.9645
+2016-05-07 03:48:17,3582,30.8156,28.0142
+2016-05-07 04:03:13,3581,30.867,28.0142
+2016-05-07 04:18:08,3582,30.8697,27.9645
+2016-05-07 04:33:10,3580,30.8697,28.0142
+2016-05-07 04:48:05,3581,30.8156,28.0142
+2016-05-07 05:03:00,3581,30.8697,28.064
+2016-05-07 05:17:55,3578,30.8128,28.0615
+2016-05-07 05:32:50,3580,30.8156,28.0167
+2016-05-07 05:47:45,3580,30.6537,28.1238
+2016-05-07 06:02:40,3577,30.8697,28.0142
+2016-05-07 06:17:35,3578,30.8697,28.0142
+2016-05-07 06:32:30,3578,30.8697,27.9719
+2016-05-07 06:47:25,3578,30.8156,28.0241
+2016-05-07 07:02:20,3576,30.8697,28.0142
+2016-05-07 07:17:15,3576,30.8073,28.0216
+2016-05-07 07:32:10,3576,30.8615,27.9744
+2016-05-07 07:47:05,3578,30.8073,27.9719
+2016-05-07 08:02:00,3582,30.7506,28.0216
+2016-05-07 08:16:55,3584,30.8101,28.0216
+2016-05-07 08:31:51,3584,30.8073,28.0241
+2016-05-07 08:46:46,3587,30.9184,27.9719
+2016-05-07 09:01:41,3581,31.0955,28.0216
+2016-05-07 09:16:37,3578,30.867,27.9719
+2016-05-07 09:31:32,3581,30.9783,27.9719
+2016-05-07 09:46:28,3588,30.9157,28.0241
+2016-05-07 10:01:23,3573,31.0355,28.0216
+2016-05-07 10:16:19,3588,30.867,28.0216
+2016-05-07 10:31:14,3592,31.153,27.9719
+2016-05-07 10:46:10,3586,30.8642,27.9744
+2016-05-07 11:01:06,3589,30.867,28.0216
+2016-05-07 11:16:02,3577,31.1038,27.9719
+2016-05-07 11:30:58,3582,31.0983,28.0216
+2016-05-07 11:45:53,3585,31.0983,27.9719
+2016-05-07 12:00:49,3597,30.8724,27.8233
+2016-05-07 12:15:45,3598,30.9294,27.774
+2016-05-07 12:30:41,3598,30.8752,27.8233
+2016-05-07 12:45:37,3595,31.041,27.8728
+2016-05-07 13:00:33,3577,31.2271,27.8233
+2016-05-07 13:15:29,3577,30.8971,27.8233
+2016-05-07 13:30:25,3596,30.9948,27.774
+2016-05-07 13:45:21,3588,31.1066,27.774
+2016-05-07 14:00:17,3591,31.0603,27.7764
+2016-05-07 14:15:13,3588,31.1204,27.8233
+2016-05-07 14:30:09,3560,31.5806,27.8233
+2016-05-07 14:45:04,3542,31.9213,27.774
+2016-05-07 15:00:01,3554,31.7533,27.8209
+2016-05-07 15:14:57,3553,31.6975,27.8209
+2016-05-07 15:29:53,3549,31.8233,27.8728
+2016-05-07 15:44:49,3536,32.2379,27.8703
+2016-05-07 15:59:45,3531,32.0031,27.774
+2016-05-07 16:14:41,3523,32.2407,27.8233
+2016-05-07 16:29:37,3525,32.1669,27.8233
+2016-05-07 16:44:33,3528,32.068000000000005,27.7715
+2016-05-07 16:59:29,3538,31.9412,27.8233
+2016-05-07 17:14:26,3530,32.0145,27.7715
+2016-05-07 17:29:21,3525,32.1331,27.8233
+2016-05-07 17:44:17,3517,32.366,27.8233
+2016-05-07 17:59:13,3514,32.3747,27.8307
+2016-05-07 18:14:09,3508,32.4976,27.8307
+2016-05-07 18:29:05,3506,32.3234,27.8802
+2016-05-07 18:44:01,3533,-9.2931,27.8802
+2016-05-07 18:58:57,3516,32.2012,27.8307
+2016-05-07 19:13:52,3514,32.1445,27.8802
+2016-05-07 19:28:48,3499,32.449,27.8802
+2016-05-07 19:43:44,3498,32.449,27.8307
+2016-05-07 19:58:40,3502,32.2723,27.8802
+2016-05-07 20:13:35,3479,32.6961,27.8802
+2016-05-07 20:28:31,3488,32.389,27.9297
+2016-05-07 20:43:26,3469,32.8146,27.8307
+2016-05-07 20:58:22,3464,32.8117,27.8802
+2016-05-07 21:13:17,3461,32.9306,27.8381
+2016-05-07 21:28:13,3455,32.9335,27.9372
+2016-05-07 21:43:08,3449,33.0559,27.9372
+2016-05-07 21:58:04,3450,32.9365,27.8876
+2016-05-07 22:12:59,3450,32.9306,27.8876
+2016-05-07 22:27:55,3437,33.1788,27.8876
+2016-05-07 22:42:50,3420,33.0559,27.9372
+2016-05-07 22:57:45,3412,33.2552,27.9372
+2016-05-07 23:12:40,3392,33.8639,27.8851
+2016-05-07 23:27:36,3381,34.0535,27.8876
+2016-05-07 23:42:31,3364,34.3753,27.8876
+2016-05-07 23:57:26,3030,-8.1326,-4.0625
+2016-05-08 00:12:22,3030,-6.5776,24.5436
+2016-05-08 00:27:17,3051,-11.7669,
+2016-05-08 00:42:13,3051,-11.7005,
+2016-05-08 00:57:08,3051,-11.6675,
+2016-05-08 01:12:06,3050,-11.6356,
+2016-05-08 01:27:02,3050,-11.6685,
+2016-05-08 01:41:57,3050,-11.6356,
+2016-05-08 01:56:52,3048,-11.6356,
+2016-05-08 02:11:47,3047,-11.6356,
+2016-05-08 02:26:42,3045,-11.6356,
+2016-05-08 02:41:37,3043,-11.6356,
+2016-05-08 02:56:33,3043,-11.6366,
+2016-05-08 03:11:28,3043,-11.6366,
+2016-05-08 03:26:23,3040,-11.6366,
+2016-05-08 03:41:18,3040,-11.6366,
+2016-05-08 03:56:13,3038,-11.6366,
+2016-05-08 04:11:08,3037,-11.6366,
+2016-05-08 04:26:03,3036,-11.6366,
+2016-05-08 04:40:58,3036,-11.6366,
+2016-05-08 04:55:53,3035,-11.6366,
+2016-05-08 05:10:48,3034,-11.6366,
+2016-05-08 05:25:44,3031,-11.6706,
+2016-05-08 05:40:39,3031,-11.6695,
+2016-05-08 05:55:34,3031,-11.7036,
+2016-05-08 06:10:29,3030,-11.6695,
+2016-05-08 06:25:24,3030,-11.7036,
+2016-05-08 06:40:19,3030,-11.7036,
+2016-05-08 06:55:14,3030,-11.6695,
+2016-05-08 07:10:09,3032,-11.7036,
+2016-05-08 07:25:04,3034,-11.7026,
+2016-05-08 07:39:59,3035,-11.7026,
+2016-05-08 07:54:54,3036,-11.6695,
+2016-05-08 08:09:50,3038,-11.6695,
+2016-05-08 08:24:45,3041,-11.6706,
+2016-05-08 08:39:40,3043,-11.6366,
+2016-05-08 08:54:36,3045,-11.6366,
+2016-05-08 09:09:31,3048,-11.6038,
+2016-05-08 09:24:27,3051,-11.571,
+2016-05-08 09:39:22,3053,-11.571,
+2016-05-08 09:54:17,3058,-11.5384,
+2016-05-08 10:09:13,3060,-11.5059,
+2016-05-08 10:24:09,3064,-11.5059,
+2016-05-08 10:39:05,3065,-11.5384,
+2016-05-08 10:54:00,3070,-11.1851,
+2016-05-08 11:09:03,3072,-11.1851,
+2016-05-08 11:23:59,3072,-11.1841,
+2016-05-08 11:38:55,3075,-11.1841,
+2016-05-08 11:53:51,3077,-11.1851,
+2016-05-08 12:08:47,3079,-11.1851,
+2016-05-08 12:23:43,3080,-11.1851,
+2016-05-08 12:38:38,3080,-11.1851,
+2016-05-08 12:53:34,3080,-11.1526,
+2016-05-08 13:08:30,3082,-11.1526,
+2016-05-08 13:23:26,3083,-11.1526,
+2016-05-08 13:38:22,3085,-11.1526,
+2016-05-08 13:53:18,3085,-11.1526,
+2016-05-08 14:08:14,3086,-11.1526,
+2016-05-08 14:23:11,3086,-11.1526,
+2016-05-08 14:38:07,3086,-11.1516,
+2016-05-08 14:53:03,3086,-11.1526,
+2016-05-08 15:07:59,3087,-11.1526,
+2016-05-08 15:22:55,3087,-11.1526,
+2016-05-08 15:37:52,3087,-11.1526,
+2016-05-08 15:52:48,3087,-11.1516,
+2016-05-08 16:07:44,3087,-11.1526,
+2016-05-08 16:22:40,3087,-11.1526,-8.1694
+2016-05-08 16:37:36,3087,-11.1841,-4.835
+2016-05-08 16:52:32,3087,-11.1831,-4.9625
+2016-05-08 17:07:28,3087,-11.1831,-4.8135
+2016-05-08 17:22:24,3086,-11.1831,-4.7917
+2016-05-08 17:37:20,3086,-11.1841,-4.7702
+2016-05-08 17:52:16,3085,-11.1831,-4.962
+2016-05-08 18:07:12,3083,-11.1831,-4.7913
+2016-05-08 18:22:08,3082,-11.1831,-4.9401
+2016-05-08 18:37:04,3081,-11.2156,-4.8968
+2016-05-08 18:52:00,3080,-11.2156,-4.7475
+2016-05-08 19:06:56,3079,-11.2156,-4.7261
+2016-05-08 19:21:52,3077,-11.2156,-4.7694
+2016-05-08 19:36:48,3075,-11.2473,-4.7274
+2016-05-08 19:51:44,3072,-11.2147,-4.7055
+2016-05-08 20:06:40,3072,-11.2473,
+2016-05-08 20:21:50,3070,-11.2473,
+2016-05-08 20:36:45,3067,-11.28,
+2016-05-08 20:51:41,3065,-11.28,
+2016-05-08 21:06:37,3064,-11.28,
+2016-05-08 21:21:32,3061,-11.3119,
+2016-05-08 21:36:28,3058,-11.3119,
+2016-05-08 21:51:24,3058,-11.3119,
+2016-05-08 22:06:19,3055,-11.3448,
+2016-05-08 22:21:15,3053,-11.3448,
+2016-05-08 22:36:10,3051,-11.3438,
+2016-05-08 22:51:06,3051,-11.3438,
+2016-05-08 23:06:01,3050,-11.3448,
+2016-05-08 23:20:56,3048,-11.3768,
+2016-05-08 23:35:52,3047,-11.3768,
+2016-05-08 23:50:47,3046,-11.3768,
+2016-05-09 00:05:43,3045,-11.3768,
+2016-05-09 00:20:38,3044,-11.3768,
+2016-05-09 00:35:34,3043,-11.3758,
+2016-05-09 00:50:29,3043,-11.3758,
+2016-05-09 01:05:32,3043,-11.3758,
+2016-05-09 01:20:27,3043,-11.3768,
+2016-05-09 01:35:30,3042,-11.3758,
+2016-05-09 01:50:25,3041,-11.3768,
+2016-05-09 02:05:21,3039,-11.3758,
+2016-05-09 02:20:16,3038,-11.4089,
+2016-05-09 02:35:12,3037,-11.4089,
+2016-05-09 02:50:07,3036,-11.4089,
+2016-05-09 03:05:02,3036,-11.4089,
+2016-05-09 03:19:58,3035,-11.4089,
+2016-05-09 03:34:53,3033,-11.4089,
+2016-05-09 03:49:49,3031,-11.4079,
+2016-05-09 04:04:44,3030,-11.4412,
+2016-05-09 04:19:39,3030,-11.4089,
+2016-05-09 04:34:34,3029,-11.4422,
+2016-05-09 04:49:30,3029,-11.4089,
+2016-05-09 05:04:25,3026,-11.4412,
+2016-05-09 05:19:20,3026,-11.4089,
+2016-05-09 05:34:16,3025,-11.4079,
+2016-05-09 05:49:11,3023,-11.4412,
+2016-05-09 06:04:06,3023,-11.4412,
+2016-05-09 06:19:01,3023,-11.4412,
+2016-05-09 06:33:57,3022,-11.4412,
+2016-05-09 06:48:52,3023,-11.4412,
+2016-05-09 07:03:47,3023,-11.4412,
+2016-05-09 07:18:42,3023,-11.4412,
+2016-05-09 07:33:37,3025,-11.4089,
+2016-05-09 07:48:32,3028,-11.4089,
+2016-05-09 08:03:28,3029,-11.3758,
+2016-05-09 08:18:23,3031,-11.3768,
+2016-05-09 08:33:18,3034,-11.3768,
+2016-05-09 08:48:14,3035,-11.3448,
+2016-05-09 09:03:09,3038,-11.3119,
+2016-05-09 09:18:05,3041,-11.28,
+2016-05-09 09:33:00,3043,-11.28,
+2016-05-09 09:47:56,3045,-11.28,-5.1088
+2016-05-09 10:02:51,3050,-11.2156,-5.0448
+2016-05-09 10:17:47,3051,-11.1831,8.4257
+2016-05-09 10:32:42,3054,-11.1841,-3.8327
+2016-05-09 10:47:38,3058,-11.2156,55.005
+2016-05-09 11:02:34,3061,-11.1526,23.7992
+2016-05-09 11:17:30,3064,-11.1526,-3.5335
+2016-05-09 11:32:26,3065,-11.1526,-4.7107
+2016-05-09 11:47:22,3066,-11.1526,23.9232
+2016-05-09 12:02:18,3068,-11.1526,25.6107
+2016-05-09 12:17:14,3070,-11.1526,26.7861
+2016-05-09 12:32:10,3071,-11.1526,-4.4504
+2016-05-09 12:47:06,3072,-11.1526,26.2608
+2016-05-09 13:02:02,3072,-11.1526,-5.2364
+2016-05-09 13:16:58,3073,-11.1526,-0.2046
+2016-05-09 13:31:54,3074,-11.1526,1.5813
+2016-05-09 13:46:50,3075,-11.1841,55.3896
+2016-05-09 14:01:46,3076,-11.1841,55.7789
+2016-05-09 14:16:42,3077,-11.1841,55.6503
+2016-05-09 14:31:38,3078,-11.1841,55.7789
+2016-05-09 14:46:34,3079,-11.1841,56.0378
+2016-05-09 15:01:30,3079,-11.1516,55.6503
+2016-05-09 15:16:26,3079,-11.1526,55.6503
+2016-05-09 15:31:22,3080,-11.1841,55.6503
+2016-05-09 15:46:19,3080,-11.2156,
+2016-05-09 16:01:15,3080,-11.1841,56.173
+2016-05-09 16:16:11,3080,-11.1841,56.173
+2016-05-09 16:31:07,3080,-11.1841,56.173
+2016-05-09 16:46:03,3079,-11.2156,
+2016-05-09 17:00:59,3078,-11.2156,49.4429
+2016-05-09 17:15:55,3078,-11.28,56.3038
+2016-05-09 17:30:51,3077,-11.2156,56.3038
+2016-05-09 17:45:47,3076,-11.2156,56.4401
+2016-05-09 18:00:44,3074,-11.2156,56.0527
+2016-05-09 18:15:40,3072,-11.2156,56.0428
+2016-05-09 18:30:36,3072,-11.2147,55.6602
+2016-05-09 18:45:32,3071,-11.2473,56.0478
+2016-05-09 19:00:28,3070,-11.2473,56.7044
+2016-05-09 19:15:24,3067,-11.2473,56.7044
+2016-05-09 19:30:20,3065,-11.28,56.3188
+2016-05-09 19:45:16,3064,-11.28,56.183
+2016-05-09 20:00:11,3061,-11.3119,
+2016-05-09 20:15:07,3059,-11.2473,-7.3898
+2016-05-09 20:30:03,3058,-11.3119,55.5271
+2016-05-09 20:44:59,3054,-11.3438,
+2016-05-09 20:59:55,3052,-11.3119,55.5222
+2016-05-09 21:14:51,3051,-11.28,-3.99
+2016-05-09 21:29:47,3050,-11.28,6.082999999999998
+2016-05-09 21:44:42,3048,-11.28,-4.4631
+2016-05-09 21:59:38,3044,-11.3768,55.5271
+2016-05-09 22:14:34,3043,-11.3758,55.923
+2016-05-09 22:29:29,3042,-11.3758,56.587
+2016-05-09 22:44:25,3039,-11.3758,56.0627
+2016-05-09 22:59:21,3039,-11.4079,54.6763
+2016-05-09 23:14:16,3043,-11.3428,70.125
+2016-05-09 23:29:12,3036,-11.4079,56.4601
+2016-05-09 23:44:07,3044,-11.3758,24.2075
+2016-05-09 23:59:03,3042,-11.5069,22.5775
+2016-05-10 00:13:58,3032,-11.3758,30.5977
+2016-05-10 00:28:53,3030,-11.3758,-0.3133
+2016-05-10 00:43:56,3029,-11.3758,-4.1844
+2016-05-10 00:58:51,3029,-11.3758,-4.6351
+2016-05-10 01:13:47,3027,-11.4089,-4.6372
+2016-05-10 01:28:42,3026,-11.3758,-4.6582
+2016-05-10 01:43:37,3024,-11.4079,-4.467
+2016-05-10 01:58:33,3023,-11.4089,
+2016-05-10 02:13:28,3022,-11.4079,-4.34
+2016-05-10 02:28:23,3020,-11.4089,
+2016-05-10 02:43:19,3020,-11.4089,
+2016-05-10 02:58:14,3019,-11.4079,
+2016-05-10 03:13:09,3016,-11.4079,
+2016-05-10 03:28:04,3015,-11.4079,
+2016-05-10 03:43:00,3014,-11.4079,
+2016-05-10 03:57:55,3014,-11.4412,
+2016-05-10 04:12:50,3012,-11.4412,
+2016-05-10 04:27:46,3011,-11.4079,
+2016-05-10 04:42:41,3010,-11.4412,
+2016-05-10 04:57:36,3009,-11.4412,
+2016-05-10 05:12:31,3008,-11.4412,
+2016-05-10 05:27:27,3007,-11.4402,
+2016-05-10 05:42:22,3006,-11.4402,
+2016-05-10 05:57:17,3005,-11.4402,
+2016-05-10 06:12:12,3004,-11.4402,
+2016-05-10 06:27:08,3003,-11.4402,
+2016-05-10 06:42:03,3005,-11.5059,
+2016-05-10 06:56:58,3005,-11.4079,
+2016-05-10 07:11:54,3004,-11.4412,
+2016-05-10 07:26:49,3005,-11.4412,
+2016-05-10 07:41:44,3007,-11.4412,
+2016-05-10 07:56:39,3008,-11.4735,
+2016-05-10 08:11:35,3009,-11.4089,
+2016-05-10 08:26:30,3010,-11.4089,
+2016-05-10 08:41:25,3012,-11.3758,
+2016-05-10 08:56:21,3015,-11.4079,-8.6262
+2016-05-10 09:11:16,3016,-11.3438,
+2016-05-10 09:26:11,3019,-11.3099,
+2016-05-10 09:41:07,3021,-11.3119,
+2016-05-10 09:56:02,3022,-11.28,
+2016-05-10 10:10:58,3025,-11.2147,
+2016-05-10 10:25:53,3029,-11.2463,
+2016-05-10 10:40:49,3032,-11.1831,
+2016-05-10 10:55:45,3036,-11.2166,
+2016-05-10 11:10:40,3037,-11.1831,-4.6623
+2016-05-10 11:25:36,3038,-11.2156,
+2016-05-10 11:40:32,3040,-11.2156,-4.6623
+2016-05-10 11:55:28,3043,-11.2156,-4.7055
+2016-05-10 12:10:24,3043,-11.2156,-4.7055
+2016-05-10 12:25:20,3045,-11.2156,-4.6841
+2016-05-10 12:40:15,3047,-11.1831,-4.7055
+2016-05-10 12:55:11,3048,-11.1831,-4.7055
+2016-05-10 13:10:07,3050,-11.1841,-4.7055
+2016-05-10 13:25:03,3051,-11.1841,-4.7475
+2016-05-10 13:39:59,3052,-11.1516,-4.8543
+2016-05-10 13:54:55,3053,-11.1526,-4.6413
+2016-05-10 14:09:51,3054,-11.1841,-4.6623
+2016-05-10 14:24:47,3054,-11.1841,-4.8753
+2016-05-10 14:39:43,3055,-11.1841,-4.9392
+2016-05-10 14:54:40,3056,-11.1841,-4.9397
+2016-05-10 15:09:36,3057,-11.1526,-5.0041
+2016-05-10 15:24:32,3058,-11.1841,-4.9182
+2016-05-10 15:39:28,3058,-11.1841,-4.9603
+2016-05-10 15:54:24,3058,-11.1851,-4.8963
+2016-05-10 16:09:20,3058,-11.1841,-4.9178
+2016-05-10 16:24:16,3058,-11.1841,-4.7891
+2016-05-10 16:39:12,3057,-11.1841,-5.1102
+2016-05-10 16:54:08,3057,-11.1841,-4.874
+2016-05-10 17:09:04,3054,-11.1831,-4.8101
+2016-05-10 17:24:00,3054,-11.1841,-4.6191
+2016-05-10 17:38:56,3053,-11.2156,-4.832
+2016-05-10 17:53:52,3052,-11.2156,-4.832
+2016-05-10 18:08:48,3051,-11.2156,-4.832
+2016-05-10 18:23:44,3050,-11.2156,-4.8316
+2016-05-10 18:38:41,3050,-11.2156,-4.7673
+2016-05-10 18:53:36,3047,-11.2483,-4.5969
+2016-05-10 19:08:32,3045,-11.2473,-4.5977
+2016-05-10 19:23:28,3043,-11.28,-4.5981
+2016-05-10 19:38:24,3042,-11.28,-4.5763
+2016-05-10 19:53:20,3039,-11.279000000000002,-4.5981
+2016-05-10 20:08:16,3037,-11.279000000000002,-4.5985
+2016-05-10 20:23:12,3036,-11.3119,-4.5771
+2016-05-10 20:38:08,3034,-11.3119,-4.5767
+2016-05-10 20:53:04,3030,-11.3119,-4.5763
+2016-05-10 21:07:59,3029,-11.3438,-4.5763
+2016-05-10 21:22:55,3027,-11.3438,-4.5981
+2016-05-10 21:37:51,3025,-11.3438,-4.5981
+2016-05-10 21:52:47,3023,-11.3758,-4.5767
+2016-05-10 22:07:43,3022,-11.3758,-4.5557
+2016-05-10 22:22:38,3020,-11.3758,-4.5767
+2016-05-10 22:37:34,3018,-11.3758,-4.5985
+2016-05-10 22:52:30,3015,-11.4089,-4.5557
+2016-05-10 23:07:26,3015,-11.3758,
+2016-05-10 23:22:21,3014,-11.4089,-4.5343
+2016-05-10 23:37:17,3012,-11.4079,-4.5561
+2016-05-10 23:52:13,3011,-11.4079,-4.5561
+2016-05-11 00:07:08,3010,-11.4079,
+2016-05-11 00:22:04,3009,-11.4079,-4.5561
+2016-05-11 00:36:59,3008,-11.4412,-4.5561
+2016-05-11 00:51:55,3007,-11.4412,-11.6284
+2016-05-11 01:06:51,3007,-11.4412,
+2016-05-11 01:21:46,3006,-11.4412,-11.4771
+2016-05-11 01:36:42,3004,-11.4412,
+2016-05-11 01:51:37,3002,-11.4402,
+2016-05-11 02:06:33,3001,-11.4402,
+2016-05-11 02:21:28,3000,-11.4412,
+2016-05-11 02:36:24,2999,-11.4735,
+2016-05-11 02:51:19,2997,-11.4735,
+2016-05-11 03:06:15,2995,-11.4735,
+2016-05-11 03:21:10,2994,-11.4402,
+2016-05-11 03:36:06,2993,-11.5069,
+2016-05-11 03:51:01,2992,-11.5049,
+2016-05-11 04:05:56,2991,-11.5741,
+2016-05-11 04:20:52,2989,-11.5049,
+2016-05-11 04:35:47,2987,-11.5069,
+2016-05-11 04:50:42,2987,-11.5049,
+2016-05-11 05:05:37,2986,-11.5049,
+2016-05-11 05:20:32,2985,-11.5049,
+2016-05-11 05:35:28,2984,-11.5049,
+2016-05-11 05:50:23,2983,-11.5049,
+2016-05-11 06:05:18,2982,-11.5384,
+2016-05-11 06:20:13,2981,-11.5049,
+2016-05-11 06:35:08,2980,-11.5049,
+2016-05-11 06:50:03,2980,-11.5384,
+2016-05-11 07:04:59,2981,-11.5049,
+2016-05-11 07:19:54,2982,-11.5384,-4.4686
+2016-05-11 07:34:49,2983,-11.5049,-4.5113
+2016-05-11 07:49:44,2985,-11.5049,-4.5109
+2016-05-11 08:04:39,2986,-11.5059,-4.659
+2016-05-11 08:19:34,2988,-11.4725,-4.5739
+2016-05-11 08:34:30,2992,-11.4735,-4.9147
+2016-05-11 08:49:25,2993,-11.4402,-4.6804
+2016-05-11 09:04:13,3171,-10.751,64.3117
+2016-05-11 09:19:09,3170,-10.751,64.2946
+2016-05-11 09:34:05,3171,-10.751,63.9632
+2016-05-11 09:49:01,3171,-10.7198,63.9632
+2016-05-11 10:03:57,3171,-10.7207,63.9575
+2016-05-11 10:18:53,3174,-10.6895,63.793
+2016-05-11 10:33:49,3175,-10.6895,63.7987
+2016-05-11 10:48:45,3177,-10.6904,63.6293
+2016-05-11 11:03:41,3178,-10.6593,63.6293
+2016-05-11 11:18:37,3180,-10.6593,63.2987
+2016-05-11 11:33:33,3181,-10.6593,63.4608
+2016-05-11 11:48:29,3183,-10.6593,62.9656
+2016-05-11 12:03:25,3184,-10.6593,63.1373
+2016-05-11 12:18:21,3185,-10.6593,63.1373
+2016-05-11 12:33:18,3185,-10.6602,62.1742
+2016-05-11 12:48:14,3186,-10.6602,61.6972
+2016-05-11 13:03:10,3188,-10.6292,61.6972
+2016-05-11 13:18:06,3188,-10.6292,61.8573
+2016-05-11 13:33:03,3189,-10.6292,61.6972
+2016-05-11 13:47:59,3189,-10.6292,61.5433
+2016-05-11 14:02:55,3190,-10.6292,61.6972
+2016-05-11 14:17:52,3191,-10.6292,61.8573
+2016-05-11 14:32:48,3191,-10.6292,62.1742
+2016-05-11 14:47:44,3191,-10.6292,62.1742
+2016-05-11 15:02:41,3191,-10.6292,62.1742
+2016-05-11 15:17:37,3192,-10.6292,62.342
+2016-05-11 15:32:33,3191,-10.6292,62.331
+2016-05-11 15:47:30,3192,-10.6292,62.331
+2016-05-11 16:02:26,3192,-10.6292,62.331
+2016-05-11 16:17:22,3191,-10.6292,62.1742
+2016-05-11 16:32:18,3191,-10.6602,61.6972
+2016-05-11 16:47:14,3190,-10.6602,61.8573
+2016-05-11 17:02:11,3190,-10.6602,62.6523
+2016-05-11 17:17:07,3189,-10.6593,62.0126
+2016-05-11 17:32:03,3189,-10.6602,62.4885
+2016-05-11 17:46:59,3188,-10.6602,62.331
+2016-05-11 18:01:55,3187,-10.6904,62.331
+2016-05-11 18:16:51,3185,-10.6904,62.1852
+2016-05-11 18:31:47,3184,-10.6904,62.342
+2016-05-11 18:46:44,3184,-10.6904,62.342
+2016-05-11 19:01:40,3184,-10.6904,62.5052
+2016-05-11 19:16:36,3183,-10.7217,62.3476
+2016-05-11 19:31:32,3182,-10.7217,62.342
+2016-05-11 19:46:28,3181,-10.7217,62.342
+2016-05-11 20:01:24,3180,-10.7207,62.342
+2016-05-11 20:16:19,3178,-10.7207,62.0126
+2016-05-11 20:31:15,3177,-10.7207,62.1852
+2016-05-11 20:46:11,3177,-10.751,62.3476
+2016-05-11 21:01:07,3175,-10.751,61.8737
+2016-05-11 21:16:03,3174,-10.751,62.1852
+2016-05-11 21:30:58,3174,-10.751,62.5052
+2016-05-11 21:45:54,3173,-10.751,62.342
+2016-05-11 22:00:50,3172,-10.7824,62.5052
+2016-05-11 22:15:46,3171,-10.7824,62.669
+2016-05-11 22:30:41,3171,-10.7824,62.6635
+2016-05-11 22:45:37,3170,-10.7824,62.5052
+2016-05-11 23:00:33,3169,-10.8139,62.5052
+2016-05-11 23:15:28,3168,-10.8129,62.342
+2016-05-11 23:30:24,3167,-10.8139,62.4996
+2016-05-11 23:45:20,3166,-10.8129,62.0291
+2016-05-12 00:00:15,3165,-10.7815,62.1963
+2016-05-12 00:15:11,3164,-10.8129,62.342
+2016-05-12 00:30:07,3164,-10.8129,62.1852
+2016-05-12 00:45:02,3163,-10.8129,61.401
+2016-05-12 00:59:58,3163,-10.7824,61.7136
+2016-05-12 01:14:53,3163,-10.8129,62.4996
+2016-05-12 01:29:49,3162,-10.8129,62.6635
+2016-05-12 01:44:44,3162,-10.8129,62.8281
+2016-05-12 01:59:40,3162,-10.8129,62.9879
+2016-05-12 02:14:35,3162,-10.8444,63.3155
+2016-05-12 02:29:31,3162,-10.8444,63.6463
+2016-05-12 02:44:26,3162,-10.8435,64.1455
+2016-05-12 02:59:22,3162,-10.8435,64.6522
+2016-05-12 03:14:17,3161,-10.8435,65.1606
+2016-05-12 03:29:13,3160,-10.8435,65.1664
+2016-05-12 03:44:08,3160,-10.8435,65.3549
+2016-05-12 03:59:03,3160,-10.8435,65.8621
+2016-05-12 04:13:59,3160,-10.8435,66.0426
+2016-05-12 04:28:54,3160,-10.8751,66.7562
+2016-05-12 04:43:50,3160,-10.8751,66.7739
+2016-05-12 04:58:45,3159,-10.9048,66.5897
+2016-05-12 05:13:41,3159,-10.9048,66.9532
+2016-05-12 05:28:36,3157,-10.9048,67.4906
+2016-05-12 05:43:31,3157,-10.9048,67.8756
+2016-05-12 05:58:27,3157,-10.9048,68.4397
+2016-05-12 06:13:22,3157,-10.9366,68.6276
+2016-05-12 06:28:18,3156,-10.9366,68.6337
+2016-05-12 06:43:14,3157,-10.9048,69.5952
+2016-05-12 06:58:09,3157,-10.9048,70.1873
+2016-05-12 07:13:05,3157,-10.9366,70.391
+2016-05-12 07:28:01,3158,-10.9048,71.8174
+2016-05-12 07:42:56,3160,-10.9366,71.8174
+2016-05-12 07:57:52,3160,-10.9058,72.4472
+2016-05-12 08:12:47,3162,-10.9058,73.0881
+2016-05-12 08:27:43,3162,-10.9058,72.6639
+2016-05-12 08:42:38,3163,-10.8741,73.0881
+2016-05-12 08:57:34,3166,-10.8435,73.9712
+2016-05-12 09:12:30,3168,-10.8435,74.6326
+2016-05-12 09:27:25,3170,-10.812,75.0868
+2016-05-12 09:42:21,3171,-10.812,76.0121
+2016-05-12 09:57:17,3174,-10.7815,77.4371
+2016-05-12 10:12:13,3174,-10.7198,76.9607
+2016-05-12 10:27:09,3176,-10.7198,75.7753
+2016-05-12 10:42:05,3177,-10.7198,75.7753
+2016-05-12 10:57:01,3179,-10.7198,76.0054
+2016-05-12 11:11:56,3180,-10.7198,75.5466
+2016-05-12 11:26:53,3181,-10.7198,75.0868
+2016-05-12 11:41:49,3182,-10.7198,74.8623
+2016-05-12 11:56:45,3183,-10.7198,75.0868
+2016-05-12 12:11:41,3184,-10.7198,74.8823
+2016-05-12 12:26:37,3184,-10.7198,75.7753
+2016-05-12 12:41:33,3185,-10.7198,76.9539
+2016-05-12 12:56:29,3185,-10.7198,73.3085
+2016-05-12 13:11:26,3185,-10.7198,73.3281
+2016-05-12 13:26:22,3186,-10.7198,73.3085
+2016-05-12 13:41:18,3188,-10.7198,74.4306
+2016-05-12 13:56:14,3188,-10.7198,75.1068
+2016-05-12 14:11:10,3189,-10.7198,76.9539
+2016-05-12 14:26:07,3188,-10.7198,74.1969
+2016-05-12 14:41:03,3189,-10.7198,76.2572
+2016-05-12 14:55:59,3189,-10.7198,77.2119
+2016-05-12 15:10:55,3190,-10.7198,23.9976
+2016-05-12 15:25:51,3190,-10.7198,24.0364
+2016-05-12 15:40:48,3190,-10.751,24.1681
+2016-05-12 15:55:44,3191,-10.7198,24.257
+2016-05-12 16:10:40,3191,-10.751,24.3505
+2016-05-12 16:25:37,3191,-10.751,24.3505
+2016-05-12 16:40:33,3190,-10.751,24.3505
+2016-05-12 16:55:29,3191,-10.751,24.3962
+2016-05-12 17:10:26,3191,-10.751,24.4356
+2016-05-12 17:25:22,3191,-10.751,24.4464
+2016-05-12 17:40:19,3191,-10.752,24.4902
+2016-05-12 17:55:15,3191,-10.752,24.4421
+2016-05-12 18:10:11,3190,-10.7501,24.6241
+2016-05-12 18:25:07,3190,-10.751,24.7123
+2016-05-12 18:40:04,3189,-10.751,24.5801
+2016-05-12 18:55:00,3189,-10.751,24.7587
+2016-05-12 19:09:56,3189,-10.751,24.8983
+2016-05-12 19:24:52,3188,-10.7815,24.8961
+2016-05-12 19:39:55,3186,-10.7815,24.8051
+2016-05-12 19:54:51,3184,-10.8129,24.6241
+2016-05-12 20:09:47,3184,-10.8129,24.8029
+2016-05-12 20:24:43,3184,-10.8129,24.8961
+2016-05-12 20:39:39,3183,-10.8129,24.8961
+2016-05-12 20:54:35,3181,-10.8444,24.4421
+2016-05-12 21:09:31,3178,-10.8435,23.7343
+2016-05-12 21:24:27,3179,-10.8435,24.3112
+2016-05-12 21:39:23,3178,-10.8435,24.2656
+2016-05-12 21:54:18,3177,-10.812,24.4421
+2016-05-12 22:09:14,3177,-10.8435,24.5779
+2016-05-12 22:24:10,3176,-10.8435,24.5779
+2016-05-12 22:39:06,3176,-10.8435,24.7188
+2016-05-12 22:54:02,3175,-10.8435,24.4902
+2016-05-12 23:08:57,3174,-10.8751,24.4443
+2016-05-12 23:23:53,3174,-10.8435,24.2721
+2016-05-12 23:38:49,3174,-10.8751,24.3984
+2016-05-12 23:53:44,3172,-10.8741,24.3591
+2016-05-13 01:08:22,3170,-10.9058,24.4508
+2016-05-13 01:23:18,3169,-10.9048,24.4946
+2016-05-13 01:38:14,3168,-10.9058,24.4508
+2016-05-13 01:53:09,3167,-10.9048,24.4049
+2016-05-13 02:08:05,3168,-10.9048,24.8561
+2016-05-13 02:23:00,3167,-10.9048,24.8583
+2016-05-13 02:37:55,3167,-10.9048,24.9027
+2016-05-13 02:52:59,3166,-10.9048,24.8583
+2016-05-13 03:07:54,3166,-10.9048,24.7652
+2016-05-13 03:22:50,3165,-10.9048,24.8561
+2016-05-13 03:37:45,3164,-10.9048,24.8095
+2016-05-13 03:52:41,3163,-10.9048,24.7652
+2016-05-13 04:07:36,3163,-10.9357,24.8095
+2016-05-13 04:22:31,3163,-10.9357,25.1303
+2016-05-13 04:37:27,3162,-10.9366,25.1326
+2016-05-13 04:52:22,3161,-10.9357,24.994
+2016-05-13 05:07:17,3161,-10.9357,25.2224
+2016-05-13 05:22:13,3160,-10.9357,24.9027
+2016-05-13 05:37:08,3160,-10.9357,25.1303
+2016-05-13 05:52:04,3160,-10.9357,25.2201
+2016-05-13 06:06:59,3160,-10.9357,25.1774
+2016-05-13 06:21:54,3160,-10.9357,25.3644
+2016-05-13 06:36:50,3160,-10.9357,25.2719
+2016-05-13 06:51:45,3160,-10.9357,25.2719
+2016-05-13 07:06:41,3158,-10.9357,25.0878
+2016-05-13 07:21:36,3158,-10.9357,24.9049
+2016-05-13 07:36:31,3159,-10.9357,25.1348
+2016-05-13 07:51:27,3159,-10.9357,25.1348
+2016-05-13 08:06:22,3159,-10.9357,25.0878
+2016-05-13 08:21:18,3160,-10.9357,25.0878
+2016-05-13 08:36:13,3160,-10.9357,24.9516
+2016-05-13 08:51:09,3160,-10.9039,25.0878
+2016-05-13 09:06:04,3161,-10.9048,25.1348
+2016-05-13 09:21:00,3162,-10.9048,25.0408
+2016-05-13 09:35:55,3162,-10.9048,24.9516
+2016-05-13 09:50:51,3163,-10.8732,24.9962
+2016-05-13 10:05:46,3164,-10.8732,24.9049
+2016-05-13 10:20:42,3166,-10.8416,24.9049
+2016-05-13 10:35:38,3167,-10.8425,24.9049
+2016-05-13 10:50:33,3167,-10.8425,24.9494
+2016-05-13 11:05:29,3169,-10.811,24.8583
+2016-05-13 11:20:25,3170,-10.811,24.9494
+2016-05-13 11:35:20,3171,-10.812,24.9962
+2016-05-13 11:50:16,3172,-10.7805,24.9049
+2016-05-13 12:05:12,3173,-10.812,24.7696
+2016-05-13 12:20:08,3174,-10.812,24.7232
+2016-05-13 12:35:04,3174,-10.812,24.4989
+2016-05-13 12:49:59,3175,-10.812,24.453000000000007
+2016-05-13 13:04:55,3176,-10.812,24.5428
+2016-05-13 13:19:51,3177,-10.7805,24.721
+2016-05-13 13:34:46,3178,-10.7805,24.7232
+2016-05-13 13:49:42,3179,-10.7815,24.7674
+2016-05-13 14:04:38,3180,-10.7491,24.7232
+2016-05-13 14:19:34,3181,-10.7501,24.721
+2016-05-13 14:34:30,3181,-10.7491,24.5867
+2016-05-13 14:49:26,3183,-10.7501,24.591
+2016-05-13 15:04:22,3183,-10.7501,24.721
+2016-05-13 15:19:18,3183,-10.7501,24.5889
+2016-05-13 15:34:14,3183,-10.7491,24.4071
+2016-05-13 15:49:10,3184,-10.7501,24.6769
+2016-05-13 16:04:07,3184,-10.7501,24.5867
+2016-05-13 16:19:03,3184,-10.7501,24.6328
+2016-05-13 16:33:59,3184,-10.7501,24.5428
+2016-05-13 16:48:55,3184,-10.7815,24.4071
+2016-05-13 17:03:51,3183,-10.7805,24.2764
+2016-05-13 17:18:48,3183,-10.8454,24.3634
+2016-05-13 17:33:44,3182,-10.7815,24.1853
+2016-05-13 17:48:40,3182,-10.9039,24.0642
+2016-05-13 18:03:36,3181,-10.8129,24.142
+2016-05-13 18:18:32,3181,-10.812,24.0083
+2016-05-13 18:33:29,3179,-10.812,23.8324
+2016-05-13 18:48:25,3178,-10.812,23.8752
+2016-05-13 19:03:21,3177,-10.8435,23.5283
+2016-05-13 19:18:17,3177,-10.8425,23.6555
+2016-05-13 19:33:13,3176,-10.8435,77.2324
+2016-05-13 19:48:09,3174,-10.8425,74.9022
+2016-05-13 20:03:04,3174,-10.8425,74.00399999999998
+2016-05-13 20:18:00,3172,-10.8741,72.9077
+2016-05-13 20:32:56,3171,-10.8741,71.0215
+2016-05-13 20:47:52,3170,-10.8732,71.4394
+2016-05-13 21:02:48,3168,-10.8732,70.416
+2016-05-13 21:17:43,3167,-10.8732,69.4329
+2016-05-13 21:32:39,3166,-10.9048,69.0434
+2016-05-13 21:47:34,3164,-10.9048,67.7217
+2016-05-13 22:02:30,3163,-10.9048,67.7157
+2016-05-13 22:17:25,3162,-10.9048,66.6193
+2016-05-13 22:32:21,3161,-10.9048,67.1632
+2016-05-13 22:47:17,3160,-10.9048,68.2769
+2016-05-13 23:02:12,3160,-10.9048,67.5325
+2016-05-13 23:17:08,3160,-10.9048,67.9058
+2016-05-13 23:32:03,3159,-10.9048,67.5325
+2016-05-13 23:46:59,3159,-10.9048,67.9058
+2016-05-14 00:01:54,3158,-10.9048,67.9058
+2016-05-14 00:16:50,3158,-10.9048,67.9058
+2016-05-14 00:31:46,3158,-10.9048,68.6642
+2016-05-14 00:46:41,3159,-10.9048,69.2407
+2016-05-14 01:01:37,3157,-10.9048,68.8472
+2016-05-14 01:16:32,3157,-10.9048,69.2407
+2016-05-14 01:31:28,3156,-10.9048,70.02199999999999
+2016-05-14 01:46:23,3156,-10.9039,70.2184
+2016-05-14 02:01:19,3156,-10.9357,70.4222
+2016-05-14 02:16:14,3155,-10.9357,70.6209
+2016-05-14 02:31:09,3153,-10.9357,71.2362
+2016-05-14 02:46:05,3152,-10.9357,70.8207
+2016-05-14 03:01:00,3151,-10.9675,71.2299
+2016-05-14 03:15:55,3150,-10.9665,71.2299
+2016-05-14 03:30:50,3150,-10.9665,71.6501
+2016-05-14 03:45:46,3148,-10.9665,71.4394
+2016-05-14 04:00:41,3146,-10.9665,71.0278
+2016-05-14 04:15:36,3146,-10.9985,71.8493
+2016-05-14 04:30:31,3145,-10.9665,72.2703
+2016-05-14 04:45:26,3144,-10.9975,71.6438
+2016-05-14 05:00:22,3143,-10.9985,71.8493
+2016-05-14 05:15:17,3143,-10.9975,70.6271
+2016-05-14 05:30:12,3142,-10.9975,70.416
+2016-05-14 05:45:07,3141,-11.0295,70.02199999999999
+2016-05-14 06:00:02,3141,-11.0295,69.4329
+2016-05-14 06:14:57,3140,-11.0286,69.439
+2016-05-14 06:29:52,3139,-11.0295,69.6261
+2016-05-14 06:44:47,3139,-11.0286,69.6261
+2016-05-14 06:59:42,3138,-11.0286,69.2407
+2016-05-14 07:14:37,3138,-11.0286,69.4329
+2016-05-14 07:29:39,3138,-11.0286,69.4329
+2016-05-14 07:44:34,3138,-11.0286,69.2407
+2016-05-14 07:59:29,3138,-11.0286,69.4329
+2016-05-14 08:14:24,3138,-11.0286,68.0909
+2016-05-14 08:29:19,3138,-11.0295,68.8533
+2016-05-14 08:44:14,3138,-11.0295,67.5385
+2016-05-14 08:59:09,3138,-11.0295,66.8036
+2016-05-14 09:14:04,3138,-10.9975,68.283
+2016-05-14 09:28:59,3138,-10.9975,68.47
+2016-05-14 09:43:54,3139,-10.9975,68.0909
+2016-05-14 09:58:49,3140,-10.9656,67.7217
+2016-05-14 10:13:44,3141,-10.9665,67.9058
+2016-05-14 10:28:39,3141,-10.9665,66.9889
+2016-05-14 10:43:34,3142,-10.9665,66.9829
+2016-05-14 10:58:29,3142,-10.9665,67.1692
+2016-05-14 11:13:24,3143,-10.9665,67.7217
+2016-05-14 11:28:20,3143,-10.9347,67.1692
+2016-05-14 11:43:15,3143,-10.9665,66.8036
+2016-05-14 11:58:10,3143,-10.9347,66.7977
+2016-05-14 12:13:05,3144,-10.9347,67.1692
+2016-05-14 12:28:00,3143,-10.9347,65.9088
+2016-05-14 12:43:03,3143,-10.9347,66.0836
+2016-05-14 12:57:58,3144,-10.9347,66.2652
+2016-05-14 13:12:53,3145,-10.9347,66.0836
+2016-05-14 13:27:48,3145,-10.9347,66.2652
+2016-05-14 13:42:43,3145,-10.9347,65.9088
+2016-05-14 13:57:38,3145,-10.9347,65.7291
+2016-05-14 14:12:34,3145,-10.9347,65.5619
+2016-05-14 14:27:29,3145,-10.9347,65.0364
+2016-05-14 14:42:24,3145,-10.9347,65.2069
+2016-05-14 14:57:19,3145,-10.9347,65.0364
+2016-05-14 15:12:22,3146,-10.9347,65.2069
+2016-05-14 15:27:17,3146,-10.9347,65.5503
+2016-05-14 15:42:12,3146,-10.9347,65.2069
+2016-05-14 15:57:07,3146,-10.9347,64.8668
+2016-05-14 16:12:03,3146,-10.9347,64.8668
+2016-05-14 16:26:58,3146,-10.9347,65.2069
+2016-05-14 16:41:53,3146,-10.9347,64.8668
+2016-05-14 16:56:48,3146,-10.9347,64.5245
+2016-05-14 17:11:44,3145,-10.9347,64.02
+2016-05-14 17:26:39,3145,-10.9347,63.6858
+2016-05-14 17:41:35,3145,-10.9347,64.1854
+2016-05-14 17:56:30,3144,-10.9347,63.6858
+2016-05-14 18:11:25,3143,-10.9665,63.6858
+2016-05-14 18:26:21,3143,-10.9665,63.5228
+2016-05-14 18:41:16,3143,-10.9665,63.0327
+2016-05-14 18:56:11,3143,-10.9656,63.0271
+2016-05-14 19:11:06,3142,-10.9665,62.7135
+2016-05-14 19:26:02,3141,-10.9656,62.708
+2016-05-14 19:40:57,3139,-10.9656,62.708
+2016-05-14 19:55:52,3138,-10.9975,62.2294
+2016-05-14 20:10:47,3138,-10.9965,62.3919
+2016-05-14 20:25:42,3137,-10.9965,62.2294
+2016-05-14 20:40:37,3137,-10.9965,62.708
+2016-05-14 20:55:32,3136,-11.0286,62.3919
+2016-05-14 21:10:27,3135,-11.0286,61.9122
+2016-05-14 21:25:22,3134,-11.0286,61.7574
+2016-05-14 21:40:17,3133,-11.0286,61.7574
+2016-05-14 21:55:12,3132,-11.0286,61.7574
+2016-05-14 22:10:07,3132,-11.0286,61.7574
+2016-05-14 22:25:02,3131,-11.0286,61.7574
+2016-05-14 22:39:57,3130,-11.0286,61.7574
+2016-05-14 22:54:52,3130,-11.0597,61.6034
+2016-05-14 23:09:47,3129,-11.0597,61.292
+2016-05-14 23:24:42,3128,-11.0597,61.292
+2016-05-14 23:39:37,3128,-11.0597,61.2757
+2016-05-14 23:54:32,3127,-11.0597,61.2974
+2016-05-15 00:09:27,3127,-11.0597,61.5925
+2016-05-15 00:24:22,3127,-11.0597,61.2974
+2016-05-15 00:39:17,3126,-11.0597,61.2811
+2016-05-15 00:54:12,3125,-11.0597,61.4337
+2016-05-15 01:09:10,3124,-11.0597,60.9726
+2016-05-15 01:24:05,3124,-11.0597,60.9726
+2016-05-15 01:39:00,3124,-11.0597,61.292
+2016-05-15 01:53:54,3124,-11.0597,61.2811
+2016-05-15 02:08:49,3123,-11.0597,60.9726
+2016-05-15 02:23:44,3123,-11.0597,60.9889
+2016-05-15 02:38:39,3123,-11.0597,60.9726
+2016-05-15 02:53:33,3123,-11.0597,60.8167
+2016-05-15 03:08:28,3123,-11.0587,60.2162
+2016-05-15 03:23:23,3122,-11.0587,60.3639
+2016-05-15 03:38:17,3122,-11.0587,60.3693
+2016-05-15 03:53:12,3122,-11.0587,60.2108
+2016-05-15 04:08:07,3122,-11.0587,59.9172
+2016-05-15 04:23:02,3121,-11.0597,59.7714
+2016-05-15 04:37:57,3121,-11.0587,59.6209
+2016-05-15 04:52:51,3121,-11.0587,59.6209
+2016-05-15 05:07:46,3121,-11.0587,59.6209
+2016-05-15 05:22:41,3121,-11.0587,59.6209
+2016-05-15 05:37:36,3121,-11.0597,59.7555
+2016-05-15 05:52:31,3121,-11.0276,59.9066
+2016-05-15 06:07:25,3121,-11.0597,59.6156
+2016-05-15 06:22:20,3121,-11.0597,59.7714
+2016-05-15 06:37:15,3121,-11.0597,59.7555
+2016-05-15 06:52:10,3121,-11.0597,59.7555
+2016-05-15 07:07:05,3121,-11.0597,59.7555
+2016-05-15 07:22:00,3121,-11.0597,59.6051
+2016-05-15 07:36:55,3121,-11.0597,59.6104
+2016-05-15 07:51:50,3121,-11.0597,59.6051
+2016-05-15 08:06:44,3121,-11.0276,59.3167
+2016-05-15 08:21:39,3121,-11.0276,59.1735
+2016-05-15 08:36:34,3121,-11.0276,58.8837
+2016-05-15 08:51:29,3121,-11.0276,59.1578
+2016-05-15 09:06:24,3121,-11.0276,59.0257
+2016-05-15 09:21:19,3121,-10.9646,58.7267
+2016-05-15 09:36:14,3122,-10.9646,58.7267
+2016-05-15 09:51:09,3122,-11.0276,58.8837
+2016-05-15 10:06:03,3122,-10.9965,58.58600000000001
+2016-05-15 10:20:58,3123,-10.9656,58.7267
+2016-05-15 10:35:53,3123,-10.9347,58.58600000000001
+2016-05-15 10:50:55,3124,-10.9337,58.5808
+2016-05-15 11:05:50,3125,-10.9347,58.5808
+2016-05-15 11:20:46,3127,-10.9347,58.5808
+2016-05-15 11:35:41,3127,-10.9347,58.1623
+2016-05-15 11:50:36,3128,-10.9337,58.1571
+2016-05-15 12:05:31,3128,-10.9347,58.29600000000001
+2016-05-15 12:20:26,3128,-10.9347,58.1623
+2016-05-15 12:35:21,3128,-10.9646,57.7389
+2016-05-15 12:50:16,3128,-10.9337,57.87600000000001
+2016-05-15 13:05:11,3128,-10.9347,57.87600000000001
+2016-05-15 13:20:06,3128,-10.9656,57.7389
+2016-05-15 13:35:01,3128,-10.9347,57.4664
+2016-05-15 13:49:56,3128,-10.9347,57.87600000000001
+2016-05-15 14:04:51,3128,-10.9656,58.0137
+2016-05-15 14:19:46,3128,-10.9347,57.744
+2016-05-15 14:34:42,3128,-10.9337,57.4613
+2016-05-15 14:49:37,3129,-10.9646,57.4664
+2016-05-15 15:04:32,3128,-10.9656,57.4613
+2016-05-15 15:19:27,3128,-10.9665,57.4613
+2016-05-15 15:34:22,3128,-10.9347,57.18600000000001
+2016-05-15 15:49:17,3128,-10.9656,57.0568
+2016-05-15 16:04:12,3128,-10.9347,57.3259
+2016-05-15 16:19:07,3128,-10.9347,56.9029
+2016-05-15 16:34:02,3128,-10.9347,56.7849
+2016-05-15 16:48:57,3127,-10.9347,56.7648
+2016-05-15 17:03:52,3127,-10.9347,56.6423
+2016-05-15 17:18:47,3127,-10.9347,56.5153
+2016-05-15 17:33:42,3127,-10.9965,56.6322
+2016-05-15 17:48:37,3127,-10.9965,56.5002
+2016-05-15 18:03:32,3127,-10.9665,56.7648
+2016-05-15 18:18:27,3127,-10.9665,56.1025
+2016-05-15 18:33:22,3127,-10.9656,55.7145
+2016-05-15 18:48:17,3125,-11.0276,55.9677
+2016-05-15 19:03:12,3125,-10.9347,32.4541
+2016-05-15 19:18:07,3123,-10.9357,32.39
+2016-05-15 19:33:02,3123,-10.9347,0.4184
+2016-05-15 19:47:57,3122,-10.9357,-0.2062
+2016-05-15 20:02:52,3122,-10.9665,9.0834
+2016-05-15 20:17:47,3121,-10.9347,0.2239
+2016-05-15 20:32:42,3121,-10.9665,-1.1253
+2016-05-15 20:47:37,3120,-10.9665,2.6954
+2016-05-15 21:02:32,3119,-10.9665,-0.1846
+2016-05-15 21:17:27,3119,-10.9665,-0.3562
+2016-05-15 21:32:22,3118,-10.9665,-0.4849
+2016-05-15 21:47:17,3118,-10.9665,-0.5062
+2016-05-15 22:02:11,3117,-10.9975,-0.5276
+2016-05-15 22:17:06,3117,-10.9665,-0.6561
+2016-05-15 22:32:01,3116,-10.9656,-2.4215
+2016-05-15 22:46:55,3116,-10.9975,-1.5721
+2016-05-15 23:01:50,3115,-10.9975,-1.0622
+2016-05-15 23:16:44,3115,-11.0597,-1.3818
+2016-05-15 23:31:47,3115,-11.0597,-4.6113
+2016-05-15 23:46:41,3114,-11.0295,-1.7654
+2016-05-16 00:01:36,3114,-11.0276,-1.041
+2016-05-16 00:16:31,3114,-11.0597,-0.9982
+2016-05-16 00:31:25,3113,-11.0286,-0.9767
+2016-05-16 00:46:20,3113,-11.0597,-4.6339
+2016-05-16 01:01:14,3112,-11.0286,-4.6347
+2016-05-16 01:16:09,3111,-11.0286,-4.6134
+2016-05-16 01:31:04,3111,-11.0286,-4.5924
+2016-05-16 01:45:58,3110,-11.0286,-4.5625
+2016-05-16 02:00:53,3110,-11.0286,-4.5497
+2016-05-16 02:15:48,3110,-11.0286,-4.5283
+2016-05-16 02:30:42,3109,-11.0286,-4.5287
+2016-05-16 02:45:37,3109,-11.0286,-4.3153
+2016-05-16 03:00:32,3109,-11.0286,-4.2951
+2016-05-16 03:15:26,3109,-11.0286,-4.5295
+2016-05-16 03:30:21,3109,-11.0919,-4.5291
+2016-05-16 03:45:16,3109,-11.0286,-4.3808
+2016-05-16 04:00:10,3109,-11.0286,-4.3808
+2016-05-16 04:15:05,3109,-11.0607,-4.4452
+2016-05-16 04:30:00,3108,-11.0607,-4.3812
+2016-05-16 04:44:54,3108,-11.0597,-4.3595
+2016-05-16 04:59:49,3109,-10.9965,-4.3595
+2016-05-16 05:14:43,3109,-11.0286,-4.4022
+2016-05-16 05:29:38,3109,-11.0597,-4.4025
+2016-05-16 05:44:32,3109,-11.0286,-4.4022
+2016-05-16 05:59:27,3109,-11.0286,-4.4658
+2016-05-16 06:14:22,3109,-11.0597,-4.5726
+2016-05-16 06:29:17,3110,-11.0597,-4.4674
+2016-05-16 06:44:11,3110,-11.0286,-5.0892
+2016-05-16 06:59:06,3111,-11.0597,-4.8076
+2016-05-16 07:14:01,3112,-11.0597,-4.8513
+2016-05-16 07:28:56,3113,-11.0597,-4.8076
+2016-05-16 07:43:50,3114,-11.0276,-4.959
+2016-05-16 07:58:45,3114,-11.0286,-4.8286
+2016-05-16 08:13:40,3114,-10.9975,-5.4987
+2016-05-16 08:28:34,3114,-11.0286,-4.7203
+2016-05-16 08:43:29,3115,-10.9665,-1.8097
+2016-05-16 08:58:24,3115,-10.9656,-4.7845
+2016-05-16 09:13:18,3116,-10.9665,-4.9134
+2016-05-16 09:28:13,3116,-10.9665,-4.7639
+2016-05-16 09:43:08,3117,-10.9665,-1.8526
+2016-05-16 09:58:03,3118,-10.9665,-2.2352
+2016-05-16 10:12:58,3119,-10.9665,-1.7673
+2016-05-16 10:27:52,3120,-10.9665,-1.7459
+2016-05-16 10:42:47,3121,-10.9357,-1.9162
+2016-05-16 10:57:42,3121,-10.9357,-1.9162
+2016-05-16 11:12:37,3122,-10.9347,-1.7032
+2016-05-16 11:27:32,3122,-10.9347,-1.7244
+2016-05-16 11:42:27,3123,-10.9347,-1.7246
+2016-05-16 11:57:22,3123,-10.9357,-1.703
+2016-05-16 12:12:17,3123,-10.9039,-1.7032
+2016-05-16 12:27:12,3124,-10.9039,-1.5112
+2016-05-16 12:42:07,3125,-10.9039,-2.0646
+2016-05-16 12:57:02,3126,-10.9357,-2.5108
+2016-05-16 13:11:57,3127,-10.9039,-1.6818
+2016-05-16 13:26:52,3127,-10.9048,-1.703
+2016-05-16 13:41:48,3128,-10.7501,-1.6603
+2016-05-16 13:56:43,3128,-10.9048,-1.7452
+2016-05-16 14:11:38,3130,-10.8732,4.1062
+2016-05-16 14:26:33,3131,-10.7198,3.2153
+2016-05-16 14:41:28,3132,-10.8732,5.2922
+2016-05-16 14:56:23,3133,-10.8732,6.27
+2016-05-16 15:11:18,3134,-10.8416,25.4878
+2016-05-16 15:26:13,3134,-10.8416,5.5876
+2016-05-16 15:41:08,3135,-10.8732,7.812
+2016-05-16 15:56:03,3135,-10.8732,6.3079
+2016-05-16 16:10:58,3135,-11.1851,-0.5920000000000001
+2016-05-16 16:25:54,3135,-11.1851,4.2818
+2016-05-16 16:40:49,3135,-10.8416,-1.7442
+2016-05-16 16:55:44,3134,-11.1851,-1.1265
+2016-05-16 17:10:39,3134,-11.2166,-1.4891
+2016-05-16 17:25:34,3133,-11.1851,-1.3613
+2016-05-16 17:40:29,3133,-11.1851,-1.4039
+2016-05-16 17:55:25,3132,-11.28,-1.702
+2016-05-16 18:10:20,3132,-10.9048,-1.5105
+2016-05-16 18:25:15,3131,-10.9048,-1.5316
+2016-05-16 18:40:10,3131,-11.3778,-1.5318
+2016-05-16 18:55:05,3131,-10.9048,-1.5743
+2016-05-16 19:10:00,3131,-10.9048,-5.1088
+2016-05-16 19:24:55,3130,-10.9039,-4.916
+2016-05-16 19:39:50,3129,-11.2156,-4.8727
+2016-05-16 19:54:44,3128,-11.281,-4.8299
+2016-05-16 20:09:39,3128,-11.281,-4.8089
+2016-05-16 20:24:34,3128,-10.9357,-4.8089
+2016-05-16 20:39:29,3127,-11.3778,-4.7664
+2016-05-16 20:54:24,3127,-11.281,-4.5525
+2016-05-16 21:09:19,3126,-11.281,-4.5521
+2016-05-16 21:24:14,3124,-10.9357,-4.5315
+2016-05-16 21:39:09,3123,-11.3129,-4.5311
+2016-05-16 21:54:04,3123,-11.3129,-4.5315
+2016-05-16 22:08:58,3123,-11.3129,-4.5751
+2016-05-16 22:23:53,3122,-10.9357,-4.5751
+2016-05-16 22:38:48,3122,-11.3129,-4.5533
+2016-05-16 22:53:43,3122,-11.3778,-4.5533
+2016-05-16 23:08:38,3121,-10.9665,-4.5751
+2016-05-16 23:23:33,3121,-10.9665,-4.5751
+2016-05-16 23:38:28,3120,-11.3778,-4.5965
+2016-05-16 23:53:23,3119,-11.3778,-4.5969
+2016-05-17 00:08:18,3118,-10.8741,-4.6397
+2016-05-17 00:23:12,3118,-11.3448,-4.5323
+2016-05-17 00:38:07,3117,-11.3458,
+2016-05-17 00:53:02,3116,-11.3778,-4.5327
+2016-05-17 01:07:57,3116,-11.3778,
+2016-05-17 01:22:51,3115,-11.3778,
+2016-05-17 01:37:46,3115,-11.3458,
+2016-05-17 01:52:41,3115,-11.3778,
+2016-05-17 02:07:36,3115,-11.3458,
+2016-05-17 02:22:31,3114,-11.41,
+2016-05-17 02:37:26,3114,-11.3448,
+2016-05-17 02:52:21,3114,-11.3778,
+2016-05-17 03:07:16,3113,-11.3778,
+2016-05-17 03:22:11,3113,-11.3778,
+2016-05-17 03:37:05,3113,-11.3778,
+2016-05-17 03:52:00,3112,-11.3448,
+2016-05-17 04:06:55,3113,-11.3448,
+2016-05-17 04:21:50,3113,-10.9665,
+2016-05-17 04:36:45,3112,-11.3778,
+2016-05-17 04:51:40,3112,-11.3119,
+2016-05-17 05:06:42,3112,-11.3778,
+2016-05-17 05:21:37,3111,-10.9665,
+2016-05-17 05:36:31,3112,-11.3458,
+2016-05-17 05:51:26,3112,-11.3129,
+2016-05-17 06:06:21,3112,-11.3448,
+2016-05-17 06:21:16,3113,-10.9665,
+2016-05-17 06:36:11,3114,-11.3778,
+2016-05-17 06:51:06,3113,-11.3448,
+2016-05-17 07:06:01,3114,-11.3129,
+2016-05-17 07:20:56,3114,-11.3129,
+2016-05-17 07:35:51,3114,-11.3458,
+2016-05-17 07:50:46,3114,-11.3129,
+2016-05-17 08:05:41,3115,-11.3129,
+2016-05-17 08:20:36,3115,-11.3778,-4.6623
+2016-05-17 08:35:30,3116,-10.9357,
+2016-05-17 08:50:26,3117,-11.3448,
+2016-05-17 09:05:21,3118,-11.41,
+2016-05-17 09:20:15,3118,-10.9357,-4.6854
+2016-05-17 09:35:10,3120,-11.28,-4.7064
+2016-05-17 09:50:05,3120,-11.3119,-4.7068
+2016-05-17 10:05:00,3121,-11.3129,-4.7286
+2016-05-17 10:19:56,3122,-11.3129,-4.7496
+2016-05-17 10:34:51,3122,-11.28,-4.7286
+2016-05-17 10:49:46,3122,-11.3778,-4.7286
+2016-05-17 11:04:41,3123,-10.9039,-4.7068
+2016-05-17 11:19:36,3123,-11.281,-4.7286
+2016-05-17 11:34:31,3123,-11.281,-4.7286
+2016-05-17 11:49:26,3124,-11.2483,-4.8779
+2016-05-17 12:04:21,3124,-11.281,-4.7286
+2016-05-17 12:19:16,3125,-10.9048,-4.7715
+2016-05-17 12:34:11,3126,-10.9048,-4.7723
+2016-05-17 12:49:06,3127,-10.9039,-4.7934
+2016-05-17 13:04:01,3127,-11.3129,-5.0497
+2016-05-17 13:18:56,3127,-11.3768,-4.9423
+2016-05-17 13:33:51,3128,-11.2483,-5.0488
+2016-05-17 13:48:46,3128,-10.9048,-4.7278
+2016-05-17 14:03:41,3128,-11.281,-4.9414
+2016-05-17 14:18:36,3128,-10.9048,-4.9419
+2016-05-17 14:33:31,3128,-11.2483,-4.9419
+2016-05-17 14:48:34,3128,-10.9048,-4.9419
+2016-05-17 15:03:29,3128,-10.9048,-4.9633
+2016-05-17 15:18:31,3129,-11.2166,-5.0063
+2016-05-17 15:33:27,3130,-10.8732,-5.0488
+2016-05-17 15:48:22,3130,-10.8732,-5.0919
+2016-05-17 16:03:17,3130,-11.2156,-5.3504
+2016-05-17 16:18:12,3131,-11.2176,-5.1138
+2016-05-17 16:33:08,3131,-10.8732,-2.0226
+2016-05-17 16:48:03,3132,-10.8425,-1.9162
+2016-05-17 17:02:58,3132,-10.8425,-1.8736
+2016-05-17 17:17:53,3133,-10.8425,-1.8522
+2016-05-17 17:32:49,3134,-10.8425,-1.7456
+2016-05-17 17:47:44,3134,-10.8425,2.7018
+2016-05-17 18:02:46,3134,-11.281,-1.5963
+2016-05-17 18:17:41,3134,-10.8425,-1.7029
+2016-05-17 18:32:37,3134,-10.8425,-1.7455
+2016-05-17 18:47:32,3134,-10.8741,-1.7456
+2016-05-17 19:02:27,3134,-10.8425,-1.7029
+2016-05-17 19:17:29,3134,-10.8741,-5.2416
+2016-05-17 19:32:25,3134,-10.8732,-1.916
+2016-05-17 19:47:20,3134,-10.8732,-1.8309
+2016-05-17 20:02:15,3133,-10.8732,-1.8095
+2016-05-17 20:17:11,3132,-10.9048,-5.0923
+2016-05-17 20:32:06,3132,-11.2166,-5.1147
+2016-05-17 20:47:01,3131,-11.2166,-5.1582
+2016-05-17 21:01:57,3131,-10.9048,-5.0937
+2016-05-17 21:16:52,3130,-11.2166,-5.3523
+2016-05-17 21:31:48,3129,-10.9048,-5.1156
+2016-05-17 21:46:43,3129,-10.9048,-5.0941
+2016-05-17 22:01:38,3128,-10.9048,-5.0515
+2016-05-17 22:16:33,3128,-10.9366,-5.073
+2016-05-17 22:31:28,3127,-10.9357,-5.0085
+2016-05-17 22:46:23,3127,-11.3129,-5.009
+2016-05-17 23:01:18,3126,-11.281,-4.9655
+2016-05-17 23:16:13,3124,-10.9357,-5.1161
+2016-05-17 23:31:08,3124,-10.9357,-4.8161
+2016-05-17 23:46:04,3123,-10.9039,-4.7951
+2016-05-18 00:00:59,3123,-10.9357,-4.838
+2016-05-18 00:15:54,3123,-11.1851,-4.8165
+2016-05-18 00:30:49,3122,-11.281,-5.0524
+2016-05-18 00:45:44,3122,-10.9357,-4.8165
+2016-05-18 01:00:39,3122,-10.9357,-4.8165
+2016-05-18 01:15:34,3122,-10.9357,-4.838
+2016-05-18 01:30:29,3121,-11.281,-4.7951
+2016-05-18 01:45:24,3121,-11.3438,-4.817
+2016-05-18 02:00:19,3121,-10.9357,-4.8165
+2016-05-18 02:15:14,3121,-10.9675,-4.817
+2016-05-18 02:30:16,3120,-10.9675,-4.817
+2016-05-18 02:45:11,3119,-10.9675,-4.817
+2016-05-18 03:00:06,3119,-11.3129,-4.8165
+2016-05-18 03:15:01,3118,-10.9675,-4.817
+2016-05-18 03:29:56,3118,-10.9675,-4.817
+2016-05-18 03:44:50,3118,-11.3129,-4.8165
+2016-05-18 03:59:45,3117,-11.3129,-4.8384
+2016-05-18 04:14:40,3117,-10.9665,-4.8388
+2016-05-18 04:29:35,3116,-10.9665,-4.8388
+2016-05-18 04:44:30,3116,-10.9665,-4.8165
+2016-05-18 04:59:25,3116,-11.3129,-4.7316
+2016-05-18 05:14:20,3116,-10.9665,-4.8384
+2016-05-18 05:29:22,3115,-11.3129,-4.8603
+2016-05-18 05:44:17,3115,-10.9665,-4.8603
+2016-05-18 05:59:12,3115,-10.9665,-4.8603
+2016-05-18 06:14:07,3115,-10.9665,-4.8603
+2016-05-18 06:29:02,3115,-11.3129,-4.817
+2016-05-18 06:43:57,3116,-10.9347,-4.8165
+2016-05-18 06:58:52,3116,-10.9357,-4.8165
+2016-05-18 07:13:47,3116,-10.9357,-4.8599
+2016-05-18 07:28:42,3117,-10.9357,-4.8384
+2016-05-18 07:43:37,3118,-10.9357,-4.8599
+2016-05-18 07:58:32,3120,-10.9039,-5.0744
+2016-05-18 08:13:27,3121,-11.281,-4.8809
+2016-05-18 08:28:29,3122,-10.9048,-5.0959
+2016-05-18 08:43:24,3123,-10.8732,-5.0735
+2016-05-18 08:58:20,3124,-10.8732,-5.0735
+2016-05-18 09:13:15,3126,-10.8416,-5.1591
+2016-05-18 09:28:10,3128,-10.8425,-5.0085
+2016-05-18 09:43:06,3129,-11.2483,-5.0305
+2016-05-18 09:58:01,3131,-11.1851,-5.1161
+2016-05-18 10:12:56,3132,-10.812,-5.1807
+2016-05-18 10:27:52,3135,-10.812,-5.4387
+2016-05-18 10:42:47,3136,-11.3448,-2.0867
+2016-05-18 10:57:42,3137,-10.812,-1.9802
+2016-05-18 11:12:37,3138,-10.8129,-1.8948
+2016-05-18 11:27:33,3138,-10.8129,-1.8309
+2016-05-18 11:42:28,3140,-10.7815,-1.7029
+2016-05-18 11:57:24,3141,-10.7815,20.9207
+2016-05-18 12:12:19,3142,-11.0899,23.0836
+2016-05-18 12:27:15,3143,-10.751,5.4557
+2016-05-18 12:42:10,3143,-10.751,24.4509
+2016-05-18 12:57:06,3144,-11.0899,6.9221
+2016-05-18 13:12:01,3146,-10.751,25.6608
+2016-05-18 13:26:57,3146,-11.0587,26.7814
+2016-05-18 13:41:53,3147,-10.751,27.4603
+2016-05-18 13:56:48,3147,-11.0899,27.9077
+2016-05-18 14:11:44,3148,-10.751,28.5648
+2016-05-18 14:26:40,3149,-10.751,28.9765
+2016-05-18 14:41:35,3149,-10.7198,29.3423
+2016-05-18 14:56:31,3149,-10.751,29.552
+2016-05-18 15:11:26,3149,-10.751,29.7659
+2016-05-18 15:26:22,3150,-10.751,29.9816
+2016-05-18 15:41:18,3149,-10.751,30.2529
+2016-05-18 15:56:13,3149,-10.7501,30.3611
+2016-05-18 16:11:09,3148,-11.2483,26.4552
+2016-05-18 16:26:04,3147,-10.7815,30.5842
+2016-05-18 16:41:00,3147,-10.751,30.6937
+2016-05-18 16:55:55,3146,-10.751,30.5869
+2016-05-18 17:10:51,3145,-10.7815,30.9195
+2016-05-18 17:25:47,3145,-10.751,30.9749
+2016-05-18 17:40:42,3144,-10.7815,9.7681
+2016-05-18 17:55:38,3144,-10.7815,10.6283
+2016-05-18 18:10:33,3143,-10.7815,30.8036
+2016-05-18 18:25:29,3143,-10.8129,31.0276
+2016-05-18 18:40:24,3143,-11.1222,9.8182
+2016-05-18 18:55:20,3143,-10.7815,10.1655
+2016-05-18 19:10:15,3143,-10.7815,9.923
+2016-05-18 19:25:11,3143,-10.7815,8.8431
+2016-05-18 19:40:06,3142,-10.8129,9.5487
+2016-05-18 19:55:02,3141,-10.8129,8.7903
+2016-05-18 20:09:57,3140,-10.8129,0.3963
+2016-05-18 20:24:52,3139,-10.8129,0.1162
+2016-05-18 20:39:48,3138,-10.812,-0.1202
+2016-05-18 20:54:43,3138,-10.812,-0.206
+2016-05-18 21:09:39,3138,-10.812,-0.3132
+2016-05-18 21:24:34,3137,-10.8435,-0.4418
+2016-05-18 21:39:30,3136,-10.812,-0.5274
+2016-05-18 21:54:25,3136,-10.8435,-0.5488
+2016-05-18 22:09:20,3136,-10.8435,-0.5915
+2016-05-18 22:24:16,3136,-10.8435,-0.6557
+2016-05-18 22:39:11,3135,-11.2493,-0.6984
+2016-05-18 22:54:07,3135,-10.8435,-0.7625
+2016-05-18 23:09:02,3133,-10.8435,-4.4765
+2016-05-18 23:23:58,3133,-10.8435,-4.3932
+2016-05-18 23:38:53,3132,-10.8425,-4.4149
+2016-05-18 23:53:49,3132,-10.8751,-4.6282
+2016-05-19 00:08:44,3131,-10.8751,-4.4579
+2016-05-19 00:23:39,3131,-10.8435,-4.4808
+2016-05-19 00:38:35,3130,-10.8741,-4.5662
+2016-05-19 00:53:30,3130,-10.8741,-4.5239
+2016-05-19 01:08:26,3129,-10.8741,-4.6327
+2016-05-19 01:23:21,3128,-10.8741,-4.5034
+2016-05-19 01:38:16,3128,-11.2483,-4.5682
+2016-05-19 01:53:12,3127,-10.9048,-4.505
+2016-05-19 02:08:07,3127,-10.9048,-4.5477
+2016-05-19 02:23:02,3127,-11.2166,-4.5485
+2016-05-19 02:37:57,3126,-10.9048,-4.5493
+2016-05-19 02:52:53,3125,-11.2166,-4.4002
+2016-05-19 03:07:48,3124,-10.9048,-4.592
+2016-05-19 03:22:43,3124,-10.9048,-4.4219
+2016-05-19 03:37:38,3123,-10.9048,-4.465
+2016-05-19 03:52:33,3123,-10.9048,-4.4872
+2016-05-19 04:07:29,3123,-10.9048,-4.6368
+2016-05-19 04:22:24,3123,-11.2166,-4.68
+2016-05-19 04:37:19,3122,-10.9048,-4.5311
+2016-05-19 04:52:14,3122,-10.9048,-4.5525
+2016-05-19 05:07:09,3122,-10.9048,-4.5315
+2016-05-19 05:22:04,3122,-10.9048,-4.5743
+2016-05-19 05:36:59,3121,-10.9048,-4.5747
+2016-05-19 05:51:54,3122,-10.9048,-4.5537
+2016-05-19 06:06:49,3121,-10.9048,-4.5327
+2016-05-19 06:21:44,3121,-10.9366,-4.5541
+2016-05-19 06:36:39,3122,-10.9048,-4.6183
+2016-05-19 06:51:35,3122,-10.9048,-4.79
+2016-05-19 07:06:30,3122,-10.9048,-4.79
+2016-05-19 07:21:25,3122,-10.9048,-4.8329
+2016-05-19 07:36:21,3122,-10.9048,-4.6623
+2016-05-19 07:51:16,3122,-10.9048,-4.9191
+2016-05-19 08:06:11,3123,-10.9058,-4.8775
+2016-05-19 08:21:06,3124,-10.8741,-4.962
+2016-05-19 08:36:01,3126,-10.8435,-4.9616
+2016-05-19 08:50:57,3127,-10.8435,-5.048
+2016-05-19 09:05:52,3128,-10.812,-1.8728
+2016-05-19 09:20:47,3129,-10.812,-1.7452
+2016-05-19 09:35:43,3131,-10.7805,-1.6812
+2016-05-19 09:50:38,3133,-10.7815,-1.6385
+2016-05-19 10:05:34,3135,-11.0899,-1.5961
+2016-05-19 10:20:29,3136,-10.751,-1.5105
+2016-05-19 10:35:25,3138,-10.751,-1.5745
+2016-05-19 10:50:21,3139,-10.7198,22.9612
+2016-05-19 11:05:16,3142,-11.0276,25.2575
+2016-05-19 11:20:12,3143,-10.7198,26.6012
+2016-05-19 11:35:08,3143,-10.7198,27.5213
+2016-05-19 11:50:03,3145,-10.7198,28.2167
+2016-05-19 12:04:59,3147,-10.7207,28.6792
+2016-05-19 12:19:55,3148,-10.6895,29.0435
+2016-05-19 12:34:51,3150,-10.6904,29.2536
+2016-05-19 12:49:47,3151,-10.6904,29.623
+2016-05-19 13:04:42,3152,-10.6904,29.8375
+2016-05-19 13:19:38,3154,-10.6904,29.6732
+2016-05-19 13:34:34,3155,-10.6904,30.1074
+2016-05-19 13:49:30,3156,-10.6593,30.5431
+2016-05-19 14:04:26,3156,-10.6904,30.71
+2016-05-19 14:19:22,3156,-11.0899,30.7677
+2016-05-19 14:34:18,3156,-10.6904,30.8228
+2016-05-19 14:49:14,3156,-10.6904,30.936
+2016-05-19 15:04:10,3156,-11.0286,30.3719
+2016-05-19 15:19:06,3156,-10.6904,31.0469
+2016-05-19 15:34:01,3156,-10.6904,31.0469
+2016-05-19 15:49:05,3156,-10.6904,31.161
+2016-05-19 16:04:01,3156,-10.6904,31.2169
+2016-05-19 16:18:56,3156,-11.0597,31.2728
+2016-05-19 16:33:52,3155,-10.6904,31.3317
+2016-05-19 16:48:48,3156,-10.6904,31.3907
+2016-05-19 17:03:44,3156,-11.0286,31.3317
+2016-05-19 17:18:40,3155,-11.0899,28.4861
+2016-05-19 17:33:36,3155,-10.6904,31.3907
+2016-05-19 17:48:32,3155,-10.7217,31.447
+2016-05-19 18:03:27,3154,-11.1536,30.595
+2016-05-19 18:18:23,3152,-11.0899,18.5652
+2016-05-19 18:33:19,3152,-10.7207,31.447
+2016-05-19 18:48:15,3152,-10.7207,31.5062
+2016-05-19 19:03:11,3151,-11.0276,31.6195
+2016-05-19 19:18:06,3150,-10.7207,31.5628
+2016-05-19 19:33:02,3149,-10.751,31.5062
+2016-05-19 19:47:58,3150,-10.752,-4.1554
+2016-05-19 20:02:54,3148,-10.751,30.5977
+2016-05-19 20:17:50,3147,-10.751,30.4805
+2016-05-19 20:32:45,3146,-10.7824,31.3879
+2016-05-19 20:47:41,3144,-11.0597,6.1344
+2016-05-19 21:02:37,3143,-10.7824,31.2728
+2016-05-19 21:17:33,3143,-10.7824,31.447
+2016-05-19 21:32:29,3142,-10.7824,31.5034
+2016-05-19 21:47:24,3140,-10.8129,8.1569
+2016-05-19 22:02:20,3138,-11.0909,0.6993
+2016-05-19 22:17:16,3138,-10.8129,0.2886
+2016-05-19 22:32:11,3138,-10.8129,6.7606
+2016-05-19 22:47:07,3137,-10.8129,30.7595
+2016-05-19 23:02:03,3137,-10.8129,0.2886
+2016-05-19 23:16:58,3136,-10.8129,-0.0558
+2016-05-19 23:31:54,3136,-10.8129,-0.0343
+2016-05-19 23:46:50,3136,-10.8129,-0.2704
+2016-05-20 00:01:45,3135,-10.8129,8.8198
+2016-05-20 00:16:41,3135,-11.0597,0.0302
+2016-05-20 00:31:37,3135,-10.8129,-0.2275
+2016-05-20 00:46:32,3134,-10.8129,-0.3347
+2016-05-20 01:01:28,3134,-10.8129,-0.4419
+2016-05-20 01:16:23,3134,-10.8129,-0.4204
+2016-05-20 01:31:19,3133,-10.8435,5.7477
+2016-05-20 01:46:15,3132,-10.8435,-0.1846
+2016-05-20 02:01:10,3132,-10.8435,8.1017
+2016-05-20 02:16:06,3131,-10.8435,2.3385
+2016-05-20 02:31:01,3131,-10.812,-0.1631
+2016-05-20 02:45:57,3130,-10.8435,-0.4418
+2016-05-20 03:00:52,3129,-10.8435,6.5902
+2016-05-20 03:15:48,3128,-10.8435,-0.206
+2016-05-20 03:30:43,3128,-10.8435,-0.4633
+2016-05-20 03:45:39,3128,-10.8435,5.1573
+2016-05-20 04:00:34,3127,-10.8425,-4.4137
+2016-05-20 04:15:29,3127,-10.8435,-4.5026
+2016-05-20 04:30:25,3127,-10.8435,-1.0834
+2016-05-20 04:45:20,3127,-10.8435,-1.0194
+2016-05-20 05:00:15,3125,-10.8435,-4.546
+2016-05-20 05:15:10,3125,-10.8435,-4.6335
+2016-05-20 05:30:05,3124,-10.8751,-4.8907
+2016-05-20 05:45:01,3124,-11.1536,-4.8697
+2016-05-20 05:59:56,3124,-10.8741,-4.934
+2016-05-20 06:14:51,3125,-10.8741,-4.847
+2016-05-20 06:29:47,3124,-10.8435,-1.5314
+2016-05-20 06:44:42,3124,-10.8435,-4.6549
+2016-05-20 06:59:37,3124,-11.1851,-4.6343
+2016-05-20 07:14:33,3125,-10.8435,-4.9992
+2016-05-20 07:29:28,3125,-10.8435,-1.5953
+2016-05-20 07:44:24,3127,-10.812,3.2105
+2016-05-20 07:59:19,3127,-10.812,-1.1684
+2016-05-20 08:14:15,3127,-10.8129,-1.2965
+2016-05-20 08:29:10,3128,-10.8129,-1.318
+2016-05-20 08:44:06,3128,-10.7815,-1.2967
+2016-05-20 08:59:01,3130,-10.7815,6.1811
+2016-05-20 09:14:04,3132,-10.7501,26.7315
+2016-05-20 09:28:59,3134,-10.7501,28.6665
+2016-05-20 09:43:55,3135,-10.7198,22.8401
+2016-05-20 09:58:51,3137,-10.7198,30.0431
+2016-05-20 10:13:46,3138,-10.7198,30.3719
+2016-05-20 10:28:42,3141,-10.6895,30.6498
+2016-05-20 10:43:38,3143,-10.6282,29.8775
+2016-05-20 10:58:34,3144,-10.6593,30.71
+2016-05-20 11:13:29,3146,-10.6904,31.0524
+2016-05-20 11:28:25,3148,-10.6904,31.108
+2016-05-20 11:43:21,3149,-10.6904,31.2224
+2016-05-20 11:58:17,3150,-10.6593,30.5977
+2016-05-20 12:13:13,3151,-10.6904,30.7677
+2016-05-20 12:28:08,3152,-10.6904,29.7739
+2016-05-20 12:43:04,3152,-10.6593,31.3907
+2016-05-20 12:58:00,3152,-10.6593,31.3962
+2016-05-20 13:12:56,3152,-10.6904,31.2811
+2016-05-20 13:27:52,3153,-10.6904,29.8828
+2016-05-20 13:42:48,3154,-10.6593,26.9939
+2016-05-20 13:57:44,3154,-10.6904,31.1665
+2016-05-20 14:12:40,3155,-10.7198,
+2016-05-20 14:27:36,3154,-10.7198,54.3022
+2016-05-20 14:42:32,3153,-10.751,54.3166
+2016-05-20 14:57:27,3152,-10.751,54.4399
+2016-05-20 15:12:30,3152,-10.7198,54.1939
+2016-05-20 15:27:26,3151,-10.751,54.3166
+2016-05-20 15:42:22,3151,-10.751,54.0667
+2016-05-20 15:57:18,3150,-10.751,54.3166
+2016-05-20 16:12:13,3150,-10.751,54.18899999999999
+2016-05-20 16:27:09,3150,-10.751,53.9449
+2016-05-20 16:42:05,3149,-10.751,53.9449
+2016-05-20 16:57:00,3149,-10.751,54.0667
+2016-05-20 17:11:56,3149,-10.751,54.0667
+2016-05-20 17:26:52,3148,-10.7824,54.18899999999999
+2016-05-20 17:41:48,3148,-10.751,53.9449
+2016-05-20 17:56:44,3147,-10.7815,53.5822
+2016-05-20 18:11:39,3146,-10.7815,53.7027
+2016-05-20 18:26:35,3146,-10.7815,53.9449
+2016-05-20 18:41:31,3145,-10.7815,53.8236
+2016-05-20 18:56:27,3145,-10.8129,53.7027
+2016-05-20 19:11:22,3145,-10.8129,54.3166
+2016-05-20 19:26:18,3143,-10.751,27.1356
+2016-05-20 19:41:14,3143,-10.751,27.7299
+2016-05-20 19:56:09,3142,-10.751,31.1135
+2016-05-20 20:11:05,3142,-10.7815,18.918
+2016-05-20 20:26:01,3140,-10.8435,
+2016-05-20 20:40:57,3138,-10.7824,-0.4422
+2016-05-20 20:55:52,3138,-10.7824,9.1577
+2016-05-20 21:10:48,3138,-10.7824,31.34
+2016-05-20 21:25:44,3137,-10.8129,31.5712
+2016-05-20 21:40:40,3137,-10.7815,31.4582
+2016-05-20 21:55:35,3136,-10.7824,31.6903
+2016-05-20 22:10:31,3136,-10.7815,31.6931
+2016-05-20 22:25:26,3135,-10.8129,31.8099
+2016-05-20 22:40:22,3135,-10.8129,31.8099
+2016-05-20 22:55:18,3135,-10.8435,31.9329
+2016-05-20 23:10:13,3134,-10.8129,31.8099
+2016-05-20 23:25:09,3133,-10.8129,31.7529
+2016-05-20 23:40:04,3132,-10.8129,31.6959
+2016-05-20 23:55:00,3132,-10.8129,31.8127
+2016-05-21 00:09:56,3131,-10.8129,31.6959
+2016-05-21 00:24:51,3131,-10.8129,31.8127
+2016-05-21 00:39:47,3130,-10.8129,31.8127
+2016-05-21 00:54:42,3130,-10.8129,31.8127
+2016-05-21 01:09:38,3129,-10.8129,31.8127
+2016-05-21 01:24:33,3129,-10.812,31.8127
+2016-05-21 01:39:28,3128,-10.8129,31.8127
+2016-05-21 01:54:24,3128,-10.8129,31.8728
+2016-05-21 02:09:19,3128,-10.812,10.147
+2016-05-21 02:24:15,3128,-10.812,10.1976
+2016-05-21 02:39:10,3128,-10.8444,31.4665
+2016-05-21 02:54:06,3128,-10.8435,31.6391
+2016-05-21 03:09:01,3127,-10.812,31.6959
+2016-05-21 03:23:57,3127,-10.812,29.4626
+2016-05-21 03:38:52,3127,-10.8435,10.7723
+2016-05-21 03:53:48,3127,-10.8435,0.7211
+2016-05-21 04:08:43,3127,-10.8129,0.1809
+2016-05-21 04:23:39,3127,-10.8435,9.0278
+2016-05-21 04:38:34,3127,-10.8129,31.1721
+2016-05-21 04:53:29,3127,-10.812,31.5202
+2016-05-21 05:08:25,3127,-10.812,31.6391
+2016-05-21 05:23:20,3126,-10.8435,31.6391
+2016-05-21 05:38:15,3127,-10.8435,31.6391
+2016-05-21 05:53:11,3127,-10.8435,31.6987
+2016-05-21 06:08:06,3126,-10.8435,31.7585
+2016-05-21 06:23:01,3126,-10.8435,31.6987
+2016-05-21 06:37:57,3126,-10.812,31.7585
+2016-05-21 06:52:52,3126,-10.8129,31.8184
+2016-05-21 07:07:47,3127,-10.8129,31.8756
+2016-05-21 07:22:43,3127,-10.8129,31.7613
+2016-05-21 07:37:38,3128,-10.8129,31.8756
+2016-05-21 07:52:33,3128,-10.7815,31.9329
+2016-05-21 08:07:29,3128,-10.8129,31.8756
+2016-05-21 08:22:24,3130,-10.7501,31.8784
+2016-05-21 08:37:20,3130,-10.7501,31.8756
+2016-05-21 08:52:15,3132,-10.7501,31.9329
+2016-05-21 09:07:11,3134,-10.751,31.8784
+2016-05-21 09:22:06,3135,-10.7198,31.9932
+2016-05-21 09:37:02,3136,-10.7198,31.8784
+2016-05-21 09:51:57,3138,-10.6895,31.9932
+2016-05-21 10:06:53,3138,-10.6895,31.9357
+2016-05-21 10:21:48,3141,-10.6904,31.8184
+2016-05-21 10:36:44,3143,-10.6904,31.5286
+2016-05-21 10:51:40,3144,-10.7198,54.3311
+2016-05-21 11:06:36,3146,-10.6885,54.2083
+2016-05-21 11:21:32,3147,-10.6895,54.4544
+2016-05-21 11:36:27,3149,-10.6895,54.583
+2016-05-21 11:51:23,3150,-10.6904,54.3359
+2016-05-21 12:06:19,3151,-10.6895,54.4592
+2016-05-21 12:21:15,3152,-10.6904,54.583
+2016-05-21 12:36:10,3152,-10.6904,54.3359
+2016-05-21 12:51:06,3155,-10.6904,54.5782
+2016-05-21 13:06:02,3155,-10.6593,54.7122
+2016-05-21 13:20:59,3156,-10.6593,54.5879
+2016-05-21 13:35:55,3156,-10.6593,54.5879
+2016-05-21 13:50:51,3158,-10.6593,54.8369
+2016-05-21 14:05:47,3160,-10.6292,55.606
+2016-05-21 14:20:43,3160,-10.6282,54.4592
+2016-05-21 14:35:39,3161,-10.6292,54.9622
+2016-05-21 14:50:35,3161,-10.6292,54.8418
+2016-05-21 15:05:31,3161,-10.6904,
+2016-05-21 15:20:27,3161,-10.6282,54.4544
+2016-05-21 15:35:23,3161,-10.6602,54.0859
+2016-05-21 15:50:19,3161,-10.6602,54.0859
+2016-05-21 16:05:15,3161,-10.6602,54.7122
+2016-05-21 16:20:11,3161,-10.6602,54.5879
+2016-05-21 16:35:07,3160,-10.6292,54.3359
+2016-05-21 16:50:03,3160,-10.6593,-3.3299
+2016-05-21 17:04:59,3160,-10.6292,-4.0506
+2016-05-21 17:19:55,3160,-10.6593,54.2083
+2016-05-21 17:34:51,3159,-10.6593,54.0859
+2016-05-21 17:49:47,3159,-10.6904,54.3359
+2016-05-21 18:04:43,3158,-10.6904,54.3311
+2016-05-21 18:19:39,3158,-10.6292,29.2205
+2016-05-21 18:34:35,3156,-10.6914,54.4641
+2016-05-21 18:49:31,3156,-10.6292,27.8189
+2016-05-21 19:04:27,3155,-10.7217,53.8427
+2016-05-21 19:19:23,3154,-10.6904,54.3407
+2016-05-21 19:34:19,3152,-10.6914,31.8671
+2016-05-21 19:49:15,3152,-10.7207,
+2016-05-21 20:04:11,3152,-10.6914,31.4609
+2016-05-21 20:19:07,3151,-10.6904,31.9216
+2016-05-21 20:34:02,3150,-10.6904,31.8728
+2016-05-21 20:48:58,3149,-10.6904,31.8756
+2016-05-21 21:03:54,3148,-10.6904,31.9329
+2016-05-21 21:18:50,3146,-10.7217,31.9272
+2016-05-21 21:33:45,3144,-10.7217,31.8212
+2016-05-21 21:48:41,3143,-10.7217,31.8784
+2016-05-21 22:03:37,3143,-10.7207,31.8784
+2016-05-21 22:18:33,3142,-10.7217,31.9386
+2016-05-21 22:33:29,3138,-10.7207,0.2673
+2016-05-21 22:48:24,3138,-11.0899,0.1163
+2016-05-21 23:03:20,3138,-10.7207,7.4266
+2016-05-21 23:18:16,3137,-10.7207,4.6462
+2016-05-21 23:33:11,3136,-11.1536,-0.3133
+2016-05-21 23:48:07,3136,-10.751,24.9911
+2016-05-22 00:03:03,3136,-11.0899,-0.6347
+2016-05-22 00:17:59,3135,-10.751,-0.2919
+2016-05-22 00:32:54,3135,-10.752,-0.3776
+2016-05-22 00:47:50,3135,-10.751,7.7726
+2016-05-22 01:02:49,3135,-10.751,-0.1631
+2016-05-22 01:17:44,3134,-10.751,-0.4204
+2016-05-22 01:32:40,3133,-10.751,-0.5916
+2016-05-22 01:47:35,3133,-10.751,-0.613
+2016-05-22 02:02:31,3133,-10.751,-0.677
+2016-05-22 02:17:26,3132,-10.751,-0.6984
+2016-05-22 02:32:22,3132,-10.751,-4.5879
+2016-05-22 02:47:18,3131,-10.751,-4.6976
+2016-05-22 03:02:13,3131,-10.751,-4.6775
+2016-05-22 03:17:09,3130,-10.751,-4.6356
+2016-05-22 03:32:05,3130,-11.0899,-4.7425
+2016-05-22 03:47:00,3129,-11.0909,-4.6796
+2016-05-22 04:01:56,3128,-11.1222,-4.8513
+2016-05-22 04:16:52,3128,-11.0889,-4.5735
+2016-05-22 04:31:47,3128,-11.1212,-4.9379
+2016-05-22 04:46:43,3127,-11.0899,-4.5735
+2016-05-22 05:01:38,3127,-11.0899,-4.5952
+2016-05-22 05:16:34,3126,-11.0899,-4.6166
+2016-05-22 05:31:29,3125,-11.0899,-4.6384
+2016-05-22 05:46:25,3126,-11.2176,-4.638
+2016-05-22 06:01:20,3126,-11.0899,-4.9169
+2016-05-22 06:16:16,3125,-11.0899,-4.874
+2016-05-22 06:31:11,3125,-11.0899,-4.8745
+2016-05-22 06:46:07,3125,-11.0899,-4.9384
+2016-05-22 07:01:02,3125,-11.0899,-4.703
+2016-05-22 07:15:58,3126,-11.0909,-4.9174
+2016-05-22 07:30:53,3127,-11.0909,-4.9817
+2016-05-22 07:45:49,3127,-11.0597,-4.8093
+2016-05-22 08:00:44,3128,-11.0597,-4.7677
+2016-05-22 08:15:40,3129,-11.0276,-4.6816
+2016-05-22 08:30:36,3130,-11.0276,-4.703
+2016-05-22 08:45:32,3132,-10.9965,-4.7026
+2016-05-22 09:00:28,3134,-10.9636,-4.8959
+2016-05-22 09:15:24,3135,-10.6282,-5.0242
+2016-05-22 09:30:19,3136,-10.6282,-5.0444
+2016-05-22 09:45:15,3138,-10.5982,-5.2384
+2016-05-22 10:00:11,3141,-10.5982,-5.1939
+2016-05-22 10:15:07,3143,-10.5682,-1.8506
+2016-05-22 10:30:03,3145,-10.9,-1.7653
+2016-05-22 10:44:59,3147,-10.5682,-1.6372
+2016-05-22 10:59:55,3150,-10.8416,-1.6586
+2016-05-22 11:14:52,3151,-10.8406,-1.6586
+2016-05-22 11:29:48,3153,-10.8416,-1.6159
+2016-05-22 11:44:44,3156,-10.81,-1.6159
+2016-05-22 11:59:40,3158,-10.81,-1.5947
+2016-05-22 12:14:36,3160,-10.811,-1.5947
+2016-05-22 12:29:33,3162,-10.811,-1.5734
+2016-05-22 12:44:29,3164,-10.7805,3.7504
+2016-05-22 12:59:25,3167,-10.7805,5.1779
+2016-05-22 13:14:21,3168,-10.7491,-0.9972
+2016-05-22 13:29:25,3171,-10.7491,-0.5893
+2016-05-22 13:44:22,3174,-10.449000000000002,25.7661
+2016-05-22 13:59:18,3174,-10.449000000000002,28.0867
+2016-05-22 14:14:14,3176,-10.449000000000002,29.1886
+2016-05-22 14:29:11,3176,-10.449000000000002,29.734
+2016-05-22 14:44:07,3177,-10.4499,30.1692
+2016-05-22 14:59:04,3178,-10.449000000000002,30.445
+2016-05-22 15:14:00,3179,-10.811,30.614
+2016-05-22 15:28:57,3180,-10.4499,30.7841
+2016-05-22 15:43:53,3181,-10.8416,30.9026
+2016-05-22 15:58:50,3181,-10.7491,30.9579
+2016-05-22 16:13:47,3181,-10.7501,31.0717
+2016-05-22 16:28:43,3181,-10.7805,31.1329
+2016-05-22 16:43:40,3181,-10.812,31.1329
+2016-05-22 16:58:36,3181,-10.4194,31.2473
+2016-05-22 17:13:33,3181,-10.812,31.2501
+2016-05-22 17:28:29,3181,-10.7501,31.2529
+2016-05-22 17:43:25,3181,-10.7501,31.2501
+2016-05-22 17:58:22,3179,-10.7805,31.3089
+2016-05-22 18:13:18,3178,-10.7805,31.3089
+2016-05-22 18:28:15,3177,-10.7805,31.3678
+2016-05-22 18:43:11,3177,-10.7805,31.3678
+2016-05-22 18:58:08,3176,-10.812,31.3678
+2016-05-22 19:13:04,3174,-10.812,31.2556
+2016-05-22 19:28:01,3174,-10.8425,31.3706
+2016-05-22 19:42:57,3172,-10.8416,31.3734
+2016-05-22 19:57:53,3170,-10.8425,31.3706
+2016-05-22 20:12:49,3169,-10.8425,9.2209
+2016-05-22 20:27:46,3167,-10.8425,0.181
+2016-05-22 20:42:42,3165,-10.8732,-0.0773
+2016-05-22 20:57:38,3163,-10.8732,-0.2276
+2016-05-22 21:12:34,3162,-10.8732,-0.3134
+2016-05-22 21:27:31,3160,-10.9048,-0.4633
+2016-05-22 21:42:27,3159,-10.9039,-0.5062
+2016-05-22 21:57:23,3156,-10.9039,-0.5918
+2016-05-22 22:12:19,3156,-10.9357,3.5258
+2016-05-22 22:27:15,3154,-10.9039,-0.4204
+2016-05-22 22:42:11,3154,-10.6001,-0.3777
+2016-05-22 22:57:07,3152,-10.9665,-0.6132
+2016-05-22 23:12:03,3151,-10.632,-0.7629
+2016-05-22 23:26:58,3150,-10.9347,-0.8057
+2016-05-22 23:41:54,3150,-10.9665,-4.8029
+2016-05-22 23:56:50,3148,-10.9656,-4.9336
+2016-05-23 01:11:29,3138,-10.9665,-4.467
+2016-05-23 01:26:25,3138,-10.9665,-4.5315
+2016-05-23 01:41:21,3137,-10.9985,-4.8955
+2016-05-23 01:56:17,3136,-10.9975,-4.9165
+2016-05-23 02:11:13,3136,-10.9975,-4.5747
+2016-05-23 02:26:08,3135,-10.9965,-4.5965
+2016-05-23 02:41:04,3134,-11.0286,-4.7883
+2016-05-23 02:56:00,3134,-11.0286,-4.5747
+2016-05-23 03:10:55,3132,-10.9694,-4.6829
+2016-05-23 03:25:50,3131,-11.0286,-4.6623
+2016-05-23 03:40:46,3131,-11.0305,-4.6397
+2016-05-23 03:55:53,3130,-11.0286,-4.6405
+2016-05-23 04:11:27,3130,-11.0276,-4.5965
+2016-05-23 04:26:22,3130,-11.0276,-4.5537
+2016-05-23 04:41:18,3130,-10.6904,-4.9384
+2016-05-23 04:56:13,3130,-11.0597,-5.1107
+2016-05-23 05:11:09,3129,-11.1536,-4.9603
+2016-05-23 05:26:04,3128,-11.0587,-4.8749
+2016-05-23 05:41:00,3128,-11.0286,-4.8757
+2016-05-23 05:55:56,3128,-11.0276,-4.8976
+2016-05-23 06:10:51,3128,-11.0286,-4.7265
+2016-05-23 06:25:47,3128,-11.0286,-4.7055
+2016-05-23 06:40:42,3128,-11.0276,-4.7274
+2016-05-23 06:55:38,3128,-11.0286,-4.7488
+2016-05-23 07:10:34,3128,-11.0286,-4.7274
+2016-05-23 07:25:29,3128,-11.0286,-4.7274
+2016-05-23 07:40:25,3128,-10.9965,-4.7702
+2016-05-23 07:55:20,3128,-10.9965,-4.7278
+2016-05-23 08:10:16,3128,-11.0276,-4.7917
+2016-05-23 08:25:12,3129,-11.0286,-5.0278
+2016-05-23 08:40:07,3129,-10.9646,-5.0059
+2016-05-23 08:55:03,3130,-10.9965,-4.9195
+2016-05-23 09:09:58,3130,-10.9975,-4.9195
+2016-05-23 09:24:54,3131,-10.9646,-4.9406
+2016-05-23 09:39:50,3132,-10.9656,-5.005
+2016-05-23 09:54:45,3132,-10.9646,-4.962
+2016-05-23 10:09:41,3132,-11.0305,-4.7261
+2016-05-23 10:24:37,3133,-10.9646,-4.6598
+2016-05-23 10:39:33,3135,-10.9347,-4.6841
+2016-05-23 10:54:28,3136,-10.9347,-5.0054
+2016-05-23 11:09:24,3136,-10.9029,-4.92
+2016-05-23 11:24:20,3136,-10.902,-4.8976
+2016-05-23 11:39:16,3137,-10.9347,-4.962
+2016-05-23 11:54:12,3138,-10.9029,-4.9195
+2016-05-23 12:09:08,3138,-10.9337,-4.9844
+2016-05-23 12:24:03,3138,-10.9675,-5.1129
+2016-05-23 12:38:59,3139,-11.0577,-5.5463
+2016-05-23 12:53:55,3141,-10.9029,-4.9187
+2016-05-23 13:08:51,3141,-10.9347,-5.1770000000000005
+2016-05-23 13:23:46,3141,-10.9029,-4.9191
+2016-05-23 13:38:42,3142,-10.9029,-5.0265
+2016-05-23 13:53:38,3142,-11.0325,-4.9401
+2016-05-23 14:08:34,3143,-10.9347,-5.134
+2016-05-23 14:23:30,3143,-10.9048,-5.069
+2016-05-23 14:38:25,3143,-10.9048,-5.0914
+2016-05-23 14:53:21,3144,-10.8406,-5.3931
+2016-05-23 15:08:17,3145,-10.9366,-5.3499
+2016-05-23 15:23:13,3145,-10.8454,-5.091
+2016-05-23 15:38:09,3145,-10.8406,-5.1147
+2016-05-23 15:53:05,3146,-10.9357,-5.3499
+2016-05-23 16:08:07,3146,-10.9039,-2.0226
+2016-05-23 16:23:03,3145,-10.9039,-1.916
+2016-05-23 16:37:59,3146,-10.9039,-2.3833
+2016-05-23 16:52:55,3146,-10.9029,-1.8521
+2016-05-23 17:07:51,3146,-10.9656,-4.811
+2016-05-23 17:22:47,3146,-10.9328,4.642
+2016-05-23 17:37:43,3146,-10.9347,-6.3712
+2016-05-23 17:52:39,3148,-10.8722,-2.0224
+2016-05-23 18:07:35,3147,-10.4787,-4.482
+2016-05-23 18:22:31,3152,-10.6602,60.5984
+2016-05-23 18:37:27,3150,-10.6904,57.4379
+2016-05-23 18:52:23,3150,-10.6593,57.7047
+2016-05-23 19:07:19,3148,-10.6904,56.4839
+2016-05-23 19:22:14,3147,-10.6904,56.3528
+2016-05-23 19:37:10,3146,-10.6904,56.3528
+2016-05-23 19:52:06,3145,-10.7207,55.9673
+2016-05-23 20:07:02,3144,-10.7198,55.176
+2016-05-23 20:21:58,3143,-10.751,55.0647
+2016-05-23 20:37:01,3142,-10.6904,29.673
+2016-05-23 20:51:57,3140,-10.9646,-3.3454
+2016-05-23 21:06:52,3137,-11.0276,17.6128
+2016-05-23 21:21:48,3136,-10.9646,9.2703
+2016-05-23 21:36:44,3136,-10.9347,-3.069
+2016-05-23 21:51:40,3135,-10.9656,6.09
+2016-05-23 22:06:36,3135,-11.0286,18.6657
+2016-05-23 22:21:31,3135,-11.0237,-3.0706
+2016-05-23 22:36:27,3136,-10.9646,-4.563
+2016-05-23 22:51:23,3143,-10.4203,
+2016-05-23 23:06:18,3141,-10.751,62.3178
+2016-05-23 23:21:14,3155,-10.6311,24.9379
+2016-05-23 23:36:09,3155,-10.6612,25.5297
+2016-05-23 23:51:05,3148,-10.6914,24.6654
+2016-05-24 00:06:00,3149,-10.6914,25.1188
+2016-05-24 00:20:56,3139,-10.8139,22.4914
+2016-05-24 00:35:51,3147,-10.6914,25.2108
+2016-05-24 00:50:47,3143,-10.7824,23.8188
+2016-05-24 01:05:42,3148,-10.5673,25.491
+2016-05-24 01:20:38,3146,-10.7226,25.3054
+2016-05-24 01:35:33,3144,-10.7529,24.8445
+2016-05-24 01:50:29,3144,-10.752,24.4371
+2016-05-24 02:05:24,3144,-10.752,25.1121
+2016-05-24 02:20:19,3145,-10.7207,25.4366
+2016-05-24 02:35:15,3144,-10.752,25.2469
+2016-05-24 02:50:10,3143,-10.751,25.016
+2016-05-24 03:05:06,3145,-10.7217,25.9929
+2016-05-24 03:20:01,3143,-10.751,25.3823
+2016-05-24 03:34:56,3143,-10.751,25.4275
+2016-05-24 03:49:59,3140,-10.7815,24.4176
+2016-05-24 04:04:55,3139,-10.7815,24.4198
+2016-05-24 04:19:50,3143,-10.751,25.2852
+2016-05-24 04:34:46,3140,-10.8129,24.6413
+2016-05-24 04:49:41,3141,-10.7824,25.0965
+2016-05-24 05:04:37,3143,-10.7207,26.0786
+2016-05-24 05:19:32,3145,-10.6895,26.4664
+2016-05-24 05:34:27,3144,-10.8444,26.2648
+2016-05-24 05:49:23,3140,-10.7815,25.1862
+2016-05-24 06:04:18,3143,-10.751,26.0786
+2016-05-24 06:19:13,3143,-10.7198,26.41
+2016-05-24 06:34:09,3142,-10.751,26.3116
+2016-05-24 06:49:04,3141,-10.7815,25.9768
+2016-05-24 07:03:59,3142,-10.7501,26.3116
+2016-05-24 07:18:54,3141,-10.7815,26.2158
+2016-05-24 07:33:49,3143,-10.751,26.6457
+2016-05-24 07:48:45,3143,-10.751,26.3701
+2016-05-24 08:03:40,3138,-10.8435,25.7856
+2016-05-24 08:18:35,3138,-10.812,25.8799
+2016-05-24 08:33:37,3138,-10.812,25.6939
+2016-05-24 08:48:33,3139,-10.8129,26.3092
+2016-05-24 09:03:28,3135,-10.8425,24.5929
+2016-05-24 09:18:23,3135,-10.8425,24.8203
+2016-05-24 09:33:19,3138,-10.812,26.1158
+2016-05-24 09:48:14,3138,-10.812,26.0694
+2016-05-24 10:03:09,3143,-10.7207,27.0008
+2016-05-24 10:18:04,3142,-10.751,26.9528
+2016-05-24 10:33:00,3143,-10.6895,26.905
+2016-05-24 10:47:55,3140,-10.7501,26.9505
+2016-05-24 11:02:50,3140,-10.7501,26.9481
+2016-05-24 11:17:45,3139,-10.7805,26.8525
+2016-05-24 11:32:41,3143,-10.7198,27.0032
+2016-05-24 11:47:36,3142,-10.7198,27.1474
+2016-05-24 12:02:31,3138,-10.8454,26.6552
+2016-05-24 12:17:26,3136,-10.8416,25.7902
+2016-05-24 12:32:22,3140,-10.7188,27.1933
+2016-05-24 12:47:17,3138,-10.81,26.8001
+2016-05-24 13:02:12,3138,-10.8129,26.7977
+2016-05-24 13:17:08,3139,-10.7501,27.1933
+2016-05-24 13:32:03,3136,-10.811,26.5064
+2016-05-24 13:46:58,3138,-10.7805,26.9481
+2016-05-24 14:01:53,3136,-10.8425,26.504
+2016-05-24 14:16:48,3138,-10.7501,27.0944
+2016-05-24 14:31:44,3135,-10.8425,26.0253
+2016-05-24 14:46:39,3136,-10.812,26.5512
+2016-05-24 15:01:34,3137,-10.812,26.8477
+2016-05-24 15:16:29,3137,-10.7491,26.8477
+2016-05-24 15:31:25,3135,-10.8425,26.3116
+2016-05-24 15:46:20,3129,-10.9039,24.2261
+2016-05-24 16:01:15,3130,-10.8722,24.6788
+2016-05-24 16:16:10,3129,-10.9039,24.0467
+2016-05-24 16:31:05,3129,-10.9039,24.2261
+2016-05-24 16:46:00,3128,-10.9039,23.9155
+2016-05-24 17:00:55,3127,-10.9039,75.4867
+2016-05-24 17:15:50,3127,-10.9039,71.5791
+2016-05-24 17:30:46,3131,-10.8732,24.765
+2016-05-24 17:45:41,3125,-10.9029,70.5599
+2016-05-24 18:00:36,3128,-10.9039,23.9155
+2016-05-24 18:15:31,3127,-10.9039,73.0697
+2016-05-24 18:30:27,3127,-10.9039,72.8383
+2016-05-24 18:45:22,3127,-10.9039,74.5827
+2016-05-24 19:00:17,3127,-10.9039,73.27
+2016-05-24 19:15:13,3131,-10.8732,24.9025
+2016-05-24 19:30:08,3127,-10.8722,74.7851
+2016-05-24 19:45:03,3127,-10.9039,71.5918
+2016-05-24 19:59:58,3128,-10.8732,76.8952
+2016-05-24 20:14:53,3127,-10.9048,73.27
+2016-05-24 20:29:49,3127,-10.9048,74.1355
+2016-05-24 20:44:44,3123,-10.9039,66.2128
+2016-05-24 20:59:39,3123,-10.9039,66.9282
+2016-05-24 21:14:34,3127,-10.9048,75.9572
+2016-05-24 21:29:30,3123,-10.9039,68.7984
+2016-05-24 21:44:25,3123,-10.9039,67.848
+2016-05-24 21:59:20,3123,-10.9039,67.1079
+2016-05-24 22:14:15,3127,-10.9048,75.9572
+2016-05-24 22:29:11,3123,-10.9039,67.2945
+2016-05-24 22:44:06,3123,-10.9029,69.5686
+2016-05-24 22:59:01,3123,-10.9357,68.5978
+2016-05-24 23:13:56,3123,-10.9039,67.848
+2016-05-24 23:28:51,3123,-10.9357,68.0506
+2016-05-24 23:43:46,3124,-10.9039,74.1619
+2016-05-24 23:58:41,3122,-10.9357,66.2128
+2016-05-25 00:13:43,3122,-10.9357,66.0318
+2016-05-25 00:28:38,3122,-10.9357,65.8517
+2016-05-25 00:43:34,3123,-10.9039,72.8318
+2016-05-25 00:58:29,3122,-10.9357,64.8303
+2016-05-25 01:13:24,3121,-10.9347,64.4889
+2016-05-25 01:28:19,3121,-10.9347,65.3343
+2016-05-25 01:43:14,3120,-10.9347,63.3113
+2016-05-25 01:58:09,3120,-10.9347,63.4731
+2016-05-25 02:13:04,3121,-10.9347,64.1394
+2016-05-25 02:27:59,3121,-10.9347,66.2128
+2016-05-25 02:42:54,3121,-10.9347,65.5001
+2016-05-25 02:57:49,3121,-10.9347,66.3888
+2016-05-25 03:12:44,3121,-10.9347,66.2128
+2016-05-25 03:27:39,3121,-10.9347,66.2128
+2016-05-25 03:42:34,3121,-10.9347,66.0259
+2016-05-25 03:57:29,3121,-10.9665,67.848
+2016-05-25 04:12:24,3121,-10.9347,68.0325
+2016-05-25 04:27:19,3121,-10.9347,68.0385
+2016-05-25 04:42:14,3122,-10.9357,70.3619
+2016-05-25 04:57:09,3121,-10.8091,69.7622
+2016-05-25 05:12:04,3121,-10.9337,69.9569
+2016-05-25 05:26:59,3122,-10.9347,70.5599
+2016-05-25 05:41:54,3121,-10.9347,69.7684
+2016-05-25 05:56:49,3121,-10.9347,68.994
+2016-05-25 06:11:44,3121,-10.9665,69.9569
+2016-05-25 06:26:38,3121,-10.9665,69.9631
+2016-05-25 06:41:33,3122,-10.9665,71.3628
+2016-05-25 06:56:28,3122,-10.9347,71.1604
+2016-05-25 07:11:23,3122,-10.9347,72.2033
+2016-05-25 07:26:18,3122,-10.9347,72.2033
+2016-05-25 07:41:13,3121,-10.9347,72.2033
+2016-05-25 07:56:08,3121,-10.9347,71.7838
+2016-05-25 08:11:03,3121,-10.9347,71.1667
+2016-05-25 08:25:58,3121,-10.9347,70.3619
+2016-05-25 08:40:53,3121,-10.9665,71.5791
+2016-05-25 08:55:48,3122,-10.9347,76.1878
+2016-05-25 09:10:43,3121,-10.9347,70.7589
+2016-05-25 09:25:38,3121,-10.9347,72.2033
+2016-05-25 09:40:33,3122,-10.9347,71.5791
+2016-05-25 09:55:28,3122,-10.9039,73.27
+2016-05-25 10:10:23,3122,-10.9347,73.2635
+2016-05-25 10:25:18,3123,-10.9357,23.8256
+2016-05-25 10:40:13,3123,-10.9347,23.736
+2016-05-25 10:55:08,3123,-10.9357,23.8684
+2016-05-25 11:10:03,3124,-10.9039,23.9585
+2016-05-25 11:24:58,3125,-10.9039,24.2197
+2016-05-25 11:39:53,3124,-10.9039,23.8642
+2016-05-25 11:54:49,3127,-10.9039,24.1785
+2016-05-25 12:09:44,3127,-10.9039,24.2631
+2016-05-25 12:24:39,3127,-10.9039,24.2674
+2016-05-25 12:39:34,3127,-10.9039,24.224
+2016-05-25 12:54:29,3127,-10.9039,24.0015
+2016-05-25 13:09:24,3128,-10.9048,24.0856
+2016-05-25 13:24:20,3128,-10.9048,24.2631
+2016-05-25 13:39:15,3129,-10.8732,24.2652
+2016-05-25 13:54:10,3130,-10.9048,24.3152
+2016-05-25 14:09:05,3131,-10.8732,24.4002
+2016-05-25 14:24:01,3132,-10.8732,24.4877
+2016-05-25 14:38:56,3133,-10.8416,24.4899
+2016-05-25 14:53:51,3134,-10.8732,24.4418
+2016-05-25 15:08:46,3135,-10.8416,24.7142
+2016-05-25 15:23:42,3136,-10.8416,24.7606
+2016-05-25 15:38:37,3136,-10.8732,24.5337
+2016-05-25 15:53:32,3136,-10.8416,24.7142
+2016-05-25 16:08:27,3136,-10.8416,24.4921
+2016-05-25 16:23:23,3136,-10.8732,24.4024
+2016-05-25 16:38:18,3136,-10.8732,24.4877
+2016-05-25 16:53:13,3135,-10.8722,24.0856
+2016-05-25 17:08:08,3135,-10.8416,24.2197
+2016-05-25 17:23:03,3134,-10.8732,23.8663
+2016-05-25 17:37:58,3132,-10.9048,74.1224
+2016-05-25 17:52:53,3132,-10.8732,74.7984
+2016-05-25 18:07:49,3132,-10.8732,74.5695
+2016-05-25 18:22:44,3131,-10.8722,72.6017
+2016-05-25 18:37:39,3132,-10.9039,74.342
+2016-05-25 18:52:34,3132,-10.8732,75.48
+2016-05-25 19:07:29,3130,-10.9039,72.184
+2016-05-25 19:22:24,3130,-10.8722,73.2505
+2016-05-25 19:37:19,3129,-10.9039,72.3923
+2016-05-25 19:52:14,3129,-10.9039,71.977
+2016-05-25 20:07:10,3128,-10.9048,71.56
+2016-05-25 20:22:05,3128,-10.9039,72.3923
+2016-05-25 20:37:00,3128,-10.9039,73.0373
+2016-05-25 20:51:55,3128,-10.9039,72.1776
+2016-05-25 21:06:50,3128,-10.8722,72.8512
+2016-05-25 21:21:45,3128,-10.9039,73.2505
+2016-05-25 21:36:40,3127,-10.9039,73.4714
+2016-05-25 21:51:36,3127,-10.9039,73.0373
+2016-05-25 22:06:31,3127,-10.9357,23.3005
+2016-05-25 22:21:26,3125,-10.9039,71.1478
+2016-05-25 22:36:21,3124,-10.9357,71.1478
+2016-05-25 22:51:16,3124,-10.9029,71.56
+2016-05-25 23:06:12,3123,-10.9347,68.0204
+2016-05-25 23:21:07,3123,-10.9347,69.9382
+2016-05-25 23:36:02,3123,-10.9357,70.7464
+2016-05-25 23:50:57,3123,-10.9029,70.5411
+2016-05-26 00:05:52,3123,-10.9029,69.5501
+2016-05-26 00:20:47,3122,-10.9665,67.1019
+2016-05-26 00:35:42,3122,-10.9029,67.2766
+2016-05-26 00:50:37,3121,-10.9347,66.377
+2016-05-26 01:05:32,3121,-10.9347,66.1893
+2016-05-26 01:20:27,3121,-10.9347,67.6465
+2016-05-26 01:35:22,3121,-10.9347,67.2706
+2016-05-26 01:50:17,3120,-10.9347,64.8016
+2016-05-26 02:05:12,3121,-10.9347,66.0142
+2016-05-26 02:20:07,3121,-10.9029,66.1952
+2016-05-26 02:35:02,3120,-10.9347,64.128
+2016-05-26 02:49:57,3118,-10.9675,63.7876
+2016-05-26 03:04:52,3118,-10.9347,63.4562
+2016-05-26 03:19:48,3118,-10.9347,63.4562
+2016-05-26 03:34:43,3118,-10.9347,62.8138
+2016-05-26 03:49:38,3118,-10.9347,63.7876
+2016-05-26 04:04:33,3117,-10.9347,62.6496
+2016-05-26 04:19:28,3117,-10.9347,62.6496
+2016-05-26 04:34:23,3117,-10.9347,62.1724
+2016-05-26 04:49:18,3115,-10.9347,60.4712
+2016-05-26 05:04:13,3115,-10.9347,61.2324
+2016-05-26 05:19:08,3116,-10.9347,62.3289
+2016-05-26 05:34:03,3115,-10.9347,61.6964
+2016-05-26 05:48:58,3115,-10.9665,61.081
+2016-05-26 06:03:53,3115,-10.9347,61.081
+2016-05-26 06:18:48,3115,-10.9347,61.2324
+2016-05-26 06:33:43,3114,-10.9347,60.6253
+2016-05-26 06:48:38,3114,-10.9347,60.4819
+2016-05-26 07:03:33,3115,-10.9347,62.9677
+2016-05-26 07:18:28,3115,-10.9347,62.6496
+2016-05-26 07:33:23,3116,-10.9347,62.4916
+2016-05-26 07:48:18,3116,-10.9039,63.1335
+2016-05-26 08:03:13,3118,-10.9039,64.1166
+2016-05-26 08:18:09,3118,-10.9039,63.1279
+2016-05-26 08:33:04,3119,-10.9039,62.644
+2016-05-26 08:47:59,3121,-10.8722,64.4603
+2016-05-26 09:02:54,3122,-10.8722,63.2944
+2016-05-26 09:17:50,3124,-10.8416,65.84
+2016-05-26 09:32:45,3123,-10.8416,63.6243
+2016-05-26 09:47:41,3124,-10.8416,63.7876
+2016-05-26 10:02:36,3127,-10.811,64.9706
+2016-05-26 10:17:31,3127,-10.81,63.3001
+2016-05-26 10:32:27,3128,-10.81,63.4562
+2016-05-26 10:47:22,3130,-10.811,63.4618
+2016-05-26 11:02:18,3131,-10.8416,
+2016-05-26 11:17:13,3133,-10.7805,63.7876
+2016-05-26 11:32:09,3134,-10.7805,64.1223
+2016-05-26 11:47:04,3135,-10.7805,60.9302
+2016-05-26 12:02:00,3136,-10.7491,62.6496
+2016-05-26 12:16:56,3138,-10.7501,62.3289
+2016-05-26 12:31:51,3135,-10.7491,57.1835
+2016-05-26 12:46:47,3136,-10.7501,57.7303
+2016-05-26 13:01:42,3136,-10.7188,56.3728
+2016-05-26 13:16:38,3136,-10.7188,56.2471
+2016-05-26 13:31:33,3137,-10.7188,57.0495
+2016-05-26 13:46:29,3137,-10.7188,55.8482
+2016-05-26 14:01:25,3137,-10.7188,55.2005
+2016-05-26 14:16:20,3137,-10.7501,54.8191
+2016-05-26 14:31:16,3138,-10.812,55.4689
+2016-05-26 14:46:11,3142,-10.7188,56.7832
+2016-05-26 15:01:07,3143,-10.7188,56.6458
+2016-05-26 15:16:02,3176,-10.181,26.9002
+2016-05-26 15:30:58,3166,-10.5682,26.9457
+2016-05-26 15:46:05,3166,-10.5682,27.186
+2016-05-26 16:01:12,3160,-10.6885,26.0138
+2016-05-26 16:16:27,3163,-10.6282,26.7905
+2016-05-26 16:31:22,3162,-10.6895,26.5465
+2016-05-26 16:46:29,3161,-10.6593,26.5512
+2016-05-26 17:02:42,3162,-10.6282,26.7929
+2016-05-26 17:17:38,3162,-10.6282,27.0848
+2016-05-26 17:32:33,3162,-10.6292,27.1354
+2016-05-26 17:47:29,3156,-10.7198,25.6894
+2016-05-26 18:02:24,3156,-10.6885,26.067
+2016-05-26 18:17:20,3155,-10.7188,25.6414
+2016-05-26 18:32:15,3152,-10.7188,25.5025
+2016-05-26 18:47:11,3152,-10.7501,25.22
+2016-05-26 19:02:06,3151,-10.7501,25.2672
+2016-05-26 19:17:02,3150,-10.7501,25.3123
+2016-05-26 19:31:57,3150,-10.7188,25.5479
+2016-05-26 19:46:53,3149,-10.7501,25.265
+2016-05-26 20:01:48,3149,-10.7188,25.5912
+2016-05-26 20:16:44,3146,-10.7501,24.9893
+2016-05-26 20:31:39,3152,-10.5982,27.092
+2016-05-26 20:46:35,3152,-10.5673,27.1884
+2016-05-26 21:01:30,3154,-10.5374,27.1884
+2016-05-26 21:16:26,3153,-10.5066,27.0464
+2016-05-26 21:31:21,3146,-10.7179,26.8859
+2016-05-26 21:46:17,3143,-10.7179,26.4499
+2016-05-26 22:01:12,3146,-10.6574,27.186
+2016-05-26 22:16:08,3138,-10.811,25.1279
+2016-05-26 22:31:03,3138,-10.811,25.6825
+2016-05-26 22:45:58,3141,-10.7491,26.3537
+2016-05-26 23:00:54,3138,-10.7805,25.6894
+2016-05-26 23:15:49,3136,-10.811,25.1279
+2016-05-26 23:30:44,3131,-10.8416,23.6042
+2016-05-26 23:45:47,3130,-10.8416,23.6042
+2016-05-27 00:00:42,3132,-10.811,24.4877
+2016-05-27 00:15:37,3134,-10.811,24.8071
+2016-05-27 00:30:33,3130,-10.8416,24.1309
+2016-05-27 00:45:28,3129,-10.8416,23.8663
+2016-05-27 01:00:30,3129,-10.8416,23.7807
+2016-05-27 01:15:26,3139,-10.6885,27.1836
+2016-05-27 01:30:21,3138,-10.7198,27.1836
+2016-05-27 01:45:16,3138,-10.7188,27.0368
+2016-05-27 02:00:11,3136,-10.7491,26.5441
+2016-05-27 02:15:06,3129,-10.8406,24.582
+2016-05-27 02:30:01,3130,-10.8406,24.4921
+2016-05-27 02:44:56,3131,-10.8416,25.4525
+2016-05-27 02:59:51,3135,-10.7482,26.5961
+2016-05-27 03:14:47,3131,-10.81,25.9168
+2016-05-27 03:29:42,3128,-10.8406,25.1324
+2016-05-27 03:44:37,3133,-10.7805,26.8286
+2016-05-27 03:59:32,3132,-10.81,26.1599
+2016-05-27 04:14:27,3134,-10.7491,26.9361
+2016-05-27 04:29:23,3128,-10.8406,25.4593
+2016-05-27 04:44:18,3132,-10.7796,26.5937
+2016-05-27 04:59:13,3133,-10.812,26.8382
+2016-05-27 05:14:08,3134,-10.8416,26.9888
+2016-05-27 05:29:04,3132,-10.811,26.6386
+2016-05-27 05:43:59,3129,-10.8406,25.3101
+2016-05-27 05:58:54,3128,-10.8406,25.362
+2016-05-27 06:13:49,3127,-10.8406,24.7164
+2016-05-27 06:28:45,3117,-10.9328,62.3344
+2016-05-27 06:43:40,3118,-10.9029,61.8617
+2016-05-27 06:58:35,3118,-10.9029,61.3955
+2016-05-27 07:13:30,3118,-10.8722,60.4712
+2016-05-27 07:28:26,3119,-10.8406,60.6253
+2016-05-27 07:43:21,3121,-10.8722,62.6496
+2016-05-27 07:58:16,3123,-10.8406,65.3111
+2016-05-27 08:13:11,3123,-10.8722,63.6243
+2016-05-27 08:28:07,3127,-10.8091,69.9445
+2016-05-27 08:43:02,3128,-10.81,72.6017
+2016-05-27 08:57:57,3131,-10.8416,23.2943
+2016-05-27 09:12:53,3130,-10.81,23.2585
+2016-05-27 09:27:48,3134,-10.7805,24.2197
+2016-05-27 09:42:44,3135,-10.7805,24.4921
+2016-05-27 09:57:39,3140,-10.6885,26.3537
+2016-05-27 10:12:35,3136,-10.7805,24.5359
+2016-05-27 10:27:30,3139,-10.7501,25.6825
+2016-05-27 10:42:25,3138,-10.7491,24.8093
+2016-05-27 10:57:21,3138,-10.7491,24.9893
+2016-05-27 11:12:16,3143,-10.6885,26.4476
+2016-05-27 11:27:12,3144,-10.6876,26.3584
+2016-05-27 11:42:08,3145,-10.6885,26.2135
+2016-05-27 11:57:03,3144,-10.6876,25.9237
+2016-05-27 12:11:59,3150,-10.5973,26.9912
+2016-05-27 12:26:54,3150,-10.5973,27.1426
+2016-05-27 12:41:50,3151,-10.5664,27.1836
+2016-05-27 12:56:45,3152,-10.5673,27.2852
+2016-05-27 13:11:41,3145,-10.7179,24.8558
+2016-05-27 13:26:36,3143,-10.7491,23.6105
+2016-05-27 13:41:32,3141,-10.7491,69.5501
+2016-05-27 13:56:27,3142,-10.7179,70.1339
+2016-05-27 14:11:23,3145,-10.6876,77.35600000000002
+2016-05-27 14:26:19,3145,-10.6876,23.6042
+2016-05-27 14:41:14,3148,-10.6574,24.3109
+2016-05-27 14:56:10,3148,-10.6885,23.7465
+2016-05-27 15:11:06,3149,-10.6885,23.9155
+2016-05-27 15:26:01,3148,-10.6885,23.5701
+2016-05-27 15:40:57,3146,-10.7198,72.184
+2016-05-27 15:55:53,3148,-10.6885,23.4772
+2016-05-27 16:10:48,3149,-10.6885,23.6997
+2016-05-27 16:25:44,3150,-10.6885,23.7465
+2016-05-27 16:40:39,3151,-10.6574,24.3152
+2016-05-27 16:55:35,3150,-10.6574,24.0962
+2016-05-27 17:10:31,3152,-10.6273,24.7252
+2016-05-27 17:25:26,3152,-10.6885,24.4067
+2016-05-27 17:40:22,3151,-10.6574,24.6369
+2016-05-27 17:55:17,3152,-10.6273,25.507
+2016-05-27 18:10:13,3150,-10.6885,24.4176
+2016-05-27 18:25:08,3149,-10.6885,24.3674
+2016-05-27 18:40:04,3150,-10.6264,24.9069
+2016-05-27 18:54:59,3150,-10.6264,25.2739
+2016-05-27 19:09:55,3150,-10.6264,25.2312
+2016-05-27 19:24:50,3150,-10.6264,25.371
+2016-05-27 19:39:46,3150,-10.6264,25.4661
+2016-05-27 19:54:41,3147,-10.6574,25.1324
+2016-05-27 20:09:37,3143,-10.6876,23.879
+2016-05-27 20:24:33,3141,-10.7188,23.357
+2016-05-27 20:39:28,3140,-10.6876,23.3991
+2016-05-27 20:54:24,3138,-10.7179,23.1831
+2016-05-27 21:09:19,3136,-10.7179,70.9591
+2016-05-27 21:24:15,3135,-10.7491,68.9879
+2016-05-27 21:39:10,3133,-10.7491,67.8541
+2016-05-27 21:54:06,3131,-10.7491,65.8517
+2016-05-27 22:09:02,3130,-10.7482,64.9936
+2016-05-27 22:23:57,3129,-10.7491,64.6448
+2016-05-27 22:38:53,3128,-10.7482,63.3113
+2016-05-27 22:53:48,3127,-10.7796,61.7129
+2016-05-27 23:08:44,3127,-10.7482,61.8727
+2016-05-27 23:23:39,3127,-10.7796,63.3113
+2016-05-27 23:38:35,3124,-10.7786,60.3392
+2016-05-27 23:53:30,3123,-10.7796,60.3392
+2016-05-28 00:08:26,3122,-10.81,58.7317
+2016-05-28 00:23:21,3122,-10.81,58.0251
+2016-05-28 00:38:17,3121,-10.81,56.6559
+2016-05-28 00:53:12,3120,-10.81,56.1268
+2016-05-28 01:08:08,3119,-10.81,55.6014
+2016-05-28 01:23:03,3118,-10.81,55.3467
+2016-05-28 01:37:58,3117,-10.81,54.7091
+2016-05-28 01:52:54,3117,-10.7491,31.4207
+2016-05-28 02:07:49,3115,-10.7491,31.6497
+2016-05-28 02:22:44,3115,-10.7805,31.6553
+2016-05-28 02:37:39,3114,-10.812,31.542
+2016-05-28 02:52:34,3114,-10.7796,31.6581
+2016-05-28 03:07:30,3114,-10.811,31.5448
+2016-05-28 03:22:25,3115,-10.7805,31.6609
+2016-05-28 03:37:20,3114,-10.7805,31.6042
+2016-05-28 03:52:15,3113,-10.811,31.4855
+2016-05-28 04:07:10,3114,-10.7805,31.6042
+2016-05-28 04:22:05,3114,-10.812,31.6609
+2016-05-28 04:37:01,3114,-10.7805,31.6609
+2016-05-28 04:51:56,3114,-10.7805,31.6609
+2016-05-28 05:06:51,3114,-10.7805,31.6609
+2016-05-28 05:21:46,3114,-10.7805,31.6042
+2016-05-28 05:36:41,3114,-10.7805,31.6609
+2016-05-28 05:51:37,3114,-10.812,31.7178
+2016-05-28 06:06:32,3114,-10.7805,31.7747
+2016-05-28 06:21:27,3115,-10.7491,31.7178
+2016-05-28 06:36:22,3115,-10.7805,31.7234
+2016-05-28 06:51:18,3115,-10.7805,31.7178
+2016-05-28 07:06:13,3115,-10.7805,31.4911
+2016-05-28 07:21:09,3116,-10.7491,31.542
+2016-05-28 07:36:04,3116,-10.7491,31.7206
+2016-05-28 07:50:59,3116,-10.7491,31.7747
+2016-05-28 08:05:55,3117,-10.7491,31.6553
+2016-05-28 08:20:50,3118,-10.7491,7.9954
+2016-05-28 08:35:46,3119,-10.7501,31.7663
+2016-05-28 08:50:42,3120,-10.7501,31.7776
+2016-05-28 09:05:37,3121,-10.7188,31.5448
+2016-05-28 09:20:33,3121,-10.7188,31.0266
+2016-05-28 09:35:28,3122,-10.7188,31.7776
+2016-05-28 09:50:24,3123,-10.7198,31.2551
+2016-05-28 10:05:19,3125,-10.6885,-0.1828
+2016-05-28 10:20:15,3126,-10.7198,31.6441
+2016-05-28 10:35:11,3128,-10.6885,31.7719
+2016-05-28 10:50:06,3129,-10.6885,31.5986
+2016-05-28 11:05:02,3130,-10.6885,31.7776
+2016-05-28 11:19:58,3130,-10.6584,31.7178
+2016-05-28 11:34:53,3132,-10.6584,31.3701
+2016-05-28 11:49:49,3133,-10.6282,31.0239
+2016-05-28 12:04:45,3134,-10.6282,31.7776
+2016-05-28 12:19:40,3134,-10.6584,29.1493
+2016-05-28 12:34:36,3135,-10.6282,31.6637
+2016-05-28 12:49:32,3135,-10.6282,31.7206
+2016-05-28 13:04:27,3136,-10.5973,31.7804
+2016-05-28 13:19:23,3137,-10.5982,31.6637
+2016-05-28 13:34:19,3138,-10.5982,31.4291
+2016-05-28 13:49:14,3138,-10.5982,31.8975
+2016-05-28 14:04:10,3138,-10.5982,31.7804
+2016-05-28 14:19:05,3138,-10.5982,31.7206
+2016-05-28 14:34:01,3138,-10.5982,31.7206
+2016-05-28 14:48:57,3138,-10.5982,31.7234
+2016-05-28 15:03:52,3138,-10.5982,26.408
+2016-05-28 15:18:48,3139,-10.5982,31.7804
+2016-05-28 15:33:43,3139,-10.5982,28.8374
+2016-05-28 15:48:39,3139,-10.5982,26.502
+2016-05-28 16:03:35,3140,-10.5982,25.2909
+2016-05-28 16:18:30,3140,-10.5982,25.2392
+2016-05-28 16:33:26,3141,-10.5982,27.4188
+2016-05-28 16:48:22,3139,-10.5682,31.4375
+2016-05-28 17:03:18,3139,-10.5682,26.121
+2016-05-28 17:18:13,3138,-10.5682,26.691
+2016-05-28 17:33:09,3138,-10.5682,27.4188
+2016-05-28 17:48:05,3138,-10.5973,25.1046
+2016-05-28 18:03:00,3138,-10.5973,26.4973
+2016-05-28 18:17:56,3136,-10.6282,27.5188
+2016-05-28 18:32:52,3136,-10.5673,24.2133
+2016-05-28 18:47:47,3135,-11.1536,23.2641
+2016-05-28 19:02:43,3135,-10.5982,24.4329
+2016-05-28 19:17:39,3135,-10.6282,6.6579
+2016-05-28 19:32:34,3134,-10.5982,22.4931
+2016-05-28 19:47:30,3132,-10.5982,7.619
+2016-05-28 20:02:26,3132,-10.6292,21.597
+2016-05-28 20:17:21,3131,-11.1212,0.5036
+2016-05-28 20:32:17,3130,-10.5973,-4.9515
+2016-05-28 20:47:12,3129,-10.5973,-5.4281
+2016-05-28 21:02:08,3128,-10.6282,-4.1132
+2016-05-28 21:17:03,3127,-11.0889,-3.5572
+2016-05-28 21:31:59,3127,-11.0256,-5.0408
+2016-05-28 21:46:54,3124,-11.0577,17.0977
+2016-05-28 22:01:50,3124,-10.6282,-4.7832
+2016-05-28 22:16:46,3123,-11.0577,-5.1743
+2016-05-28 22:31:41,3122,-11.0577,-5.1752
+2016-05-28 22:46:37,3123,-11.0577,-5.1752
+2016-05-28 23:01:32,3122,-11.0577,-5.0883
+2016-05-28 23:16:28,3122,-11.0577,-5.0668
+2016-05-28 23:31:23,3122,-11.0266,-5.1097
+2016-05-28 23:46:19,3121,-11.0577,-5.0448
+2016-05-29 00:01:14,3121,-11.0577,-5.1962
+2016-05-29 00:16:10,3121,-11.0577,-5.2604
+2016-05-29 00:31:06,3119,-11.0577,-5.2604
+2016-05-29 00:46:01,3118,-11.0256,-5.1532
+2016-05-29 01:01:00,3118,-11.0266,-5.1308
+2016-05-29 01:15:56,3118,-11.0577,-5.0892
+2016-05-29 01:30:51,3118,-11.0577,-5.1317
+2016-05-29 01:45:47,3117,-11.0256,-5.1532
+2016-05-29 02:00:42,3117,-11.0256,-5.1962
+2016-05-29 02:15:38,3117,-11.0266,-5.1111
+2016-05-29 02:30:33,3117,-11.0266,-5.0247
+2016-05-29 02:45:29,3116,-11.0266,-5.1107
+2016-05-29 03:00:24,3116,-10.9956,-5.1532
+2016-05-29 03:15:20,3115,-11.0256,-5.0037
+2016-05-29 03:30:15,3115,-11.0266,-5.2402
+2016-05-29 03:45:11,3115,-10.9956,-5.0883
+2016-05-29 04:00:06,3115,-11.0266,-5.1747
+2016-05-29 04:15:01,3115,-11.0256,-5.2608
+2016-05-29 04:29:57,3115,-11.0256,-5.0457
+2016-05-29 04:44:52,3114,-11.0577,-5.1747
+2016-05-29 04:59:47,3114,-11.0276,-5.0896
+2016-05-29 05:14:43,3114,-11.0276,-4.8101
+2016-05-29 05:29:38,3114,-11.0276,-4.8101
+2016-05-29 05:44:33,3114,-11.0276,-5.1312
+2016-05-29 05:59:29,3114,-11.0256,-4.9379
+2016-05-29 06:14:24,3113,-11.0266,-4.9392
+2016-05-29 06:29:19,3113,-11.0266,-4.9616
+2016-05-29 06:44:15,3113,-11.0266,-4.8757
+2016-05-29 06:59:10,3112,-10.9956,-4.9187
+2016-05-29 07:14:05,3113,-10.9956,-5.1766
+2016-05-29 07:29:00,3112,-10.9956,-5.0045
+2016-05-29 07:43:55,3112,-10.9956,-4.9831
+2016-05-29 07:58:58,3113,-10.9956,-4.9616
+2016-05-29 08:13:53,3112,-10.9636,-4.8757
+2016-05-29 08:28:49,3113,-11.0276,-4.8757
+2016-05-29 08:43:44,3113,-10.9646,-4.9191
+2016-05-29 08:58:39,3113,-10.9965,-4.9191
+2016-05-29 09:13:42,3114,-10.9646,-4.7904
+2016-05-29 09:28:37,3114,-10.9646,-4.9401
+2016-05-29 09:43:33,3114,-10.9646,-4.7904
+2016-05-29 09:58:28,3114,-10.9656,-4.8766
+2016-05-29 10:13:24,3115,-10.9646,-4.9401
+2016-05-29 10:28:19,3116,-10.9656,-4.9191
+2016-05-29 10:43:15,3116,-10.9337,-4.8123
+2016-05-29 10:58:10,3117,-10.9337,-4.7904
+2016-05-29 11:13:06,3120,-10.9337,-4.9406
+2016-05-29 11:28:01,3121,-10.9337,-5.0265
+2016-05-29 11:42:57,3121,-10.9029,-4.877
+2016-05-29 11:57:52,3122,-10.9029,-4.9195
+2016-05-29 12:12:47,3123,-10.9039,-5.0045
+2016-05-29 12:27:43,3124,-10.8722,-5.0265
+2016-05-29 12:42:38,3127,-10.9405,-5.0076
+2016-05-29 12:57:34,3127,-10.8406,-4.9195
+2016-05-29 13:12:29,3128,-10.8732,-4.941
+2016-05-29 13:27:25,3130,-10.8416,-4.8131
+2016-05-29 13:42:21,3131,-10.81,-4.8131
+2016-05-29 13:57:16,3132,-10.8416,-4.9625
+2016-05-29 14:12:12,3133,-10.81,-4.8127
+2016-05-29 14:27:08,3134,-10.811,-4.8127
+2016-05-29 14:42:03,3135,-10.7796,-4.835
+2016-05-29 14:57:06,3135,-10.7805,-4.8341
+2016-05-29 15:12:02,3136,-10.7491,-4.8569
+2016-05-29 15:26:57,3136,-10.7805,-4.8569
+2016-05-29 15:42:01,3138,-10.7805,-4.9419
+2016-05-29 15:56:56,3138,-10.7482,-5.0278
+2016-05-29 16:11:52,3138,-10.7491,-5.0488
+2016-05-29 16:26:48,3138,-10.7491,-5.1569
+2016-05-29 16:41:44,3140,-10.7491,-4.8783
+2016-05-29 16:56:40,3140,-10.7491,-5.0928
+2016-05-29 17:11:35,3140,-10.7491,-4.8569
+2016-05-29 17:26:31,3139,-10.7491,-5.1143
+2016-05-29 17:41:27,3139,-10.7796,-5.0923
+2016-05-29 17:56:23,3138,-10.7805,-5.1358
+2016-05-29 18:11:18,3138,-10.7491,-4.8569
+2016-05-29 18:26:14,3138,-10.7491,-5.0488
+2016-05-29 18:41:10,3137,-10.7805,-4.9853
+2016-05-29 18:56:06,3136,-10.7796,-4.8569
+2016-05-29 19:11:01,3136,-10.7796,-4.8354
+2016-05-29 19:25:57,3135,-10.7796,-4.8144
+2016-05-29 19:40:53,3135,-10.7796,-4.9221
+2016-05-29 19:55:49,3134,-10.7796,-4.9436
+2016-05-29 20:10:44,3132,-10.7796,
+2016-05-29 20:25:40,3131,-10.811,-4.8792
+2016-05-29 20:40:35,3130,-10.811,
+2016-05-29 20:55:31,3129,-10.81,
+2016-05-29 21:10:27,3128,-10.81,
+2016-05-29 21:25:22,3127,-10.81,-4.9226
+2016-05-29 21:40:18,3124,-10.81,
+2016-05-29 21:55:13,3124,-10.8416,
+2016-05-29 22:10:09,3123,-10.8416,
+2016-05-29 22:25:04,3122,-10.8416,
+2016-05-29 22:40:00,3122,-10.8416,
+2016-05-29 22:54:55,3121,-10.8416,
+2016-05-29 23:09:51,3121,-10.8416,
+2016-05-29 23:24:46,3120,-10.8416,
+2016-05-29 23:39:42,3119,-10.8416,
+2016-05-29 23:54:37,3118,-10.8406,
+2016-05-30 00:09:33,3118,-10.8416,
+2016-05-30 00:24:29,3117,-10.8406,
+2016-05-30 00:39:24,3117,-10.8732,
+2016-05-30 00:54:20,3116,-10.8416,
+2016-05-30 01:09:15,3115,-10.8406,
+2016-05-30 01:24:11,3115,-10.8416,
+2016-05-30 01:39:06,3115,-10.8416,
+2016-05-30 01:54:02,3115,-10.8722,
+2016-05-30 02:08:57,3115,-10.8722,
+2016-05-30 02:23:53,3114,-10.8406,
+2016-05-30 02:38:49,3114,-10.8406,
+2016-05-30 02:53:44,3114,-10.8722,
+2016-05-30 03:08:39,3114,-10.8722,
+2016-05-30 03:23:35,3114,-10.8722,
+2016-05-30 03:38:30,3114,-10.8722,
+2016-05-30 03:53:26,3114,-10.8722,
+2016-05-30 04:08:21,3114,-10.8406,
+2016-05-30 04:23:17,3114,-10.8732,
+2016-05-30 04:38:12,3114,-10.8722,
+2016-05-30 04:53:07,3113,-10.8722,
+2016-05-30 05:08:03,3113,-10.8406,
+2016-05-30 05:22:58,3113,-10.8416,
+2016-05-30 05:37:54,3114,-10.8406,
+2016-05-30 05:52:49,3114,-10.8722,
+2016-05-30 06:07:45,3114,-10.8406,
+2016-05-30 06:22:41,3114,-10.8416,
+2016-05-30 06:37:36,3114,-10.8732,
+2016-05-30 06:52:32,3114,-10.8416,
+2016-05-30 07:07:27,3115,-10.8416,
+2016-05-30 07:22:23,3115,-10.8416,
+2016-05-30 07:37:19,3115,-10.8416,
+2016-05-30 07:52:14,3116,-10.8416,
+2016-05-30 08:07:10,3117,-10.811,
+2016-05-30 08:22:06,3117,-10.811,
+2016-05-30 08:37:01,3118,-10.811,
+2016-05-30 08:51:57,3120,-10.811,
+2016-05-30 09:06:53,3121,-10.812,
+2016-05-30 09:21:48,3122,-10.812,
+2016-05-30 09:36:44,3123,-10.7805,
+2016-05-30 09:51:40,3123,-10.7805,
+2016-05-30 10:06:35,3124,-10.7805,
+2016-05-30 10:21:31,3126,-10.7805,
+2016-05-30 10:36:26,3127,-10.7491,
+2016-05-30 10:51:22,3127,-10.7501,
+2016-05-30 11:06:18,3128,-10.7501,
+2016-05-30 11:21:14,3129,-10.7501,
+2016-05-30 11:36:09,3130,-10.7501,
+2016-05-30 11:51:05,3131,-10.7188,
+2016-05-30 12:06:01,3133,-10.7188,
+2016-05-30 12:20:57,3134,-10.7188,
+2016-05-30 12:35:53,3135,-10.7188,
+2016-05-30 12:50:48,3136,-10.7198,
+2016-05-30 13:05:44,3136,-10.7198,
+2016-05-30 13:20:40,3136,-10.7198,
+2016-05-30 13:35:43,3137,-10.7188,
+2016-05-30 13:50:39,3136,-10.7188,
+2016-05-30 14:05:35,3136,-10.7188,
+2016-05-30 14:20:31,3135,-10.7188,
+2016-05-30 14:35:27,3135,-10.7188,
+2016-05-30 14:50:23,3133,-10.7188,
+2016-05-30 15:05:19,3132,-10.7188,
+2016-05-30 15:20:14,3130,-10.7198,
+2016-05-30 15:35:10,3130,-10.7188,
+2016-05-30 15:50:06,3131,-10.7501,
+2016-05-30 16:05:02,3131,-10.7501,
+2016-05-30 16:19:58,3131,-10.7501,
+2016-05-30 16:34:54,3131,-10.7188,
+2016-05-30 16:49:50,3131,-10.7188,
+2016-05-30 17:04:45,3132,-10.7188,
+2016-05-30 17:19:41,3132,-10.7188,
+2016-05-30 17:34:37,3132,-10.7188,
+2016-05-30 17:49:33,3132,-10.7188,
+2016-05-30 18:04:29,3132,-10.7501,
+2016-05-30 18:19:25,3132,-10.7501,
+2016-05-30 18:34:21,3132,-10.7188,
+2016-05-30 18:49:17,3131,-10.7501,
+2016-05-30 19:04:13,3130,-10.7501,
+2016-05-30 19:19:09,3130,-10.7491,
+2016-05-30 19:34:05,3130,-10.7491,
+2016-05-30 19:49:01,3129,-10.7491,
+2016-05-30 20:03:57,3128,-10.7491,
+2016-05-30 20:18:53,3127,-10.7491,
+2016-05-30 20:33:49,3127,-10.7805,
+2016-05-30 20:48:44,3126,-10.7805,
+2016-05-30 21:03:40,3124,-10.7805,
+2016-05-30 21:18:36,3123,-10.7805,
+2016-05-30 21:33:32,3123,-10.811,
+2016-05-30 21:48:27,3122,-10.811,
+2016-05-30 22:03:23,3122,-10.81,
+2016-05-30 22:18:19,3121,-10.81,
+2016-05-30 22:33:15,3121,-10.81,
+2016-05-30 22:48:10,3120,-10.8416,
+2016-05-30 23:03:06,3119,-10.8416,
+2016-05-30 23:18:02,3117,-10.8416,
+2016-05-30 23:32:58,3116,-10.8416,
+2016-05-30 23:47:53,3116,-10.8416,
+2016-05-31 00:02:49,3115,-10.8416,
+2016-05-31 00:17:45,3115,-10.8722,
+2016-05-31 00:32:41,3114,-10.8406,
+2016-05-31 00:47:36,3114,-10.8396,
+2016-05-31 01:02:32,3113,-10.8732,
+2016-05-31 01:17:28,3112,-10.8712,
+2016-05-31 01:32:23,3110,-10.8722,
+2016-05-31 01:47:19,3110,-10.9058,
+2016-05-31 02:02:14,3110,-10.8349,
+2016-05-31 02:17:10,3109,-10.9386,
+2016-05-31 02:32:06,3109,-10.7757,
+2016-05-31 02:47:02,3109,-10.9694,
+2016-05-31 03:01:57,3108,-11.0325,
+2016-05-31 03:16:53,3108,-10.9029,
+2016-05-31 03:31:48,3107,-10.9328,
+2016-05-31 03:46:44,3107,-10.9039,
+2016-05-31 04:01:39,3107,-10.9357,
+2016-05-31 04:16:35,3107,-10.9347,
+2016-05-31 04:31:30,3106,-10.9039,
+2016-05-31 04:46:26,3106,-10.9029,
+2016-05-31 05:01:21,3106,-10.9029,
+2016-05-31 05:16:17,3106,-10.9347,
+2016-05-31 05:31:12,3106,-10.9029,
+2016-05-31 05:46:08,3106,-10.9337,
+2016-05-31 06:01:03,3105,-10.9039,
+2016-05-31 06:15:59,3106,-10.9029,
+2016-05-31 06:30:54,3106,-10.9039,
+2016-05-31 06:45:50,3106,-10.9039,
+2016-05-31 07:00:46,3106,-10.9039,
+2016-05-31 07:15:41,3107,-10.7453,
+2016-05-31 07:30:36,3107,-10.7815,
+2016-05-31 07:45:32,3107,-10.7188,
+2016-05-31 08:00:27,3109,-10.7179,
+2016-05-31 08:15:23,3109,-10.8139,
+2016-05-31 08:30:19,3110,-10.8072,
+2016-05-31 08:45:14,3112,-10.8406,
+2016-05-31 09:00:10,3114,-10.8732,
+2016-05-31 09:15:06,3115,-10.8416,
+2016-05-31 09:30:01,3116,-10.81,
+2016-05-31 09:44:57,3117,-10.81,
+2016-05-31 09:59:53,3120,-10.811,
+2016-05-31 10:14:49,3121,-10.811,
+2016-05-31 10:29:44,3122,-10.812,
+2016-05-31 10:44:40,3123,-10.7805,
+2016-05-31 10:59:36,3124,-10.7805,
+2016-05-31 11:14:31,3125,-10.7805,
+2016-05-31 11:29:27,3127,-10.7805,
+2016-05-31 11:44:23,3128,-10.7805,
+2016-05-31 11:59:19,3128,-10.7501,
+2016-05-31 12:14:14,3130,-10.7491,
+2016-05-31 12:29:10,3131,-10.7501,
+2016-05-31 12:44:06,3132,-10.7501,
+2016-05-31 12:59:02,3134,-10.7501,
+2016-05-31 13:13:57,3134,-10.7501,
+2016-05-31 13:28:53,3135,-10.7501,
+2016-05-31 13:43:49,3136,-10.7501,
+2016-05-31 13:58:45,3136,-10.7501,
+2016-05-31 14:13:41,3136,-10.7188,
+2016-05-31 14:28:37,3137,-10.7198,
+2016-05-31 14:43:40,3138,-10.7188,
+2016-05-31 14:58:36,3138,-10.7188,
+2016-05-31 15:13:32,3138,-10.7188,
+2016-05-31 15:28:28,3138,-10.7188,-4.8792
+2016-05-31 15:43:24,3138,-10.7198,-4.8788
+2016-05-31 15:58:20,3138,-10.7188,-4.8573
+2016-05-31 16:13:16,3139,-10.7188,-4.8573
+2016-05-31 16:28:12,3139,-10.7198,-4.8792
+2016-05-31 16:43:08,3140,-10.7198,
+2016-05-31 16:58:04,3139,-10.7188,-4.8792
+2016-05-31 17:13:00,3139,-10.7198,
+2016-05-31 17:27:56,3138,-10.7188,
+2016-05-31 17:42:52,3138,-10.7188,
+2016-05-31 17:57:48,3138,-10.7188,-5.2879999999999985
+2016-05-31 18:12:44,3137,-10.7188,-5.3523
+2016-05-31 18:27:40,3136,-10.7188,
+2016-05-31 18:42:36,3135,-10.7188,
+2016-05-31 18:57:32,3134,-10.7491,-5.2439
+2016-05-31 19:12:28,3132,-10.7491,
+2016-05-31 19:27:24,3130,-10.7805,-5.2444
+2016-05-31 19:42:19,3128,-10.7491,
+2016-05-31 19:57:15,3127,-10.811,-5.1802
+2016-05-31 20:12:11,3124,-10.811,-5.1802
+2016-05-31 20:27:07,3123,-10.811,-5.1802
+2016-05-31 20:42:03,3122,-10.81,-4.987
+2016-05-31 20:56:58,3121,-10.8368,
+2016-05-31 21:12:01,3120,-10.8435,-5.0905
+2016-05-31 21:26:56,3118,-10.7434,-5.1367
+2016-05-31 21:41:52,3117,-10.8416,
+2016-05-31 21:56:48,3116,-10.9039,-5.1811
+2016-05-31 22:11:43,3115,-10.8081,-5.0502
+2016-05-31 22:26:39,3115,-10.8732,
+2016-05-31 22:41:34,3115,-10.8396,
+2016-05-31 22:56:30,3114,-10.8377,
+2016-05-31 23:11:25,3113,-10.8416,
+2016-05-31 23:26:21,3113,-10.8712,
+2016-05-31 23:41:16,3113,-10.9039,
+2016-05-31 23:56:12,3111,-10.9029,
+2016-06-01 00:11:07,3110,-10.9029,-4.9682
+2016-06-01 00:26:03,3110,-10.9029,-4.9226
+2016-06-01 00:40:58,3109,-10.9347,-4.9226
+2016-06-01 00:55:54,3109,-10.9337,-4.8998
+2016-06-01 01:10:49,3109,-10.9029,
+2016-06-01 01:25:45,3109,-10.9347,-4.923
+2016-06-01 01:40:40,3109,-10.9029,
+2016-06-01 01:55:36,3108,-10.9029,
+2016-06-01 02:10:31,3108,-10.9029,
+2016-06-01 02:25:27,3107,-10.9347,
+2016-06-01 02:40:23,3107,-10.9029,
+2016-06-01 02:55:18,3107,-10.9337,
+2016-06-01 03:10:14,3106,-10.9039,
+2016-06-01 03:25:09,3106,-10.9029,
+2016-06-01 03:40:05,3107,-10.9347,
+2016-06-01 03:55:00,3107,-10.9347,
+2016-06-01 04:09:55,3107,-10.9347,
+2016-06-01 04:24:51,3106,-10.9337,
+2016-06-01 04:39:46,3107,-10.9337,
+2016-06-01 04:54:42,3107,-10.9337,
+2016-06-01 05:09:37,3107,-10.9337,
+2016-06-01 05:24:33,3107,-10.9337,
+2016-06-01 05:39:28,3107,-10.9337,
+2016-06-01 05:54:24,3107,-10.9337,
+2016-06-01 06:09:19,3107,-10.9337,
+2016-06-01 06:24:14,3107,-10.9337,
+2016-06-01 06:39:10,3107,-10.9347,
+2016-06-01 06:54:05,3107,-10.9337,
+2016-06-01 07:09:08,3107,-10.9347,
+2016-06-01 07:24:03,3108,-10.9347,
+2016-06-01 07:38:59,3108,-10.9347,
+2016-06-01 07:53:54,3109,-10.9347,
+2016-06-01 08:08:49,3109,-10.9347,
+2016-06-01 08:23:45,3109,-10.9347,
+2016-06-01 08:38:40,3109,-10.9029,
+2016-06-01 08:53:36,3109,-10.9029,
+2016-06-01 09:08:31,3109,-10.9029,
+2016-06-01 09:23:27,3109,-10.9029,
+2016-06-01 09:38:22,3109,-10.9029,
+2016-06-01 09:53:18,3109,-10.9029,
+2016-06-01 10:08:13,3110,-10.9029,
+2016-06-01 10:23:09,3110,-10.9029,
+2016-06-01 10:38:05,3110,-10.9029,
+2016-06-01 10:53:00,3110,-10.8712,
+2016-06-01 11:07:56,3110,-10.8722,
+2016-06-01 11:22:51,3111,-10.8712,
+2016-06-01 11:37:47,3112,-10.8722,
+2016-06-01 11:52:42,3113,-10.8722,
+2016-06-01 12:07:38,3113,-10.8722,
+2016-06-01 12:22:33,3113,-10.9039,
+2016-06-01 12:37:29,3113,-10.8406,
+2016-06-01 12:52:25,3114,-10.8406,
+2016-06-01 13:07:20,3114,-10.8406,
+2016-06-01 13:22:16,3114,-10.8406,
+2016-06-01 13:37:11,3115,-10.8406,
+2016-06-01 13:52:07,3115,-10.8722,
+2016-06-01 14:07:02,3115,-10.8416,
+2016-06-01 14:21:58,3116,-10.8406,
+2016-06-01 14:36:54,3116,-10.8416,
+2016-06-01 14:51:49,3116,-10.8406,
+2016-06-01 15:06:45,3117,-10.8406,
+2016-06-01 15:21:40,3117,-10.8416,
+2016-06-01 15:36:36,3118,-10.8416,
+2016-06-01 15:51:32,3118,-10.8416,
+2016-06-01 16:06:27,3118,-10.8416,
+2016-06-01 16:21:23,3119,-10.8416,
+2016-06-01 16:36:19,3120,-10.81,
+2016-06-01 16:51:15,3120,-10.8416,
+2016-06-01 17:06:10,3120,-10.8416,-4.9449
+2016-06-01 17:21:06,3120,-10.81,-4.923
+2016-06-01 17:36:02,3121,-10.8091,-4.9234
+2016-06-01 17:50:58,3121,-10.8091,-4.923
+2016-06-01 18:05:53,3121,-10.8091,-4.9011
+2016-06-01 18:20:49,3121,-10.811,-4.902
+2016-06-01 18:35:45,3121,-10.7786,
+2016-06-01 18:50:40,3121,-10.81,
+2016-06-01 19:05:36,3121,-10.8416,-4.9015
+2016-06-01 19:20:32,3120,-10.81,
+2016-06-01 19:35:27,3120,-10.8406,
+2016-06-01 19:50:23,3119,-10.8091,
+2016-06-01 20:05:19,3118,-10.81,
+2016-06-01 20:20:14,3118,-10.8091,
+2016-06-01 20:35:10,3117,-10.8416,
+2016-06-01 20:50:06,3116,-10.8406,
+2016-06-01 21:05:01,3116,-10.8406,
+2016-06-01 21:19:57,3115,-10.8406,
+2016-06-01 21:34:53,3115,-10.8406,
+2016-06-01 21:49:48,3114,-10.8406,
+2016-06-01 22:04:44,3114,-10.8722,
+2016-06-01 22:19:40,3114,-10.8722,
+2016-06-01 22:34:35,3113,-10.8722,
+2016-06-01 22:49:31,3113,-10.8406,
+2016-06-01 23:04:27,3111,-10.8712,
+2016-06-01 23:19:22,3110,-10.8712,
+2016-06-01 23:34:18,3110,-10.9029,
+2016-06-01 23:49:13,3110,-10.8712,
+2016-06-02 01:03:51,3108,-10.9029,
+2016-06-02 01:18:47,3108,-10.9029,
+2016-06-02 01:33:42,3108,-10.9029,
+2016-06-02 01:48:38,3107,-10.9029,
+2016-06-02 02:03:33,3107,-10.9029,
+2016-06-02 02:18:29,3107,-10.9029,
+2016-06-02 02:33:25,3107,-10.9029,
+2016-06-02 02:48:20,3107,-10.9337,
+2016-06-02 03:03:16,3106,-10.9029,
+2016-06-02 03:18:11,3106,-10.9337,
+2016-06-02 03:33:07,3106,-10.9029,
+2016-06-02 03:48:02,3106,-10.9337,
+2016-06-02 04:02:57,3106,-10.9347,
+2016-06-02 04:17:53,3106,-10.9029,
+2016-06-02 04:32:48,3106,-10.9337,
+2016-06-02 04:47:44,3106,-10.9337,
+2016-06-02 05:02:39,3106,-10.9337,
+2016-06-02 05:17:35,3106,-10.9337,
+2016-06-02 05:32:31,3106,-10.9337,
+2016-06-02 05:47:26,3106,-10.9337,
+2016-06-02 06:02:22,3106,-10.9337,
+2016-06-02 06:17:17,3105,-10.902,
+2016-06-02 06:32:13,3106,-10.9029,
+2016-06-02 06:47:08,3106,-10.9337,
+2016-06-02 07:02:04,3107,-10.9029,
+2016-06-02 07:16:59,3107,-10.9029,
+2016-06-02 07:32:02,3107,-10.9029,
+2016-06-02 07:46:57,3109,-10.9029,
+2016-06-02 08:01:53,3107,-10.8081,
+2016-06-02 08:16:48,3109,-10.8722,
+2016-06-02 08:31:44,3109,-10.8377,
+2016-06-02 08:46:39,3109,-11.0325,
+2016-06-02 09:01:35,3109,-10.9087,
+2016-06-02 09:16:30,3113,-10.8406,
+2016-06-02 09:31:26,3114,-10.8712,
+2016-06-02 09:46:21,3114,-10.8712,
+2016-06-02 10:01:17,3114,-10.8712,
+2016-06-02 10:16:13,3114,-10.8712,
+2016-06-02 10:31:08,3114,-10.8712,
+2016-06-02 10:46:04,3115,-10.8712,
+2016-06-02 11:00:59,3114,-10.8722,
+2016-06-02 11:15:55,3115,-10.8712,-4.8577
+2016-06-02 11:30:50,3130,-10.3006,74.4192
+2016-06-02 11:45:46,3132,-10.4194,26.8525
+2016-06-02 12:00:42,3134,-10.4194,27.4968
+2016-06-02 12:15:37,3132,-10.4796,27.0488
+2016-06-02 12:30:33,3133,-10.4796,27.1474
+2016-06-02 12:45:28,3135,-10.449000000000002,27.3993
+2016-06-02 13:00:24,3136,-10.4796,27.3993
+2016-06-02 13:15:19,3136,-10.4796,27.4992
+2016-06-02 13:30:15,3138,-10.449000000000002,27.5995
+2016-06-02 13:45:11,3138,-10.449000000000002,27.4017
+2016-06-02 14:00:06,3139,-10.4796,27.7001
+2016-06-02 14:15:02,3140,-10.4796,27.3993
+2016-06-02 14:29:58,3139,-10.4787,27.0056
+2016-06-02 14:44:53,3141,-10.4787,27.008000000000006
+2016-06-02 14:59:49,3142,-10.4787,26.9074
+2016-06-02 15:14:45,3142,-10.4481,26.9576
+2016-06-02 15:29:41,3143,-10.4787,26.862
+2016-06-02 15:44:37,3143,-10.4796,26.9576
+2016-06-02 15:59:32,3143,-10.4787,27.008000000000006
+2016-06-02 16:14:28,3143,-10.4787,27.008000000000006
+2016-06-02 16:29:24,3143,-10.4787,26.9576
+2016-06-02 16:44:20,3143,-10.4796,26.96
+2016-06-02 16:59:16,3143,-10.4787,27.0056
+2016-06-02 17:14:11,3144,-10.4481,27.1547
+2016-06-02 17:29:07,3144,-10.4787,27.056
+2016-06-02 17:44:03,3143,-10.4787,27.0056
+2016-06-02 17:58:59,3143,-10.4787,27.056
+2016-06-02 18:13:55,3143,-10.4787,27.1041
+2016-06-02 18:28:50,3141,-10.4787,26.9576
+2016-06-02 18:43:46,3140,-10.4778,26.9576
+2016-06-02 18:58:42,3138,-10.4778,26.8096
+2016-06-02 19:13:38,3138,-10.5085,26.7691
+2016-06-02 19:28:33,3137,-10.4778,26.812
+2016-06-02 19:43:29,3136,-10.5085,26.7667
+2016-06-02 19:58:25,3136,-10.5085,27.0104
+2016-06-02 20:13:21,3135,-10.5085,26.96
+2016-06-02 20:28:16,3134,-10.5076,26.9672
+2016-06-02 20:43:12,3132,-10.5383,26.96
+2016-06-02 20:58:08,3131,-10.5383,26.8644
+2016-06-02 21:13:04,3130,-10.5692,26.9122
+2016-06-02 21:27:59,3128,-10.5682,26.9122
+2016-06-02 21:42:55,3128,-10.5682,26.8692
+2016-06-02 21:57:51,3127,-10.5682,26.9672
+2016-06-02 22:12:46,3126,-10.5682,26.7739
+2016-06-02 22:27:42,3125,-10.5682,26.8692
+2016-06-02 22:42:38,3124,-10.5682,26.8692
+2016-06-02 22:57:33,3123,-10.5682,26.8716
+2016-06-02 23:12:29,3123,-10.5682,26.7739
+2016-06-02 23:27:24,3123,-10.5982,26.8215
+2016-06-02 23:42:20,3122,-10.5982,26.8191
+2016-06-02 23:57:16,3122,-10.5982,26.7739
+2016-06-03 00:12:11,3122,-10.5982,26.7715
+2016-06-03 00:27:07,3121,-10.5982,26.7739
+2016-06-03 00:42:03,3121,-10.5982,26.8191
+2016-06-03 00:56:58,3121,-10.5982,26.6765
+2016-06-03 01:11:54,3121,-10.5982,26.7691
+2016-06-03 01:26:49,3120,-10.5982,26.6765
+2016-06-03 01:41:45,3120,-10.5982,26.6268
+2016-06-03 01:56:40,3120,-10.5982,26.724
+2016-06-03 02:11:36,3120,-10.5982,26.8215
+2016-06-03 02:26:31,3119,-10.5982,26.6315
+2016-06-03 02:41:26,3119,-10.5982,26.5795
+2016-06-03 02:56:22,3119,-10.5982,26.5842
+2016-06-03 03:11:17,3119,-10.5982,26.6315
+2016-06-03 03:26:13,3119,-10.5982,26.6292
+2016-06-03 03:41:08,3119,-10.5982,26.5819
+2016-06-03 03:56:04,3119,-10.5982,26.6268
+2016-06-03 04:10:59,3119,-10.5982,26.6315
+2016-06-03 04:25:55,3119,-10.5982,26.4852
+2016-06-03 04:40:50,3119,-10.5982,26.5819
+2016-06-03 04:55:46,3120,-10.5982,26.5323
+2016-06-03 05:10:41,3120,-10.5982,26.6789
+2016-06-03 05:25:37,3120,-10.5982,26.6315
+2016-06-03 05:40:32,3120,-10.5982,26.5795
+2016-06-03 05:55:28,3120,-10.5982,26.6315
+2016-06-03 06:10:23,3120,-10.5982,26.5819
+2016-06-03 06:25:19,3121,-10.5982,26.7763
+2016-06-03 06:40:15,3121,-10.5982,26.7263
+2016-06-03 06:55:10,3122,-10.5982,26.8239
+2016-06-03 07:10:05,3121,-10.5982,26.3396
+2016-06-03 07:25:01,3122,-10.5982,26.1948
+2016-06-03 07:39:56,3122,-10.5982,26.2438
+2016-06-03 07:54:52,3123,-10.5982,26.1948
+2016-06-03 08:09:47,3123,-10.5673,26.0995
+2016-06-03 08:24:43,3125,-10.5982,26.2415
+2016-06-03 08:39:38,3127,-10.5682,26.2438
+2016-06-03 08:54:34,3128,-10.5682,26.1506
+2016-06-03 09:09:30,3128,-10.5682,26.2484
+2016-06-03 09:24:25,3130,-10.5682,26.1995
+2016-06-03 09:39:21,3131,-10.5383,26.2952
+2016-06-03 09:54:17,3133,-10.5383,26.2018
+2016-06-03 10:09:12,3135,-10.5076,26.2461
+2016-06-03 10:24:08,3136,-10.5085,26.342
+2016-06-03 10:39:04,3136,-10.5085,26.2952
+2016-06-03 10:54:00,3138,-10.5085,26.2484
+2016-06-03 11:08:56,3138,-10.4778,26.342
+2016-06-03 11:23:51,3139,-10.4778,26.2952
+2016-06-03 11:38:47,3141,-10.4778,26.3912
+2016-06-03 11:53:43,3142,-10.4481,26.4381
+2016-06-03 12:08:39,3143,-10.4787,26.3022
+2016-06-03 12:23:35,3143,-10.4787,26.3045
+2016-06-03 12:38:31,3145,-10.4787,26.2998
+2016-06-03 12:53:27,3145,-10.4481,26.2998
+2016-06-03 13:08:23,3146,-10.4481,26.2998
+2016-06-03 13:23:19,3146,-10.4175,26.349
+2016-06-03 13:38:15,3148,-10.4185,26.3959
+2016-06-03 13:53:11,3148,-10.4185,26.2088
+2016-06-03 14:08:07,3149,-10.4185,26.2554
+2016-06-03 14:23:03,3150,-10.4185,26.2554
+2016-06-03 14:37:59,3150,-10.4194,26.2554
+2016-06-03 14:52:55,3151,-10.4185,26.5842
+2016-06-03 15:07:51,3152,-10.4194,26.641
+2016-06-03 15:22:47,3152,-10.3889,26.596
+2016-06-03 15:37:43,3152,-10.4185,26.067
+2016-06-03 15:52:39,3152,-10.3889,26.6528
+2016-06-03 16:07:36,3152,-10.3889,26.596
+2016-06-03 16:22:32,3152,-10.3889,26.5017
+2016-06-03 16:37:35,3152,-10.3889,26.5559
+2016-06-03 16:52:31,3151,-10.4194,26.118
+2016-06-03 17:07:27,3151,-10.4185,26.0739
+2016-06-03 17:22:23,3150,-10.4185,26.1645
+2016-06-03 17:37:19,3150,-10.4185,26.0739
+2016-06-03 17:52:15,3150,-10.3889,26.5559
+2016-06-03 18:07:11,3150,-10.3889,26.4546
+2016-06-03 18:22:08,3150,-10.388,26.4146
+2016-06-03 18:37:04,3149,-10.388,26.4616
+2016-06-03 18:52:00,3148,-10.388,26.5087
+2016-06-03 19:06:56,3146,-10.4175,26.4123
+2016-06-03 19:21:52,3145,-10.4175,26.4616
+2016-06-03 19:36:48,3145,-10.4175,26.4616
+2016-06-03 19:51:51,3143,-10.4175,26.4123
+2016-06-03 20:06:46,3143,-10.4175,26.4616
+2016-06-03 20:21:42,3140,-10.4472,25.8406
+2016-06-03 20:36:38,3138,-10.4472,25.8406
+2016-06-03 20:51:34,3138,-10.4778,25.8867
+2016-06-03 21:06:30,3136,-10.4778,25.7465
+2016-06-03 21:21:26,3135,-10.4778,25.8406
+2016-06-03 21:36:22,3134,-10.4769,25.836
+2016-06-03 21:51:18,3132,-10.5085,25.6984
+2016-06-03 22:06:14,3131,-10.4769,25.7465
+2016-06-03 22:21:10,3130,-10.5076,25.8406
+2016-06-03 22:36:05,3129,-10.5076,25.7465
+2016-06-03 22:51:01,3128,-10.5076,25.7465
+2016-06-03 23:05:57,3128,-10.5066,25.7947
+2016-06-03 23:20:52,3127,-10.5066,25.7465
+2016-06-03 23:35:48,3126,-10.5066,25.8406
+2016-06-03 23:50:44,3124,-10.5374,25.7007
+2016-06-04 00:05:39,3123,-10.5374,25.7947
+2016-06-04 00:20:35,3123,-10.5374,25.7007
+2016-06-04 00:35:30,3122,-10.5682,25.7007
+2016-06-04 00:50:26,3122,-10.5673,25.607
+2016-06-04 01:05:22,3122,-10.5682,25.7465
+2016-06-04 01:20:17,3121,-10.5682,25.8406
+2016-06-04 01:35:13,3122,-10.5682,26.0786
+2016-06-04 01:50:08,3121,-10.5673,26.0786
+2016-06-04 02:05:03,3121,-10.5673,26.0786
+2016-06-04 02:19:59,3119,-10.5673,25.7465
+2016-06-04 02:34:54,3119,-10.5673,25.8406
+2016-06-04 02:49:50,3118,-10.5673,25.8406
+2016-06-04 03:04:46,3118,-10.5982,25.8406
+2016-06-04 03:19:41,3117,-10.5982,25.797
+2016-06-04 03:34:37,3117,-10.5982,25.8429
+2016-06-04 03:49:32,3116,-10.5982,25.8867
+2016-06-04 04:04:28,3116,-10.5673,26.1343
+2016-06-04 04:19:23,3116,-10.5973,26.0345
+2016-06-04 04:34:19,3116,-10.5982,26.1738
+2016-06-04 04:49:14,3115,-10.5982,26.2204
+2016-06-04 05:04:09,3115,-10.5982,26.2764
+2016-06-04 05:19:05,3115,-10.5973,26.132
+2016-06-04 05:34:00,3115,-10.5973,25.9813
+2016-06-04 05:48:55,3115,-10.5982,26.4146
+2016-06-04 06:03:51,3114,-10.5973,26.1273
+2016-06-04 06:18:46,3115,-10.5973,26.3185
+2016-06-04 06:33:41,3115,-10.5973,26.2718
+2016-06-04 06:48:37,3115,-10.5982,26.3677
+2016-06-04 07:03:32,3115,-10.5982,26.1273
+2016-06-04 07:18:28,3116,-10.5982,26.2718
+2016-06-04 07:33:23,3116,-10.5673,26.3677
+2016-06-04 07:48:18,3118,-10.5673,26.4146
+2016-06-04 08:03:14,3118,-10.5673,26.4146
+2016-06-04 08:18:09,3120,-10.5673,26.5111
+2016-06-04 08:33:05,3121,-10.5682,26.5606
+2016-06-04 08:48:00,3122,-10.5374,26.6576
+2016-06-04 09:02:56,3123,-10.5066,26.8024
+2016-06-04 09:17:51,3125,-10.5066,26.6576
+2016-06-04 09:32:47,3127,-10.5076,26.7073
+2016-06-04 09:47:43,3129,-10.5076,26.6079
+2016-06-04 10:02:38,3131,-10.5076,26.5158
+2016-06-04 10:17:34,3133,-10.4778,26.5229
+2016-06-04 10:32:30,3136,-10.4778,26.5701
+2016-06-04 10:47:25,3137,-10.4778,26.6599
+2016-06-04 11:02:21,3138,-10.4472,26.6599
+2016-06-04 11:17:17,3140,-10.4472,26.6623
+2016-06-04 11:32:13,3143,-10.4481,26.6599
+2016-06-04 11:47:08,3143,-10.4175,26.5677
+2016-06-04 12:02:04,3145,-10.4175,26.5748
+2016-06-04 12:17:00,3146,-10.4175,26.615
+2016-06-04 12:31:56,3148,-10.4175,26.2788
+2016-06-04 12:46:53,3149,-10.388,26.5748
+2016-06-04 13:01:49,3150,-10.388,26.4781
+2016-06-04 13:16:45,3150,-10.388,26.6718
+2016-06-04 13:31:41,3150,-10.388,26.622
+2016-06-04 13:46:37,3151,-10.3889,26.6623
+2016-06-04 14:01:34,3151,-10.3889,26.4781
+2016-06-04 14:16:30,3151,-10.3585,26.4334
+2016-06-04 14:31:26,3151,-10.3585,26.4734
+2016-06-04 14:46:22,3151,-10.3585,26.4734
+2016-06-04 15:01:18,3151,-10.3889,26.2858
+2016-06-04 15:16:14,3152,-10.3585,26.3841
+2016-06-04 15:31:11,3152,-10.3585,26.4334
+2016-06-04 15:46:07,3152,-10.3585,26.4264
+2016-06-04 16:01:03,3152,-10.3585,26.2391
+2016-06-04 16:15:59,3152,-10.3585,26.4334
+2016-06-04 16:30:55,3152,-10.3585,26.4334
+2016-06-04 16:45:51,3152,-10.3585,26.4334
+2016-06-04 17:00:47,3152,-10.3585,26.3841
+2016-06-04 17:15:43,3152,-10.3585,26.4805
+2016-06-04 17:30:39,3152,-10.329,27.0656
+2016-06-04 17:45:36,3152,-10.329,27.0656
+2016-06-04 18:00:32,3152,-10.329,27.068
+2016-06-04 18:15:28,3152,-10.3281,27.0656
+2016-06-04 18:30:24,3151,-10.3585,26.9696
+2016-06-04 18:45:20,3150,-10.3585,26.9194
+2016-06-04 19:00:16,3150,-10.3585,26.9672
+2016-06-04 19:15:12,3149,-10.3585,27.0176
+2016-06-04 19:30:08,3148,-10.3585,26.9218
+2016-06-04 19:45:05,3147,-10.3585,27.0152
+2016-06-04 20:00:01,3145,-10.3576,26.917
+2016-06-04 20:14:57,3144,-10.3576,26.917
+2016-06-04 20:29:53,3143,-10.3871,26.8191
+2016-06-04 20:44:49,3142,-10.3871,26.8692
+2016-06-04 20:59:45,3139,-10.3871,26.7715
+2016-06-04 21:14:41,3138,-10.3871,26.8692
+2016-06-04 21:29:37,3138,-10.4175,26.6741
+2016-06-04 21:44:33,3136,-10.4175,26.7715
+2016-06-04 21:59:29,3135,-10.4175,26.8191
+2016-06-04 22:14:24,3135,-10.4166,26.724
+2016-06-04 22:29:20,3133,-10.4166,26.6741
+2016-06-04 22:44:16,3132,-10.4166,26.8215
+2016-06-04 22:59:12,3131,-10.4166,26.7715
+2016-06-04 23:14:08,3130,-10.4166,26.724
+2016-06-04 23:29:04,3129,-10.4166,26.8215
+2016-06-04 23:44:00,3128,-10.4472,26.8215
+2016-06-04 23:58:56,3128,-10.4462,26.8191
+2016-06-05 00:13:52,3127,-10.4769,26.7715
+2016-06-05 00:28:48,3127,-10.4769,26.724
+2016-06-05 00:43:43,3127,-10.4462,26.7715
+2016-06-05 00:58:39,3126,-10.4778,26.7786
+2016-06-05 01:13:39,3124,-10.4769,26.724
+2016-06-05 01:28:35,3124,-10.4769,26.724
+2016-06-05 01:43:31,3123,-10.4769,26.8191
+2016-06-05 01:58:27,3123,-10.4769,26.6765
+2016-06-05 02:13:22,3123,-10.4769,26.9218
+2016-06-05 02:28:18,3123,-10.4472,27.0272
+2016-06-05 02:43:14,3123,-10.4769,27.0272
+2016-06-05 02:58:10,3122,-10.4778,27.0296
+2016-06-05 03:13:05,3122,-10.4769,26.874
+2016-06-05 03:28:01,3122,-10.4759,26.8811
+2016-06-05 03:42:57,3121,-10.4769,26.8811
+2016-06-05 03:57:53,3121,-10.4759,26.8811
+2016-06-05 04:12:49,3121,-10.4759,27.0272
+2016-06-05 04:27:45,3120,-10.4759,26.9792
+2016-06-05 04:42:40,3120,-10.4759,26.9768
+2016-06-05 04:57:36,3119,-10.4759,26.9289
+2016-06-05 05:12:32,3119,-10.4759,26.9313
+2016-06-05 05:27:27,3118,-10.4759,26.831
+2016-06-05 05:42:23,3118,-10.4759,26.9289
+2016-06-05 05:57:18,3118,-10.4759,26.8811
+2016-06-05 06:12:14,3119,-10.4759,26.9289
+2016-06-05 06:27:10,3118,-10.4759,26.9768
+2016-06-05 06:42:05,3119,-10.4759,27.0272
+2016-06-05 06:57:01,3120,-10.4759,27.0776
+2016-06-05 07:11:57,3121,-10.4759,27.0776
+2016-06-05 07:26:52,3121,-10.4769,27.174
+2016-06-05 07:41:48,3122,-10.4462,27.0776
+2016-06-05 07:56:44,3123,-10.4472,27.2271
+2016-06-05 08:11:40,3123,-10.4166,27.2755
+2016-06-05 08:26:36,3124,-10.4166,27.2271
+2016-06-05 08:41:31,3127,-10.4166,27.375
+2016-06-05 08:56:27,3128,-10.4166,27.375
+2016-06-05 09:11:23,3130,-10.4166,27.375
+2016-06-05 09:26:19,3131,-10.3871,27.2296
+2016-06-05 09:41:15,3134,-10.3871,27.278
+2016-06-05 09:56:10,3136,-10.3871,27.278
+2016-06-05 10:11:06,3138,-10.3576,27.2296
+2016-06-05 10:26:02,3139,-10.3576,27.3265
+2016-06-05 10:40:58,3143,-10.3576,27.4797
+2016-06-05 10:55:54,3144,-10.3281,27.4797
+2016-06-05 11:10:50,3145,-10.3272,27.5237
+2016-06-05 11:25:46,3148,-10.3281,27.4334
+2016-06-05 11:40:42,3150,-10.2988,27.4822
+2016-06-05 11:55:38,3150,-10.2988,27.4822
+2016-06-05 12:10:34,3152,-10.2988,27.5824
+2016-06-05 12:25:30,3152,-10.2686,27.58
+2016-06-05 12:40:26,3155,-10.2988,27.6339
+2016-06-05 12:55:22,3155,-10.2686,27.6314
+2016-06-05 13:10:18,3156,-10.2686,27.6339
+2016-06-05 13:25:14,3157,-10.2695,27.6363
+2016-06-05 13:40:10,3159,-10.2393,27.7347
+2016-06-05 13:55:14,3160,-10.2393,27.683000000000003
+2016-06-05 14:10:10,3160,-10.2393,27.7815
+2016-06-05 14:25:06,3161,-10.2393,27.784
+2016-06-05 14:40:02,3162,-10.2402,27.8358
+2016-06-05 14:54:58,3162,-10.2393,27.6363
+2016-06-05 15:09:55,3163,-10.2101,27.6388
+2016-06-05 15:24:51,3163,-10.2101,27.6855
+2016-06-05 15:39:47,3163,-10.2402,27.6879
+2016-06-05 15:54:43,3163,-10.2101,27.6363
+2016-06-05 16:09:40,3163,-10.2101,27.5873
+2016-06-05 16:24:36,3163,-10.2101,27.6363
+2016-06-05 16:39:32,3163,-10.2101,27.6363
+2016-06-05 16:54:28,3163,-10.2402,27.6879
+2016-06-05 17:09:24,3162,-10.1801,27.6879
+2016-06-05 17:24:21,3162,-10.2101,27.5359
+2016-06-05 17:39:17,3161,-10.2092,27.5898
+2016-06-05 17:54:13,3160,-10.2402,27.4968
+2016-06-05 18:09:09,3160,-10.1801,27.5457
+2016-06-05 18:24:05,3160,-10.2092,27.4968
+2016-06-05 18:39:02,3158,-10.2092,27.4456
+2016-06-05 18:53:58,3157,-10.2092,27.4968
+2016-06-05 19:08:54,3156,-10.2393,27.4481
+2016-06-05 19:23:51,3155,-10.2393,27.2974
+2016-06-05 19:38:47,3153,-10.2393,27.3945
+2016-06-05 19:53:43,3152,-10.2393,27.4968
+2016-06-05 20:08:39,3151,-10.2384,27.4968
+2016-06-05 20:23:36,3150,-10.2384,27.5482
+2016-06-05 20:38:32,3149,-10.2384,27.4968
+2016-06-05 20:53:35,3147,-10.2686,27.5482
+2016-06-05 21:08:31,3145,-10.2686,27.5457
+2016-06-05 21:23:28,3144,-10.2686,27.5457
+2016-06-05 21:38:24,3143,-10.2988,27.5042
+2016-06-05 21:53:27,3142,-10.2677,27.5457
+2016-06-05 22:08:23,3140,-10.2979,27.4993
+2016-06-05 22:23:19,3138,-10.2979,27.5457
+2016-06-05 22:38:15,3138,-10.2979,27.4481
+2016-06-05 22:53:11,3136,-10.2979,27.4456
+2016-06-05 23:08:07,3136,-10.2969,27.4042
+2016-06-05 23:23:03,3135,-10.2969,27.4529
+2016-06-05 23:37:59,3134,-10.3272,27.4554
+2016-06-05 23:52:55,3133,-10.3272,27.4554
+2016-06-06 00:07:51,3132,-10.3272,27.4529
+2016-06-06 00:22:46,3131,-10.3567,27.4554
+2016-06-06 00:37:41,3130,-10.3567,27.4529
+2016-06-06 00:52:45,3130,-10.3263,27.4529
+2016-06-06 01:07:41,3128,-10.3567,27.4042
+2016-06-06 01:22:36,3128,-10.3567,27.4529
+2016-06-06 01:37:32,3128,-10.3567,27.4529
+2016-06-06 01:52:28,3127,-10.3567,27.4529
+2016-06-06 02:07:24,3127,-10.3567,27.4505
+2016-06-06 02:22:19,3125,-10.3567,27.4018
+2016-06-06 02:37:15,3125,-10.3557,27.4529
+2016-06-06 02:52:10,3124,-10.3861,27.4042
+2016-06-06 03:07:06,3123,-10.3861,27.4042
+2016-06-06 03:22:01,3123,-10.3861,27.4018
+2016-06-06 03:36:57,3123,-10.3861,27.4042
+2016-06-06 03:51:53,3122,-10.3861,27.4554
+2016-06-06 04:06:48,3122,-10.3861,27.4018
+2016-06-06 04:21:44,3122,-10.3861,27.4042
+2016-06-06 04:36:39,3121,-10.3861,27.4018
+2016-06-06 04:51:35,3121,-10.3861,27.4018
+2016-06-06 05:06:31,3120,-10.3861,27.4042
+2016-06-06 05:21:27,3120,-10.3861,27.4042
+2016-06-06 05:36:22,3119,-10.3852,27.3532
+2016-06-06 05:51:18,3119,-10.4166,27.4529
+2016-06-06 06:06:13,3118,-10.4157,27.2538
+2016-06-06 06:21:09,3118,-10.4166,27.3605
+2016-06-06 06:36:04,3119,-10.3861,27.4042
+2016-06-06 06:51:00,3119,-10.3861,27.4115
+2016-06-06 07:05:55,3119,-10.3861,27.4529
+2016-06-06 07:20:51,3120,-10.3861,27.4505
+2016-06-06 07:35:47,3120,-10.3861,27.3022
+2016-06-06 07:50:42,3121,-10.3861,27.3532
+2016-06-06 08:05:38,3122,-10.3861,27.4529
+2016-06-06 08:20:33,3123,-10.3861,27.4554
+2016-06-06 08:35:29,3123,-10.3861,27.5091
+2016-06-06 08:50:24,3125,-10.3557,27.5115
+2016-06-06 09:05:20,3127,-10.3567,27.5531
+2016-06-06 09:20:16,3128,-10.3567,27.5604
+2016-06-06 09:35:12,3130,-10.3567,27.5604
+2016-06-06 09:50:07,3132,-10.3576,27.5164
+2016-06-06 10:05:03,3135,-10.3272,27.5653
+2016-06-06 10:19:59,3136,-10.2979,27.5653
+2016-06-06 10:34:55,3138,-10.2979,27.6168
+2016-06-06 10:49:58,3138,-10.2677,27.6658
+2016-06-06 11:04:54,3141,-10.2677,27.6658
+2016-06-06 11:19:50,3143,-10.2677,27.6634
+2016-06-06 11:34:45,3143,-10.2686,27.715
+2016-06-06 11:49:41,3145,-10.2384,27.7175
+2016-06-06 12:04:37,3146,-10.2384,27.6168
+2016-06-06 12:19:33,3148,-10.2384,27.6168
+2016-06-06 12:34:29,3149,-10.2384,27.6192
+2016-06-06 12:49:25,3150,-10.2384,27.6168
+2016-06-06 13:04:22,3150,-10.2384,27.6168
+2016-06-06 13:19:18,3151,-10.2384,27.6168
+2016-06-06 13:34:14,3152,-10.2092,27.821
+2016-06-06 13:49:10,3152,-10.1792,27.821
+2016-06-06 14:04:13,3153,-10.1792,27.821
+2016-06-06 14:19:09,3154,-10.1792,27.8705
+2016-06-06 14:34:05,3155,-10.1792,27.821
+2016-06-06 14:49:01,3155,-10.1801,27.821
+2016-06-06 15:03:57,3156,-10.1792,27.8729
+2016-06-06 15:18:53,3156,-10.1792,27.8729
+2016-06-06 15:33:49,3156,-10.1792,27.8729
+2016-06-06 15:48:45,3156,-10.1792,27.7224
+2016-06-06 16:03:41,3156,-10.1792,27.7224
+2016-06-06 16:18:38,3156,-10.1792,27.7224
+2016-06-06 16:33:34,3156,-10.1792,27.6732
+2016-06-06 16:48:30,3155,-10.1792,27.6732
+2016-06-06 17:03:26,3155,-10.1792,27.6732
+2016-06-06 17:18:22,3154,-10.1792,27.6241
+2016-06-06 17:33:18,3153,-10.1792,27.6708
+2016-06-06 17:48:14,3152,-10.1792,27.6732
+2016-06-06 18:03:10,3152,-10.1792,27.6732
+2016-06-06 18:18:06,3152,-10.1792,27.6732
+2016-06-06 18:33:02,3150,-10.2092,27.6217
+2016-06-06 18:47:58,3150,-10.2083,27.6217
+2016-06-06 19:02:54,3149,-10.2083,27.5727
+2016-06-06 19:17:50,3147,-10.2083,27.6217
+2016-06-06 19:32:46,3146,-10.2384,27.5727
+2016-06-06 19:47:42,3145,-10.2384,27.5237
+2016-06-06 20:02:38,3143,-10.2384,27.5727
+2016-06-06 20:17:34,3142,-10.2677,27.5213
+2016-06-06 20:32:30,3140,-10.2677,27.5237
+2016-06-06 20:47:26,3138,-10.2677,27.5237
+2016-06-06 21:02:21,3138,-10.2677,27.5213
+2016-06-06 21:17:17,3136,-10.2677,27.4213
+2016-06-06 21:32:13,3135,-10.2667,27.4213
+2016-06-06 21:47:09,3134,-10.2969,27.4213
+2016-06-06 22:02:04,3132,-10.2969,27.4213
+2016-06-06 22:17:00,3131,-10.296,27.3726
+2016-06-06 22:31:56,3130,-10.296,27.4213
+2016-06-06 22:46:51,3128,-10.3263,27.4213
+2016-06-06 23:01:47,3128,-10.3263,27.3726
+2016-06-06 23:16:42,3127,-10.3263,27.4262
+2016-06-06 23:31:38,3126,-10.3263,27.3216
+2016-06-06 23:46:34,3124,-10.3263,27.3313
+2016-06-07 00:01:29,3123,-10.3557,27.3241
+2016-06-07 00:16:25,3123,-10.3557,27.4213
+2016-06-07 00:31:21,3123,-10.3557,27.4286
+2016-06-07 00:46:16,3122,-10.3557,27.4213
+2016-06-07 01:01:12,3122,-10.3557,27.4286
+2016-06-07 01:16:07,3121,-10.3861,27.4286
+2016-06-07 01:31:03,3121,-10.3861,27.4213
+2016-06-07 01:45:59,3120,-10.3852,27.4286
+2016-06-07 02:00:54,3118,-10.3852,27.3799
+2016-06-07 02:15:50,3118,-10.3852,27.3799
+2016-06-07 02:30:46,3116,-10.4148,26.9816
+2016-06-07 02:45:41,3116,-10.4166,27.3241
+2016-06-07 03:00:37,3115,-10.3843,27.3313
+2016-06-07 03:15:32,3115,-10.4148,27.3313
+2016-06-07 03:30:28,3114,-10.4157,27.3289
+2016-06-07 03:45:23,3114,-10.4139,27.3799
+2016-06-07 04:00:19,3113,-10.4148,27.3289
+2016-06-07 04:15:14,3112,-10.4148,27.3313
+2016-06-07 04:30:10,3110,-10.4148,27.2829
+2016-06-07 04:45:05,3110,-10.4741,27.3799
+2016-06-07 05:00:01,3109,-10.4157,27.2829
+2016-06-07 05:14:56,3109,-10.4157,27.3313
+2016-06-07 05:29:51,3109,-10.4778,27.2853
+2016-06-07 05:44:47,3109,-10.5916,27.2707
+2016-06-07 05:59:42,3109,-10.4148,27.6781
+2016-06-07 06:14:38,3107,-10.5057,26.7834
+2016-06-07 06:29:33,3108,-10.4778,27.3241
+2016-06-07 06:44:29,3109,-10.4139,27.4773
+2016-06-07 06:59:25,3109,-10.4139,27.536
+2016-06-07 07:14:20,3109,-10.4148,27.5384
+2016-06-07 07:29:16,3110,-10.4148,27.629
+2016-06-07 07:44:12,3112,-10.4157,27.4286
+2016-06-07 07:59:08,3113,-10.3502,27.3848
+2016-06-07 08:14:15,3114,-10.4148,27.431
+2016-06-07 08:29:21,3115,-10.4453,26.984
+2016-06-07 08:44:23,3115,-10.4759,26.641
+2016-06-07 08:59:19,3116,-10.4759,26.4922
+2016-06-07 09:14:15,3119,-10.4759,26.5464
+2016-06-07 09:29:11,3121,-10.4453,26.5417
+2016-06-07 09:44:07,3122,-10.4453,26.4052
+2016-06-07 09:59:02,3124,-10.4157,26.4969
+2016-06-07 10:14:09,3127,-10.4157,26.4451
+2016-06-07 10:29:16,3131,-10.3567,27.2417
+2016-06-07 10:44:18,3134,-10.3567,27.2901
+2016-06-07 10:59:14,3136,-10.3263,27.2829
+2016-06-07 11:14:10,3136,-10.3567,26.8955
+2016-06-07 11:29:06,3138,-10.2969,27.0921
+2016-06-07 11:44:03,3140,-10.2969,26.9936
+2016-06-07 11:58:58,3142,-10.2969,26.984
+2016-06-07 12:14:05,3143,-10.3263,26.5464
+2016-06-07 12:29:13,3143,-10.3567,26.1133
+2016-06-07 12:44:15,3144,-10.3263,26.3044
+2016-06-07 12:59:11,3146,-10.2969,26.4545
+2016-06-07 13:14:07,3148,-10.2677,26.8406
+2016-06-07 13:29:03,3147,-10.2969,26.4029
+2016-06-07 13:43:59,3147,-10.2969,26.3044
+2016-06-07 13:58:55,3148,-10.2979,26.3068
+2016-06-07 14:14:02,3149,-10.2979,26.1644
+2016-06-07 14:29:10,3150,-10.2979,26.2157
+2016-06-07 14:44:24,3150,-10.2677,26.4052
+2016-06-07 14:59:20,3150,-10.2677,26.5016
+2016-06-07 15:14:16,3150,-10.2979,26.3091
+2016-06-07 15:29:13,3150,-10.2677,26.1644
+2016-06-07 15:44:09,3150,-10.2677,26.3091
+2016-06-07 15:59:05,3150,-10.2677,26.26
+2016-06-07 16:14:02,3150,-10.2677,26.2133
+2016-06-07 16:29:09,3150,-10.2979,26.211
+2016-06-07 16:44:16,3150,-10.2677,26.3091
+2016-06-07 16:59:18,3150,-10.2677,26.4052
+2016-06-07 17:14:15,3150,-10.2979,26.0738
+2016-06-07 17:29:11,3149,-10.2979,26.211
+2016-06-07 17:44:07,3151,-10.1783,27.2926
+2016-06-07 17:59:04,3150,-10.2083,27.2417
+2016-06-07 18:14:00,3150,-10.1783,27.3897
+2016-06-07 18:29:07,3150,-10.1783,27.3897
+2016-06-07 18:44:14,3144,-10.2969,26.211
+2016-06-07 18:59:16,3147,-10.1783,27.4993
+2016-06-07 19:14:12,3142,-10.296,26.1644
+2016-06-07 19:29:09,3147,-10.2092,27.997
+2016-06-07 19:44:05,3139,-10.296,26.3138
+2016-06-07 19:59:01,3138,-10.3263,26.1644
+2016-06-07 20:13:57,3136,-10.3567,26.1226
+2016-06-07 20:29:04,3136,-10.3263,26.3138
+2016-06-07 20:44:11,3134,-10.3557,26.0274
+2016-06-07 20:59:13,3132,-10.3557,26.0205
+2016-06-07 21:14:10,3131,-10.3557,26.0645
+2016-06-07 21:29:06,3129,-10.3557,25.9788
+2016-06-07 21:44:02,3128,-10.3852,26.1156
+2016-06-07 21:58:58,3127,-10.3861,25.928
+2016-06-07 22:13:54,3127,-10.3861,26.0228
+2016-06-07 22:29:00,3125,-10.4157,25.9719
+2016-06-07 22:44:07,3129,-10.2677,27.5923
+2016-06-07 22:59:09,3123,-10.4157,25.8313
+2016-06-07 23:14:05,3122,-10.4148,25.8382
+2016-06-07 23:29:02,3122,-10.3852,26.218000000000004
+2016-06-07 23:43:58,3122,-10.4148,26.0297
+2016-06-07 23:58:54,3121,-10.4148,25.9326
+2016-06-08 00:13:49,3120,-10.4453,25.8382
+2016-06-08 00:28:56,3121,-10.3852,26.6528
+2016-06-08 00:44:03,3119,-10.4148,26.2203
+2016-06-08 00:59:05,3116,-10.4759,25.7899
+2016-06-08 01:14:01,3113,-10.5664,24.6339
+2016-06-08 01:28:57,3106,-10.6565,64.0267
+2016-06-08 01:43:53,3109,-10.5654,71.408
+2016-06-08 01:58:49,3109,-10.5664,24.0087
+2016-06-08 02:13:44,3109,-10.5654,73.0805
+2016-06-08 02:28:51,3107,-10.5963,73.2991
+2016-06-08 02:43:58,3107,-10.5963,71.8142
+2016-06-08 02:59:00,3106,-10.5954,72.4387
+2016-06-08 03:13:56,3107,-10.5654,74.6112
+2016-06-08 03:28:51,3108,-10.5664,24.6339
+2016-06-08 03:43:47,3107,-10.5664,24.7244
+2016-06-08 03:58:43,3107,-10.5355,24.9551
+2016-06-08 04:13:39,3106,-10.5963,24.4101
+2016-06-08 04:28:46,3105,-10.5954,24.0972
+2016-06-08 04:43:53,3107,-10.5355,25.2351
+2016-06-08 04:58:54,3109,-10.4759,26.5016
+2016-06-08 05:13:50,3103,-10.6264,23.8755
+2016-06-08 05:28:46,3102,-10.6264,73.7269
+2016-06-08 05:43:42,3102,-10.6264,23.8369
+2016-06-08 05:58:38,3101,-10.5954,73.9493
+2016-06-08 06:13:33,3102,-10.5954,73.9493
+2016-06-08 06:28:40,3102,-10.5954,74.4047
+2016-06-08 06:43:46,3103,-10.5963,24.3206
+2016-06-08 06:58:48,3101,-10.6264,72.4387
+2016-06-08 07:13:44,3104,-10.5954,74.6112
+2016-06-08 07:28:40,3104,-10.5954,23.9248
+2016-06-08 08:22:40,3104,-10.5057,24.2358
+2016-06-08 09:16:40,3107,-10.4453,24.2793
+2016-06-08 10:10:41,3114,-10.3861,24.5437
+2016-06-08 11:04:53,3120,-10.3263,24.8595
+2016-06-08 11:59:05,3125,-10.2677,25.143
+2016-06-08 12:53:12,3129,-10.2677,24.9172
+2016-06-08 13:47:14,3132,-10.2384,25.1474
+2016-06-08 14:41:15,3134,-10.2384,24.875
+2016-06-08 15:35:17,3135,-10.2384,24.7774
+2016-06-08 16:29:19,3132,-10.2384,24.9128
+2016-06-08 17:23:20,3128,-10.2686,24.915
+2016-06-08 18:17:33,3123,-10.2979,24.9195
+2016-06-08 19:11:44,3117,-10.3272,24.8661
+2016-06-08 20:05:51,3112,-10.3567,24.9195
+2016-06-08 20:59:51,3107,-10.3871,24.9596
+2016-06-08 21:53:52,3101,-10.4166,24.8284
+2016-06-08 22:47:51,3099,-10.4157,24.7353
+2016-06-08 23:41:51,3095,-10.4453,24.7375
+2016-06-09 00:35:50,3096,-10.3861,25.8082
+2016-06-09 01:30:01,3094,-10.3567,26.3442
+2016-06-09 02:24:12,3087,-10.5057,24.603
+2016-06-09 03:18:20,3086,-10.5355,24.4669
+2016-06-09 04:12:19,3086,-10.5673,24.603
+2016-06-09 05:06:29,3085,-10.5664,24.647
+2016-06-09 06:00:39,3084,-10.5664,24.6933
+2016-06-09 06:54:44,3085,-10.5664,24.5568
+2016-06-09 07:48:44,3087,-10.5057,24.875
+2016-06-09 08:42:43,3087,-10.5057,25.1497
+2016-06-09 09:36:43,3089,-10.5057,24.9195
+2016-06-09 10:30:42,3091,-10.5057,24.8728
+2016-06-09 11:24:41,3095,-10.4759,25.1003
+2016-06-09 12:18:52,3100,-10.4157,25.1026
+2016-06-09 13:13:03,3106,-10.3861,25.5202
+2016-06-09 14:07:09,3109,-10.3272,26.039
+2016-06-09 15:01:10,3109,-10.3871,24.9172
+2016-06-09 15:55:11,3113,-10.3871,24.964
+2016-06-09 16:49:12,3117,-10.2686,26.5158
+2016-06-09 17:43:12,3115,-10.2979,26.0436
+2016-06-09 18:37:13,3113,-10.3567,25.4702
+2016-06-09 19:31:24,3114,-10.2677,27.0512
+2016-06-09 20:25:36,3106,-10.3861,25.1407
+2016-06-09 21:19:42,3104,-10.4166,25.3298
+2016-06-09 22:13:42,3102,-10.3861,25.9395
+2016-06-09 23:07:42,3099,-10.3861,25.9419
+2016-06-10 00:01:42,3096,-10.4166,25.7027
+2016-06-10 00:55:42,3094,-10.4462,25.7991
+2016-06-10 01:49:42,3093,-10.4759,25.7991
+2016-06-10 02:43:52,3090,-10.4759,25.7509
+2016-06-10 03:38:03,3085,-10.5973,24.9128
+2016-06-10 04:32:08,3080,-10.6264,24.0993
+2016-06-10 05:26:08,3081,-10.6273,24.9551
+2016-06-10 06:20:07,3078,-10.6574,24.2771
+2016-06-10 07:14:07,3078,-10.6264,24.5481
+2016-06-10 08:08:06,3080,-10.5963,24.9975
+2016-06-10 09:02:05,3080,-10.5963,24.904
+2016-06-10 09:56:15,3083,-10.5664,24.9106
+2016-06-10 10:50:25,3088,-10.5365,24.9084
+2016-06-10 11:44:31,3094,-10.4462,25.4634
+2016-06-10 12:38:31,3103,-10.4166,25.8427
+2016-06-10 13:32:32,3110,-10.4166,25.4657
+2016-06-10 14:26:33,3108,-10.3567,24.8728
+2016-06-10 15:20:34,3114,-10.3871,24.8217
+2016-06-10 16:14:35,3116,-10.3871,24.8261
+2016-06-10 17:08:47,3116,-10.3567,25.0533
+2016-06-10 18:02:59,3115,-10.3567,25.1452
+2016-06-10 18:57:06,3115,-10.2979,25.9881
+2016-06-10 19:51:07,3110,-10.3861,25.0064
+2016-06-10 20:45:08,3108,-10.4166,25.0981
+2016-06-10 21:39:09,3105,-10.4166,25.3773
+2016-06-10 22:33:09,3101,-10.4462,25.1474
+2016-06-10 23:27:09,3099,-10.4462,25.3231
+2016-06-11 00:21:20,3096,-10.4769,25.3231
+2016-06-11 01:15:31,3095,-10.4462,25.7945
+2016-06-11 02:09:37,3094,-10.4769,25.7005
+2016-06-11 03:03:37,3090,-10.5057,25.2396
+2016-06-11 03:57:37,3087,-10.5673,24.8706
+2016-06-11 04:51:37,3087,-10.5664,25.2779
+2016-06-11 05:45:37,3086,-10.5365,25.5111
+2016-06-11 06:39:36,3082,-10.5963,24.7708
+2016-06-11 07:33:46,3083,-10.5664,24.8639
+2016-06-11 08:27:57,3085,-10.5973,24.3664
+2016-06-11 09:22:02,3087,-10.5664,24.7752
+2016-06-11 10:16:02,3094,-10.5066,25.2306
+2016-06-11 11:10:02,3097,-10.5066,25.0467
+2016-06-11 12:04:02,3101,-10.4769,25.1879
+2016-06-11 12:58:02,3102,-10.4462,25.1385
+2016-06-11 13:52:03,3106,-10.4166,25.0981
+2016-06-11 14:46:14,3107,-10.4166,25.0042
+2016-06-11 15:40:25,3109,-10.3871,25.4679
+2016-06-11 16:34:31,3106,-10.4759,23.9248
+2016-06-11 17:28:33,3107,-10.4462,24.0993
+2016-06-11 18:22:33,3109,-10.3861,24.964
+2016-06-11 19:16:34,3107,-10.4157,24.5986
+2016-06-11 20:10:35,3106,-10.4166,24.9618
+2016-06-11 21:04:35,3102,-10.4759,24.964
+2016-06-11 21:58:35,3099,-10.5066,24.773000000000003
+2016-06-11 22:52:46,3094,-10.5057,24.6448
+2016-06-11 23:46:57,3093,-10.5057,24.8683
+2016-06-12 01:35:02,3090,-10.5664,24.7266
+2016-06-12 02:29:02,3087,-10.5973,24.8151
+2016-06-12 03:23:02,3087,-10.5973,24.6339
+2016-06-12 04:17:01,3085,-10.5973,24.6383
+2016-06-12 05:11:00,3083,-10.5963,24.7708
+2016-06-12 06:05:10,3082,-10.5963,24.6383
+2016-06-12 06:59:20,3082,-10.5973,24.7287
+2016-06-12 07:53:25,3084,-10.5664,24.7266
+2016-06-12 08:47:25,3087,-10.5673,24.7287
+2016-06-12 09:41:24,3093,-10.5057,25.0467
+2016-06-12 10:35:24,3096,-10.5066,24.9084
+2016-06-12 11:29:25,3103,-10.4462,25.0064
+2016-06-12 12:23:18,3109,-10.3871,25.3298
+2016-06-12 13:17:23,3111,-10.3871,24.9128
+2016-06-12 14:11:23,3113,-10.3861,25.0981
+2016-06-12 15:05:24,3109,-10.3861,24.9062
+2016-06-12 15:59:25,3107,-10.4166,24.9551
+2016-06-12 16:53:26,3105,-10.4166,24.8151
+2016-06-12 17:47:27,3103,-10.4166,24.9084
+2016-06-12 18:41:39,3100,-10.4462,24.8617
+2016-06-12 19:35:50,3099,-10.4759,24.6802
+2016-06-12 20:29:56,3094,-10.5066,24.7708
+2016-06-12 21:23:56,3094,-10.5066,25.0444
+2016-06-12 22:17:55,3092,-10.5057,24.9529
+2016-06-12 23:11:55,3090,-10.5365,24.9529
+2016-06-13 00:05:55,3087,-10.5673,24.9084
+2016-06-13 00:59:55,3086,-10.5673,24.6802
+2016-06-13 01:54:05,3085,-10.5664,24.6339
+2016-06-13 02:48:15,3083,-10.5973,24.4538
+2016-06-13 03:42:21,3082,-10.5973,24.6339
+2016-06-13 04:36:20,3083,-10.5973,24.8595
+2016-06-13 05:30:20,3082,-10.5973,24.5899
+2016-06-13 06:24:20,3082,-10.5973,24.7244
+2016-06-13 07:18:19,3084,-10.5664,24.9084
+2016-06-13 08:12:19,3086,-10.5664,24.8617
+2016-06-13 09:06:29,3087,-10.5365,24.9998
+2016-06-13 10:00:40,3090,-10.5057,24.8151
+2016-06-13 10:54:47,3094,-10.5066,24.9529
+2016-06-13 11:48:47,3095,-10.4759,24.9529
+2016-06-13 12:42:48,3098,-10.4759,24.9975
+2016-06-13 13:36:48,3100,-10.4769,25.0444
+2016-06-13 14:30:48,3101,-10.4166,24.8639
+2016-06-13 15:24:49,3106,-10.4166,25.0959
+2016-06-13 16:19:01,3104,-10.5066,25.0914
+2016-06-13 17:13:12,3095,-10.5066,25.3118
+2016-06-13 18:07:18,3092,-10.5066,25.6776
+2016-06-13 19:01:18,3090,-10.5066,25.6274
+2016-06-13 19:55:18,3091,-10.5066,25.7234
+2016-06-13 20:49:18,3090,-10.5057,25.7188
+2016-06-13 21:43:19,3089,-10.5057,25.7166
+2016-06-13 22:37:19,3087,-10.5365,25.8083
+2016-06-13 23:31:30,3087,-10.5673,25.8083
+2016-06-14 00:25:41,3085,-10.5673,25.9974
+2016-06-14 01:19:46,3082,-10.5664,25.9004
+2016-06-14 02:13:46,3081,-10.5973,25.852
+2016-06-14 03:07:46,3081,-10.5973,25.8981
+2016-06-14 04:01:46,3080,-10.5973,25.7601
+2016-06-14 04:55:46,3081,-10.5973,25.852
+2016-06-14 05:49:46,3082,-10.5973,25.7578
+2016-06-14 06:43:57,3082,-10.5664,25.9465
+2016-06-14 07:38:07,3084,-10.5664,25.9951
+2016-06-14 08:32:13,3086,-10.5664,26.0924
+2016-06-14 09:26:13,3086,-10.5673,25.7578
+2016-06-14 10:20:13,3087,-10.5365,26.0414
+2016-06-14 11:14:13,3090,-10.5066,26.0437
+2016-06-14 12:08:14,3094,-10.4759,26.3279
+2016-06-14 13:02:14,3095,-10.4472,26.3794
+2016-06-14 13:56:25,3096,-10.4166,26.6149
+2016-06-14 14:50:36,3100,-10.4166,26.6623
+2016-06-14 15:44:42,3100,-10.4166,26.8072
+2016-06-14 16:38:43,3101,-10.4166,26.8549
+2016-06-14 17:32:43,3100,-10.4166,26.7572
+2016-06-14 18:26:43,3100,-10.4166,26.7596
+2016-06-14 19:20:44,3100,-10.4166,26.7097
+2016-06-14 20:14:44,3099,-10.4166,26.905
+2016-06-14 21:08:54,3096,-10.4148,27.1475
+2016-06-14 22:03:05,3094,-10.4472,26.9002
+2016-06-14 22:57:10,3091,-10.632,27.008000000000006
+2016-06-14 23:51:10,3088,-10.4759,26.8525
+2016-06-15 00:45:09,3087,-10.5066,27.2949
+2016-06-15 01:39:08,3087,-10.4472,27.6462
+2016-06-15 02:33:07,3085,-10.5057,27.1475
+2016-06-15 03:27:07,3082,-10.5365,26.9505
+2016-06-15 04:21:17,3082,-10.5673,27.0512
+2016-06-15 05:15:27,3081,-10.5673,26.9026
+2016-06-15 06:09:33,3082,-10.5365,27.0512
+2016-06-15 07:03:33,3084,-10.5673,27.0488
+2016-06-15 07:57:32,3087,-10.5048,26.9505
+2016-06-15 08:51:31,3090,-10.4759,27.1982
+2016-06-15 09:45:31,3093,-10.4769,27.3945
+2016-06-15 10:39:30,3093,-10.2979,27.1957
+2016-06-15 11:33:41,3095,-10.3861,27.2974
+2016-06-15 12:27:52,3101,-10.3272,27.3507
+2016-06-15 13:21:59,3107,-10.2969,27.4505
+2016-06-15 14:16:00,3112,-10.4166,27.3046
+2016-06-15 15:10:01,3106,-10.3567,27.0512
+2016-06-15 16:04:01,3100,-10.3576,27.3046
+2016-06-15 16:58:02,3099,-10.3871,27.2538
+2016-06-15 17:52:02,3096,-10.3567,27.3022
+2016-06-15 18:46:13,3095,-10.3852,27.1017
+2016-06-15 19:40:24,3094,-10.3861,27.2489
+2016-06-15 20:34:30,3092,-10.4157,27.0536
+2016-06-15 21:28:30,3087,-10.4462,27.0536
+2016-06-15 22:22:29,3085,-10.5066,26.8955
+2016-06-15 23:16:28,3080,-10.4732,26.7525
+2016-06-16 00:10:27,3079,-10.412,26.7644
+2016-06-16 01:04:26,3084,-10.4157,28.4757
+2016-06-16 01:58:36,3074,-10.5963,26.6647
+2016-06-16 02:52:46,3072,-10.5973,26.5582
+2016-06-16 03:46:51,3071,-10.6264,26.5158
+2016-06-16 04:40:49,3069,-10.6282,26.7121
+2016-06-16 05:34:48,3069,-10.5963,26.9505
+2016-06-16 06:28:46,3071,-10.5664,26.8072
+2016-06-16 07:22:46,3072,-10.5664,26.667
+2016-06-16 08:16:45,3078,-10.5365,26.7168
+2016-06-16 09:10:54,3083,-10.5057,27.1089
+2016-06-16 10:05:05,3088,-10.4148,27.2586
+2016-06-16 10:59:10,3094,-10.4166,26.9098
+2016-06-16 11:53:11,3099,-10.2375,27.1041
+2016-06-16 12:47:11,3101,-10.3861,26.9098
+2016-06-16 13:41:11,3104,-10.4175,26.8096
+2016-06-16 14:35:11,3106,-10.3852,27.0464
+2016-06-16 15:29:11,3107,-10.2667,26.9696
+2016-06-16 16:23:22,3106,-10.4796,26.6884
+2016-06-16 17:17:33,3104,-10.3567,27.0608
+2016-06-16 18:11:39,3100,-10.4166,26.7667
+2016-06-16 19:05:40,3100,-10.3567,27.2102
+2016-06-16 19:59:40,3097,-10.3861,27.0104
+2016-06-16 20:53:40,3095,-10.3861,26.7667
+2016-06-16 21:47:40,3094,-10.4166,26.9122
+2016-06-16 22:41:39,3092,-10.4166,26.8143
+2016-06-16 23:35:50,3090,-10.4462,26.7667
+2016-06-17 00:30:00,3088,-10.4157,26.8143
+2016-06-17 01:24:06,3087,-10.4759,26.7168
+2016-06-17 02:18:05,3087,-10.4759,26.6694
+2016-06-17 03:12:05,3086,-10.4759,26.7667
+2016-06-17 04:06:04,3086,-10.4759,26.9122
+2016-06-17 05:00:03,3082,-10.4759,26.6647
+2016-06-17 05:54:03,3082,-10.5066,26.9601
+2016-06-17 06:48:13,3081,-10.475,26.7121
+2016-06-17 07:42:24,3081,-10.4759,26.862
+2016-06-17 08:36:30,3082,-10.4759,26.7691
+2016-06-17 09:30:30,3084,-10.4759,26.8167
+2016-06-17 10:24:30,3087,-10.4759,26.6718
+2016-06-17 11:18:30,3087,-10.4759,26.7192
+2016-06-17 12:12:30,3088,-10.3861,26.7168
+2016-06-17 13:06:31,3094,-10.4426,26.7287
+2016-06-17 14:00:42,3094,-10.4769,26.7168
+2016-06-17 14:54:53,3091,-10.4166,26.8692
+2016-06-17 15:49:00,3091,-10.4157,26.8692
+2016-06-17 16:43:00,3089,-10.4462,26.7739
+2016-06-17 17:36:59,3087,-10.4157,26.8143
+2016-06-17 18:30:59,3087,-10.4759,26.8644
+2016-06-17 19:24:59,3087,-10.475,26.7644
+2016-06-17 20:18:59,3082,-10.5057,26.7192
+2016-06-17 21:13:10,3080,-10.5365,26.5748
+2016-06-17 22:07:20,3079,-10.5057,26.8167
+2016-06-17 23:01:26,3076,-10.5365,26.7691
+2016-06-17 23:55:25,3076,-10.5057,27.2562
+2016-06-18 00:49:25,3072,-10.5664,27.0608
+2016-06-18 01:43:24,3072,-10.5664,26.8143
+2016-06-18 02:37:23,3071,-10.5963,26.8143
+2016-06-18 03:31:22,3070,-10.5963,26.862
+2016-06-18 04:25:32,3067,-10.6264,26.862
+2016-06-18 05:19:41,3066,-10.6264,26.7667
+2016-06-18 06:13:46,3066,-10.5954,26.8167
+2016-06-18 07:07:45,3070,-10.5664,27.1089
+2016-06-18 08:01:45,3072,-10.5355,27.1113
+2016-06-18 08:55:44,3078,-10.5057,26.9625
+2016-06-18 09:49:44,3083,-10.4759,27.1523
+2016-06-18 10:43:44,3089,-10.4157,27.2538
+2016-06-18 11:37:56,3094,-10.3861,27.2127
+2016-06-18 12:32:07,3100,-10.3567,27.2151
+2016-06-18 13:26:14,3105,-10.2969,27.3095
+2016-06-18 14:20:15,3108,-10.3272,27.261
+2016-06-18 15:14:16,3107,-10.3309,27.2949
+2016-06-18 16:08:17,3107,-10.2667,27.3095
+2016-06-18 17:02:18,3108,-10.2969,27.3095
+2016-06-18 17:56:19,3106,-10.2969,27.3095
+2016-06-18 18:50:31,3104,-10.2667,27.4091
+2016-06-18 19:44:43,3103,-10.296,27.4067
+2016-06-18 20:38:50,3100,-10.3567,27.358
+2016-06-18 21:32:51,3095,-10.3557,27.261
+2016-06-18 22:26:52,3094,-10.3861,27.656
+2016-06-18 23:20:53,3090,-10.3861,27.2078
+2016-06-19 00:14:53,3087,-10.4157,27.1089
+2016-06-19 01:08:53,3084,-10.4453,27.1162
+2016-06-19 02:03:04,3080,-10.4759,26.9625
+2016-06-19 02:57:15,3084,-10.296,28.2792
+2016-06-19 03:51:21,3078,-10.5057,26.9194
+2016-06-19 04:45:22,3077,-10.5057,27.0176
+2016-06-19 05:39:22,3076,-10.475,27.1186
+2016-06-19 06:33:22,3076,-10.475,27.068
+2016-06-19 07:27:23,3079,-10.4453,27.3168
+2016-06-19 08:21:23,3080,-10.4453,27.1668
+2016-06-19 09:15:24,3083,-10.4148,27.2659
+2016-06-19 10:09:35,3087,-10.3861,27.2151
+2016-06-19 11:03:46,3089,-10.4157,26.972
+2016-06-19 11:57:53,3090,-10.4157,27.4188
+2016-06-19 12:51:53,3087,-10.3861,27.3192
+2016-06-19 13:45:53,3088,-10.3557,27.2707
+2016-06-19 14:39:54,3093,-10.3557,27.2199
+2016-06-19 15:33:55,3095,-10.3567,27.2199
+2016-06-19 16:27:56,3099,-10.3263,27.2804
+2016-06-19 17:22:08,3097,-10.3567,27.2248
+2016-06-19 18:16:20,3095,-10.3557,27.0753
+2016-06-19 19:10:27,3094,-10.3852,27.1258
+2016-06-19 20:04:27,3091,-10.3852,27.2248
+2016-06-19 20:58:27,3087,-10.4148,27.0777
+2016-06-19 21:52:27,3081,-10.475,27.1258
+2016-06-19 22:46:27,3076,-10.5057,27.0704
+2016-06-19 23:40:26,3072,-10.5954,26.7739
+2016-06-20 00:34:37,3065,-10.6245,25.5339
+2016-06-20 01:28:47,3064,-10.6565,26.0043
+2016-06-20 02:22:53,3064,-10.5954,26.9744
+2016-06-20 03:16:52,3062,-10.6254,26.6789
+2016-06-20 04:10:52,3058,-10.6867,26.0066
+2016-06-20 05:04:51,3061,-10.6264,27.121
+2016-06-20 05:58:50,3060,-10.6254,26.7763
+2016-06-20 06:52:49,3064,-10.1475,28.0318
+2016-06-20 07:46:59,3064,-10.5954,26.2951
+2016-06-20 08:41:10,3071,-10.5963,26.724
+2016-06-20 09:35:16,3080,-10.4759,27.2732
+2016-06-20 10:29:17,3087,-10.4148,27.2732
+2016-06-20 11:23:18,3094,-10.4472,26.3396
+2016-06-20 12:17:19,3101,-10.3567,27.1789
+2016-06-20 13:11:21,3106,-10.3861,26.2928
+2016-06-20 14:05:22,3110,-10.3567,26.2928
+2016-06-20 14:59:35,3115,-10.296,26.3888
+2016-06-20 15:53:48,3121,-10.2083,27.2732
+2016-06-20 16:47:56,3118,-10.2969,26.1994
+2016-06-20 17:41:58,3119,-10.1783,27.3241
+2016-06-20 18:36:00,3112,-10.3263,26.1994
+2016-06-20 19:30:01,3110,-10.2375,27.1258
+2016-06-20 20:24:03,3104,-10.3861,26.104
+2016-06-20 21:18:03,3101,-10.3852,26.5842
+2016-06-20 22:12:15,3099,-10.296,27.3241
+2016-06-20 23:06:26,3093,-10.3861,26.7335
+2016-06-21 00:00:32,3087,-10.3852,27.0248
+2016-06-21 00:54:32,3082,-10.4157,26.9289
+2016-06-21 01:48:32,3081,-10.4453,27.2175
+2016-06-21 02:42:31,3076,-10.5048,26.3888
+2016-06-21 03:36:30,3075,-10.5057,26.3911
+2016-06-21 04:30:30,3073,-10.5664,26.0136
+2016-06-21 05:24:41,3075,-10.5057,26.5346
+2016-06-21 06:18:51,3075,-10.5057,26.4428
+2016-06-21 07:12:57,3077,-10.475,26.3982
+2016-06-21 08:06:56,3080,-10.4148,26.9864
+2016-06-21 09:00:56,3083,-10.3557,27.4286
+2016-06-21 09:54:56,3087,-10.4157,26.7882
+2016-06-21 10:48:57,3091,-10.4157,26.7335
+2016-06-21 11:42:58,3094,-10.3548,26.4475
+2016-06-21 12:37:10,3100,-10.2375,27.3313
+2016-06-21 13:31:22,3106,-10.2384,27.1354
+2016-06-21 14:25:29,3110,-10.1792,27.3362
+2016-06-21 15:19:31,3115,-10.1493,27.2393
+2016-06-21 16:13:33,3118,-10.0616,27.4384
+2016-06-21 17:07:35,3117,-10.1493,27.0897
+2016-06-21 18:01:37,3117,-10.1194,27.2393
+2016-06-21 18:55:39,3114,-10.1783,26.8501
+2016-06-21 19:49:51,3113,-10.1783,26.843000000000004
+2016-06-21 20:44:04,3109,-10.2083,26.9003
+2016-06-21 21:38:11,3106,-10.1484,27.2465
+2016-06-21 22:32:11,3101,-10.1783,27.2465
+2016-06-21 23:26:12,3100,-10.1493,27.2974
+2016-06-22 01:14:14,3094,-10.2375,27.1982
+2016-06-22 02:08:14,3094,-10.2375,27.3459
+2016-06-22 03:02:25,3091,-10.2375,27.2465
+2016-06-22 03:56:36,3088,-10.2366,27.1958
+2016-06-22 04:50:43,3087,-10.3254,26.8501
+2016-06-22 05:44:43,3090,-9.9736,27.4481
+2016-06-22 06:38:44,3088,-10.3254,26.6504
+2016-06-22 07:32:44,3093,-10.2375,27.1475
+2016-06-22 08:26:45,3094,-10.2375,27.1041
+2016-06-22 09:20:46,3099,-10.1783,27.0488
+2016-06-22 10:14:58,3104,-10.1783,27.056
+2016-06-22 11:09:11,3110,-10.1212,27.3047
+2016-06-22 12:03:19,3115,-10.0616,27.2514
+2016-06-22 12:57:21,3122,-10.004,27.3047
+2016-06-22 13:51:23,3128,-9.9745,27.2079
+2016-06-22 14:45:26,3132,-9.9458,27.2079
+2016-06-22 15:39:29,3137,-9.9467,26.7763
+2016-06-22 16:33:32,3138,-9.9467,27.0633
+2016-06-22 17:27:46,3138,-9.9467,27.0633
+2016-06-22 18:21:59,3137,-9.8594,27.3557
+2016-06-22 19:16:07,3132,-9.9467,27.1114
+2016-06-22 20:10:10,3127,-9.9745,27.1114
+2016-06-22 21:04:12,3121,-10.0328,27.0152
+2016-06-22 21:58:14,3116,-10.0616,27.1138
+2016-06-22 22:52:15,3112,-10.0328,27.1186
+2016-06-22 23:46:16,3108,-10.0625,27.0705
+2016-06-23 00:40:29,3106,-10.0914,26.9793
+2016-06-23 01:34:40,3103,-10.1203,26.9793
+2016-06-23 02:28:48,3101,-10.0896,27.1234
+2016-06-23 03:22:49,3099,-10.1203,27.1813
+2016-06-23 04:16:51,3096,-10.1493,27.121
+2016-06-23 05:10:52,3094,-10.1493,27.1307
+2016-06-23 06:04:54,3095,-10.1194,27.1307
+2016-06-23 06:58:55,3099,-10.1203,27.1307
+2016-06-23 07:53:08,3104,-10.0328,27.1331
+2016-06-23 08:47:21,3110,-10.0032,27.1331
+2016-06-23 09:41:28,3121,-9.9467,27.1813
+2016-06-23 10:35:31,3131,-9.8594,27.0392
+2016-06-23 11:29:34,3141,-9.8603,26.9888
+2016-06-23 12:23:38,3150,-9.8035,26.8931
+2016-06-23 13:17:42,3157,-9.746,26.8931
+2016-06-23 14:11:47,3163,-9.717,26.8454
+2016-06-23 15:06:02,3167,-9.6888,26.8931
+2016-06-23 16:00:18,3170,-9.6897,26.8931
+2016-06-23 16:54:28,3170,-9.6607,26.8931
+2016-06-23 17:48:33,3166,-9.6888,26.8454
+2016-06-23 18:42:37,3163,-9.6607,26.7572
+2016-06-23 19:36:42,3160,-9.7178,26.7596
+2016-06-23 20:30:46,3154,-9.746,26.8525
+2016-06-23 21:24:50,3149,-9.7743,26.812
+2016-06-23 22:18:53,3145,-9.7743,26.8597
+2016-06-23 23:13:08,3142,-9.8035,26.8597
+2016-06-24 00:07:22,3137,-9.8318,26.8191
+2016-06-24 01:01:31,3132,-9.8611,26.8191
+2016-06-24 01:55:34,3128,-9.8603,26.8669
+2016-06-24 02:49:37,3124,-9.8594,26.8669
+2016-06-24 03:43:39,3123,-9.9181,26.8669
+2016-06-24 04:37:41,3119,-9.9173,26.8191
+2016-06-24 05:31:42,3115,-9.9458,26.8191
+2016-06-24 06:25:54,3111,-9.9458,26.8191
+2016-06-24 07:20:07,3111,-9.9467,26.8239
+2016-06-24 08:14:15,3115,-9.8887,26.8239
+2016-06-24 09:08:18,3123,-9.8603,26.8335
+2016-06-24 10:02:21,3131,-9.8026,26.7382
+2016-06-24 10:56:24,3142,-9.746,26.7858
+2016-06-24 11:50:28,3150,-9.6607,26.7858
+2016-06-24 12:44:31,3156,-9.7187,26.596
+2016-06-24 13:38:46,3163,-9.6616,26.6008
+2016-06-24 14:33:02,3168,-9.6327,26.6008
+2016-06-24 15:27:12,3171,-9.6327,26.6031
+2016-06-24 16:21:17,3171,-9.6047,26.4616
+2016-06-24 17:15:22,3174,-9.24,26.6055
+2016-06-24 18:09:25,3152,-9.7187,26.4616
+2016-06-24 19:03:28,3132,-9.8035,26.4592
+2016-06-24 19:57:40,3121,-9.8026,26.4075
+2016-06-24 20:51:53,3115,-9.8594,26.3137
+2016-06-24 21:46:01,3110,-9.9173,26.3114
+2016-06-24 22:40:02,3108,-9.9467,26.4146
+2016-06-24 23:34:04,3104,-9.9458,26.3676
+2016-06-25 00:28:05,3101,-9.9458,26.5134
+2016-06-25 01:22:07,3098,-9.9745,26.4686
+2016-06-25 02:16:08,3095,-9.9745,26.471
+2016-06-25 03:10:20,3094,-9.9736,26.471
+2016-06-25 04:04:32,3094,-9.9736,26.5252
+2016-06-25 04:58:39,3094,-9.9736,26.5252
+2016-06-25 05:52:40,3093,-10.0032,26.4287
+2016-06-25 06:46:42,3094,-9.9736,26.4781
+2016-06-25 07:40:43,3099,-9.945,26.4287
+2016-06-25 08:34:45,3104,-9.9458,26.4781
+2016-06-25 09:28:46,3106,-9.9736,26.2343
+2016-06-25 10:22:59,3109,-9.9745,26.2366
+2016-06-25 11:17:12,3113,-9.9467,26.3325
+2016-06-25 12:11:20,3115,-9.9173,26.4287
+2016-06-25 13:05:22,3118,-9.9173,26.3348
+2016-06-25 13:59:24,3118,-9.9173,26.288
+2016-06-25 14:53:25,3117,-9.8594,26.431
+2016-06-25 15:47:28,3117,-9.8594,26.3371
+2016-06-25 16:41:29,3119,-9.8594,26.384
+2016-06-25 17:35:32,3121,-9.831,26.4357
+2016-06-25 18:29:34,3121,-9.8594,26.3371
+2016-06-25 19:23:36,3118,-9.8887,26.3371
+2016-06-25 20:17:49,3114,-9.9467,26.2413
+2016-06-25 21:12:01,3109,-9.9754,26.3371
+2016-06-25 22:06:09,3096,-10.0328,26.2413
+2016-06-25 23:00:10,3087,-10.0914,26.1434
+2016-06-25 23:54:11,3079,-10.1194,26.2413
+2016-06-26 00:48:12,3073,-10.1783,26.0018
+2016-06-26 01:42:12,3072,-10.1783,26.141
+2016-06-26 02:36:12,3070,-10.2375,26.141
+2016-06-26 03:30:23,3067,-10.2375,26.1876
+2016-06-26 04:24:34,3065,-10.2658,26.0946
+2016-06-26 05:18:40,3064,-10.2658,26.141
+2016-06-26 06:12:40,3064,-10.2658,26.0946
+2016-06-26 07:06:40,3065,-10.2366,26.0087
+2016-06-26 08:00:40,3072,-10.0896,26.6268
+2016-06-26 08:54:40,3072,-10.2074,26.0969
+2016-06-26 09:48:40,3078,-10.1484,26.1923
+2016-06-26 10:42:52,3082,-10.1194,26.288
+2016-06-26 11:37:04,3089,-10.1203,26.2413
+2016-06-26 12:31:11,3095,-10.0616,26.2413
+2016-06-26 13:25:12,3100,-10.0616,26.2413
+2016-06-26 14:19:14,3103,-10.0616,26.0969
+2016-06-26 15:13:15,3106,-10.0328,26.1015
+2016-06-26 16:07:28,3106,-10.0319,26.1015
+2016-06-26 17:01:41,3104,-10.0319,26.0528
+2016-06-26 17:55:48,3103,-10.0319,26.1015
+2016-06-26 18:49:50,3100,-10.0616,26.0528
+2016-06-26 19:43:52,3096,-10.0896,25.9117
+2016-06-26 20:37:53,3092,-10.1484,25.8172
+2016-06-26 21:31:54,3086,-10.1783,25.8149
+2016-06-26 22:25:55,3080,-10.2375,25.7208
+2016-06-26 23:20:07,3075,-10.2366,25.529
+2016-06-27 00:14:18,3072,-10.2658,25.6225
+2016-06-27 01:08:25,3071,-10.296,25.529
+2016-06-27 02:02:26,3068,-10.296,25.6225
+2016-06-27 02:56:26,3065,-10.3557,25.529
+2016-06-27 03:50:26,3064,-10.3557,25.529
+2016-06-27 04:44:25,3062,-10.3548,25.5768
+2016-06-27 05:38:24,3060,-10.3852,25.5746
+2016-06-27 06:32:35,3059,-10.3548,25.5768
+2016-06-27 07:26:46,3061,-10.3557,25.6225
+2016-06-27 08:20:52,3065,-10.3557,25.5746
+2016-06-27 09:14:52,3071,-10.296,25.5335
+2016-06-27 10:08:53,3076,-10.2677,25.5814
+2016-06-27 11:02:53,3081,-10.2375,25.4404
+2016-06-27 11:56:55,3082,-10.3263,24.025
+2016-06-27 12:50:56,3094,-10.1493,25.8518
+2016-06-27 13:45:10,3099,-10.1194,25.7621
+2016-06-27 14:39:23,3100,-10.1203,25.7621
+2016-06-27 15:33:31,3100,-10.1194,25.714
+2016-06-27 16:27:33,3099,-10.1203,25.3928
+2016-06-27 17:21:35,3100,-10.1194,25.529
+2016-06-27 18:15:37,3099,-10.1493,25.3928
+2016-06-27 19:09:39,3096,-10.1493,25.3928
+2016-06-27 20:03:40,3090,-10.2677,24.2045
+2016-06-27 20:57:53,3087,-10.2357,24.7458
+2016-06-27 21:52:05,3084,-10.296,24.383000000000006
+2016-06-27 22:46:12,3080,-10.4713,24.2785
+2016-06-27 23:40:14,3079,-10.3263,24.5562
+2016-06-28 00:34:15,3075,-10.3254,24.5627
+2016-06-28 01:28:16,3072,-10.3557,24.5562
+2016-06-28 02:22:17,3071,-10.3843,24.0596
+2016-06-28 03:16:18,3068,-10.4148,23.9712
+2016-06-28 04:10:30,3067,-10.475,24.2372
+2016-06-28 05:04:41,3065,-10.4148,24.285
+2016-06-28 05:58:48,3062,-10.4741,24.1028
+2016-06-28 06:52:48,3063,-10.4444,24.2872
+2016-06-28 07:46:48,3058,-10.5355,22.8409
+2016-06-28 08:40:49,3064,-10.4453,24.3307
+2016-06-28 09:34:50,3065,-10.3852,24.5101
+2016-06-28 10:28:50,3070,-10.3861,24.6465
+2016-06-28 11:23:03,3074,-10.3263,24.6972
+2016-06-28 12:17:15,3080,-10.2667,24.8789
+2016-06-28 13:11:23,3080,-10.3557,23.4911
+2016-06-28 14:05:25,3085,-10.2375,23.6267
+2016-06-28 14:59:27,3092,-10.2083,25.0552
+2016-06-28 15:53:29,3091,-10.296,23.6183
+2016-06-28 16:47:31,3092,-10.296,23.7077
+2016-06-28 17:41:33,3090,-10.1783,25.1493
+2016-06-28 18:35:46,3087,-10.1774,23.8423
+2016-06-28 19:29:58,3087,-10.2375,25.0149
+2016-06-28 20:24:05,3082,-10.2667,24.8767
+2016-06-28 21:18:07,3078,-10.296,24.8301
+2016-06-28 22:12:08,3072,-10.3254,24.9636
+2016-06-28 23:06:08,3067,-10.3557,24.8723
+2016-06-29 00:00:08,3065,-10.3557,24.9636
+2016-06-29 00:54:08,3061,-10.475,23.6183
+2016-06-29 01:48:19,3058,-10.5355,23.3602
+2016-06-29 02:42:29,3057,-10.5654,23.0566
+2016-06-29 03:36:35,3054,-10.5654,23.1004
+2016-06-29 04:30:34,3053,-10.5654,23.1422
+2016-06-29 05:24:34,3053,-10.5654,23.0984
+2016-06-29 06:18:33,3055,-10.4148,24.737
+2016-06-29 07:12:32,3061,-10.4148,24.5144
+2016-06-29 08:06:32,3065,-10.4157,24.5123
+2016-06-29 09:00:44,3068,-10.3852,24.4684
+2016-06-29 09:54:55,3071,-10.3861,24.4684
+2016-06-29 10:49:02,3075,-10.3567,24.5584
+2016-06-29 11:43:03,3080,-10.296,24.6046
+2016-06-29 12:37:05,3088,-10.2677,24.5606
+2016-06-29 13:31:06,3093,-10.2083,24.6928
+2016-06-29 14:25:08,3094,-10.2074,24.5144
+2016-06-29 15:19:10,3096,-10.2384,24.4246
+2016-06-29 16:13:23,3095,-10.2375,24.2936
+2016-06-29 17:07:36,3094,-10.2375,24.2501
+2016-06-29 18:01:44,3093,-10.2375,24.2501
+2016-06-29 18:55:45,3090,-10.2667,24.2936
+2016-06-29 19:49:47,3087,-10.2979,24.2067
+2016-06-29 20:43:48,3081,-10.3621,23.9797
+2016-06-29 21:37:49,3076,-10.3834,24.025
+2016-06-29 22:31:50,3071,-10.3852,23.9367
+2016-06-29 23:26:02,3065,-10.3861,23.9367
+2016-06-30 00:20:13,3064,-10.3843,23.9388
+2016-06-30 01:14:20,3062,-10.4148,23.8487
+2016-06-30 02:08:20,3062,-10.4148,23.8508
+2016-06-30 03:02:20,3060,-10.4148,23.9346
+2016-06-30 03:56:20,3059,-10.4444,23.9346
+2016-06-30 04:50:20,3058,-10.475,23.8895
+2016-06-30 05:44:20,3058,-10.475,23.8466
+2016-06-30 06:38:31,3058,-10.475,23.8466
+2016-06-30 07:32:43,3061,-10.4139,23.8058
+2016-06-30 08:26:49,3062,-10.4148,23.8487
+2016-06-30 09:20:50,3065,-10.3843,23.8508
+2016-06-30 10:14:51,3067,-10.4148,23.808000000000003
+2016-06-30 11:08:51,3071,-10.5664,23.9367
+2016-06-30 12:02:52,3077,-10.3263,23.808000000000003
+2016-06-30 12:56:53,3081,-10.296,24.0228
+2016-06-30 13:51:06,3087,-10.2667,23.8101
+2016-06-30 14:45:18,3090,-10.2375,23.8101
+2016-06-30 15:39:26,3092,-10.2083,23.8101
+2016-06-30 16:33:28,3093,-10.2375,23.808000000000003
+2016-06-30 17:27:29,3092,-10.2375,23.898000000000003
+2016-06-30 18:21:31,3089,-10.2677,23.7652
+2016-06-30 19:15:32,3087,-10.2667,23.6777
+2016-06-30 20:09:33,3082,-10.296,23.633000000000006
+2016-06-30 21:03:45,3079,-10.3263,23.546
+2016-06-30 21:57:57,3075,-10.3557,23.4592
+2016-06-30 22:52:03,3072,-10.3861,23.4149
+2016-06-30 23:46:03,3068,-10.3861,23.3706
+2016-07-01 00:40:04,3066,-10.4148,23.2844
+2016-07-01 01:34:04,3065,-10.4148,23.2844
+2016-07-01 02:28:04,3064,-10.4444,23.2005
+2016-07-01 03:22:03,3062,-10.4444,23.2005
+2016-07-01 04:16:14,3059,-10.475,23.1566
+2016-07-01 05:10:26,3058,-10.475,23.1545
+2016-07-01 06:04:32,3058,-10.4444,23.1545
+2016-07-01 06:58:32,3061,-10.4148,23.1566
+2016-07-01 07:52:33,3065,-10.3852,23.2005
+2016-07-01 08:46:34,3070,-10.3557,23.2844
+2016-07-01 09:40:36,3076,-10.296,23.2844
+2016-07-01 10:34:37,3084,-10.2969,23.3285
+2016-07-01 11:28:51,3087,-10.2677,23.4592
+2016-07-01 12:23:04,3092,-10.2083,23.4592
+2016-07-01 13:17:12,3095,-10.1792,23.4592
+2016-07-01 14:11:15,3100,-10.1792,23.4592
+2016-07-01 15:05:18,3101,-10.1203,23.3327
+2016-07-01 15:59:21,3106,-10.1203,23.3327
+2016-07-01 16:53:24,3106,-10.1203,90.4855
+2016-07-01 17:47:27,3103,-10.0905,23.1607
+2016-07-01 18:41:30,3100,-10.1493,23.1168
+2016-07-01 19:35:43,3095,-10.1792,22.9877
+2016-07-01 20:29:57,3092,-10.2083,22.8592
+2016-07-01 21:24:05,3087,-10.2677,22.8179
+2016-07-01 22:18:07,3081,-10.2677,22.8179
+2016-07-01 23:12:08,3078,-10.2658,22.7313
+2016-07-02 01:00:12,3072,-10.296,22.6962
+2016-07-02 01:54:14,3071,-10.3557,22.653
+2016-07-02 02:48:26,3068,-10.3557,22.653
+2016-07-02 03:42:39,3066,-10.3557,22.612
+2016-07-02 04:36:46,3067,-10.3557,22.569000000000006
+2016-07-02 05:30:47,3067,-10.3557,22.569000000000006
+2016-07-02 06:24:49,3068,-10.3254,22.5261
+2016-07-02 07:18:51,3071,-10.3263,22.4832
+2016-07-02 08:12:53,3072,-10.3861,22.4832
+2016-07-02 09:06:54,3071,-10.3861,22.4076
+2016-07-02 10:01:07,3065,-10.4453,22.2758
+2016-07-02 10:55:19,3059,-10.5057,22.1988
+2016-07-02 11:49:26,3052,-10.5048,22.1544
+2016-07-02 12:43:27,3050,-10.5048,79.2416
+2016-07-02 13:37:27,3048,-10.5355,77.8061
+2016-07-02 14:31:27,3046,-10.5654,22.0317
+2016-07-02 15:25:27,3045,-10.5664,22.0336
+2016-07-02 16:19:27,3047,-10.5355,22.0738
+2016-07-02 17:13:37,3049,-10.5664,22.0719
+2016-07-02 18:07:48,3051,-10.5963,22.0719
+2016-07-02 19:01:54,3048,-10.6574,21.9915
+2016-07-02 19:55:53,3043,-10.6565,21.9495
+2016-07-02 20:49:53,3039,-10.6876,21.9094
+2016-07-02 21:43:52,3036,-10.6867,21.9075
+2016-07-02 22:37:51,3034,-10.7179,75.8981
+2016-07-02 23:31:50,3032,-10.7472,76.5941
+2016-07-03 00:26:00,3029,-10.7786,76.3426
+2016-07-03 01:20:10,3029,-10.8091,75.8779
+2016-07-03 02:14:15,3026,-10.8406,75.65100000000002
+2016-07-03 03:08:14,3024,-10.8396,75.65100000000002
+2016-07-03 04:02:13,3023,-10.8712,74.9653
+2016-07-03 04:56:11,3022,-10.9029,74.7439
+2016-07-03 05:50:10,3022,-10.9029,74.5171
+2016-07-03 06:44:08,3022,-10.8396,74.0611
+2016-07-03 07:38:18,3024,-10.8406,74.0611
+2016-07-03 08:32:29,3030,-10.81,74.0611
+2016-07-03 09:26:35,3037,-10.7482,74.0611
+2016-07-03 10:20:35,3043,-10.7179,73.8384
+2016-07-03 11:14:35,3051,-10.6876,74.0611
+2016-07-03 12:08:36,3056,-10.6574,74.0611
+2016-07-03 13:02:38,3061,-10.6264,73.4099
+2016-07-03 13:56:39,3063,-10.5954,73.1909
+2016-07-03 14:50:51,3064,-10.5973,72.5547
+2016-07-03 15:45:03,3065,-10.5664,71.7251
+2016-07-03 16:39:10,3065,-10.5654,70.4995
+2016-07-03 17:33:12,3065,-10.5664,69.5168
+2016-07-03 18:27:13,3064,-10.5954,68.5602
+2016-07-03 19:21:15,3059,-10.5963,67.6224
+2016-07-03 20:15:16,3054,-10.6264,66.5263
+2016-07-03 21:09:16,3050,-10.6876,65.1231
+2016-07-03 22:03:27,3043,-10.6867,64.2767
+2016-07-03 22:57:38,3037,-10.7169,63.2897
+2016-07-03 23:51:44,3034,-10.7472,62.3248
+2016-07-04 00:45:44,3030,-10.8091,61.7018
+2016-07-04 01:39:44,3027,-10.8406,61.5437
+2016-07-04 02:33:43,3025,-10.8406,60.9348
+2016-07-04 03:27:42,3023,-10.8396,60.3312
+2016-07-04 04:21:41,3022,-10.9029,59.8882
+2016-07-04 05:15:51,3021,-10.8712,59.8723
+2016-07-04 06:10:01,3022,-10.8396,60.1633
+2016-07-04 07:04:06,3023,-10.8406,60.1633
+2016-07-04 07:58:06,3029,-10.81,60.1633
+2016-07-04 08:52:06,3034,-10.7482,60.4677
+2016-07-04 09:46:06,3039,-10.7179,60.4677
+2016-07-04 10:40:06,3048,-10.6574,60.9185
+2016-07-04 11:34:07,3055,-10.6904,60.9239
+2016-07-04 12:28:20,3058,-10.5954,60.6155
+2016-07-04 13:22:32,3059,-10.5973,60.3045
+2016-07-04 14:16:39,3062,-10.5664,59.8563
+2016-07-04 15:10:41,3063,-10.5057,58.418
+2016-07-04 16:04:43,3065,-10.5057,57.7215
+2016-07-04 16:58:45,3065,-10.5365,57.0344
+2016-07-04 17:52:46,3065,-10.5365,59.1294
+2016-07-04 18:46:48,3065,-10.475,62.4588
+2016-07-04 19:41:00,3059,-10.5954,57.58600000000001
+2016-07-04 20:35:12,3058,-10.5963,60.0068
+2016-07-04 21:29:19,3050,-10.7169,55.8494
+2016-07-04 22:23:19,3047,-10.6867,58.1366
+2016-07-04 23:17:20,3051,-10.5654,72.2957
+2016-07-05 00:11:20,3050,-10.5954,71.8653
+2016-07-05 01:05:21,3043,-10.6264,73.1324
+2016-07-05 01:59:22,3047,-10.6565,71.6678
+2016-07-05 02:53:33,3044,-10.6565,71.8717
+2016-07-05 03:47:45,3043,-10.6867,70.8511
+2016-07-05 04:41:51,3042,-10.7179,72.2893
+2016-07-05 05:35:51,3039,-10.7179,72.0767
+2016-07-05 06:29:51,3039,-10.6876,72.4967
+2016-07-05 07:23:51,3043,-10.6264,72.7117
+2016-07-05 08:17:52,3047,-10.6264,72.0831
+2016-07-05 09:11:52,3051,-10.5963,69.4613
+2016-07-05 10:06:05,3058,-10.5664,68.1349
+2016-07-05 11:00:17,3065,-10.5057,70.2534
+2016-07-05 11:54:24,3071,-10.4759,70.8511
+2016-07-05 12:48:26,3076,-10.4157,71.2572
+2016-07-05 13:42:28,3080,-10.3861,70.6528
+2016-07-05 14:36:30,3081,-10.3557,71.6678
+2016-07-05 15:30:32,3082,-10.3567,70.4494
+2016-07-05 16:24:34,3082,-10.3861,70.2534
+2016-07-05 17:18:47,3080,-10.3567,68.8859
+2016-07-05 18:12:59,3078,-10.3567,70.0522
+2016-07-05 19:07:07,3072,-10.4166,66.8622
+2016-07-05 20:01:09,3067,-10.4462,66.8681
+2016-07-05 20:55:10,3063,-10.4759,66.8681
+2016-07-05 21:49:11,3058,-10.5057,66.6849
+2016-07-05 22:43:11,3051,-10.5355,66.1526
+2016-07-05 23:37:12,3047,-10.5963,65.9788
+2016-07-06 00:31:23,3040,-10.6867,63.9313
+2016-07-06 01:25:34,3030,-10.7786,-16.6124
+2016-07-06 02:19:39,3029,-10.7472,55.3406
+2016-07-06 03:13:39,3026,-10.7179,28.0267
+2016-07-06 04:07:38,3023,-10.7482,-1.8323
+2016-07-06 05:01:38,3022,-10.7472,-4.9839
+2016-07-06 05:55:38,3021,-10.7786,-5.0045
+2016-07-06 06:49:48,3021,-10.7482,-2.4286
+2016-07-06 07:43:59,3022,-10.7482,-2.4286
+2016-07-06 08:38:05,3023,-10.7491,-2.4074
+2016-07-06 09:32:05,3027,-10.7188,25.4353
+2016-07-06 10:26:16,3030,-10.7179,26.6199
+2016-07-06 11:20:27,3033,-10.6574,27.2513
+2016-07-06 11:40:48,3037,-10.6574,27.9844
+2016-07-06 11:54:11,3042,-10.6885,28.5568
+2016-07-06 12:04:14,3043,-10.7188,29.2452
+2016-07-06 12:14:17,3046,-10.7188,29.3546
+2016-07-06 12:29:52,3046,-10.6885,29.2478
+2016-07-06 12:49:22,3047,-10.6885,29.3546
+2016-07-06 13:02:25,3050,-10.7188,55.3456
+2016-07-06 13:15:48,3051,-10.7188,55.472
+2016-07-06 13:26:19,3051,-10.7188,55.3456
+2016-07-06 13:36:22,3053,-10.7491,55.6039
+2016-07-06 13:48:17,3053,-10.7188,55.472
+2016-07-06 14:00:44,3054,-10.7188,55.6039
+2016-07-06 14:10:54,3054,-10.7491,55.472
+2016-07-06 14:22:40,3055,-10.7491,55.472
+2016-07-06 14:33:31,3056,-10.7491,55.2196
+2016-07-06 14:48:05,3055,-10.7188,55.3456
+2016-07-06 15:02:29,3056,-10.6885,27.7395
+2016-07-06 15:15:53,3055,-10.6876,55.3456
+2016-07-06 15:28:28,3056,-10.7188,55.2196
+2016-07-06 15:45:53,3057,-10.6885,27.5872
+2016-07-06 16:11:51,3057,-10.6264,27.4846
+2016-07-06 16:24:26,3057,-10.6574,27.1861
+2016-07-06 16:49:01,3058,-10.6885,27.1378
+2016-07-06 16:59:04,3054,-10.7188,54.9595
+2016-07-06 17:09:06,3057,-10.6885,29.5222
+2016-07-06 17:19:09,3058,-10.6885,29.5301
+2016-07-06 17:29:12,3057,-10.7188,29.8021
+2016-07-06 17:39:15,3057,-10.7188,27.9993
+2016-07-06 17:49:17,3057,-10.7188,28.0018
+2016-07-06 17:59:20,3057,-10.7188,29.6909
+2016-07-06 18:09:23,3056,-10.7188,29.3677
+2016-07-06 18:19:26,3055,-10.7188,29.749
+2016-07-06 18:29:28,3055,-10.7188,30.3595
+2016-07-06 18:39:31,3055,-10.7188,30.3595
+2016-07-06 18:49:34,3054,-10.7501,30.5851
+2016-07-06 18:59:36,3054,-10.7491,30.5851
+2016-07-06 19:09:39,3054,-10.7501,30.6398
+2016-07-06 19:19:42,3053,-10.7491,30.5879
+2016-07-06 19:29:45,3052,-10.7491,30.5879
+2016-07-06 19:39:47,3052,-10.7805,30.6452
+2016-07-06 19:49:50,3051,-10.7805,30.7
+2016-07-06 19:59:53,3051,-10.7805,30.7027
+2016-07-06 20:09:55,3050,-10.7805,30.7054
+2016-07-06 20:19:58,3050,-10.812,30.7054
+2016-07-06 20:30:01,3050,-10.811,30.7603
+2016-07-06 20:40:03,3049,-10.811,30.7549
+2016-07-06 20:50:06,3048,-10.811,30.7054
+2016-07-06 21:00:08,3046,-10.8722,30.7082
+2016-07-06 21:10:11,3045,-10.8416,30.7631
+2016-07-06 21:20:14,3043,-10.8416,30.7082
+2016-07-06 21:30:16,3045,-10.9337,58.2848
+2016-07-06 21:40:19,3045,-10.9646,59.4352
+2016-07-06 21:50:22,3044,-10.9337,60.0122
+2016-07-06 22:00:24,3043,-10.9337,60.4623
+2016-07-06 22:10:27,3043,-10.9328,60.9023
+2016-07-06 22:20:29,3044,-10.9347,64.59100000000001
+2016-07-06 22:30:32,3043,-10.9646,19.0471
+2016-07-06 22:40:34,3041,-10.9646,60.3098
+2016-07-06 22:50:37,3040,-10.9646,60.4569
+2016-07-06 23:00:39,3051,-10.7491,86.0274
+2016-07-06 23:10:42,3039,-10.9646,61.6744
+2016-07-06 23:20:44,3038,-10.9646,61.0629
+2016-07-06 23:30:47,3037,-10.9956,60.9131
+2016-07-06 23:40:49,3037,-10.9646,62.6268
+2016-07-06 23:50:52,3037,-11.0276,61.2134
+2016-07-07 00:00:54,3037,-10.9956,63.9199
+2016-07-07 00:10:57,3036,-11.0276,61.0629
+2016-07-07 00:20:59,3035,-11.0276,60.9131
+2016-07-07 00:31:02,3035,-11.0276,61.37
+2016-07-07 00:41:04,3034,-11.0276,61.37
+2016-07-07 00:51:07,3033,-11.0587,61.37
+2016-07-07 01:01:09,3032,-11.0587,60.9131
+2016-07-07 01:11:12,3031,-11.0577,61.37
+2016-07-07 01:21:14,3030,-11.0587,61.0683
+2016-07-07 01:31:17,3030,-11.0899,61.3646
+2016-07-07 01:41:19,3030,-11.0899,61.0629
+2016-07-07 01:51:21,3029,-11.0899,60.6047
+2016-07-07 02:01:24,3028,-11.0899,60.7586
+2016-07-07 02:11:26,3027,-11.0899,64.4306
+2016-07-07 02:21:29,3027,-11.0899,61.6799
+2016-07-07 02:31:31,3026,-11.1212,61.8386
+2016-07-07 02:41:33,3024,-11.0889,61.6744
+2016-07-07 02:51:36,3023,-11.1212,60.7586
+2016-07-07 03:01:38,3023,-11.0889,62.3027
+2016-07-07 03:11:41,3023,-11.1212,61.2188
+2016-07-07 03:21:43,3023,-11.1212,61.5219
+2016-07-07 03:31:45,3022,-11.1212,61.0629
+2016-07-07 03:41:48,3022,-11.1536,60.9131
+2016-07-07 03:51:50,3022,-11.1536,60.7586
+2016-07-07 04:01:52,3022,-11.1536,60.7586
+2016-07-07 04:11:55,3021,-11.1536,60.9131
+2016-07-07 04:21:57,3021,-11.1536,60.9131
+2016-07-07 04:31:59,3020,-11.1536,61.37
+2016-07-07 04:42:02,3020,-11.1536,60.9131
+2016-07-07 04:52:04,3020,-11.1536,61.5219
+2016-07-07 05:02:06,3020,-11.1536,61.6744
+2016-07-07 05:12:09,3020,-11.1536,61.6799
+2016-07-07 05:22:11,3020,-11.1526,61.3646
+2016-07-07 05:32:13,3020,-11.1526,61.8332
+2016-07-07 05:42:16,3020,-11.1526,61.8386
+2016-07-07 05:52:18,3020,-11.1526,61.0629
+2016-07-07 06:02:21,3020,-11.1526,61.37
+2016-07-07 06:12:23,3019,-11.1526,60.9131
+2016-07-07 06:22:25,3020,-11.1536,61.37
+2016-07-07 06:32:28,3019,-11.1526,61.37
+2016-07-07 06:42:30,3019,-11.1526,61.37
+2016-07-07 06:52:32,3020,-11.1536,61.37
+2016-07-07 07:02:35,3020,-11.1536,61.8386
+2016-07-07 07:12:37,3020,-11.1212,61.37
+2016-07-07 07:22:39,3021,-11.1212,61.5219
+2016-07-07 07:32:42,3021,-11.1212,60.9131
+2016-07-07 07:42:44,3022,-11.0889,61.37
+2016-07-07 07:52:46,3022,-11.0899,61.5273
+2016-07-07 08:02:49,3023,-11.0899,61.37
+2016-07-07 08:12:51,3023,-11.0889,61.37
+2016-07-07 08:22:54,3024,-11.0899,61.5219
+2016-07-07 08:32:56,3025,-11.0577,61.6744
+2016-07-07 08:42:59,3026,-11.0899,61.6799
+2016-07-07 08:53:01,3028,-11.0899,61.6799
+2016-07-07 09:03:04,3029,-11.0577,61.8386
+2016-07-07 09:13:06,3030,-11.0577,62.4644
+2016-07-07 09:23:08,3030,-11.0577,62.3082
+2016-07-07 09:33:11,3031,-11.0587,61.9926
+2016-07-07 09:43:14,3033,-11.0266,62.1473
+2016-07-07 09:53:16,3034,-11.0266,62.1473
+2016-07-07 10:03:19,3035,-11.0276,61.8386
+2016-07-07 10:13:21,3036,-11.0276,61.9926
+2016-07-07 10:23:24,3037,-11.0276,62.3082
+2016-07-07 10:33:27,3038,-10.9956,62.3082
+2016-07-07 10:43:29,3039,-10.9956,62.9427
+2016-07-07 10:53:32,3040,-11.0276,62.4644
+2016-07-07 11:03:34,3043,-10.9646,63.2673
+2016-07-07 11:13:37,3043,-10.9646,62.3082
+2016-07-07 11:23:40,3043,-10.9328,63.2729
+2016-07-07 11:33:42,3043,-10.9328,63.113
+2016-07-07 11:43:45,3046,-10.9337,63.9256
+2016-07-07 11:53:48,3047,-10.9337,63.757
+2016-07-07 12:03:50,3049,-10.9337,63.5949
+2016-07-07 12:13:53,3050,-10.9347,64.4134
+2016-07-07 12:23:55,3050,-10.9337,63.9256
+2016-07-07 12:33:56,3051,-10.8712,62.7788
+2016-07-07 12:43:59,3051,-10.9029,63.5949
+2016-07-07 12:54:01,3052,-10.8712,63.757
+2016-07-07 13:04:04,3054,-10.9029,64.2538
+2016-07-07 13:14:07,3056,-10.8712,64.4248
+2016-07-07 13:24:10,3057,-10.8712,64.4248
+2016-07-07 13:34:13,3058,-10.8712,64.4306
+2016-07-07 13:44:15,3058,-10.8406,64.5967
+2016-07-07 13:54:18,3058,-10.8091,63.2616
+2016-07-07 14:04:21,3059,-10.8091,65.2752
+2016-07-07 14:14:24,3059,-10.8406,64.0893
+2016-07-07 14:24:27,3061,-10.8406,63.9256
+2016-07-07 14:34:30,3062,-10.8091,63.757
+2016-07-07 14:44:32,3063,-10.81,64.9314
+2016-07-07 14:54:35,3064,-10.81,64.9314
+2016-07-07 15:04:38,3064,-10.7786,63.1018
+2016-07-07 15:14:41,3065,-10.7786,64.9314
+2016-07-07 15:24:43,3065,-10.7786,64.0893
+2016-07-07 15:34:46,3065,-10.7472,62.9371
+2016-07-07 15:44:49,3065,-10.81,63.757
+2016-07-07 15:54:52,3066,-10.7786,64.7636
+2016-07-07 16:04:55,3066,-10.81,64.5967
+2016-07-07 16:14:58,3067,-10.7786,64.9372
+2016-07-07 16:25:01,3067,-10.7786,65.2752
+2016-07-07 16:35:04,3066,-10.7472,63.5949
+2016-07-07 16:45:06,3067,-10.81,65.1057
+2016-07-07 16:55:09,3067,-10.81,65.2752
+2016-07-07 17:05:12,3067,-10.7786,65.1057
+2016-07-07 17:15:15,3066,-10.81,64.4248
+2016-07-07 17:25:18,3065,-10.7472,62.7788
+2016-07-07 17:35:20,3068,-10.7491,69.486
+2016-07-07 17:45:23,3065,-10.81,64.5967
+2016-07-07 17:55:26,3065,-10.81,64.7636
+2016-07-07 18:05:29,3065,-10.7786,63.1018
+2016-07-07 18:15:31,3065,-10.8091,63.7627
+2016-07-07 18:25:34,3065,-10.81,64.4191
+2016-07-07 18:35:37,3065,-10.81,65.7943
+2016-07-07 18:45:40,3063,-10.81,63.1074
+2016-07-07 18:55:42,3064,-10.8091,65.9671
+2016-07-07 19:05:45,3061,-10.81,62.3027
+2016-07-07 19:15:48,3060,-10.81,62.6212
+2016-07-07 19:25:50,3058,-10.8091,62.4588
+2016-07-07 19:35:53,3058,-10.8091,62.4588
+2016-07-07 19:45:56,3058,-10.8406,62.3027
+2016-07-07 19:55:58,3057,-10.8091,62.3027
+2016-07-07 20:06:01,3055,-10.8406,61.8332
+2016-07-07 20:16:03,3053,-10.8406,61.8277
+2016-07-07 20:26:06,3052,-10.8406,61.3646
+2016-07-07 20:36:09,3051,-10.8396,61.3646
+2016-07-07 20:46:11,3050,-10.8396,61.3646
+2016-07-07 20:56:14,3049,-10.8712,61.2134
+2016-07-07 21:06:16,3047,-10.8712,61.0629
+2016-07-07 21:16:19,3045,-10.8712,60.9077
+2016-07-07 21:26:21,3043,-10.902,60.7532
+2016-07-07 21:36:24,3043,-10.902,60.7586
+2016-07-07 21:46:26,3042,-10.902,60.6047
+2016-07-07 21:56:29,3040,-10.902,60.6047
+2016-07-07 22:06:32,3039,-10.901,60.4569
+2016-07-07 22:16:34,3038,-10.901,60.3045
+2016-07-07 22:26:37,3037,-10.901,60.1526
+2016-07-07 22:36:39,3036,-10.9328,59.8616
+2016-07-07 22:46:41,3036,-10.9328,59.8616
+2016-07-07 22:56:44,3035,-10.9328,60.0122
+2016-07-07 23:06:46,3034,-10.9328,59.7171
+2016-07-07 23:16:49,3033,-10.9328,59.4246
+2016-07-07 23:26:51,3033,-10.9328,59.7171
+2016-07-07 23:36:54,3031,-10.9318,59.7171
+2016-07-07 23:46:56,3030,-10.9318,59.7171
+2016-07-07 23:56:58,3030,-10.9318,59.5732
+2016-07-08 00:07:01,3030,-10.9318,59.4246
+2016-07-08 00:17:03,3030,-10.9636,59.4299
+2016-07-08 00:27:06,3029,-10.9636,59.4246
+2016-07-08 00:37:08,3029,-10.9636,59.4246
+2016-07-08 00:47:11,3029,-10.9636,59.1347
+2016-07-08 00:57:13,3029,-10.9636,59.1347
+2016-07-08 01:07:15,3028,-10.9636,58.8472
+2016-07-08 01:17:25,3028,-10.9636,58.707
+2016-07-08 01:27:28,3027,-10.9636,58.8472
+2016-07-08 01:37:30,3026,-10.9636,58.707
+2016-07-08 01:47:33,3026,-10.9627,58.707
+2016-07-08 01:57:35,3026,-10.9636,58.8472
+2016-07-08 02:07:38,3026,-10.9946,58.5674
+2016-07-08 02:17:40,3025,-10.9946,58.5674
+2016-07-08 02:27:42,3025,-10.9946,58.5674
+2016-07-08 02:37:45,3024,-10.9946,58.5674
+2016-07-08 02:47:47,3024,-10.9946,58.5674
+2016-07-08 02:57:50,3023,-10.9946,58.707
+2016-07-08 03:07:52,3023,-11.0266,58.5674
+2016-07-08 03:17:55,3023,-10.9946,58.4232
+2016-07-08 03:27:57,3023,-11.0266,58.2848
+2016-07-08 03:37:59,3023,-11.0256,58.2848
+2016-07-08 03:48:02,3022,-11.0256,58.2848
+2016-07-08 03:58:04,3022,-11.0266,58.147
+2016-07-08 04:08:07,3022,-11.0266,58.147
+2016-07-08 04:18:09,3022,-11.0256,58.0046
+2016-07-08 04:28:12,3022,-11.0256,58.147
+2016-07-08 04:38:14,3022,-11.0256,58.147
+2016-07-08 04:48:17,3022,-11.0256,58.0046
+2016-07-08 04:58:19,3022,-11.0256,58.147
+2016-07-08 05:08:22,3022,-11.0256,58.0046
+2016-07-08 05:18:24,3022,-11.0256,57.7267
+2016-07-08 05:28:27,3021,-11.0256,57.9994
+2016-07-08 05:38:29,3021,-11.0256,57.5912
+2016-07-08 05:48:32,3021,-11.0256,57.4511
+2016-07-08 05:58:34,3022,-11.0256,57.4562
+2016-07-08 06:08:37,3022,-11.0266,57.4562
+2016-07-08 06:18:39,3022,-11.0256,57.4511
+2016-07-08 06:28:42,3022,-11.0266,57.7215
+2016-07-08 06:38:44,3022,-10.9946,57.58600000000001
+2016-07-08 06:48:47,3022,-10.9946,57.3167
+2016-07-08 06:58:49,3022,-10.9946,57.4511
+2016-07-08 07:08:52,3023,-10.9946,57.3167
+2016-07-08 07:18:54,3023,-10.9946,57.3116
+2016-07-08 07:28:57,3023,-10.9636,57.1829
+2016-07-08 07:38:59,3024,-10.9636,57.3167
+2016-07-08 07:49:02,3024,-10.9636,57.3167
+2016-07-08 07:59:04,3024,-10.9318,57.3167
+2016-07-08 08:09:07,3026,-10.9318,57.7267
+2016-07-08 08:19:09,3026,-10.9318,57.7267
+2016-07-08 08:29:12,3028,-10.9328,57.7267
+2016-07-08 08:39:14,3029,-10.901,57.4511
+2016-07-08 08:49:17,3029,-10.9328,57.7267
+2016-07-08 08:59:19,3030,-10.901,57.7267
+2016-07-08 09:09:22,3031,-10.8722,57.7215
+2016-07-08 09:19:25,3033,-10.9048,57.3218
+2016-07-08 09:29:28,3034,-10.8712,57.5963
+2016-07-08 09:39:30,3036,-10.902,57.4511
+2016-07-08 09:49:33,3037,-10.8712,57.4511
+2016-07-08 09:59:36,3038,-10.8712,57.4511
+2016-07-08 10:09:38,3040,-10.8406,57.8628
+2016-07-08 10:19:41,3042,-10.8406,57.8628
+2016-07-08 10:29:44,3043,-10.8406,58.2848
+2016-07-08 10:39:47,3043,-10.8406,57.8628
+2016-07-08 10:49:49,3046,-10.8091,58.0046
+2016-07-08 10:59:52,3048,-10.8091,58.1418
+2016-07-08 11:09:55,3049,-10.8091,58.0046
+2016-07-08 11:19:57,3050,-10.81,58.0046
+2016-07-08 11:30:00,3051,-10.81,58.147
+2016-07-08 11:40:03,3051,-10.81,58.147
+2016-07-08 11:50:05,3051,-10.7786,58.2848
+2016-07-08 12:00:08,3053,-10.7786,58.2848
+2016-07-08 12:10:11,3053,-10.7786,58.0046
+2016-07-08 12:20:14,3054,-10.7786,58.2848
+2016-07-08 12:30:16,3054,-10.7786,58.0046
+2016-07-08 12:40:19,3054,-10.7472,58.147
+2016-07-08 12:50:22,3055,-10.7472,58.0046
+2016-07-08 13:00:24,3055,-10.7472,57.9994
+2016-07-08 13:10:27,3055,-10.7472,57.9994
+2016-07-08 13:20:30,3055,-10.7786,57.7267
+2016-07-08 13:30:33,3055,-10.7786,57.7267
+2016-07-08 13:40:43,3055,-10.7472,57.8679
+2016-07-08 13:50:46,3054,-10.7472,57.7267
+2016-07-08 14:00:48,3054,-10.7472,57.5912
+2016-07-08 14:10:51,3054,-10.7472,57.7267
+2016-07-08 14:20:54,3053,-10.7472,57.7267
+2016-07-08 14:30:56,3053,-10.7472,57.3167
+2016-07-08 14:40:59,3053,-10.7472,57.4511
+2016-07-08 14:51:02,3054,-10.716,57.7267
+2016-07-08 15:01:05,3054,-10.7482,57.3167
+2016-07-08 15:11:08,3054,-10.716,57.5912
+2016-07-08 15:21:11,3054,-10.7169,57.58600000000001
+2016-07-08 15:31:13,3054,-10.7472,57.4511
+2016-07-08 15:41:16,3054,-10.7472,57.4511
+2016-07-08 15:51:20,3054,-10.716,57.4511
+2016-07-08 16:01:23,3054,-10.716,57.3167
+2016-07-08 16:11:25,3054,-10.7169,57.3167
+2016-07-08 16:21:28,3054,-10.716,57.1829
+2016-07-08 16:31:31,3054,-10.716,57.3167
+2016-07-08 16:41:34,3054,-10.7472,57.0445
+2016-07-08 16:51:36,3054,-10.716,57.1778
+2016-07-08 17:01:39,3054,-10.7169,57.1778
+2016-07-08 17:11:42,3053,-10.716,57.0445
+2016-07-08 17:21:45,3053,-10.7472,57.0445
+2016-07-08 17:31:48,3053,-10.7472,56.7746
+2016-07-08 17:41:51,3052,-10.7786,57.0395
+2016-07-08 17:51:53,3051,-10.7786,56.7746
+2016-07-08 18:01:56,3051,-10.7786,56.9068
+2016-07-08 18:11:59,3051,-10.7472,56.7746
+2016-07-08 18:22:02,3050,-10.7776,56.643
+2016-07-08 18:32:05,3049,-10.7463,56.643
+2016-07-08 18:42:08,3047,-10.7776,56.5068
+2016-07-08 18:52:10,3046,-10.7776,56.3763
+2016-07-08 19:02:13,3044,-10.7776,56.6379
+2016-07-08 19:12:16,3043,-10.8091,56.5068
+2016-07-08 19:22:19,3043,-10.8091,56.2413
+2016-07-08 19:32:21,3043,-10.8081,56.2463
+2016-07-08 19:42:24,3042,-10.8081,56.2463
+2016-07-08 19:52:27,3040,-10.8081,56.2463
+2016-07-08 20:02:30,3039,-10.8396,56.2463
+2016-07-08 20:12:33,3038,-10.8396,56.1118
+2016-07-08 20:22:35,3037,-10.8396,55.9778
+2016-07-08 20:32:38,3036,-10.8396,55.8494
+2016-07-08 20:42:40,3036,-10.8396,55.7214
+2016-07-08 20:52:43,3035,-10.8387,55.4621
+2016-07-08 21:02:45,3034,-10.8387,55.5891
+2016-07-08 21:12:48,3033,-10.8387,55.7214
+2016-07-08 21:22:51,3031,-10.8703,55.7165
+2016-07-08 21:32:53,3030,-10.8693,55.4621
+2016-07-08 21:42:56,3030,-10.901,55.0795
+2016-07-08 21:52:59,3029,-10.901,54.9546
+2016-07-08 22:03:02,3029,-10.901,54.9546
+2016-07-08 22:13:04,3028,-10.901,55.0795
+2016-07-08 22:23:07,3026,-10.9328,55.0795
+2016-07-08 22:33:10,3026,-10.8703,27.6485
+2016-07-08 22:43:12,3025,-10.902,28.0615
+2016-07-08 22:53:15,3024,-10.902,29.8685
+2016-07-08 23:03:17,3023,-10.901,30.3703
+2016-07-08 23:13:20,3022,-10.901,30.3162
+2016-07-08 23:23:22,3022,-10.901,30.1491
+2016-07-08 23:33:25,3020,-10.9328,30.6507
+2016-07-08 23:43:28,3019,-10.9328,30.6534
+2016-07-08 23:53:30,3017,-10.9318,30.6534
+2016-07-09 00:03:33,3015,-10.9318,30.7658
+2016-07-09 00:13:35,3015,-10.9318,30.7685
+2016-07-09 00:23:38,3014,-10.9318,30.7658
+2016-07-09 00:33:40,3014,-10.9318,30.774
+2016-07-09 00:43:43,3012,-10.9636,30.7713
+2016-07-09 00:53:45,3012,-10.9636,30.774
+2016-07-09 01:03:48,3010,-10.9636,30.7685
+2016-07-09 01:13:50,3010,-10.9318,30.829
+2016-07-09 01:23:53,3009,-10.9636,30.8841
+2016-07-09 01:33:55,3009,-10.9,30.8208
+2016-07-09 01:43:58,3007,-10.9946,8.0469
+2016-07-09 01:54:00,3007,-10.9946,9.9502
+2016-07-09 02:04:03,3007,-10.9946,30.3162
+2016-07-09 02:14:05,3007,-10.9936,30.4843
+2016-07-09 02:24:08,3005,-11.0256,10.6325
+2016-07-09 02:34:10,3005,-10.9936,30.4272
+2016-07-09 02:44:13,3005,-11.0256,30.4897
+2016-07-09 02:54:15,3005,-11.0256,30.7082
+2016-07-09 03:04:18,3003,-11.0948,30.4843
+2016-07-09 03:14:20,3002,-10.9946,30.7109
+2016-07-09 03:24:23,3002,-11.0256,30.7713
+2016-07-09 03:34:25,3002,-11.0256,30.7713
+2016-07-09 03:44:28,3002,-11.0256,30.7658
+2016-07-09 03:54:30,3002,-11.0256,30.487
+2016-07-09 04:04:33,3001,-11.0256,29.9297
+2016-07-09 04:14:35,3001,-11.0889,55.4621
+2016-07-09 04:24:38,3009,-10.9318,73.3577
+2016-07-09 04:34:40,3009,-10.9636,75.5838
+2016-07-09 04:44:43,3009,-10.9636,74.8987
+2016-07-09 04:54:45,3009,-10.9636,76.0385
+2016-07-09 05:04:48,3009,-10.9318,75.8105
+2016-07-09 05:14:50,3009,-10.9318,76.0453
+2016-07-09 05:24:52,3009,-10.9318,76.7448
+2016-07-09 05:34:55,3009,-10.9318,75.8105
+2016-07-09 05:44:57,3009,-10.9308,75.8172
+2016-07-09 05:55:00,3009,-10.9627,75.8105
+2016-07-09 06:05:02,3009,-10.9636,75.1346
+2016-07-09 06:15:05,3009,-10.9627,76.0453
+2016-07-09 06:25:07,3009,-10.9627,76.5056
+2016-07-09 06:35:10,3009,-10.9627,75.8172
+2016-07-09 06:45:12,3010,-10.9308,75.5771
+2016-07-09 06:55:15,3010,-10.9308,75.8105
+2016-07-09 07:05:17,3009,-10.9627,75.3519
+2016-07-09 07:15:20,3010,-10.9318,75.8105
+2016-07-09 07:25:22,3011,-10.9318,75.8105
+2016-07-09 07:35:25,3011,-10.9636,75.3519
+2016-07-09 07:45:27,3012,-10.9318,75.3519
+2016-07-09 07:55:30,3012,-10.9318,74.9054
+2016-07-09 08:05:33,3013,-10.9318,75.5838
+2016-07-09 08:15:35,3013,-10.9318,74.6841
+2016-07-09 08:25:38,3014,-10.9318,75.8105
+2016-07-09 08:35:40,3014,-10.9,75.5838
+2016-07-09 08:45:43,3017,-10.9328,76.0453
+2016-07-09 08:55:45,3018,-10.901,75.5838
+2016-07-09 09:05:48,3019,-10.901,74.9054
+2016-07-09 09:15:50,3020,-10.901,74.45100000000002
+2016-07-09 09:25:53,3021,-10.901,74.9054
+2016-07-09 09:35:55,3022,-10.8693,75.1279
+2016-07-09 09:45:58,3022,-10.8693,75.3585
+2016-07-09 09:56:00,3024,-10.8703,75.5838
+2016-07-09 10:06:03,3024,-10.8693,73.5777
+2016-07-09 10:16:05,3026,-10.8387,74.0151
+2016-07-09 10:26:08,3029,-10.8396,74.23899999999998
+2016-07-09 10:36:11,3029,-10.8387,73.79899999999998
+2016-07-09 10:46:13,3030,-10.8396,74.0151
+2016-07-09 10:56:16,3030,-10.8396,74.0151
+2016-07-09 11:06:19,3030,-10.8396,74.2324
+2016-07-09 11:16:21,3032,-10.8396,74.4576
+2016-07-09 11:26:24,3033,-10.8081,74.4576
+2016-07-09 11:36:27,3034,-10.8081,73.79899999999998
+2016-07-09 11:46:29,3034,-10.8091,74.45100000000002
+2016-07-09 11:56:32,3035,-10.7776,73.79899999999998
+2016-07-09 12:06:34,3036,-10.8091,74.4576
+2016-07-09 12:16:37,3036,-10.8091,74.6841
+2016-07-09 12:26:40,3037,-10.7776,74.6775
+2016-07-09 12:36:42,3037,-10.7776,74.23899999999998
+2016-07-09 12:46:45,3038,-10.7776,74.6841
+2016-07-09 12:56:48,3038,-10.7776,74.45100000000002
+2016-07-09 13:06:51,3039,-10.7463,74.6775
+2016-07-09 13:16:53,3039,-10.7463,75.3519
+2016-07-09 13:26:56,3039,-10.7463,74.6841
+2016-07-09 13:36:59,3040,-10.715,74.9054
+2016-07-09 13:47:02,3040,-10.7463,74.4576
+2016-07-09 13:57:04,3041,-10.7472,73.79899999999998
+2016-07-09 14:07:07,3043,-10.716,75.5838
+2016-07-09 14:17:10,3043,-10.716,74.0151
+2016-07-09 14:27:13,3043,-10.716,73.3642
+2016-07-09 14:37:16,3043,-10.716,73.5777
+2016-07-09 14:47:18,3043,-10.716,73.7925
+2016-07-09 14:57:21,3043,-10.7472,73.79899999999998
+2016-07-09 15:07:24,3043,-10.716,73.3642
+2016-07-09 15:17:27,3044,-10.6848,73.79899999999998
+2016-07-09 15:27:30,3044,-10.716,73.3642
+2016-07-09 15:37:32,3044,-10.6848,73.3642
+2016-07-09 15:47:35,3045,-10.6848,73.79899999999998
+2016-07-09 15:57:38,3045,-10.6848,74.0151
+2016-07-09 16:07:41,3046,-10.6848,74.0216
+2016-07-09 16:17:43,3046,-10.716,73.79899999999998
+2016-07-09 16:27:46,3045,-10.716,73.3642
+2016-07-09 16:37:49,3045,-10.716,72.516
+2016-07-09 16:47:52,3044,-10.716,71.8844
+2016-07-09 16:57:54,3044,-10.716,72.5095
+2016-07-09 17:07:57,3044,-10.716,72.5095
+2016-07-09 17:18:00,3044,-10.716,72.0895
+2016-07-09 17:28:03,3044,-10.716,72.2957
+2016-07-09 17:38:06,3044,-10.716,72.0959
+2016-07-09 17:48:08,3043,-10.716,72.3021
+2016-07-09 17:58:11,3043,-10.716,72.0895
+2016-07-09 18:08:14,3043,-10.715,71.6742
+2016-07-09 18:18:17,3043,-10.715,71.0693
+2016-07-09 18:28:19,3043,-10.7463,71.4714
+2016-07-09 18:38:22,3041,-10.7463,71.0693
+2016-07-09 18:48:25,3040,-10.7463,70.8637
+2016-07-09 18:58:28,3039,-10.7463,70.2659
+2016-07-09 19:08:30,3038,-10.7463,70.2659
+2016-07-09 19:18:32,3037,-10.7776,69.4798
+2016-07-09 19:28:35,3037,-10.7767,69.289
+2016-07-09 19:38:38,3036,-10.7767,68.5176
+2016-07-09 19:48:40,3035,-10.7767,68.8982
+2016-07-09 19:58:43,3034,-10.8081,68.3318
+2016-07-09 20:08:46,3032,-10.8081,67.9511
+2016-07-09 20:18:48,3030,-10.8072,67.9511
+2016-07-09 20:28:51,3029,-10.8072,67.7683
+2016-07-09 20:38:54,3029,-10.8072,67.7683
+2016-07-09 20:48:56,3027,-10.8072,67.5864
+2016-07-09 20:58:59,3026,-10.8072,67.7683
+2016-07-09 21:09:02,3025,-10.8377,67.5864
+2016-07-09 21:19:04,3023,-10.8377,67.2194
+2016-07-09 21:29:07,3023,-10.8377,67.0403
+2016-07-09 21:39:10,3022,-10.8377,67.0403
+2016-07-09 21:49:12,3022,-10.8377,66.8562
+2016-07-09 21:59:15,3021,-10.8377,66.8622
+2016-07-09 22:09:18,3019,-10.8684,66.679
+2016-07-09 22:19:20,3018,-10.8693,66.679
+2016-07-09 22:29:23,3016,-10.8684,67.2194
+2016-07-09 22:39:26,3015,-10.8684,66.679
+2016-07-09 22:49:28,3015,-10.9,66.679
+2016-07-09 22:59:31,3014,-10.9,66.8562
+2016-07-09 23:09:34,3014,-10.9,67.0403
+2016-07-09 23:19:36,3014,-10.9,66.4967
+2016-07-09 23:29:39,3012,-10.9,65.806
+2016-07-09 23:39:42,3012,-10.9,66.1584
+2016-07-09 23:49:44,3011,-10.9,66.1584
+2016-07-09 23:59:47,3010,-10.9,65.9847
+2016-07-10 00:09:49,3010,-10.8991,65.9671
+2016-07-10 00:19:52,3009,-10.8991,65.6165
+2016-07-10 00:29:55,3008,-10.8991,65.1
+2016-07-10 00:39:57,3008,-10.9308,65.1173
+2016-07-10 00:50:00,3008,-10.9308,64.7694
+2016-07-10 01:00:02,3007,-10.9299,65.2868
+2016-07-10 01:10:05,3007,-10.9299,64.9314
+2016-07-10 01:20:08,3006,-10.9617,64.6082
+2016-07-10 01:30:10,3006,-10.9617,64.9429
+2016-07-10 01:40:13,3005,-10.9617,65.2752
+2016-07-10 01:50:15,3005,-10.9617,65.4629
+2016-07-10 02:00:18,3004,-10.9617,65.2868
+2016-07-10 02:10:21,3002,-10.9617,64.271
+2016-07-10 02:20:23,3002,-10.9617,64.1064
+2016-07-10 02:30:26,3002,-10.9617,65.634
+2016-07-10 02:40:28,3001,-10.9617,65.1115
+2016-07-10 02:50:31,3001,-10.9617,64.7751
+2016-07-10 03:00:33,3001,-10.9617,64.7751
+2016-07-10 03:10:36,3000,-10.9617,64.4363
+2016-07-10 03:20:39,3000,-10.9926,63.2785
+2016-07-10 03:30:41,2999,-10.9926,63.2785
+2016-07-10 03:40:44,2999,-10.9926,63.937
+2016-07-10 03:50:46,2999,-10.9926,63.937
+2016-07-10 04:00:49,2997,-11.0247,62.9539
+2016-07-10 04:10:53,2997,-10.9926,63.2785
+2016-07-10 04:20:56,2997,-11.0247,62.9539
+2016-07-10 04:30:59,2997,-11.0247,63.6062
+2016-07-10 04:41:01,2996,-11.0247,63.1186
+2016-07-10 04:51:04,2995,-11.0247,63.1186
+2016-07-10 05:01:07,2994,-11.0247,62.6379
+2016-07-10 05:11:09,2994,-11.0247,62.9539
+2016-07-10 05:21:12,2994,-11.0247,62.4755
+2016-07-10 05:31:14,2993,-11.0247,62.0036
+2016-07-10 05:41:17,2993,-11.0247,62.3138
+2016-07-10 05:51:20,2993,-11.0237,62.1583
+2016-07-10 06:01:22,2993,-11.0247,61.8496
+2016-07-10 06:11:25,2993,-11.0247,62.1583
+2016-07-10 06:21:27,2993,-11.0247,61.8441
+2016-07-10 06:31:30,2993,-10.9926,61.8441
+2016-07-10 06:41:32,2993,-10.9926,62.1583
+2016-07-10 06:51:35,2993,-10.9926,62.0036
+2016-07-10 07:01:38,2993,-10.9607,62.3138
+2016-07-10 07:11:40,2993,-10.9607,61.8441
+2016-07-10 07:21:43,2994,-10.9617,61.6908
+2016-07-10 07:31:46,2994,-10.9299,62.0036
+2016-07-10 07:41:48,2995,-10.9617,62.1583
+2016-07-10 07:51:51,2996,-10.9617,62.3138
+2016-07-10 08:01:54,2998,-10.9299,62.481
+2016-07-10 08:11:56,2998,-10.9299,61.5328
+2016-07-10 08:21:59,3000,-10.9299,62.1583
+2016-07-10 08:32:02,3001,-10.9308,62.3138
+2016-07-10 08:42:05,3002,-10.9308,61.8441
+2016-07-10 08:52:07,3004,-10.9,62.4755
+2016-07-10 09:02:10,3006,-10.9,62.7955
+2016-07-10 09:12:13,3007,-10.9,62.9539
+2016-07-10 09:22:16,3008,-10.9,62.0036
+2016-07-10 09:32:18,3009,-10.9,62.3193
+2016-07-10 09:42:21,3011,-10.8693,62.3138
+2016-07-10 09:52:24,3013,-10.8377,62.6323
+2016-07-10 10:02:27,3014,-10.8377,62.9707
+2016-07-10 10:12:30,3015,-10.8081,62.3138
+2016-07-10 10:22:32,3016,-10.8081,62.6379
+2016-07-10 10:32:35,3019,-10.8081,62.9539
+2016-07-10 10:42:38,3021,-10.7767,62.649
+2016-07-10 10:52:41,3023,-10.7767,63.6118
+2016-07-10 11:02:44,3024,-10.7463,62.4755
+2016-07-10 11:12:47,3026,-10.7463,63.2785
+2016-07-10 11:22:50,3029,-10.7463,63.937
+2016-07-10 11:32:53,3029,-10.7463,61.8496
+2016-07-10 11:42:56,3030,-10.715,63.95399999999999
+2016-07-10 11:52:59,3032,-10.6848,63.7853
+2016-07-10 12:03:02,3034,-10.6848,63.1354
+2016-07-10 12:13:05,3036,-10.6848,62.9707
+2016-07-10 12:23:08,3037,-10.6857,63.1354
+2016-07-10 12:33:11,3038,-10.6546,63.1354
+2016-07-10 12:43:14,3039,-10.6848,62.8123
+2016-07-10 12:53:17,3042,-10.6546,63.4448
+2016-07-10 13:03:20,3043,-10.6546,62.9707
+2016-07-10 13:13:23,3046,-10.6245,63.2954
+2016-07-10 13:23:26,3047,-10.6245,62.8123
+2016-07-10 13:33:29,3048,-10.6245,63.1186
+2016-07-10 13:43:32,3047,-10.6245,62.0036
+2016-07-10 13:53:35,3048,-10.5935,62.9539
+2016-07-10 14:03:38,3048,-10.5935,63.1354
+2016-07-10 14:13:41,3048,-10.5945,63.7853
+2016-07-10 14:23:44,3048,-10.5945,63.1354
+2016-07-10 14:33:48,3049,-10.5626,63.1354
+2016-07-10 14:43:51,3050,-10.5636,63.456
+2016-07-10 14:53:54,3050,-10.5636,63.1354
+2016-07-10 15:03:57,3050,-10.5636,62.4921
+2016-07-10 15:14:00,3050,-10.5636,63.6062
+2016-07-10 15:24:03,3050,-10.5636,62.4977
+2016-07-10 15:34:07,3051,-10.5636,62.8123
+2016-07-10 15:44:10,3051,-10.5327,63.6231
+2016-07-10 15:54:13,3051,-10.5636,63.456
+2016-07-10 16:04:16,3051,-10.5636,63.2954
+2016-07-10 16:14:19,3051,-10.5636,62.8123
+2016-07-10 16:24:22,3052,-10.5327,63.7853
+2016-07-10 16:34:25,3051,-10.5327,62.8123
+2016-07-10 16:44:29,3052,-10.5327,63.9483
+2016-07-10 16:54:32,3051,-10.5636,62.4921
+2016-07-10 17:04:35,3051,-10.5636,62.8123
+2016-07-10 17:14:38,3051,-10.5636,62.3304
+2016-07-10 17:24:41,3051,-10.5636,62.8123
+2016-07-10 17:34:44,3050,-10.5636,61.8606
+2016-07-10 17:44:47,3050,-10.5636,62.4866
+2016-07-10 17:54:51,3048,-10.5626,61.7073
+2016-07-10 18:04:54,3048,-10.5636,62.8123
+2016-07-10 18:14:57,3047,-10.5626,62.4921
+2016-07-10 18:25:00,3045,-10.5626,62.3304
+2016-07-10 18:35:03,3044,-10.5626,62.1749
+2016-07-10 18:45:06,3043,-10.5626,62.8123
+2016-07-10 18:55:09,3043,-10.5935,62.6546
+2016-07-10 19:05:12,3042,-10.5935,61.8661
+2016-07-10 19:15:15,3040,-10.5935,61.5492
+2016-07-10 19:25:18,3039,-10.5926,61.7073
+2016-07-10 19:35:21,3037,-10.5926,61.3918
+2016-07-10 19:45:25,3037,-10.6235,61.09
+2016-07-10 19:55:28,3036,-10.6235,61.09
+2016-07-10 20:05:31,3035,-10.6226,61.4082
+2016-07-10 20:15:34,3033,-10.6226,61.09
+2016-07-10 20:25:37,3031,-10.6226,61.1063
+2016-07-10 20:35:40,3030,-10.6226,60.8018
+2016-07-10 20:45:43,3029,-10.6226,60.951
+2016-07-10 20:55:46,3029,-10.6536,60.7856
+2016-07-10 21:05:49,3026,-10.6536,61.1063
+2016-07-10 21:15:51,3030,-10.6226,61.1063
+2016-07-10 21:25:54,3029,-10.6536,60.8018
+2016-07-10 21:35:57,3028,-10.6838,60.4999
+2016-07-10 21:46:00,3025,-10.6838,59.9042
+2016-07-10 21:56:03,3024,-10.6838,60.2007
+2016-07-10 22:06:06,3023,-10.6838,59.9095
+2016-07-10 22:16:09,3022,-10.6838,59.9095
+2016-07-10 22:26:11,3022,-10.6838,59.7596
+2016-07-10 22:36:14,3021,-10.6838,59.7596
+2016-07-10 22:46:16,3020,-10.715,59.7596
+2016-07-10 22:56:19,3018,-10.7141,59.7596
+2016-07-10 23:06:22,3017,-10.7141,60.0548
+2016-07-10 23:16:25,3015,-10.7141,59.4669
+2016-07-10 23:26:27,3015,-10.7141,59.4669
+2016-07-10 23:36:30,3014,-10.7141,59.182
+2016-07-10 23:46:33,3014,-10.7141,60.3473
+2016-07-10 23:56:36,3013,-10.7453,59.7596
+2016-07-11 00:06:39,3012,-10.7444,59.1767
+2016-07-11 00:16:41,3011,-10.7453,60.2007
+2016-07-11 00:26:44,3010,-10.7453,59.7596
+2016-07-11 00:36:47,3009,-10.7444,59.4669
+2016-07-11 00:46:50,3009,-10.7757,59.182
+2016-07-11 00:56:53,3008,-10.7444,59.4669
+2016-07-11 01:06:56,3008,-10.7444,59.4669
+2016-07-11 01:16:58,3007,-10.7757,59.182
+2016-07-11 01:27:01,3007,-10.7748,59.0352
+2016-07-11 01:37:04,3007,-10.7757,59.7596
+2016-07-11 01:47:07,3006,-10.7748,59.9042
+2016-07-11 01:57:09,3006,-10.7757,60.2007
+2016-07-11 02:07:12,3005,-10.7748,59.4669
+2016-07-11 02:17:15,3004,-10.8062,59.7596
+2016-07-11 02:27:17,3002,-10.8062,59.6155
+2016-07-11 02:37:20,3000,-10.8062,58.8943
+2016-07-11 02:47:23,3000,-10.8062,59.6155
+2016-07-11 02:57:26,3000,-10.8062,59.6155
+2016-07-11 03:07:29,2999,-10.8062,59.7596
+2016-07-11 03:17:31,2998,-10.8053,59.6155
+2016-07-11 03:27:34,2997,-10.8053,59.4669
+2016-07-11 03:37:37,2996,-10.8368,59.4669
+2016-07-11 03:47:40,2996,-10.8368,59.6155
+2016-07-11 03:57:42,2995,-10.8368,59.7596
+2016-07-11 04:07:45,2994,-10.8368,59.1767
+2016-07-11 04:17:47,2994,-10.8368,59.7596
+2016-07-11 04:27:50,2994,-10.8368,59.4669
+2016-07-11 04:37:53,2993,-10.8368,59.6102
+2016-07-11 04:47:55,2993,-10.8368,59.0352
+2016-07-11 04:57:58,2992,-10.8684,59.182
+2016-07-11 05:08:01,2992,-10.8368,59.4669
+2016-07-11 05:18:03,2992,-10.8368,59.3241
+2016-07-11 05:28:06,2992,-10.8368,59.9202
+2016-07-11 05:38:09,2992,-10.8684,59.4669
+2016-07-11 05:48:11,2991,-10.8368,59.3347
+2016-07-11 05:58:14,2991,-10.8368,59.7755
+2016-07-11 06:08:17,2989,-10.8684,59.0509
+2016-07-11 06:18:19,2989,-10.8684,59.1925
+2016-07-11 06:28:29,2988,-10.8684,59.1925
+2016-07-11 06:38:32,2988,-10.8684,59.3347
+2016-07-11 06:48:34,2987,-10.8684,59.0457
+2016-07-11 06:58:37,2987,-10.8368,58.6247
+2016-07-11 07:08:39,2987,-10.8368,59.4827
+2016-07-11 07:18:42,2988,-10.8368,59.6314
+2016-07-11 07:28:46,2988,-10.8368,59.7755
+2016-07-11 07:38:50,2988,-10.8368,59.9202
+2016-07-11 07:48:52,2988,-10.8368,59.9202
+2016-07-11 07:58:55,2988,-10.8368,60.2168
+2016-07-11 08:08:58,2988,-10.8368,60.2168
+2016-07-11 08:19:01,2989,-10.8368,60.3634
+2016-07-11 08:29:03,2988,-10.8368,59.7755
+2016-07-11 08:39:06,2990,-10.8368,59.9202
+2016-07-11 08:49:09,2989,-10.8368,59.7755
+2016-07-11 08:59:11,2990,-10.8368,59.9202
+2016-07-11 09:09:14,2990,-10.8053,59.3347
+2016-07-11 09:19:17,2991,-10.8053,59.6261
+2016-07-11 09:29:20,2992,-10.8062,59.9202
+2016-07-11 09:39:22,2992,-10.8053,60.2168
+2016-07-11 09:49:25,2992,-10.8062,59.4933
+2016-07-11 09:59:28,2993,-10.8062,59.7861
+2016-07-11 10:09:30,2993,-10.7748,60.3634
+2016-07-11 10:19:33,2994,-10.7748,60.3795
+2016-07-11 10:29:36,2994,-10.7748,60.6801
+2016-07-11 10:39:39,2995,-10.7444,60.5322
+2016-07-11 10:49:42,2996,-10.7444,60.2328
+2016-07-11 10:59:44,2997,-10.7444,60.2328
+2016-07-11 11:09:47,2998,-10.7453,59.7914
+2016-07-11 11:19:49,3000,-10.7453,60.5322
+2016-07-11 11:30:00,3001,-10.7453,60.8288
+2016-07-11 11:40:03,3001,-10.7453,60.6748
+2016-07-11 11:50:06,3002,-10.7141,60.5322
+2016-07-11 12:00:09,3002,-10.7453,61.2896
+2016-07-11 12:10:12,3002,-10.7453,60.5268
+2016-07-11 12:20:15,3004,-10.7453,61.128
+2016-07-11 12:30:17,3005,-10.7141,61.1389
+2016-07-11 12:40:20,3005,-10.7141,60.8288
+2016-07-11 12:50:23,3005,-10.7141,60.9835
+2016-07-11 13:00:26,3005,-10.7141,60.5322
+2016-07-11 13:10:29,3005,-10.7141,60.5268
+2016-07-11 13:20:32,3006,-10.7141,61.1335
+2016-07-11 13:30:35,3005,-10.7453,60.6963
+2016-07-11 13:40:38,3005,-10.7453,60.3956
+2016-07-11 13:50:41,3005,-10.7453,60.3956
+2016-07-11 14:00:44,3003,-10.7141,60.2328
+2016-07-11 14:10:47,3003,-10.7141,60.3795
+2016-07-11 14:20:50,3002,-10.7141,60.2328
+2016-07-11 14:30:53,3002,-10.7141,60.8288
+2016-07-11 14:40:56,3002,-10.7141,60.5429
+2016-07-11 14:50:59,3002,-10.7141,60.3795
+2016-07-11 15:01:02,3002,-10.7453,60.3956
+2016-07-11 15:11:05,3002,-10.7141,60.9943
+2016-07-11 15:21:08,3001,-10.7453,60.9943
+2016-07-11 15:31:11,3001,-10.7453,60.8288
+2016-07-11 15:41:14,3001,-10.7141,60.9997
+2016-07-11 15:51:17,3000,-10.7141,60.845
+2016-07-11 16:01:20,2998,-10.7444,60.0975
+2016-07-11 16:11:23,2998,-10.7444,60.2435
+2016-07-11 16:21:26,2997,-10.7444,59.802
+2016-07-11 16:31:29,2996,-10.7444,59.2188
+2016-07-11 16:41:31,2996,-10.7444,59.6526
+2016-07-11 16:51:34,2996,-10.7444,59.9468
+2016-07-11 17:01:37,2996,-10.7131,59.9521
+2016-07-11 17:11:40,2996,-10.7444,59.9468
+2016-07-11 17:21:42,2996,-10.7434,60.3956
+2016-07-11 17:31:46,2995,-10.7434,60.2489
+2016-07-11 17:41:49,2994,-10.7434,60.0975
+2016-07-11 17:51:52,2994,-10.7748,59.6526
+2016-07-11 18:01:55,2993,-10.7748,60.0975
+2016-07-11 18:11:58,2992,-10.8062,59.6579
+2016-07-11 18:22:01,2992,-10.8062,60.0975
+2016-07-11 18:32:04,2991,-10.8062,60.5429
+2016-07-11 18:42:07,2990,-10.8062,60.2435
+2016-07-11 18:52:10,2988,-10.8053,59.9468
+2016-07-11 19:02:13,2987,-10.8053,59.802
+2016-07-11 19:12:15,2986,-10.8053,59.802
+2016-07-11 19:22:18,2985,-10.8368,59.5091
+2016-07-11 19:32:21,2983,-10.8368,59.6579
+2016-07-11 19:42:24,2982,-10.8358,59.9468
+2016-07-11 19:52:27,2980,-10.8358,59.5091
+2016-07-11 20:02:30,2980,-10.8674,59.0772
+2016-07-11 20:12:33,2979,-10.8664,58.9361
+2016-07-11 20:22:35,2978,-10.8664,59.224
+2016-07-11 20:32:38,2975,-10.8981,58.5063
+2016-07-11 20:42:41,2974,-10.8981,59.2188
+2016-07-11 20:52:44,2973,-10.8981,58.6455
+2016-07-11 21:02:46,2971,-10.9289,58.2297
+2016-07-11 21:12:49,2969,-10.9289,57.2642
+2016-07-11 21:22:52,2967,-10.9289,57.2591
+2016-07-11 21:32:54,2967,-10.9289,58.0871
+2016-07-11 21:42:57,2965,-10.9289,57.5328
+2016-07-11 21:53:00,2963,-10.9598,57.2642
+2016-07-11 22:03:03,2963,-10.9598,57.6679
+2016-07-11 22:13:05,2960,-10.9598,57.5328
+2016-07-11 22:23:08,2958,-10.9598,57.6731
+2016-07-11 22:33:11,2958,-10.9598,57.5328
+2016-07-11 22:43:14,2956,-10.9598,57.5328
+2016-07-11 22:53:16,2954,-11.0227,56.8553
+2016-07-11 23:03:19,2953,-10.9907,57.2591
+2016-07-11 23:13:22,2952,-11.0217,56.3263
+2016-07-11 23:23:25,2951,-11.0217,57.1257
+2016-07-11 23:33:27,2949,-11.0217,55.6681
+2016-07-11 23:43:30,2949,-11.0217,56.1916
+2016-07-11 23:53:33,2947,-11.0538,55.8007
+2016-07-12 01:03:51,2941,-11.0528,56.1916
+2016-07-12 01:13:53,2941,-11.0528,56.3212
+2016-07-12 01:23:56,2940,-11.085,56.1916
+2016-07-12 01:33:58,2939,-11.085,55.541
+2016-07-12 01:44:00,2938,-11.0538,30.457
+2016-07-12 01:54:03,2938,-11.085,55.8007
+2016-07-12 02:04:06,2937,-11.085,55.541
+2016-07-12 02:14:08,2936,-11.084,55.4095
+2016-07-12 02:24:11,2936,-11.0538,28.1438
+2016-07-12 02:34:13,2935,-11.1163,55.4095
+2016-07-12 02:44:16,2934,-11.0538,30.5659
+2016-07-12 02:54:18,2934,-11.1163,55.541
+2016-07-12 03:04:21,2934,-11.1163,55.4095
+2016-07-12 03:14:24,2933,-11.0528,30.2918
+2016-07-12 03:24:26,2932,-11.085,28.0416
+2016-07-12 03:34:29,2932,-11.085,30.3485
+2016-07-12 03:44:31,2931,-11.085,30.6915
+2016-07-12 03:54:34,2931,-11.085,29.4749
+2016-07-12 04:04:37,2931,-11.085,27.5407
+2016-07-12 04:14:39,2930,-11.085,30.6915
+2016-07-12 04:24:42,2930,-11.1173,30.5794
+2016-07-12 04:34:44,2928,-11.1163,30.6942
+2016-07-12 04:44:47,2927,-11.1173,28.2565
+2016-07-12 04:54:49,2927,-11.1163,30.6942
+2016-07-12 05:04:52,2926,-11.1487,30.6942
+2016-07-12 05:14:54,2926,-11.1487,30.697
+2016-07-12 05:24:57,2925,-11.1163,30.6942
+2016-07-12 05:34:59,2924,-11.1487,30.7518
+2016-07-12 05:45:02,2924,-11.1487,30.6942
+2016-07-12 05:55:04,2924,-11.1487,30.6997
+2016-07-12 06:05:07,2924,-11.1487,30.7546
+2016-07-12 06:15:09,2923,-11.1487,30.7546
+2016-07-12 06:25:12,2923,-11.1487,30.7546
+2016-07-12 06:35:14,2922,-11.1487,30.7546
+2016-07-12 06:45:17,2922,-11.1487,30.8095
+2016-07-12 06:55:19,2921,-11.1487,29.8603
+2016-07-12 07:05:22,2921,-11.1487,30.4705
+2016-07-12 07:15:24,2921,-11.1487,30.2485
+2016-07-12 07:25:27,2920,-11.1487,30.3566
+2016-07-12 07:35:29,2919,-11.1487,29.9136
+2016-07-12 07:45:32,2919,-11.1163,30.3025
+2016-07-12 07:55:34,2919,-11.1163,30.5821
+2016-07-12 08:05:37,2920,-11.084,30.0257
+2016-07-12 08:15:39,2920,-11.1163,30.5767
+2016-07-12 08:25:42,2920,-11.1467,55.4243
+2016-07-12 08:35:44,2921,-11.1477,55.9437
+2016-07-12 08:45:47,2923,-11.1477,55.6879
+2016-07-12 08:55:50,2923,-11.1163,56.4715
+2016-07-12 09:05:52,2924,-11.1163,55.8155
+2016-07-12 09:16:02,2924,-11.1487,55.6879
+2016-07-12 09:26:05,2926,-11.1163,55.8007
+2016-07-12 09:36:07,2927,-11.084,56.0774
+2016-07-12 09:46:10,2928,-11.085,56.4565
+2016-07-12 09:56:13,2930,-11.0538,29.9056
+2016-07-12 10:06:15,2931,-11.085,55.4243
+2016-07-12 10:16:18,2931,-11.0528,55.8155
+2016-07-12 10:26:21,2934,-11.085,55.8155
+2016-07-12 10:36:23,2936,-11.085,56.3363
+2016-07-12 10:46:27,2937,-11.085,55.9437
+2016-07-12 10:56:31,2937,-11.085,55.9437
+2016-07-12 11:06:34,2938,-11.085,56.2065
+2016-07-12 11:16:36,2939,-11.0528,56.2065
+2016-07-12 11:26:39,2939,-11.0538,56.3363
+2016-07-12 11:36:42,2940,-11.0538,56.2065
+2016-07-12 11:46:44,2941,-11.0538,56.8705
+2016-07-12 11:56:47,2941,-11.0538,56.2065
+2016-07-12 12:06:50,2941,-11.0217,56.7336
+2016-07-12 12:16:52,2943,-11.0217,57.2744
+2016-07-12 12:26:55,2943,-11.0217,56.7336
+2016-07-12 12:36:58,2944,-11.0217,56.4665
+2016-07-12 12:47:01,2945,-10.9907,56.8705
+2016-07-12 12:57:03,2945,-11.0217,56.0774
+2016-07-12 13:07:06,2945,-11.0227,56.7336
+2016-07-12 13:17:09,2946,-10.9907,57.0079
+2016-07-12 13:27:12,2946,-10.9907,56.4665
+2016-07-12 13:37:15,2946,-10.9907,56.3363
+2016-07-12 13:47:18,2946,-10.9897,56.6023
+2016-07-12 13:57:20,2946,-10.9907,56.0724
+2016-07-12 14:07:23,2946,-10.9598,56.2065
+2016-07-12 14:17:26,2946,-10.9588,56.3363
+2016-07-12 14:27:29,2947,-10.9598,56.4715
+2016-07-12 14:37:32,2948,-10.9588,56.6023
+2016-07-12 14:47:35,2949,-10.9588,56.2065
+2016-07-12 14:57:37,2949,-10.9598,57.0029
+2016-07-12 15:07:40,2949,-10.9598,56.3363
+2016-07-12 15:17:43,2949,-10.9598,56.8654
+2016-07-12 15:27:46,2949,-10.9598,56.0774
+2016-07-12 15:37:49,2949,-10.9598,55.9437
+2016-07-12 15:47:52,2949,-10.9588,55.6879
+2016-07-12 15:57:55,2948,-10.9588,55.8155
+2016-07-12 16:07:58,2947,-10.9588,55.683
+2016-07-12 16:18:01,2947,-10.9588,55.8007
+2016-07-12 16:28:04,2946,-10.9907,55.1676
+2016-07-12 16:38:06,2946,-10.9279,28.0939
+2016-07-12 16:48:09,2945,-10.9588,55.2982
+2016-07-12 16:58:12,2945,-10.9279,27.4845
+2016-07-12 17:08:15,2944,-10.9279,29.4696
+2016-07-12 17:18:18,2943,-10.9279,27.6362
+2016-07-12 17:28:21,2942,-10.9897,55.4243
+2016-07-12 17:38:24,2941,-11.0217,55.4243
+2016-07-12 17:48:27,2940,-10.9598,29.4696
+2016-07-12 17:58:30,2939,-10.9598,28.1488
+2016-07-12 18:08:33,2938,-10.9588,28.2064
+2016-07-12 18:18:36,2937,-11.0217,55.683
+2016-07-12 18:28:38,2935,-11.0528,55.4243
+2016-07-12 18:38:41,2934,-11.0528,55.4243
+2016-07-12 18:48:44,2932,-11.0528,55.683
+2016-07-12 18:58:47,2931,-11.0528,55.5509
+2016-07-12 19:08:50,2929,-11.0217,27.4845
+2016-07-12 19:18:53,2927,-11.0519,55.683
+2016-07-12 19:28:56,2926,-11.0519,55.9437
+2016-07-12 19:38:58,2924,-11.084,55.4243
+2016-07-12 19:49:01,2923,-11.084,55.4243
+2016-07-12 19:59:03,2921,-11.084,55.683
+2016-07-12 20:09:06,2920,-11.084,57.0079
+2016-07-12 20:19:09,2918,-11.084,56.6023
+2016-07-12 20:29:11,2918,-11.0831,56.7336
+2016-07-12 20:39:14,2915,-11.1153,56.3363
+2016-07-12 20:49:17,2913,-11.1143,56.0774
+2016-07-12 20:59:19,2912,-11.1467,56.2065
+2016-07-12 21:09:22,2911,-11.1467,55.8155
+2016-07-12 21:19:25,2910,-11.1467,56.2065
+2016-07-12 21:29:27,2908,-11.1467,56.3363
+2016-07-12 21:39:30,2907,-11.1467,56.8654
+2016-07-12 21:49:33,2905,-11.1457,56.8654
+2016-07-12 21:59:35,2904,-11.1782,57.1409
+2016-07-12 22:09:38,2903,-11.1782,57.1409
+2016-07-12 22:19:41,2901,-11.2107,55.6879
+2016-07-12 22:29:43,2898,-11.2097,55.4243
+2016-07-12 22:39:46,2898,-11.1782,29.741
+2016-07-12 22:49:48,2897,-11.1782,30.5767
+2016-07-12 22:59:51,2896,-11.1782,30.023000000000003
+2016-07-12 23:09:54,2894,-11.2107,30.6942
+2016-07-12 23:19:56,2892,-11.2107,30.7546
+2016-07-12 23:29:59,2890,-11.2097,30.7546
+2016-07-12 23:40:01,2890,-11.2097,30.5849
+2016-07-12 23:50:04,2888,-11.2423,30.7573
+2016-07-13 00:00:07,2886,-11.2097,30.2512
+2016-07-13 00:10:09,2883,-11.2423,30.7546
+2016-07-13 00:20:12,2883,-11.2751,30.7573
+2016-07-13 00:30:15,2882,-11.2741,30.7024
+2016-07-13 00:40:17,2881,-11.3378,55.8155
+2016-07-13 00:50:20,2880,-11.3059,11.3113
+2016-07-13 01:00:22,2877,-11.3059,30.593000000000004
+2016-07-13 01:10:25,2875,-11.3378,-4.1119
+2016-07-13 01:20:27,2874,-11.3388,-4.862
+2016-07-13 01:30:30,2873,-11.3708,-4.5589
+2016-07-13 01:40:32,2870,-11.3708,-4.6664
+2016-07-13 01:50:35,2869,-11.3708,6.8695
+2016-07-13 02:00:37,2869,-11.3698,2.2482
+2016-07-13 02:10:40,2868,-11.3708,-4.1484
+2016-07-13 02:20:43,2866,-11.4029,2.0934
+2016-07-13 02:30:45,2866,-11.3708,-4.3851
+2016-07-13 02:40:47,2864,-11.4039,-4.6442
+2016-07-13 02:50:50,2863,-11.4029,-4.8814
+2016-07-13 03:00:52,2862,-11.4029,-4.6883
+2016-07-13 03:10:55,2862,-11.4351,6.9868
+2016-07-13 03:20:57,2861,-11.4019,-3.997
+2016-07-13 03:31:00,2860,-11.4351,-4.3210000000000015
+2016-07-13 03:41:02,2859,-11.4351,-4.6219
+2016-07-13 03:51:05,2857,-11.4351,-4.923
+2016-07-13 04:01:07,2857,-11.4351,-4.7955
+2016-07-13 04:11:10,2855,-11.4341,1.6703
+2016-07-13 04:21:13,2854,-11.4341,-4.4702
+2016-07-13 04:31:15,2854,-11.4351,-4.7085
+2016-07-13 04:41:18,2854,-11.4341,-4.774
+2016-07-13 04:51:20,2854,-11.4674,-4.817
+2016-07-13 05:01:22,2853,-11.4674,-4.7951
+2016-07-13 05:11:24,2852,-11.4674,-4.7946
+2016-07-13 05:21:27,2851,-11.4674,-4.7942
+2016-07-13 05:31:29,2850,-11.4674,-4.7942
+2016-07-13 05:41:32,2849,-11.4674,-4.7942
+2016-07-13 05:51:34,2848,-11.4674,-4.7942
+2016-07-13 06:01:36,2847,-11.4674,-4.8157
+2016-07-13 06:11:39,2847,-11.4674,-4.7942
+2016-07-13 06:21:41,2847,-11.4674,-4.7938
+2016-07-13 06:31:44,2847,-11.4674,-4.7715
+2016-07-13 06:41:46,2847,-11.4341,-4.7719
+2016-07-13 06:51:49,2847,-11.4341,-4.7278
+2016-07-13 07:01:51,2847,-11.4341,-4.7282
+2016-07-13 07:11:54,2847,-11.4341,-4.7496
+2016-07-13 07:21:56,2847,-11.4351,-4.7492
+2016-07-13 07:31:59,2847,-11.4341,-4.7278
+2016-07-13 07:42:02,2847,-11.4351,-4.7711
+2016-07-13 07:52:04,2847,-11.4341,-4.7929
+2016-07-13 08:02:07,2847,-11.4341,-4.7925
+2016-07-13 08:12:09,2848,-11.4351,-4.8144
+2016-07-13 08:22:12,2849,-11.4341,-4.8148
+2016-07-13 08:32:14,2850,-11.5008,-5.0278
+2016-07-13 08:42:17,2850,-11.5333,-4.9638
+2016-07-13 08:52:20,2848,-11.5333,-4.814
+2016-07-13 09:02:22,2847,-11.5333,-4.9638
+2016-07-13 09:12:25,2847,-11.5333,-4.814
+2016-07-13 09:22:27,2845,-11.5333,-4.8573
+2016-07-13 09:32:30,2843,-11.5333,-4.7715
+2016-07-13 09:42:32,2842,-11.4998,-4.9414
+2016-07-13 09:52:35,2840,-11.5008,-4.7484
+2016-07-13 10:02:37,2840,-11.5333,-4.7265
+2016-07-13 10:12:40,2840,-11.4998,-4.7047
+2016-07-13 10:22:42,2839,-11.4674,-4.6829
+2016-07-13 10:32:44,2839,-11.4998,-4.7047
+2016-07-13 10:42:47,2839,-11.5008,-4.6829
+2016-07-13 10:52:49,2839,-11.5008,-4.6619
+2016-07-13 11:02:52,2846,-11.4341,-4.6401
+2016-07-13 11:12:54,2927,-11.1801,55.4243
+2016-07-13 11:22:57,2931,-11.1801,56.2165
+2016-07-13 11:32:59,2933,-11.1811,55.8205
+2016-07-13 11:43:02,2934,-11.1811,55.6929
+2016-07-13 11:53:04,2934,-11.1487,55.5509
+2016-07-13 12:03:07,2934,-11.1487,55.4243
+2016-07-13 12:13:09,2934,-11.086,28.9116
+2016-07-13 12:23:12,2934,-11.085,28.234
+2016-07-13 12:33:14,2935,-11.1173,29.4539
+2016-07-13 12:43:17,2935,-11.1173,29.6723
+2016-07-13 12:53:20,2935,-11.085,29.7834
+2016-07-13 13:03:22,2936,-11.086,29.895
+2016-07-13 13:13:25,2936,-11.086,29.9003
+2016-07-13 13:23:28,2937,-11.1173,29.9536
+2016-07-13 13:33:30,2937,-11.1173,30.015
+2016-07-13 13:43:33,2937,-11.1173,30.015
+2016-07-13 13:53:35,2937,-11.086,30.1302
+2016-07-13 14:03:39,2937,-11.086,29.6353
+2016-07-13 14:13:43,2937,-11.086,29.3623
+2016-07-13 14:23:45,2937,-11.1487,55.5509
+2016-07-13 14:33:48,2937,-11.1487,55.5509
+2016-07-13 14:43:50,2937,-11.086,29.855
+2016-07-13 14:53:53,2936,-11.085,30.0739
+2016-07-13 15:03:56,2935,-11.085,27.7419
+2016-07-13 15:13:58,2935,-11.1173,30.2405
+2016-07-13 15:24:01,2934,-11.1497,30.2458
+2016-07-13 15:34:04,2934,-11.1497,29.9082
+2016-07-13 15:44:06,2934,-11.1497,30.1866
+2016-07-13 15:54:09,2933,-11.1497,30.1302
+2016-07-13 16:04:12,2932,-11.1487,30.1893
+2016-07-13 16:14:14,2931,-11.1487,30.3566
+2016-07-13 16:24:16,2930,-11.1487,30.3025
+2016-07-13 16:34:27,2930,-11.1487,30.3593
+2016-07-13 16:44:29,2928,-11.1811,29.9696
+2016-07-13 16:54:32,2927,-11.1811,30.4108
+2016-07-13 17:04:34,2926,-11.1487,30.3593
+2016-07-13 17:14:37,2925,-11.1801,13.5243
+2016-07-13 17:24:39,2924,-11.1811,30.3106
+2016-07-13 17:34:42,2924,-11.1811,30.3647
+2016-07-13 17:44:44,2922,-11.1801,0.2901
+2016-07-13 17:54:47,2922,-11.1801,30.023000000000003
+2016-07-13 18:04:49,2921,-11.1801,0.2249
+2016-07-13 18:14:52,2920,-11.1801,8.4504
+2016-07-13 18:24:54,2920,-11.1801,29.9563
+2016-07-13 18:34:57,2918,-11.1801,30.2351
+2016-07-13 18:44:59,2918,-11.2117,30.3458
+2016-07-13 18:55:02,2918,-11.2117,-0.1639
+2016-07-13 19:05:05,2918,-11.2117,30.015
+2016-07-13 19:15:07,2916,-11.2117,10.3316
+2016-07-13 19:25:10,2915,-11.2443,-0.293
+2016-07-13 19:35:12,2913,-11.2443,-0.615
+2016-07-13 19:45:15,2913,-11.2443,-0.8935
+2016-07-13 19:55:17,2912,-11.276,-1.0646
+2016-07-13 20:05:20,2911,-11.276,-4.7887
+2016-07-13 20:15:22,2910,-11.276,-1.5346
+2016-07-13 20:25:25,2909,-11.276,-1.4918
+2016-07-13 20:35:27,2908,-11.276,-4.7249
+2016-07-13 20:45:30,2906,-11.3089,-1.8332
+2016-07-13 20:55:32,2905,-11.3079,-1.7473
+2016-07-13 21:05:35,2904,-11.3079,-4.7896
+2016-07-13 21:15:37,2903,-11.3408,2.5596
+2016-07-13 21:25:40,2902,-11.3398,-4.874
+2016-07-13 21:35:42,2900,-11.3718,-5.0699
+2016-07-13 21:45:45,2898,-11.3718,-5.2852
+2016-07-13 21:55:47,2897,-11.4049,-4.9629
+2016-07-13 22:05:50,2896,-11.4049,-4.7698
+2016-07-13 22:15:52,2894,-11.4049,-4.8123
+2016-07-13 22:25:55,2892,-11.4039,-4.7904
+2016-07-13 22:35:57,2890,-11.4049,-4.9625
+2016-07-13 22:45:59,2889,-11.4371,-5.1985
+2016-07-13 22:56:02,2886,-11.4361,8.6022
+2016-07-13 23:06:04,2875,-11.5018,12.7679
+2016-07-13 23:16:06,2872,-11.5018,-3.8162
+2016-07-13 23:26:08,2869,-11.5018,-4.249
+2016-07-13 23:36:10,2869,-11.5008,-4.5521
+2016-07-13 23:46:13,2862,-11.5343,-4.7261
+2016-07-13 23:56:15,2857,-11.5333,-4.7925
+2016-07-14 00:06:17,2856,-11.5333,-5.4377
+2016-07-14 00:16:20,2854,-11.567,-4.9436
+2016-07-14 00:26:22,2847,-11.5649,-5.0506
+2016-07-14 00:36:24,2847,-11.5986,-5.5892
+2016-07-14 00:46:27,2847,-11.5659,-4.8792
+2016-07-14 00:56:29,2846,-11.5659,
+2016-07-14 01:06:31,2846,-11.5659,
+2016-07-14 01:16:34,2844,-11.6325,-4.8788
+2016-07-14 01:26:36,2843,-11.6325,-4.8792
+2016-07-14 01:36:39,2842,-11.6325,-4.9647
+2016-07-14 01:46:41,2840,-11.6325,-4.8135
+2016-07-14 01:56:44,2840,-11.6325,-4.9427
+2016-07-14 02:06:46,2840,-11.6325,
+2016-07-14 02:16:48,2839,-11.6325,
+2016-07-14 02:26:51,2839,-11.6325,
+2016-07-14 02:36:53,2837,-11.6325,
+2016-07-14 02:46:55,2836,-11.6664,
+2016-07-14 02:57:09,2836,-11.6664,
+2016-07-14 03:07:49,2833,-11.6654,
+2016-07-14 03:17:51,2833,-11.6654,
+2016-07-14 03:27:53,2833,-11.6995,
+2016-07-14 03:37:55,2833,-11.6995,
+2016-07-14 03:47:58,2831,-11.6984,-4.9853
+2016-07-14 03:58:00,2830,-11.6984,-4.9853
+2016-07-14 04:08:02,2829,-11.6984,
+2016-07-14 04:18:04,2828,-11.6984,-4.9213
+2016-07-14 04:28:07,2827,-11.6995,
+2016-07-14 04:38:09,2826,-11.6984,
+2016-07-14 04:48:11,2825,-11.7326,
+2016-07-14 04:58:13,2825,-11.7326,
+2016-07-14 05:08:16,2825,-11.7326,
+2016-07-14 05:18:18,2824,-11.7659,-4.9432
+2016-07-14 05:28:20,2823,-11.7326,
+2016-07-14 05:38:22,2822,-11.7659,
+2016-07-14 05:48:32,2820,-11.7659,
+2016-07-14 05:58:34,2819,-11.7669,
+2016-07-14 06:08:36,2818,-11.7669,
+2016-07-14 06:18:38,2818,-11.7659,
+2016-07-14 06:28:40,2818,-11.7659,
+2016-07-14 06:38:43,2818,-11.7659,
+2016-07-14 06:48:45,2818,-11.7659,
+2016-07-14 06:58:47,2816,-11.7648,
+2016-07-14 07:08:49,2815,-11.8003,
+2016-07-14 07:18:52,2814,-11.7648,
+2016-07-14 07:28:54,2814,-11.7992,
+2016-07-14 07:38:56,2813,-11.7316,
+2016-07-14 07:48:59,2811,-11.7992,
+2016-07-14 07:59:01,2811,-11.9084,
+2016-07-14 08:09:03,2811,-11.7659,
+2016-07-14 08:19:05,2811,-11.7982,
+2016-07-14 08:29:07,2810,-11.7326,
+2016-07-14 08:39:09,2809,-11.8611,-5.0072
+2016-07-14 08:49:22,2809,-11.8337,
+2016-07-14 09:00:02,2809,-11.7992,-4.9427
+2016-07-14 09:10:04,2808,-11.7992,
+2016-07-14 09:20:06,2808,-11.7982,-5.0076
+2016-07-14 09:30:09,2807,-11.7982,-5.0502
+2016-07-14 09:40:11,2807,-11.7982,-5.0291
+2016-07-14 09:50:13,2809,-11.7659,
+2016-07-14 10:00:15,2808,-11.7982,7.8662
+2016-07-14 10:10:18,2809,-11.7648,
+2016-07-14 10:20:20,2809,-11.7648,
+2016-07-14 10:30:22,2810,-11.5608,
+2016-07-14 10:40:24,2811,-11.8003,-4.8552
+2016-07-14 10:50:26,2811,-11.7659,-4.9204
+2016-07-14 11:00:29,2812,-11.7659,-4.9427
+2016-07-14 11:10:31,2812,-11.7648,-5.0502
+2016-07-14 11:20:33,2813,-11.7305,-5.0291
+2016-07-14 11:30:35,2814,-11.7648,-5.0721
+2016-07-14 11:40:38,2814,-11.7659,-5.0502
+2016-07-14 11:50:40,2814,-11.7305,-5.3091
+2016-07-14 12:00:42,2815,-11.7316,-4.9436
+2016-07-14 12:10:44,2815,-11.6964,
+2016-07-14 12:20:47,2815,-11.7648,-5.0721
+2016-07-14 12:30:49,2815,-11.6974,-5.0085
+2016-07-14 12:40:52,2817,-11.6284,-5.0506
+2016-07-14 12:50:54,2818,-11.6984,-5.0506
+2016-07-14 13:00:56,2818,-11.6984,-5.0076
+2016-07-14 13:10:59,2818,-11.6984,-5.0305
+2016-07-14 13:21:01,2818,-11.6964,-5.117
+2016-07-14 13:31:03,2819,-11.6984,-5.5232
+2016-07-14 13:41:06,2821,-11.6984,-5.0291
+2016-07-14 13:51:08,2820,-11.6644,-5.0511
+2016-07-14 14:01:11,2819,-11.6974,-5.0085
+2016-07-14 14:11:13,2820,-11.6304,-5.0941
+2016-07-14 14:21:15,2821,-11.6644,-5.1798
+2016-07-14 14:31:18,2820,-11.6294,-5.0937
+2016-07-14 14:41:20,2820,-11.6654,-5.0937
+2016-07-14 14:51:23,2822,-11.6675,-5.0928
+2016-07-14 15:01:25,2822,-11.6356,-5.0072
+2016-07-14 15:11:28,2822,-11.6995,-5.0296
+2016-07-14 15:21:30,2822,-11.6315,-5.0717
+2016-07-14 15:31:33,2822,-11.5956,-5.1573
+2016-07-14 15:41:35,2820,-11.6633,-5.3508
+2016-07-14 15:51:37,2819,-11.6623,-4.9647
+2016-07-14 16:01:40,2818,-11.6644,-4.9217
+2016-07-14 16:11:42,2818,-11.4674,-5.0721
+2016-07-14 16:21:45,2818,-11.6644,-5.0296
+2016-07-14 16:31:47,2818,-11.6654,-5.0502
+2016-07-14 16:41:50,2817,-11.5598,
+2016-07-14 16:51:52,2816,-11.6294,
+2016-07-14 17:01:55,2815,-11.6995,
+2016-07-14 17:11:57,2814,-11.567,-5.0305
+2016-07-14 17:22:00,2814,-11.6644,
+2016-07-14 17:32:04,2813,-11.6644,
+2016-07-14 17:42:07,2812,-11.6964,-5.0506
+2016-07-14 17:52:10,2812,-11.6984,-4.9647
+2016-07-14 18:02:12,2811,-11.7305,-5.2444
+2016-07-14 18:12:15,2811,-11.6964,
+2016-07-14 18:22:17,2809,-11.7305,-5.0076
+2016-07-14 18:32:20,2808,-11.6964,-5.0081
+2016-07-14 18:42:22,2806,-11.7316,
+2016-07-14 18:52:25,2805,-11.7305,
+2016-07-14 19:02:28,2804,-11.7305,
+2016-07-14 19:12:30,2801,-11.7648,
+2016-07-14 19:22:32,2799,-11.7648,
+2016-07-14 19:32:35,2798,-11.7982,
+2016-07-14 19:42:37,2796,-11.7982,
+2016-07-14 19:52:40,2795,-11.7982,
+2016-07-14 20:02:42,2792,-11.7982,
+2016-07-14 20:12:45,2790,-11.7971,
+2016-07-14 20:22:47,2789,-11.8317,
+2016-07-14 20:32:50,2787,-11.8663,
+2016-07-14 20:42:52,2786,-11.8663,
+2016-07-14 20:52:55,2784,-11.8653,
+2016-07-14 21:02:57,2782,-11.8653,
+2016-07-14 21:13:00,2781,-11.9,
+2016-07-14 21:23:02,2777,-11.899,
+2016-07-14 21:33:05,2775,-11.899,
+2016-07-14 21:43:07,2773,-11.9338,
+2016-07-14 21:53:09,2771,-11.9338,
+2016-07-14 22:03:12,2769,-11.9338,
+2016-07-14 22:13:14,2768,-11.9328,
+2016-07-14 22:23:16,2767,-11.9328,
+2016-07-14 22:33:19,2765,-11.9328,
+2016-07-14 22:43:21,2762,-11.9678,
+2016-07-14 22:53:24,2762,-11.9678,
+2016-07-14 23:03:26,2760,-11.9667,
+2016-07-14 23:13:29,2759,-11.9667,
+2016-07-14 23:23:31,2758,-12.0019,
+2016-07-14 23:33:33,2757,-12.0019,
+2016-07-14 23:43:36,2756,-12.0008,
+2016-07-14 23:53:38,2755,-11.9667,
+2016-07-15 00:03:40,2755,-12.0008,
+2016-07-15 00:13:43,2754,-12.0008,
+2016-07-15 00:23:45,2753,-12.0008,
+2016-07-15 00:33:47,2753,-12.0008,
+2016-07-15 00:43:50,2751,-12.0008,
+2016-07-15 00:53:52,2749,-12.0361,
+2016-07-15 01:03:55,2749,-12.0361,
+2016-07-15 01:13:57,2748,-12.035,
+2016-07-15 01:24:00,2747,-12.035,
+2016-07-15 01:34:02,2745,-12.0704,
+2016-07-15 01:44:04,2744,-12.0704,
+2016-07-15 01:54:07,2743,-12.035,
+2016-07-15 02:04:09,2742,-12.0704,
+2016-07-15 02:14:11,2742,-12.0704,
+2016-07-15 02:24:14,2741,-12.0704,
+2016-07-15 02:34:15,2740,-12.0704,
+2016-07-15 02:44:18,2738,-12.0704,
+2016-07-15 02:54:20,2738,-12.0704,
+2016-07-15 03:04:23,2738,-12.0704,
+2016-07-15 03:14:25,2737,-12.0693,
+2016-07-15 03:24:27,2734,-12.1048,
+2016-07-15 03:34:30,2734,-12.1048,
+2016-07-15 03:44:32,2733,-12.1048,
+2016-07-15 03:54:34,2731,-12.1394,
+2016-07-15 04:04:37,2730,-12.1394,
+2016-07-15 04:14:39,2730,-12.1394,
+2016-07-15 04:24:41,2728,-12.1384,
+2016-07-15 04:34:44,2727,-12.1394,
+2016-07-15 04:44:46,2726,-12.1384,
+2016-07-15 04:54:48,2725,-12.1741,
+2016-07-15 05:04:51,2724,-12.1741,
+2016-07-15 05:14:53,2724,-12.1741,
+2016-07-15 05:24:55,2721,-12.1741,
+2016-07-15 05:34:58,2721,-12.1741,
+2016-07-15 05:45:00,2720,-12.1741,
+2016-07-15 05:55:02,2719,-12.1741,
+2016-07-15 06:05:04,2718,-12.1731,
+2016-07-15 06:15:06,2716,-12.1731,
+2016-07-15 06:25:16,2716,-12.2101,
+2016-07-15 06:35:18,2715,-12.209,
+2016-07-15 06:45:21,2714,-12.209,
+2016-07-15 06:55:23,2713,-12.209,
+2016-07-15 07:05:25,2713,-12.209,
+2016-07-15 07:15:27,2712,-12.209,
+2016-07-15 07:25:30,2712,-12.209,
+2016-07-15 07:35:32,2711,-12.209,
+2016-07-15 07:45:34,2710,-12.1731,
+2016-07-15 07:55:36,2710,-12.209,
+2016-07-15 08:05:39,2710,-12.1731,
+2016-07-15 08:15:41,2710,-12.1731,
+2016-07-15 08:25:43,2709,-12.1731,
+2016-07-15 08:35:45,2709,-12.1731,
+2016-07-15 08:45:55,2709,-12.1731,
+2016-07-15 08:55:58,2709,-12.1731,
+2016-07-15 09:06:00,2709,-12.1731,
+2016-07-15 09:16:02,2709,-12.1731,
+2016-07-15 09:26:05,2709,-12.1731,
+2016-07-15 09:36:07,2709,-12.1731,
+2016-07-15 09:46:09,2709,-12.1731,
+2016-07-15 09:56:12,2709,-12.1731,
+2016-07-15 10:06:14,2709,-12.1731,
+2016-07-15 10:16:16,2709,-12.1731,
+2016-07-15 10:26:19,2708,-12.1731,
+2016-07-15 10:36:21,2708,-12.1731,
+2016-07-15 10:46:23,2709,-12.1731,
+2016-07-15 10:56:26,2709,-12.1384,
+2016-07-15 11:06:28,2709,-12.1384,
+2016-07-15 11:16:31,2710,-12.1384,
+2016-07-15 11:26:33,2710,-12.1384,
+2016-07-15 11:36:35,2710,-12.1384,
+2016-07-15 11:46:37,2712,-12.1384,
+2016-07-15 11:56:40,2712,-12.1384,
+2016-07-15 12:06:42,2713,-12.1384,
+2016-07-15 12:16:44,2713,-12.1384,
+2016-07-15 12:26:46,2713,-12.1384,
+2016-07-15 12:36:56,2714,-12.1384,
+2016-07-15 12:46:58,2714,-12.1384,
+2016-07-15 12:57:01,2714,-12.1384,
+2016-07-15 13:07:03,2713,-12.1384,
+2016-07-15 13:17:05,2713,-12.1384,
+2016-07-15 13:27:08,2713,-12.1384,
+2016-07-15 13:37:10,2712,-12.1384,
+2016-07-15 13:47:12,2712,-12.1384,
+2016-07-15 13:57:15,2712,-12.1384,
+2016-07-15 14:07:17,2712,-12.1384,
+2016-07-15 14:17:19,2712,-12.1384,
+2016-07-15 14:27:22,2711,-12.1384,
+2016-07-15 14:37:24,2710,-12.1384,
+2016-07-15 14:47:34,2709,-12.1373,
+2016-07-15 14:57:36,2706,-12.1384,
+2016-07-15 15:07:39,2706,-12.1373,
+2016-07-15 15:17:41,2705,-12.1373,
+2016-07-15 15:27:44,2704,-12.1373,
+2016-07-15 15:37:46,2702,-12.1384,
+2016-07-15 15:47:49,2702,-12.1373,
+2016-07-15 15:57:51,2702,-12.1384,
+2016-07-15 16:07:53,2701,-12.1027,
+2016-07-15 16:17:56,2701,-12.1384,
+2016-07-15 16:27:58,2701,-12.1373,
+2016-07-15 16:38:01,2701,-12.1373,
+2016-07-15 16:48:03,2700,-12.1373,
+2016-07-15 16:58:05,2700,-12.1373,
+2016-07-15 17:08:08,2700,-12.1373,
+2016-07-15 17:18:10,2700,-12.1373,
+2016-07-15 17:28:13,2700,-12.1373,
+2016-07-15 17:38:15,2699,-12.1373,
+2016-07-15 17:48:17,2698,-12.1373,
+2016-07-15 17:58:20,2697,-12.1373,
+2016-07-15 18:08:22,2696,-12.1373,
+2016-07-15 18:18:25,2695,-12.1731,
+2016-07-15 18:28:27,2695,-12.1731,
+2016-07-15 18:38:29,2694,-12.1373,
+2016-07-15 18:48:32,2693,-12.1373,
+2016-07-15 18:58:34,2692,-12.1373,
+2016-07-15 19:08:37,2691,-12.172,
+2016-07-15 19:18:39,2688,-12.172,
+2016-07-15 19:28:41,2687,-12.2079,
+2016-07-15 19:38:44,2685,-12.2068,
+2016-07-15 19:48:46,2684,-12.2068,
+2016-07-15 19:58:49,2682,-12.278,
+2016-07-15 20:08:51,2680,-12.278,
+2016-07-15 20:18:53,2678,-12.278,
+2016-07-15 20:28:56,2676,-12.278,
+2016-07-15 20:38:58,2673,-12.3144,
+2016-07-15 20:49:02,2672,-12.3133,
+2016-07-15 20:59:05,2670,-12.3509,
+2016-07-15 21:09:07,2666,-12.3122,
+2016-07-15 21:19:10,2664,-12.3842,
+2016-07-15 21:29:12,2661,-12.3853,
+2016-07-15 21:39:15,2658,-12.3842,
+2016-07-15 21:49:17,2657,-12.421,
+2016-07-15 21:59:19,2654,-12.4199,
+2016-07-15 22:09:22,2650,-12.4199,
+2016-07-15 22:19:24,2649,-12.4569,
+2016-07-15 22:29:26,2646,-12.4558,
+2016-07-15 22:39:29,2644,-12.4558,
+2016-07-15 22:49:31,2642,-12.4929,
+2016-07-15 22:59:33,2640,-12.4929,
+2016-07-15 23:09:35,2637,-12.5301,
+2016-07-15 23:19:38,2635,-12.529000000000002,
+2016-07-15 23:29:40,2631,-12.5301,
+2016-07-15 23:39:42,2629,-12.529000000000002,
+2016-07-15 23:49:44,2629,-12.529000000000002,
+2016-07-15 23:59:47,2626,-12.5653,
+2016-07-16 00:09:49,2623,-12.5664,
+2016-07-16 00:19:51,2622,-12.5664,
+2016-07-16 00:29:53,2620,-12.5653,
+2016-07-16 00:39:55,2617,-12.6018,
+2016-07-16 00:49:58,2615,-12.6018,
+2016-07-16 01:00:00,2614,-12.6018,
+2016-07-16 01:10:02,2612,-12.6018,
+2016-07-16 01:20:04,2610,-12.6018,
+2016-07-16 01:30:06,2608,-12.6384,
+2016-07-16 01:40:09,2607,-12.6384,
+2016-07-16 01:50:11,2605,-12.6763,
+2016-07-16 02:00:13,2603,-12.6763,
+2016-07-16 02:10:15,2601,-12.6752,
+2016-07-16 02:20:17,2600,-12.6763,
+2016-07-16 02:30:20,2600,-12.6763,
+2016-07-16 02:40:22,2598,-12.6752,
+2016-07-16 02:50:24,2596,-12.6752,
+2016-07-16 03:00:26,2593,-12.7122,
+2016-07-16 03:10:28,2593,-12.7122,
+2016-07-16 03:20:30,2591,-12.7122,
+2016-07-16 03:30:32,2590,-12.7122,
+2016-07-16 03:40:34,2588,-12.7122,
+2016-07-16 03:50:36,2587,-12.7493,-5.9394
+2016-07-16 04:00:39,2586,-12.7493,
+2016-07-16 04:10:41,2584,-12.7493,-5.874
+2016-07-16 04:20:43,2582,-12.7866,-6.0049
+2016-07-16 04:30:45,2580,-12.8253,0.8539
+2016-07-16 04:40:47,2580,-12.7493,
+2016-07-16 04:50:49,2579,-12.7493,
+2016-07-16 05:00:51,2577,-12.7878,
+2016-07-16 05:10:54,2576,-12.7493,
+2016-07-16 05:20:56,2574,-12.7866,
+2016-07-16 05:30:58,2573,-12.7866,
+2016-07-16 05:41:00,2571,-12.7866,
+2016-07-16 05:51:02,2570,-12.7866,
+2016-07-16 06:01:04,2569,-12.7866,
+2016-07-16 06:11:06,2567,-12.8253,
+2016-07-16 06:21:08,2566,-12.7866,
+2016-07-16 06:31:10,2565,-12.7866,
+2016-07-16 06:41:12,2564,-12.7866,
+2016-07-16 06:51:14,2564,-12.7866,
+2016-07-16 07:01:16,2564,-12.7866,
+2016-07-16 07:11:18,2562,-12.7866,
+2016-07-16 07:21:20,2562,-12.7866,
+2016-07-16 07:31:22,2562,-12.7482,
+2016-07-16 07:41:25,2561,-12.7482,
+2016-07-16 07:51:27,2561,-12.7482,
+2016-07-16 08:01:36,2562,-12.7866,
+2016-07-16 08:11:38,2562,-12.7493,
+2016-07-16 08:21:40,2562,-12.7482,
+2016-07-16 08:31:43,2564,-12.7493,
+2016-07-16 08:41:45,2564,-12.7482,
+2016-07-16 08:51:47,2564,-12.7493,
+2016-07-16 09:01:49,2565,-12.7493,
+2016-07-16 09:11:52,2565,-12.7493,
+2016-07-16 09:21:54,2566,-12.7111,
+2016-07-16 09:31:56,2567,-12.7111,
+2016-07-16 09:41:59,2569,-12.7111,
+2016-07-16 09:52:01,2571,-12.7122,
+2016-07-16 10:02:03,2572,-12.6741,
+2016-07-16 10:12:05,2573,-12.6741,
+2016-07-16 10:22:08,2574,-12.6741,
+2016-07-16 10:32:10,2576,-12.6741,
+2016-07-16 10:42:19,2578,-12.6752,
+2016-07-16 10:52:22,2579,-12.6752,
+2016-07-16 11:02:24,2580,-12.6384,
+2016-07-16 11:12:27,2580,-12.6373,
+2016-07-16 11:22:29,2580,-12.6362,
+2016-07-16 11:32:32,2580,-12.6373,
+2016-07-16 11:42:34,2580,-12.6373,
+2016-07-16 11:52:37,2580,-12.6007,
+2016-07-16 12:02:39,2581,-12.5631,
+2016-07-16 12:12:42,2581,-12.5631,
+2016-07-16 12:22:44,2582,-12.5631,
+2016-07-16 12:32:47,2582,-12.5631,
+2016-07-16 12:42:49,2582,-12.5642,
+2016-07-16 12:52:52,2581,-12.5642,
+2016-07-16 13:02:55,2581,-12.5642,
+2016-07-16 13:12:57,2580,-12.6007,
+2016-07-16 13:23:00,2579,-12.5642,
+2016-07-16 13:33:03,2578,-12.5631,
+2016-07-16 13:43:05,2577,-12.5631,
+2016-07-16 13:53:08,2576,-12.5631,
+2016-07-16 14:03:10,2575,-12.5257,
+2016-07-16 14:13:13,2573,-12.5631,
+2016-07-16 14:23:15,2573,-12.5631,
+2016-07-16 14:33:18,2571,-12.5631,
+2016-07-16 14:43:21,2569,-12.5631,
+2016-07-16 14:53:23,2566,-12.5631,
+2016-07-16 15:03:25,2564,-12.5631,
+2016-07-16 15:13:28,2562,-12.5631,
+2016-07-16 15:23:30,2560,-12.562,
+2016-07-16 15:33:33,2558,-12.5984,
+2016-07-16 15:43:36,2557,-12.5609,
+2016-07-16 15:53:38,2554,-12.5609,
+2016-07-16 16:03:41,2552,-12.5984,
+2016-07-16 16:13:44,2549,-12.4437,
+2016-07-16 16:23:46,2546,-12.5213,
+2016-07-16 16:33:48,2543,-12.634,
+2016-07-16 16:43:51,2541,-12.673,
+2016-07-16 16:54:05,2537,-12.6351,
+2016-07-16 17:04:46,2533,-12.6719,
+2016-07-16 17:14:48,2530,-12.6719,
+2016-07-16 17:24:51,2526,-12.7099,
+2016-07-16 17:34:54,2522,-12.7088,
+2016-07-16 17:44:56,2517,-12.7077,
+2016-07-16 17:54:59,2513,-12.7459,
+2016-07-16 18:05:02,2508,-12.7459,
+2016-07-16 18:15:04,2504,-12.7448,
+2016-07-16 18:25:07,2499,-12.7832,
+2016-07-16 18:35:10,2495,-12.7821,
+2016-07-16 18:45:12,2490,-12.8207,
+2016-07-16 18:55:15,2485,-12.8584,
+2016-07-16 19:05:18,2480,-12.8962,-5.8548
+2016-07-16 19:15:20,2475,-12.8962,
+2016-07-16 19:25:23,2468,-12.9343,
+2016-07-16 19:35:26,2461,-13.012,
+2016-07-16 19:45:28,2454,-13.0109,-5.9207
+2016-07-16 19:55:31,2447,-13.0109,
+2016-07-16 20:05:33,2440,-13.0495,
+2016-07-16 20:15:36,2433,-13.0883,
+2016-07-16 20:25:39,2426,-13.1285,
+2016-07-16 20:35:41,2419,-13.1273,
+2016-07-16 20:45:44,2411,-13.1666,
+2016-07-16 20:55:46,2404,-13.206,
+2016-07-16 21:05:49,2397,-13.1619,
+2016-07-16 21:15:52,2389,-13.2457,
+2016-07-16 21:25:54,2380,-13.2855,
+2016-07-16 21:35:57,2371,-13.3257,
+2016-07-16 21:45:59,2362,-13.366,
+2016-07-16 21:56:02,2353,-13.4066,
+2016-07-16 22:06:04,2345,-13.4474,
+2016-07-16 22:16:07,2334,-13.4885,
+2016-07-16 22:26:09,2326,-13.5726,
+2016-07-16 22:36:12,2317,-13.5714,
+2016-07-16 22:46:15,2305,-13.6132,
+2016-07-21 15:18:26,3171,-8.7205,65.4378
+2016-07-21 15:28:29,3170,-7.1864,65.2505
+2016-07-21 15:38:32,3168,-8.1925,58.1387
+2016-07-21 15:48:46,3174,-8.3784,61.6696
+2016-07-21 15:59:27,3179,-8.5638,66.6564
+2016-07-21 16:09:30,3179,-8.8342,65.4261
+2016-07-21 16:19:33,3182,-8.8886,69.2344
+2016-07-21 16:29:37,3599,18.2752,21.0865
+2016-07-21 16:39:40,3599,18.3564,21.1254
+2016-07-21 16:49:42,3599,18.3856,21.0884
+2016-07-21 16:59:45,3599,18.428,21.0865
+2016-07-21 17:09:48,3599,18.4296,21.1273
+2016-07-21 17:19:51,3599,18.428,21.0884
+2016-07-21 17:29:54,3599,18.4296,21.0884
+2016-07-21 17:39:57,3599,18.428,21.1292
+2016-07-21 17:50:01,3599,18.4672,21.0884
+2016-07-21 18:00:03,3599,18.4655,21.0884
+2016-07-21 18:10:07,3599,18.4672,21.0902
+2016-07-21 18:20:10,3599,18.5014,21.133000000000006
+2016-07-21 18:30:13,3599,18.4655,21.1348
+2016-07-21 18:40:16,3599,18.5014,21.133000000000006
+2016-07-21 18:50:19,3599,18.5014,21.094
+2016-07-21 19:00:21,3599,18.5014,21.133000000000006
+2016-07-21 19:10:24,3599,18.5031,21.1273
+2016-07-21 19:20:27,3599,18.539,21.094
+2016-07-21 19:30:30,3599,18.5031,21.0921
+2016-07-21 19:40:33,3599,18.5374,21.1273
+2016-07-21 19:50:43,3599,18.5374,21.094
+2016-07-21 20:00:46,3599,18.5374,21.094
+2016-07-21 20:10:49,3599,18.508,21.0959
+2016-07-21 20:20:52,3599,18.5423,21.133000000000006
+2016-07-21 20:30:54,3599,18.544,21.1348
+2016-07-21 20:40:57,3599,18.544,21.1348
+2016-07-21 20:51:00,3599,18.5783,21.133000000000006
+2016-07-21 21:01:03,3599,18.58,21.133000000000006
+2016-07-21 21:11:06,3599,18.58,21.1348
+2016-07-21 21:21:09,3599,18.5783,21.1348
+2016-07-21 21:31:12,3599,18.58,21.0959
+2016-07-21 21:41:15,3599,18.58,21.133000000000006
+2016-07-21 21:51:17,3599,18.58,21.1348
+2016-07-21 22:01:20,3599,18.58,21.094
+2016-07-21 22:11:23,3599,18.5783,21.0959
+2016-07-21 22:21:26,3599,18.58,21.094
+2016-07-21 22:31:29,3599,18.544,21.1348
+2016-07-21 22:41:31,3599,18.5849,21.1348
+2016-07-21 22:51:34,3599,18.5833,20.983
+2016-07-21 23:01:37,3599,18.58,21.133000000000006
+2016-07-21 23:11:40,3599,18.5833,21.0996
+2016-07-21 23:21:43,3599,18.58,20.9793
+2016-07-21 23:31:45,3599,18.616,20.9774
+2016-07-21 23:41:48,3599,18.58,21.0162
+2016-07-21 23:51:51,3599,18.5783,20.983
+2016-07-22 01:02:11,3599,18.58,20.9849
+2016-07-22 01:12:13,3599,18.616,21.0218
+2016-07-22 01:22:16,3599,18.5783,20.9849
+2016-07-22 01:32:19,3599,18.58,20.9849
+2016-07-22 01:42:22,3599,18.58,20.983
+2016-07-22 01:52:25,3599,18.616,21.0256
+2016-07-22 02:02:27,3599,18.5783,20.9849
+2016-07-22 02:12:30,3599,18.616,20.983
+2016-07-22 02:22:33,3599,18.616,20.983
+2016-07-22 02:32:36,3599,18.5783,20.9849
+2016-07-22 02:42:38,3599,18.58,21.0218
+2016-07-22 02:52:41,3599,18.5783,21.0218
+2016-07-22 03:02:44,3599,18.5783,20.9849
+2016-07-22 03:12:46,3599,18.616,20.9849
+2016-07-22 03:22:49,3599,18.616,20.9849
+2016-07-22 03:32:52,3599,18.5783,20.983
+2016-07-22 03:42:54,3599,18.616,21.0237
+2016-07-22 03:52:57,3599,18.6094,21.0218
+2016-07-22 04:03:00,3599,18.6143,21.0237
+2016-07-22 04:13:03,3599,18.6094,20.983
+2016-07-22 04:23:05,3599,18.6094,21.0218
+2016-07-22 04:33:08,3599,18.5733,21.0237
+2016-07-22 04:43:11,3599,18.611,21.0237
+2016-07-22 04:53:13,3599,18.5733,21.0218
+2016-07-22 05:03:16,3599,18.6094,20.983
+2016-07-22 05:13:19,3599,18.5733,21.0237
+2016-07-22 05:23:22,3599,18.575,21.0237
+2016-07-22 05:33:24,3599,18.5733,20.9849
+2016-07-22 05:43:27,3599,18.6094,20.983
+2016-07-22 05:53:29,3599,18.6094,20.9849
+2016-07-22 06:03:32,3599,18.6094,21.0237
+2016-07-22 06:13:34,3599,18.5733,20.9849
+2016-07-22 06:23:44,3599,18.575,20.983
+2016-07-22 06:33:47,3599,18.611,20.9849
+2016-07-22 06:43:50,3599,18.575,20.9849
+2016-07-22 06:53:52,3599,18.6094,21.0237
+2016-07-22 07:03:55,3599,18.575,20.9849
+2016-07-22 07:13:58,3599,18.5733,21.0237
+2016-07-22 07:24:01,3599,18.5733,20.983
+2016-07-22 07:34:03,3599,18.5733,21.0237
+2016-07-22 07:44:06,3599,18.611,20.983
+2016-07-22 07:54:09,3599,18.5733,20.9849
+2016-07-22 08:04:11,3599,18.611,20.983
+2016-07-22 08:14:14,3599,18.57,21.0237
+2016-07-22 08:24:17,3599,18.5684,20.983
+2016-07-22 08:34:19,3599,18.6061,20.983
+2016-07-22 08:44:22,3599,18.6061,21.0237
+2016-07-22 08:54:25,3599,18.57,20.9849
+2016-07-22 09:04:28,3599,18.5684,21.0237
+2016-07-22 09:14:31,3599,18.6405,21.0237
+2016-07-22 09:24:33,3599,18.57,20.9849
+2016-07-22 09:34:36,3599,18.5341,21.0237
+2016-07-22 09:44:39,3599,18.6044,20.983
+2016-07-22 09:54:42,3599,18.4622,20.983
+2016-07-22 10:04:44,3599,18.5341,20.9849
+2016-07-22 10:14:47,3599,18.4622,21.0237
+2016-07-22 10:24:50,3599,18.4606,21.0218
+2016-07-22 10:34:53,3599,18.4606,20.9849
+2016-07-22 10:44:56,3599,18.4247,20.9849
+2016-07-22 10:54:59,3599,18.3223,21.0237
+2016-07-22 11:05:02,3599,18.3531,20.9849
+2016-07-22 11:15:04,3599,18.3938,21.0218
+2016-07-22 11:25:07,3599,18.3597,20.9849
+2016-07-22 11:35:10,3599,18.2493,20.9849
+2016-07-22 11:45:13,3599,18.2866,21.0237
+2016-07-22 11:55:16,3599,18.2882,20.983
+2016-07-22 12:05:19,3599,18.2169,21.0218
+2016-07-22 12:15:22,3599,18.2509,20.9849
+2016-07-22 12:25:25,3599,18.2526,21.0237
+2016-07-22 12:35:28,3599,18.2866,20.983
+2016-07-22 12:45:31,3599,18.1442,20.9849
+2016-07-22 12:55:34,3599,18.1813,21.0218
+2016-07-22 13:05:37,3599,18.1442,20.983
+2016-07-22 13:15:40,3599,18.0394,20.9849
+2016-07-22 13:25:44,3599,18.0378,21.0218
+2016-07-22 13:35:47,3599,18.0426,21.0237
+2016-07-22 13:45:50,3599,18.0426,21.0237
+2016-07-22 13:55:53,3599,18.0426,20.9849
+2016-07-22 14:05:56,3599,18.0426,20.9849
+2016-07-22 14:15:59,3599,17.9044,20.9849
+2016-07-22 14:26:02,3599,17.9365,20.9055
+2016-07-22 14:36:05,3599,17.9044,20.9074
+2016-07-22 14:46:08,3599,17.906,20.9074
+2016-07-22 14:56:11,3599,17.9397,20.8668
+2016-07-22 15:06:14,3599,17.9044,20.9074
+2016-07-22 15:16:17,3599,17.906,20.8668
+2016-07-22 15:26:20,3599,17.9044,20.9074
+2016-07-22 15:36:23,3599,17.9397,20.8687
+2016-07-22 15:46:26,3599,17.9044,20.9074
+2016-07-22 15:56:29,3599,17.9044,20.8687
+2016-07-22 16:06:32,3599,17.8692,20.9074
+2016-07-22 16:16:35,3599,17.9044,20.8687
+2016-07-22 16:26:38,3599,17.9397,20.9055
+2016-07-22 16:36:41,3599,17.9461,20.9074
+2016-07-22 16:46:44,3599,17.9092,20.9074
+2016-07-22 16:56:47,3599,17.9799,20.9074
+2016-07-22 17:06:50,3599,17.9108,20.9055
+2016-07-22 17:16:53,3599,17.9108,20.8687
+2016-07-22 17:26:55,3599,17.9815,20.9074
+2016-07-22 17:36:58,3599,17.9445,20.8668
+2016-07-22 17:47:01,3599,18.0152,20.9442
+2016-07-22 17:57:04,3599,18.0506,20.9074
+2016-07-22 18:07:07,3599,18.0861,20.9461
+2016-07-22 18:17:09,3599,18.0506,20.8687
+2016-07-22 18:27:12,3599,18.0506,20.9074
+2016-07-22 18:37:15,3599,18.0152,20.9074
+2016-07-22 18:47:18,3599,18.0506,20.9074
+2016-07-22 18:57:20,3599,18.0522,20.9055
+2016-07-22 19:07:30,3599,17.9799,20.9074
+2016-07-22 19:17:33,3599,18.0861,20.8687
+2016-07-22 19:27:36,3599,18.0506,20.9461
+2016-07-22 19:37:38,3599,17.9799,20.9074
+2016-07-22 19:47:41,3599,18.0506,20.9074
+2016-07-22 19:57:44,3599,18.0506,20.9055
+2016-07-22 20:07:47,3599,18.0506,20.9074
+2016-07-22 20:17:49,3599,18.0168,20.9074
+2016-07-22 20:27:52,3599,18.0152,20.9461
+2016-07-22 20:37:55,3599,18.0152,20.9074
+2016-07-22 20:47:58,3599,18.0458,20.8687
+2016-07-22 20:58:00,3599,18.0152,20.8743
+2016-07-22 21:08:03,3599,18.0104,20.9517
+2016-07-22 21:18:06,3599,18.0104,20.9517
+2016-07-22 21:28:08,3599,17.9751,20.9074
+2016-07-22 21:38:11,3599,18.0458,20.9074
+2016-07-22 21:48:14,3599,18.0104,20.9517
+2016-07-22 21:58:16,3599,17.9767,20.9517
+2016-07-22 22:08:19,3599,18.0458,20.9129
+2016-07-22 22:18:22,3599,18.0474,20.9129
+2016-07-22 22:28:25,3599,18.0458,20.9129
+2016-07-22 22:38:27,3599,18.0458,20.9129
+2016-07-22 22:48:30,3599,18.0458,20.9498
+2016-07-22 22:58:33,3599,18.0829,20.9129
+2016-07-22 23:08:35,3599,18.0458,20.8743
+2016-07-22 23:18:38,3599,18.0458,20.9129
+2016-07-22 23:28:41,3599,18.1167,20.9129
+2016-07-22 23:38:43,3599,18.1539,20.9129
+2016-07-22 23:48:46,3599,18.0829,20.8743
+2016-07-22 23:58:49,3599,18.1167,20.9517
+2016-07-23 00:08:51,3599,18.1539,20.9111
+2016-07-23 00:18:54,3599,18.1523,20.9129
+2016-07-23 00:28:56,3599,18.1539,20.9498
+2016-07-23 00:38:59,3599,18.0813,20.9111
+2016-07-23 00:49:02,3599,18.1523,20.9129
+2016-07-23 00:59:04,3599,18.1119,20.9517
+2016-07-23 01:09:07,3599,18.1474,20.9517
+2016-07-23 01:19:10,3599,18.1506,20.9129
+2016-07-23 01:29:12,3599,18.1474,20.9185
+2016-07-23 01:39:15,3599,18.183,20.9111
+2016-07-23 01:49:17,3599,18.1135,20.9129
+2016-07-23 01:59:20,3599,18.183,20.9129
+2016-07-23 02:09:22,3599,18.149,20.9573
+2016-07-23 02:19:25,3599,18.149,20.9129
+2016-07-23 02:29:27,3599,18.2153,20.9129
+2016-07-23 02:39:30,3599,18.1846,20.9498
+2016-07-23 02:49:33,3599,18.1797,20.9517
+2016-07-23 02:59:35,3599,18.1442,20.9129
+2016-07-23 03:09:38,3599,18.1797,20.9517
+2016-07-23 03:19:40,3599,18.1442,20.878
+2016-07-23 03:29:43,3599,18.1797,20.9185
+2016-07-23 03:39:46,3599,18.1781,20.9554
+2016-07-23 03:49:48,3599,18.1781,20.9498
+2016-07-23 03:59:51,3599,18.1442,20.9554
+2016-07-23 04:09:53,3599,18.1797,20.9554
+2016-07-23 04:19:56,3599,18.1442,20.9185
+2016-07-23 04:29:58,3599,18.1442,20.9129
+2016-07-23 04:40:01,3599,18.1797,20.8798
+2016-07-23 04:50:03,3599,18.2153,20.8743
+2016-07-23 05:00:13,3599,18.2153,20.9498
+2016-07-23 05:10:16,3599,18.1442,20.9517
+2016-07-23 05:20:18,3599,18.1797,20.9535
+2016-07-23 05:30:21,3599,18.1813,20.9573
+2016-07-23 05:40:23,3599,18.1442,20.8817
+2016-07-23 05:50:26,3599,18.1749,20.8817
+2016-07-23 06:00:29,3599,18.1393,20.8798
+2016-07-23 06:10:31,3599,18.1749,20.8798
+2016-07-23 06:20:34,3599,18.1749,20.9517
+2016-07-23 06:30:36,3599,18.1749,20.9185
+2016-07-23 06:40:39,3599,18.1749,20.9185
+2016-07-23 06:50:41,3599,18.1765,20.8798
+2016-07-23 07:00:44,3599,18.1749,20.9573
+2016-07-23 07:10:47,3599,18.1749,20.9204
+2016-07-23 07:20:49,3599,18.1749,20.9129
+2016-07-23 07:30:52,3599,18.1038,20.8817
+2016-07-23 07:40:54,3599,18.1055,20.9129
+2016-07-23 07:50:57,3599,18.0684,20.9185
+2016-07-23 08:01:00,3599,18.1393,20.8798
+2016-07-23 08:11:02,3599,18.1038,20.9554
+2016-07-23 08:21:05,3599,18.07,20.8045
+2016-07-23 08:31:07,3599,18.0668,20.8045
+2016-07-23 08:41:10,3599,18.0684,20.7641
+2016-07-23 08:51:13,3599,18.1055,20.8393
+2016-07-23 09:01:15,3599,18.0282,20.8393
+2016-07-23 09:11:18,3599,17.9928,20.8393
+2016-07-23 09:21:20,3599,17.9222,20.8412
+2016-07-23 09:31:23,3599,17.9928,20.8026
+2016-07-23 09:41:25,3599,17.9575,20.8026
+2016-07-23 09:51:28,3599,17.9928,20.8008
+2016-07-23 10:01:31,3599,17.9575,20.8026
+2016-07-23 10:11:33,3599,17.9559,20.8356
+2016-07-23 10:21:36,3599,17.9575,20.8026
+2016-07-23 10:31:39,3599,17.9944,20.8026
+2016-07-23 10:41:42,3599,17.9928,20.7971
+2016-07-23 10:51:44,3599,17.9591,20.8412
+2016-07-23 11:01:47,3599,18.0282,20.8356
+2016-07-23 11:11:50,3599,17.9591,20.8338
+2016-07-23 11:21:53,3599,17.8917,20.7622
+2016-07-23 11:31:56,3599,17.8869,20.7971
+2016-07-23 11:41:58,3599,17.8565,20.6816
+2016-07-23 11:52:01,3599,17.8917,20.6872
+2016-07-23 12:02:04,3599,17.7862,20.6798
+2016-07-23 12:12:07,3599,17.7862,20.6816
+2016-07-23 12:22:09,3599,17.7862,20.6816
+2016-07-23 12:32:12,3599,17.7862,20.6872
+2016-07-23 12:42:15,3599,17.7862,20.6488
+2016-07-23 12:52:18,3599,17.7877,20.6872
+2016-07-23 13:02:21,3599,17.715999999999994,20.6816
+2016-07-23 13:12:23,3599,17.680999999999994,20.6816
+2016-07-23 13:22:26,3599,17.680999999999994,20.6816
+2016-07-23 13:32:29,3599,17.646,20.6816
+2016-07-23 13:42:32,3599,17.715999999999994,20.6853
+2016-07-23 13:52:34,3599,17.680999999999994,20.6816
+2016-07-23 14:02:37,3599,17.646,20.6798
+2016-07-23 14:12:40,3599,17.680999999999994,20.7201
+2016-07-23 14:22:43,3599,17.611,20.647
+2016-07-23 14:32:46,3599,17.5761,20.6816
+2016-07-23 14:42:49,3599,17.5808,20.6816
+2016-07-23 14:52:51,3599,17.6507,20.6488
+2016-07-23 15:02:54,3599,17.5808,20.6872
+2016-07-23 15:12:57,3599,17.5111,20.6798
+2016-07-23 15:23:00,3599,17.5111,20.6798
+2016-07-23 15:33:03,3599,17.5111,20.6816
+2016-07-23 15:43:06,3599,17.5111,20.6816
+2016-07-23 15:53:09,3599,17.4763,20.6798
+2016-07-23 16:03:12,3599,17.4763,20.6816
+2016-07-23 16:13:15,3599,17.5111,20.6816
+2016-07-23 16:23:18,3599,17.4415,20.678
+2016-07-23 16:33:21,3599,17.4778,20.6816
+2016-07-23 16:43:24,3599,17.4083,20.6798
+2016-07-23 16:53:27,3599,17.4067,20.6816
+2016-07-23 17:03:30,3599,17.372,20.6798
+2016-07-23 17:13:33,3599,17.4114,20.6816
+2016-07-23 17:23:35,3599,17.3767,20.5649
+2016-07-23 17:33:38,3599,17.3767,20.5667
+2016-07-23 17:43:41,3599,17.4114,20.5667
+2016-07-23 17:53:44,3599,17.4114,20.5667
+2016-07-23 18:03:47,3599,17.4129,20.5667
+2016-07-23 18:13:50,3599,17.4114,20.5667
+2016-07-23 18:23:53,3599,17.3798,20.5667
+2016-07-23 18:33:56,3599,17.416,20.5722
+2016-07-23 18:43:59,3599,17.3813,20.5667
+2016-07-23 18:54:02,3599,17.416,20.5649
+2016-07-23 19:04:05,3599,17.4145,20.5667
+2016-07-23 19:14:15,3599,17.4145,20.5704
+2016-07-23 19:24:18,3599,17.416,20.605
+2016-07-23 19:34:21,3599,17.4145,20.5649
+2016-07-23 19:44:24,3599,17.3798,20.5722
+2016-07-23 19:54:27,3599,17.4145,20.5722
+2016-07-23 20:04:30,3599,17.416,20.5667
+2016-07-23 20:14:33,3599,17.4145,20.6105
+2016-07-23 20:24:36,3599,17.3813,20.5722
+2016-07-23 20:34:39,3599,17.5189,20.5704
+2016-07-23 20:44:42,3599,17.5189,20.605
+2016-07-23 20:54:44,3599,17.5189,20.5722
+2016-07-23 21:04:47,3599,17.5189,20.5704
+2016-07-23 21:14:50,3599,17.5204,20.5722
+2016-07-23 21:24:53,3599,17.5568,20.5722
+2016-07-23 21:34:56,3599,17.484,20.534
+2016-07-23 21:44:59,3599,17.5204,20.5722
+2016-07-23 21:55:02,3599,17.5204,20.5722
+2016-07-23 22:05:04,3599,17.5204,20.5722
+2016-07-23 22:15:07,3599,17.5189,20.5722
+2016-07-23 22:25:10,3599,17.5204,20.5722
+2016-07-23 22:35:13,3599,17.5189,20.5704
+2016-07-23 22:45:16,3599,17.5204,20.5722
+2016-07-23 22:55:19,3599,17.5537,20.5704
+2016-07-23 23:05:21,3599,17.5553,20.5722
+2016-07-23 23:15:24,3599,17.5204,20.5722
+2016-07-23 23:25:27,3599,17.5204,20.5722
+2016-07-23 23:35:30,3599,17.5189,20.5722
+2016-07-23 23:45:33,3599,17.5189,20.5722
+2016-07-23 23:55:35,3599,17.5553,20.5704
+2016-07-24 00:05:38,3599,17.5189,20.6105
+2016-07-24 00:15:41,3599,17.5553,20.5722
+2016-07-24 00:25:44,3599,17.5553,20.5722
+2016-07-24 00:35:47,3599,17.5189,20.6086
+2016-07-24 00:45:49,3599,17.5189,20.5722
+2016-07-24 00:55:52,3599,17.5204,20.6105
+2016-07-24 01:05:55,3599,17.5189,20.6105
+2016-07-24 01:15:57,3599,17.5204,20.5722
+2016-07-24 01:26:00,3599,17.5553,20.5758
+2016-07-24 01:36:03,3599,17.5189,20.6105
+2016-07-24 01:46:06,3599,17.6251,20.6105
+2016-07-24 01:56:08,3599,17.6601,20.5722
+2016-07-24 02:06:11,3599,17.5886,20.5722
+2016-07-24 02:16:13,3599,17.6601,20.5722
+2016-07-24 02:26:16,3599,17.6236,20.6105
+2016-07-24 02:36:19,3599,17.6601,20.5722
+2016-07-24 02:46:22,3599,17.5886,20.5722
+2016-07-24 02:56:24,3599,17.6904,20.5722
+2016-07-24 03:06:27,3599,17.6554,20.5722
+2016-07-24 03:16:30,3599,17.657,20.5758
+2016-07-24 03:26:32,3599,17.5855,20.5704
+2016-07-24 03:36:35,3599,17.6554,20.5722
+2016-07-24 03:46:37,3599,17.6554,20.5758
+2016-07-24 03:56:40,3599,17.6554,20.6178
+2016-07-24 04:06:43,3599,17.6554,20.5758
+2016-07-24 04:16:45,3599,17.6554,20.6086
+2016-07-24 04:26:48,3599,17.6554,20.5722
+2016-07-24 04:36:51,3599,17.657,20.6141
+2016-07-24 04:46:53,3599,17.5855,20.5758
+2016-07-24 04:56:56,3599,17.6554,20.5722
+2016-07-24 05:06:59,3599,17.6189,20.5777
+2016-07-24 05:17:02,3599,17.5855,20.5758
+2016-07-24 05:27:04,3599,17.6554,20.5758
+2016-07-24 05:37:07,3599,17.5824,20.5758
+2016-07-24 05:47:10,3599,17.6507,20.6105
+2016-07-24 05:57:12,3599,17.6173,20.6141
+2016-07-24 06:07:15,3599,17.6507,20.5758
+2016-07-24 06:17:18,3599,17.6507,20.5758
+2016-07-24 06:27:20,3599,17.6507,20.5758
+2016-07-24 06:37:23,3599,17.6507,20.5777
+2016-07-24 06:47:26,3599,17.6507,20.5704
+2016-07-24 06:57:29,3599,17.6157,20.5758
+2016-07-24 07:07:31,3599,17.6507,20.5758
+2016-07-24 07:17:34,3599,17.6507,20.5758
+2016-07-24 07:27:36,3599,17.6157,20.5758
+2016-07-24 07:37:39,3599,17.6507,20.616
+2016-07-24 07:47:42,3599,17.6157,20.5758
+2016-07-24 07:57:44,3599,17.6157,20.6141
+2016-07-24 08:07:47,3599,17.6507,20.6141
+2016-07-24 08:17:50,3599,17.6507,20.6141
+2016-07-24 08:27:52,3599,17.6157,20.5777
+2016-07-24 08:37:55,3599,17.5459,20.5758
+2016-07-24 08:47:58,3599,17.5459,20.5758
+2016-07-24 08:58:00,3599,17.5111,20.6141
+2016-07-24 09:08:03,3599,17.5459,20.6141
+2016-07-24 09:18:06,3599,17.5459,20.5758
+2016-07-24 09:28:09,3599,17.5459,20.4994
+2016-07-24 09:38:11,3599,17.5459,20.4613
+2016-07-24 09:48:14,3599,17.5459,20.4631
+2016-07-24 09:58:17,3599,17.4415,20.4994
+2016-07-24 10:08:20,3599,17.4067,20.4613
+2016-07-24 10:18:22,3599,17.4083,20.4613
+2016-07-24 10:28:25,3599,17.4067,20.4613
+2016-07-24 10:38:28,3599,17.4067,20.4613
+2016-07-24 10:48:31,3599,17.4067,20.4613
+2016-07-24 10:58:34,3599,17.4067,20.4631
+2016-07-24 11:08:37,3599,17.4067,20.4613
+2016-07-24 11:18:40,3599,17.3374,20.4613
+2016-07-24 11:28:43,3599,17.3028,20.4631
+2016-07-24 11:38:46,3599,17.3012,20.4613
+2016-07-24 11:48:49,3599,17.3028,20.4613
+2016-07-24 11:58:52,3599,17.3028,20.4613
+2016-07-24 12:08:55,3599,17.2682,20.4613
+2016-07-24 12:18:58,3599,17.3389,20.4613
+2016-07-24 12:29:03,3599,17.2682,20.4613
+2016-07-24 12:39:06,3599,17.372,20.3093
+2016-07-24 12:49:09,3599,17.2037,20.3472
+2016-07-24 12:59:12,3599,17.2336,20.387
+2016-07-24 13:09:15,3599,17.2382,20.3472
+2016-07-24 13:19:18,3599,17.2022,20.3472
+2016-07-24 13:29:21,3599,17.2382,20.3852
+2016-07-24 13:39:24,3599,17.2413,20.3472
+2016-07-24 13:49:27,3599,17.2413,20.349
+2016-07-24 13:59:30,3599,17.1393,20.3472
+2016-07-24 14:09:33,3599,17.1034,20.3472
+2016-07-24 14:19:36,3599,17.1723,20.3472
+2016-07-24 14:29:39,3599,17.069000000000006,20.3472
+2016-07-24 14:39:42,3599,17.1378,20.349
+2016-07-24 14:49:45,3599,17.1378,20.3093
+2016-07-24 14:59:48,3599,17.1393,20.3852
+2016-07-24 15:09:51,3599,17.1393,20.387
+2016-07-24 15:19:54,3599,17.1378,20.3472
+2016-07-24 15:29:57,3599,17.1034,20.3472
+2016-07-24 15:40:00,3599,17.1378,20.3472
+2016-07-24 15:50:03,3599,17.1034,20.3472
+2016-07-24 16:00:06,3599,17.1034,20.2732
+2016-07-24 16:10:09,3599,17.1424,20.2714
+2016-07-24 16:20:12,3599,17.1378,20.2335
+2016-07-24 16:30:15,3599,17.1424,20.2732
+2016-07-24 16:40:18,3599,17.0392,20.2714
+2016-07-24 16:50:21,3599,17.0736,20.2714
+2016-07-24 17:00:24,3599,17.0392,20.2335
+2016-07-24 17:10:27,3599,17.0049,20.2714
+2016-07-24 17:20:30,3599,17.0049,20.2335
+2016-07-24 17:30:33,3599,17.0407,20.1958
+2016-07-24 17:40:36,3599,17.0392,20.2335
+2016-07-24 17:50:39,3599,17.0407,20.2335
+2016-07-24 18:00:42,3599,17.0049,20.2714
+2016-07-24 18:10:45,3599,17.0392,20.1958
+2016-07-24 18:20:48,3599,17.0392,20.2714
+2016-07-24 18:30:51,3599,17.0392,20.2714
+2016-07-24 18:40:54,3599,17.0407,20.2714
+2016-07-24 18:50:57,3599,17.0407,20.2335
+2016-07-24 19:01:00,3599,17.0392,20.2732
+2016-07-24 19:11:03,3599,17.0392,20.2714
+2016-07-24 19:21:06,3599,17.0392,20.1958
+2016-07-24 19:31:09,3599,17.0049,20.2714
+2016-07-24 19:41:12,3599,17.0392,20.2714
+2016-07-24 19:51:15,3599,17.0407,20.2714
+2016-07-24 20:01:18,3599,17.0392,20.2714
+2016-07-24 20:11:21,3599,17.0392,20.1958
+2016-07-24 20:21:24,3599,17.0453,20.2335
+2016-07-24 20:31:27,3599,17.0049,20.2335
+2016-07-24 20:41:30,3599,17.0453,20.2353
+2016-07-24 20:51:33,3599,17.0392,20.2714
+2016-07-24 21:01:36,3599,17.1439,20.2714
+2016-07-24 21:11:39,3599,17.1814,20.2335
+2016-07-24 21:21:42,3599,17.1485,20.2714
+2016-07-24 21:31:44,3599,17.1814,20.2335
+2016-07-24 21:41:46,3599,17.1814,20.2335
+2016-07-24 21:51:49,3599,17.1814,20.2714
+2016-07-24 22:01:52,3599,17.147000000000002,20.2714
+2016-07-24 22:11:55,3599,17.1485,20.2714
+2016-07-24 22:21:58,3599,17.1814,20.2714
+2016-07-24 22:32:01,3599,17.147000000000002,20.2714
+2016-07-24 22:42:04,3599,17.1485,20.2714
+2016-07-24 22:52:06,3599,17.1125,20.2714
+2016-07-24 23:02:09,3599,17.1424,20.2732
+2016-07-24 23:12:12,3599,17.1485,20.2335
+2016-07-24 23:22:15,3599,17.183,20.2714
+2016-07-24 23:32:18,3599,17.1814,20.1958
+2016-07-24 23:42:21,3599,17.1769,20.2714
+2016-07-24 23:52:24,3599,17.1485,20.2732
+2016-07-25 00:02:26,3599,17.183,20.2335
+2016-07-25 00:12:29,3599,17.1485,20.2335
+2016-07-25 00:22:32,3599,17.1485,20.2353
+2016-07-25 00:32:35,3599,17.1769,20.2335
+2016-07-25 00:42:38,3599,17.183,20.2714
+2016-07-25 00:52:40,3599,17.1784,20.2714
+2016-07-25 01:02:50,3599,17.1784,20.2335
+2016-07-25 01:12:53,3599,17.1784,20.2335
+2016-07-25 01:22:56,3599,17.1769,20.2714
+2016-07-25 01:32:59,3599,17.1769,20.2335
+2016-07-25 01:43:02,3599,17.1424,20.2714
+2016-07-25 01:53:04,3599,17.1769,20.2714
+2016-07-25 02:03:07,3599,17.1439,20.2732
+2016-07-25 02:13:10,3599,17.1769,20.2714
+2016-07-25 02:23:13,3599,17.1784,20.2714
+2016-07-25 02:33:16,3599,17.1424,20.2714
+2016-07-25 02:43:18,3599,17.1095,20.2714
+2016-07-25 02:53:21,3599,17.1424,20.2732
+2016-07-25 03:03:24,3599,17.1784,20.2714
+2016-07-25 03:13:27,3599,17.1424,20.2714
+2016-07-25 03:23:30,3599,17.1424,20.1958
+2016-07-25 03:33:33,3599,17.1424,20.2335
+2016-07-25 03:43:35,3599,17.1439,20.2714
+2016-07-25 03:53:38,3599,17.2459,20.2732
+2016-07-25 04:03:41,3599,17.2805,20.2353
+2016-07-25 04:13:44,3599,17.2474,20.2732
+2016-07-25 04:23:47,3599,17.2805,20.2335
+2016-07-25 04:33:50,3599,17.2459,20.2335
+2016-07-25 04:43:52,3599,17.2805,20.2714
+2016-07-25 04:53:55,3599,17.2805,20.2714
+2016-07-25 05:03:58,3599,17.2805,20.2714
+2016-07-25 05:14:01,3599,17.3151,20.2714
+2016-07-25 05:24:04,3599,17.2805,20.2714
+2016-07-25 05:34:07,3599,17.2805,20.2714
+2016-07-25 05:44:10,3599,17.3151,20.2732
+2016-07-25 05:54:13,3599,17.282,20.2714
+2016-07-25 06:04:16,3599,17.282,20.2714
+2016-07-25 06:14:19,3599,17.2805,20.2714
+2016-07-25 06:24:21,3599,17.2459,20.2714
+2016-07-25 06:34:24,3599,17.2805,20.2714
+2016-07-25 06:44:26,3599,17.3166,20.2335
+2016-07-25 06:54:36,3599,17.3105,20.2714
+2016-07-25 07:04:39,3599,17.3166,20.2732
+2016-07-25 07:14:42,3599,17.2805,20.2714
+2016-07-25 07:24:45,3599,17.2413,20.2714
+2016-07-25 07:34:47,3599,17.1378,20.2335
+2016-07-25 07:44:50,3599,17.1738,20.2714
+2016-07-25 07:54:53,3599,17.1723,20.2714
+2016-07-25 08:04:56,3599,17.1378,20.2335
+2016-07-25 08:14:59,3599,17.1378,20.2714
+2016-07-25 08:25:02,3599,17.1723,20.2714
+2016-07-25 08:35:05,3599,17.1723,20.2714
+2016-07-25 08:45:08,3599,17.2083,20.2732
+2016-07-25 08:55:10,3599,17.1723,20.2714
+2016-07-25 09:05:13,3599,17.0347,20.2335
+2016-07-25 09:15:16,3599,17.069000000000006,20.2732
+2016-07-25 09:25:19,3599,17.069000000000006,20.2732
+2016-07-25 09:35:22,3599,17.1049,20.2714
+2016-07-25 09:45:25,3599,17.069000000000006,20.2714
+2016-07-25 09:55:28,3599,17.0347,20.2714
+2016-07-25 10:05:31,3599,17.0347,20.2335
+2016-07-25 10:15:34,3599,17.0362,20.2714
+2016-07-25 10:25:37,3599,17.069000000000006,20.2714
+2016-07-25 10:35:40,3599,17.0392,20.1598
+2016-07-25 10:45:43,3599,16.9334,20.158
+2016-07-25 10:55:45,3599,16.9379,20.1203
+2016-07-25 11:05:48,3599,16.9364,20.158
+2016-07-25 11:15:51,3599,17.0049,20.158
+2016-07-25 11:25:54,3599,17.0064,20.158
+2016-07-25 11:35:57,3599,16.9364,20.1203
+2016-07-25 11:46:00,3599,16.868,20.1203
+2016-07-25 11:56:03,3599,16.868,20.1203
+2016-07-25 12:06:06,3599,16.868,20.1203
+2016-07-25 12:16:09,3599,16.7331,20.1203
+2016-07-25 12:26:12,3599,16.7657,20.158
+2016-07-25 12:36:15,3599,16.7998,20.1203
+2016-07-25 12:46:18,3599,16.7657,20.158
+2016-07-25 12:56:21,3599,16.7657,20.0076
+2016-07-25 13:06:24,3599,16.7672,20.0076
+2016-07-25 13:16:27,3599,16.7717,20.0076
+2016-07-25 13:26:30,3599,16.6637,20.0076
+2016-07-25 13:36:33,3599,16.6681,20.0094
+2016-07-25 13:46:36,3599,16.6342,20.0094
+2016-07-25 13:56:39,3599,16.5664,20.0076
+2016-07-25 14:06:42,3599,16.5679,20.0076
+2016-07-25 14:16:45,3599,16.5679,20.0076
+2016-07-25 14:26:49,3599,16.468,20.0094
+2016-07-25 14:36:52,3599,16.4695,20.0076
+2016-07-25 14:46:55,3599,16.4709,20.0094
+2016-07-25 14:56:58,3599,16.4343,19.8952
+2016-07-25 15:07:01,3599,16.4695,19.897
+2016-07-25 15:17:04,3599,16.3684,19.938
+2016-07-25 15:27:07,3599,16.402,19.9326
+2016-07-25 15:37:10,3599,16.4035,19.8952
+2016-07-25 15:47:15,3599,16.3684,19.9326
+2016-07-25 15:57:19,3599,16.3669,19.9326
+2016-07-25 16:07:22,3599,16.3684,19.9362
+2016-07-25 16:17:25,3599,16.3669,19.8952
+2016-07-25 16:27:28,3599,16.3727,19.9006
+2016-07-25 16:37:31,3599,16.3727,19.8952
+2016-07-25 16:47:34,3599,16.3391,19.8632
+2016-07-25 16:57:37,3599,16.4064,19.8952
+2016-07-25 17:07:41,3599,16.3727,19.9344
+2016-07-25 17:17:44,3599,16.4064,19.9326
+2016-07-25 17:27:47,3599,16.3771,19.938
+2016-07-25 17:37:50,3599,16.4108,19.9397
+2016-07-25 17:47:53,3599,16.3771,19.8952
+2016-07-25 17:57:56,3599,16.3771,19.8952
+2016-07-25 18:07:59,3599,16.3756,19.938
+2016-07-25 18:18:02,3599,16.4108,19.9397
+2016-07-25 18:28:05,3599,16.3771,19.938
+2016-07-25 18:38:08,3599,16.4093,19.9006
+2016-07-25 18:48:11,3599,16.3771,19.9006
+2016-07-25 18:58:14,3599,16.4093,19.938
+2016-07-25 19:08:17,3599,16.3771,19.9006
+2016-07-25 19:18:20,3599,16.4108,19.9006
+2016-07-25 19:28:23,3599,16.4445,19.9006
+2016-07-25 19:38:26,3599,16.3756,19.9006
+2016-07-25 19:48:29,3599,16.4108,19.9772
+2016-07-25 19:58:32,3599,16.4152,19.9006
+2016-07-25 20:08:35,3599,16.3771,19.9397
+2016-07-25 20:18:38,3599,16.3478,19.938
+2016-07-25 20:28:41,3599,16.4137,19.9006
+2016-07-25 20:38:44,3599,16.4152,19.938
+2016-07-25 20:48:47,3599,16.3478,19.9006
+2016-07-25 20:58:50,3599,16.3478,19.938
+2016-07-25 21:08:53,3599,16.4152,19.938
+2016-07-25 21:18:56,3599,16.3815,19.9006
+2016-07-25 21:28:59,3599,16.4152,19.9006
+2016-07-25 21:39:02,3599,16.4152,19.9006
+2016-07-25 21:49:05,3599,16.3815,19.9023
+2016-07-25 21:59:07,3599,16.4152,19.9023
+2016-07-25 22:09:10,3599,16.4137,19.9023
+2016-07-25 22:19:13,3599,16.4137,19.9754
+2016-07-25 22:29:16,3599,16.3464,19.938
+2016-07-25 22:39:19,3599,16.4137,19.938
+2016-07-25 22:49:22,3599,16.38,19.938
+2016-07-25 22:59:25,3599,16.3815,19.9076
+2016-07-25 23:09:28,3599,16.4152,19.9006
+2016-07-25 23:19:31,3599,16.4152,19.9059
+2016-07-25 23:29:33,3599,16.4152,19.9023
+2016-07-25 23:39:36,3599,16.38,19.9433
+2016-07-25 23:49:39,3599,16.4152,19.9433
+2016-07-25 23:59:42,3599,16.4152,19.9059
+2016-07-26 00:09:45,3599,16.3478,19.9076
+2016-07-26 00:19:48,3599,16.3815,19.9807
+2016-07-26 00:29:50,3599,16.4489,19.9433
+2016-07-26 00:39:53,3599,16.3815,19.9433
+2016-07-26 00:49:55,3599,16.4137,19.9451
+2016-07-26 00:59:58,3599,16.3478,19.9451
+2016-07-26 01:10:01,3599,16.3815,19.9433
+2016-07-26 01:20:03,3599,16.4152,19.9451
+2016-07-26 01:30:06,3599,16.3478,19.9451
+2016-07-26 01:40:09,3599,16.5503,19.9433
+2016-07-26 01:50:11,3599,16.4826,19.9807
+2016-07-26 02:00:14,3599,16.5488,19.9451
+2016-07-26 02:10:17,3599,16.4474,19.9433
+2016-07-26 02:20:20,3599,16.5164,19.9433
+2016-07-26 02:30:22,3599,16.5164,19.9433
+2016-07-26 02:40:25,3599,16.4826,19.9059
+2016-07-26 02:50:28,3599,16.4474,19.9451
+2016-07-26 03:00:31,3599,16.5164,19.9112
+2016-07-26 03:10:34,3599,16.4782,19.9486
+2016-07-26 03:20:36,3599,16.512,19.9433
+2016-07-26 03:30:39,3599,16.4812,19.9879
+2016-07-26 03:40:42,3599,16.512,19.9112
+2016-07-26 03:50:44,3599,16.512,19.9076
+2016-07-26 04:00:47,3599,16.512,19.9861
+2016-07-26 04:10:50,3599,16.4445,19.9486
+2016-07-26 04:20:53,3599,16.5106,19.9486
+2016-07-26 04:30:55,3599,16.512,19.9504
+2016-07-26 04:40:58,3599,16.5459,19.9861
+2016-07-26 04:51:00,3599,16.4768,19.9486
+2016-07-26 05:01:03,3599,16.5444,19.9486
+2016-07-26 05:11:06,3599,16.512,19.9112
+2016-07-26 05:21:08,3599,16.512,19.9861
+2016-07-26 05:31:11,3599,16.512,19.9504
+2016-07-26 05:41:14,3599,16.4782,19.9504
+2016-07-26 05:51:16,3599,16.512,19.913
+2016-07-26 06:01:19,3599,16.512,19.9486
+2016-07-26 06:11:22,3599,16.512,19.9486
+2016-07-26 06:21:24,3599,16.4782,19.9112
+2016-07-26 06:31:34,3599,16.4782,19.9486
+2016-07-26 06:41:37,3599,16.5106,19.9486
+2016-07-26 06:51:40,3599,16.4782,19.9486
+2016-07-26 07:01:42,3599,16.512,19.9504
+2016-07-26 07:11:45,3599,16.5076,19.913
+2016-07-26 07:21:48,3599,16.4739,19.9112
+2016-07-26 07:31:50,3599,16.5076,19.9504
+2016-07-26 07:41:53,3599,16.5414,19.9861
+2016-07-26 07:51:56,3599,16.4739,19.9861
+2016-07-26 08:01:59,3599,16.5414,19.9112
+2016-07-26 08:12:01,3599,16.5076,19.9486
+2016-07-26 08:22:04,3599,16.4401,19.9861
+2016-07-26 08:32:07,3599,16.4739,19.9861
+2016-07-26 08:42:10,3599,16.54,19.9861
+2016-07-26 08:52:12,3599,16.4343,19.9112
+2016-07-26 09:02:15,3599,16.3347,19.8365
+2016-07-26 09:12:18,3599,16.402,19.9486
+2016-07-26 09:22:20,3599,16.4049,19.8365
+2016-07-26 09:32:23,3599,16.402,19.7992
+2016-07-26 09:42:26,3599,16.3669,19.8382
+2016-07-26 09:52:28,3599,16.4357,19.8365
+2016-07-26 10:02:31,3599,16.402,19.8738
+2016-07-26 10:12:33,3599,16.402,19.8365
+2016-07-26 10:22:36,3599,16.4343,19.7992
+2016-07-26 10:32:39,3599,16.3684,19.8365
+2016-07-26 10:42:42,3599,16.4357,19.8365
+2016-07-26 10:52:45,3599,16.402,19.8365
+2016-07-26 11:02:47,3599,16.3333,19.8365
+2016-07-26 11:12:50,3599,16.402,19.8365
+2016-07-26 11:22:53,3599,16.402,19.8365
+2016-07-26 11:32:56,3599,16.402,19.8382
+2016-07-26 11:42:58,3599,16.2676,19.8365
+2016-07-26 11:53:01,3599,16.3011,19.8365
+2016-07-26 12:03:04,3599,16.3011,19.7992
+2016-07-26 12:13:07,3599,16.3011,19.8382
+2016-07-26 12:23:10,3599,16.2676,19.8365
+2016-07-26 12:33:13,3599,16.3011,19.801
+2016-07-26 12:43:15,3599,16.2676,19.7248
+2016-07-26 12:53:18,3599,16.1671,19.7248
+2016-07-26 13:03:21,3599,16.1671,19.7248
+2016-07-26 13:13:24,3599,16.1656,19.7248
+2016-07-26 13:23:27,3599,16.2005,19.7248
+2016-07-26 13:33:30,3599,16.2005,19.7248
+2016-07-26 13:43:33,3599,16.1671,19.7248
+2016-07-26 13:53:36,3599,16.0669,19.7265
+2016-07-26 14:03:39,3599,16.0655,19.7248
+2016-07-26 14:13:42,3599,16.1045,19.7248
+2016-07-26 14:23:45,3599,16.0712,19.7248
+2016-07-26 14:33:48,3599,16.0712,19.7248
+2016-07-26 14:43:50,3599,16.0712,19.6876
+2016-07-26 14:53:53,3599,16.0712,19.7248
+2016-07-26 15:03:56,3599,16.137999999999998,19.7248
+2016-07-26 15:13:59,3599,16.1045,19.6135
+2016-07-26 15:24:02,3599,16.0697,19.6135
+2016-07-26 15:34:05,3599,16.137999999999998,19.6135
+2016-07-26 15:44:08,3599,16.137999999999998,19.5765
+2016-07-26 15:54:11,3599,16.0712,19.6135
+2016-07-26 16:04:14,3599,16.0712,19.6153
+2016-07-26 16:14:17,3599,16.0712,19.6135
+2016-07-26 16:24:20,3599,16.1045,19.5783
+2016-07-26 16:34:23,3599,16.0421,19.6135
+2016-07-26 16:44:26,3599,16.0088,19.6135
+2016-07-26 16:54:29,3599,16.0045,19.6153
+2016-07-26 17:04:32,3599,16.0074,19.6135
+2016-07-26 17:14:35,3599,16.0088,19.6135
+2016-07-26 17:24:38,3599,16.0421,19.6135
+2016-07-26 17:34:41,3599,16.0421,19.5765
+2016-07-26 17:44:44,3599,16.0088,19.6135
+2016-07-26 17:54:47,3599,16.0074,19.6153
+2016-07-26 18:04:50,3599,16.0074,19.6135
+2016-07-26 18:14:53,3599,16.0074,19.6135
+2016-07-26 18:24:56,3599,16.0074,19.6153
+2016-07-26 18:34:59,3599,16.0074,19.6135
+2016-07-26 18:45:02,3599,16.0088,19.6135
+2016-07-26 18:55:05,3599,16.0088,19.6135
+2016-07-26 19:05:10,3599,16.0088,19.6135
+2016-07-26 19:15:13,3599,16.0088,19.6135
+2016-07-26 19:25:16,3599,16.0421,19.6135
+2016-07-26 19:35:19,3599,15.9741,19.6153
+2016-07-26 19:45:22,3599,16.0088,19.6153
+2016-07-26 19:55:24,3599,16.0088,19.6153
+2016-07-26 20:05:27,3599,16.0088,19.6135
+2016-07-26 20:15:30,3599,16.0088,19.6135
+2016-07-26 20:25:33,3599,16.0088,19.6153
+2016-07-26 20:35:36,3599,16.0088,19.6135
+2016-07-26 20:45:39,3599,16.0421,19.6153
+2016-07-26 20:55:41,3599,16.0074,19.6135
+2016-07-26 21:05:44,3599,16.0088,19.6135
+2016-07-26 21:15:47,3599,16.0088,19.6135
+2016-07-26 21:25:50,3599,16.0421,19.5765
+2016-07-26 21:35:53,3599,16.1074,19.6135
+2016-07-26 21:45:56,3599,16.074,19.6153
+2016-07-26 21:55:58,3599,16.1408,19.6135
+2016-07-26 22:06:01,3599,16.074,19.6153
+2016-07-26 22:16:04,3599,16.1088,19.6506
+2016-07-26 22:26:07,3599,16.0755,19.6153
+2016-07-26 22:36:10,3599,16.0755,19.5765
+2016-07-26 22:46:12,3599,16.1074,19.6135
+2016-07-26 22:56:15,3599,16.0755,19.6135
+2016-07-26 23:06:18,3599,16.1074,19.6506
+2016-07-26 23:16:21,3599,16.0755,19.6135
+2016-07-26 23:26:24,3599,16.0755,19.6153
+2016-07-26 23:36:26,3599,16.0755,19.6135
+2016-07-26 23:46:29,3599,16.074,19.6135
+2016-07-26 23:56:32,3599,16.0755,19.5783
+2016-07-27 00:06:35,3599,16.1423,19.6135
+2016-07-27 00:16:37,3599,16.137999999999998,19.5765
+2016-07-27 00:26:40,3599,16.1088,19.6205
+2016-07-27 00:36:43,3599,16.1771,19.6135
+2016-07-27 00:46:46,3599,16.0712,19.6118
+2016-07-27 00:56:48,3599,16.1045,19.6135
+2016-07-27 01:06:51,3599,16.1365,19.5765
+2016-07-27 01:16:53,3599,16.0712,19.6135
+2016-07-27 01:27:03,3599,16.105999999999998,19.6153
+2016-07-27 01:37:06,3599,16.1365,19.6135
+2016-07-27 01:47:09,3599,16.1045,19.5835
+2016-07-27 01:57:12,3599,16.0712,19.6187
+2016-07-27 02:07:14,3599,16.1045,19.6135
+2016-07-27 02:17:17,3599,16.1045,19.6506
+2016-07-27 02:27:20,3599,16.1017,19.617
+2016-07-27 02:37:22,3599,16.0669,19.5748
+2016-07-27 02:47:25,3599,16.0988,19.6523
+2016-07-27 02:57:28,3599,16.1351,19.617
+2016-07-27 03:07:31,3599,16.0988,19.6153
+2016-07-27 03:17:33,3599,16.0655,19.6205
+2016-07-27 03:27:36,3599,16.0683,19.6205
+2016-07-27 03:37:38,3599,16.1003,19.6135
+2016-07-27 03:47:41,3599,16.1017,19.617
+2016-07-27 03:57:44,3599,16.0669,19.617
+2016-07-27 04:07:46,3599,16.1003,19.6135
+2016-07-27 04:17:48,3599,16.1003,19.6135
+2016-07-27 04:27:51,3599,16.1336,19.6135
+2016-07-27 04:37:54,3599,16.1351,19.617
+2016-07-27 04:47:56,3599,16.1336,19.6558
+2016-07-27 04:57:59,3599,16.0988,19.6205
+2016-07-27 05:08:02,3599,16.1336,19.6187
+2016-07-27 05:18:04,3599,16.1293,19.6205
+2016-07-27 05:28:07,3599,16.1351,19.6205
+2016-07-27 05:38:10,3599,16.1003,19.6135
+2016-07-27 05:48:13,3599,16.1293,19.6135
+2016-07-27 05:58:15,3599,16.0626,19.6205
+2016-07-27 06:08:18,3599,16.17,19.6153
+2016-07-27 06:18:21,3599,16.096,19.6153
+2016-07-27 06:28:23,3599,16.17,19.6523
+2016-07-27 06:38:26,3598,16.0974,19.6187
+2016-07-27 06:48:29,3599,16.0597,19.6187
+2016-07-27 06:58:31,3599,16.0974,19.6187
+2016-07-27 07:08:34,3599,16.0917,19.6523
+2016-07-27 07:18:37,3598,-6.6287,19.6558
+2016-07-27 07:28:39,3599,16.1265,19.6153
+2016-07-27 07:38:42,3599,16.125,19.6558
+2016-07-27 07:48:44,3598,-13.1385,19.6135
+2016-07-27 07:58:47,3599,16.0917,19.6135
+2016-07-27 08:08:50,3599,16.0583,19.6153
+2016-07-27 08:18:52,3599,16.0583,19.6135
+2016-07-27 08:28:55,3599,16.125,19.6576
+2016-07-27 08:38:58,3599,16.0583,19.6205
+2016-07-27 08:49:00,3599,-8.3606,19.6135
+2016-07-27 08:59:03,3597,15.9528,19.6187
+2016-07-27 09:09:06,3599,16.0583,19.6135
+2016-07-27 09:19:08,3599,16.0583,19.6135
+2016-07-27 09:29:11,3599,15.9055,19.6576
+2016-07-27 09:39:13,3599,16.0917,19.6135
+2016-07-27 09:49:16,3599,16.0583,19.6153
+2016-07-27 09:59:19,3599,16.125,19.6135
+2016-07-27 10:09:33,3599,16.125,19.6135
+2016-07-27 10:20:20,3599,16.125,19.6153
+2016-07-27 10:30:23,3599,16.054000000000002,19.6135
+2016-07-27 10:40:25,3599,16.125,19.6135
+2016-07-27 10:50:28,3599,16.0874,19.6135
+2016-07-27 11:00:31,3599,10.6535,19.6135
+2016-07-27 11:10:34,3599,16.1207,19.6135
+2016-07-27 11:20:37,3599,16.1236,19.6205
+2016-07-27 11:30:39,3599,16.0555,19.5044
+2016-07-27 11:40:42,3599,16.1222,19.5765
+2016-07-27 11:50:45,3599,16.1222,19.5027
+2016-07-27 12:00:48,3599,15.986,19.5009
+2016-07-27 12:10:51,3599,16.0207,19.5027
+2016-07-27 12:20:53,3599,16.054000000000002,19.5413
+2016-07-27 12:30:56,3599,15.9875,19.5027
+2016-07-27 12:40:59,3599,15.9875,19.5027
+2016-07-27 12:51:02,3599,16.0207,19.5413
+2016-07-27 13:01:05,3599,15.9585,19.5044
+2016-07-27 13:11:08,3599,15.8879,19.5027
+2016-07-27 13:21:11,3599,15.8921,19.5027
+2016-07-27 13:31:14,3599,15.9253,19.5044
+2016-07-27 13:41:17,3599,15.9253,19.5027
+2016-07-27 13:51:20,3599,15.8921,19.5027
+2016-07-27 14:01:23,3599,15.7928,19.5027
+2016-07-27 14:11:26,3599,15.7928,19.5027
+2016-07-27 14:21:29,3599,15.7928,19.429
+2016-07-27 14:31:32,3599,15.8258,19.429
+2016-07-27 14:41:35,3599,15.7942,19.3555
+2016-07-27 14:51:38,3599,15.7928,19.4307
+2016-07-27 15:01:41,3599,15.7928,19.3922
+2016-07-27 15:11:44,3599,15.8273,19.3922
+2016-07-27 15:21:47,3599,15.7928,19.394
+2016-07-27 15:31:50,3599,15.8258,19.394
+2016-07-27 15:41:53,3599,15.7928,19.429
+2016-07-27 15:51:56,3599,15.7928,19.429
+2016-07-27 16:01:59,3599,15.6937,19.3555
+2016-07-27 16:12:02,3599,15.6937,19.429
+2016-07-27 16:22:05,3599,15.6979,19.4307
+2016-07-27 16:32:08,3599,15.665,19.3922
+2016-07-27 16:42:11,3599,15.6979,19.4307
+2016-07-27 16:52:14,3599,15.6993,19.4307
+2016-07-27 17:02:17,3599,15.7639,19.429
+2016-07-27 17:12:20,3599,15.6664,19.3189
+2016-07-27 17:22:23,3599,15.6979,19.3189
+2016-07-27 17:32:26,3599,15.7309,19.2822
+2016-07-27 17:42:29,3599,15.5992,19.2822
+2016-07-27 17:52:32,3599,15.6334,19.3189
+2016-07-27 18:02:35,3599,15.6362,19.3189
+2016-07-27 18:12:37,3599,15.6019,19.2822
+2016-07-27 18:22:40,3599,15.6348,19.2822
+2016-07-27 18:32:43,3599,-10.969,19.284
+2016-07-27 18:42:46,3599,15.6005,19.2822
+2016-07-27 18:52:49,3599,15.5704,19.284
+2016-07-27 19:02:52,3599,15.6348,19.3189
+2016-07-27 19:12:55,3599,15.5704,19.3189
+2016-07-27 19:22:58,3599,15.6033,19.3206
+2016-07-27 19:33:01,3599,15.7337,19.3189
+2016-07-27 19:43:11,3599,15.7007,19.3189
+2016-07-27 19:53:14,3599,15.7021,19.284
+2016-07-27 20:03:17,3599,15.7007,19.3189
+2016-07-27 20:13:20,3599,15.7007,19.284
+2016-07-27 20:23:23,3599,15.7035,19.3189
+2016-07-27 20:33:26,3599,15.7021,19.3206
+2016-07-27 20:43:28,3599,15.7007,19.2822
+2016-07-27 20:53:31,3599,15.7007,19.3189
+2016-07-27 21:03:34,3599,15.7007,19.3189
+2016-07-27 21:13:37,3599,15.6678,19.3189
+2016-07-27 21:23:40,3599,15.7667,19.2822
+2016-07-27 21:33:43,3599,15.7007,19.2822
+2016-07-27 21:43:46,3599,15.7021,19.3189
+2016-07-27 21:53:48,3599,15.7351,19.3555
+2016-07-27 22:03:51,3599,15.7007,19.2474
+2016-07-27 22:13:54,3599,15.6691,19.3189
+2016-07-27 22:23:57,3599,15.7007,19.3189
+2016-07-27 22:33:59,3599,15.7337,19.2822
+2016-07-27 22:44:02,3599,15.7007,19.2822
+2016-07-27 22:54:05,3599,15.7021,19.3189
+2016-07-27 23:04:08,3599,15.7021,19.2822
+2016-07-27 23:14:11,3599,15.6678,19.3189
+2016-07-27 23:24:14,3599,15.8012,19.3189
+2016-07-27 23:34:16,3599,15.7998,19.3189
+2016-07-27 23:44:19,3599,15.7998,19.3206
+2016-07-27 23:54:22,3599,15.7998,19.284
+2016-07-28 00:04:25,3599,15.8026,19.284
+2016-07-28 00:14:28,3599,15.8012,19.284
+2016-07-28 00:24:30,3599,15.8329,19.284
+2016-07-28 00:34:33,3599,15.7681,19.3206
+2016-07-28 00:44:36,3599,15.8357,19.284
+2016-07-28 00:54:39,3599,15.7998,19.2822
+2016-07-28 01:04:41,3599,15.8012,19.2822
+2016-07-28 01:14:44,3599,15.8012,19.284
+2016-07-28 01:24:47,3599,15.797,19.2822
+2016-07-28 01:34:50,3599,15.797,19.2822
+2016-07-28 01:44:53,3599,15.8315,19.284
+2016-07-28 01:54:55,3598,15.797,19.3189
+2016-07-28 02:04:58,3598,15.7984,19.3206
+2016-07-28 02:15:01,3598,15.7984,19.3189
+2016-07-28 02:25:03,3598,15.797,19.3206
+2016-07-28 02:35:06,3598,15.8301,19.2822
+2016-07-28 02:45:08,3598,15.7928,19.3189
+2016-07-28 02:55:10,3598,15.7984,19.3206
+2016-07-28 03:05:21,3598,15.7928,19.2822
+2016-07-28 03:15:23,3598,15.7928,19.3189
+2016-07-28 03:25:26,3598,15.7942,19.284
+2016-07-28 03:35:28,3597,15.7928,19.2822
+2016-07-28 03:45:31,3597,15.7597,19.3206
+2016-07-28 03:55:34,3596,15.8258,19.2822
+2016-07-28 04:05:36,3596,15.7928,19.3189
+2016-07-28 04:15:39,3596,15.7928,19.284
+2016-07-28 04:25:41,3596,15.7928,19.284
+2016-07-28 04:35:44,3595,15.7928,19.2822
+2016-07-28 04:45:46,3595,15.7942,19.3206
+2016-07-28 04:55:49,3595,15.7928,19.3189
+2016-07-28 05:05:51,3594,15.8258,19.3189
+2016-07-28 05:15:53,3594,15.7942,19.3206
+2016-07-28 05:26:04,3594,15.8258,19.3206
+2016-07-28 05:36:06,3593,15.8216,19.3189
+2016-07-28 05:46:09,3593,15.7886,19.2822
+2016-07-28 05:56:11,3593,15.7886,19.3189
+2016-07-28 06:06:14,3593,15.79,19.3189
+2016-07-28 06:16:16,3593,15.7886,19.3189
+2016-07-28 06:26:19,3592,15.79,19.3189
+2016-07-28 06:36:22,3592,15.7886,19.3189
+2016-07-28 06:46:24,3592,15.7886,19.3206
+2016-07-28 06:56:27,3592,15.7886,19.3189
+2016-07-28 07:06:29,3592,15.8216,19.3206
+2016-07-28 07:16:32,3593,15.7886,19.3206
+2016-07-28 07:26:34,3593,15.8216,19.284
+2016-07-28 07:36:37,3593,15.79,19.3206
+2016-07-28 07:46:39,3594,15.79,19.3206
+2016-07-28 07:56:42,3595,15.8216,19.3189
+2016-07-28 08:06:44,3595,15.7857,19.3206
+2016-07-28 08:16:47,3595,15.8174,19.2822
+2016-07-28 08:26:49,3596,15.8174,19.284
+2016-07-28 08:36:52,3597,15.8188,19.3189
+2016-07-28 08:46:55,3598,15.7857,19.2822
+2016-07-28 08:56:57,3598,15.8174,19.3189
+2016-07-28 09:07:00,3598,15.7857,19.3171
+2016-07-28 09:17:02,3598,15.8174,19.3189
+2016-07-28 09:27:05,3598,15.7857,19.3189
+2016-07-28 09:37:07,3598,15.7843,19.3189
+2016-07-28 09:47:10,3598,15.7513,19.3189
+2016-07-28 09:57:12,3599,15.8174,19.3189
+2016-07-28 10:07:15,3599,15.6881,19.284
+2016-07-28 10:17:18,3599,15.7183,19.3206
+2016-07-28 10:27:20,3599,15.7843,19.2822
+2016-07-28 10:37:23,3599,15.6868,19.3189
+2016-07-28 10:47:25,3599,15.6854,19.3189
+2016-07-28 10:57:28,3599,15.7211,19.3573
+2016-07-28 11:07:31,3599,15.7197,19.3189
+2016-07-28 11:17:34,3599,15.6868,19.3189
+2016-07-28 11:27:36,3599,15.684,19.2822
+2016-07-28 11:37:39,3599,15.6868,19.3189
+2016-07-28 11:47:42,3599,15.7183,19.3189
+2016-07-28 11:57:45,3599,15.6854,19.3189
+2016-07-28 12:07:48,3599,15.6195,19.2822
+2016-07-28 12:17:51,3599,15.6154,19.2822
+2016-07-28 12:27:53,3599,15.6524,19.2822
+2016-07-28 12:37:56,3599,15.6195,19.2091
+2016-07-28 12:47:59,3599,15.6209,19.2091
+2016-07-28 12:58:02,3599,15.5881,19.2091
+2016-07-28 13:08:05,3599,15.6195,19.2108
+2016-07-28 13:18:08,3599,15.4555,19.2108
+2016-07-28 13:28:11,3599,15.521,19.1743
+2016-07-28 13:38:14,3599,15.521,19.1726
+2016-07-28 13:48:17,3599,15.491,19.1743
+2016-07-28 13:58:20,3599,15.4883,19.2457
+2016-07-28 14:08:22,3599,15.521,19.2091
+2016-07-28 14:18:25,3599,15.4883,19.2091
+2016-07-28 14:28:28,3599,15.521,19.1726
+2016-07-28 14:38:31,3599,15.4938,19.2091
+2016-07-28 14:48:34,3599,15.5252,19.2091
+2016-07-28 14:58:37,3599,15.4924,19.2091
+2016-07-28 15:08:40,3599,15.558,19.2091
+2016-07-28 15:18:43,3599,15.3929,19.0998
+2016-07-28 15:28:46,3599,15.3943,19.0652
+2016-07-28 15:38:49,3599,15.3943,19.0635
+2016-07-28 15:48:52,3599,15.3943,19.0635
+2016-07-28 15:58:55,3599,15.3943,19.0635
+2016-07-28 16:08:58,3599,15.4597,19.1379
+2016-07-28 16:19:01,3599,15.4283,19.0998
+2016-07-28 16:29:04,3599,15.3943,19.0635
+2016-07-28 16:39:07,3599,15.4269,19.0998
+2016-07-28 16:49:10,3599,15.4269,19.0652
+2016-07-28 16:59:13,3599,15.3943,19.0635
+2016-07-28 17:09:16,3599,15.4269,19.0998
+2016-07-28 17:19:19,3599,15.4269,19.1015
+2016-07-28 17:29:22,3599,15.3943,19.1015
+2016-07-28 17:39:24,3599,15.4269,19.0998
+2016-07-28 17:49:27,3599,15.3943,19.0652
+2016-07-28 17:59:30,3599,15.1435,19.0998
+2016-07-28 18:09:33,3599,15.3943,19.1362
+2016-07-28 18:19:36,3599,15.3943,19.0635
+2016-07-28 18:29:39,3599,15.4311,19.1379
+2016-07-28 18:39:42,3599,15.4311,19.0652
+2016-07-28 18:49:45,3599,15.3984,19.0652
+2016-07-28 18:59:48,3599,15.4638,19.0652
+2016-07-28 19:09:51,3599,15.4297,19.1015
+2016-07-28 19:19:54,3599,15.3984,19.0618
+2016-07-28 19:29:57,3599,15.3984,19.0618
+2016-07-28 19:40:00,3599,15.5307,19.0635
+2016-07-28 19:50:02,3599,15.4965,19.1379
+2016-07-28 20:00:05,3599,15.5293,19.1032
+2016-07-28 20:10:08,3599,15.4979,19.0635
+2016-07-28 20:20:11,3599,15.5307,19.1015
+2016-07-28 20:30:14,3599,15.5293,19.0998
+2016-07-28 20:40:17,3599,15.5621,19.0652
+2016-07-28 20:50:19,3599,15.4965,19.0635
+2016-07-28 21:00:22,3599,15.5293,19.0635
+2016-07-28 21:10:25,3599,15.5293,19.0635
+2016-07-28 21:20:28,3599,15.5293,19.0998
+2016-07-28 21:30:30,3599,15.5293,19.0998
+2016-07-28 21:40:33,3599,15.4965,19.0635
+2016-07-28 21:50:36,3599,15.5307,19.1362
+2016-07-28 22:00:39,3599,15.5293,19.1379
+2016-07-28 22:10:42,3599,15.5293,19.0998
+2016-07-28 22:20:45,3599,15.5293,19.0635
+2016-07-28 22:30:47,3599,15.4965,19.1015
+2016-07-28 22:40:50,3599,15.4979,19.1015
+2016-07-28 22:50:53,3598,15.5293,19.0635
+2016-07-28 23:00:56,3598,15.5635,19.0635
+2016-07-28 23:10:58,3598,15.5293,19.0998
+2016-07-28 23:21:01,3598,15.5293,19.0635
+2016-07-28 23:31:04,3588,15.4269,19.1015
+2016-07-28 23:41:07,3598,15.4924,19.1379
+2016-07-28 23:51:09,3598,15.5293,19.0998
+2016-07-29 00:01:12,3597,15.4938,19.0998
+2016-07-29 00:11:15,3597,15.5252,19.0635
+2016-07-29 00:21:18,3597,15.5252,19.0998
+2016-07-29 00:31:20,3597,15.5252,19.0998
+2016-07-29 00:41:23,3596,15.5252,19.1015
+2016-07-29 00:51:26,3596,15.5252,19.1362
+2016-07-29 01:01:28,3595,15.5252,19.0998
+2016-07-29 01:11:31,3595,15.5252,19.0635
+2016-07-29 01:21:33,3595,15.4924,19.0998
+2016-07-29 01:31:36,3595,15.5252,19.0652
+2016-07-29 01:41:39,3595,15.5252,19.0998
+2016-07-29 01:51:42,3594,15.5252,19.1015
+2016-07-29 02:01:44,3594,15.4924,19.0652
+2016-07-29 02:11:47,3594,15.5252,19.0998
+2016-07-29 02:21:50,3593,15.5252,19.1379
+2016-07-29 02:31:52,3593,15.5252,19.0635
+2016-07-29 02:41:55,3592,15.5252,19.0635
+2016-07-29 02:51:58,3592,15.5252,19.0652
+2016-07-29 03:02:00,3592,15.5238,19.1362
+2016-07-29 03:12:03,3591,15.5266,19.0635
+2016-07-29 03:22:06,3591,15.558,19.0635
+2016-07-29 03:32:08,3590,15.5238,19.0652
+2016-07-29 03:42:11,3590,15.5252,19.0635
+2016-07-29 03:52:14,3590,15.5252,19.0635
+2016-07-29 04:02:16,3589,15.4924,19.0652
+2016-07-29 04:12:19,3589,15.521,19.0635
+2016-07-29 04:22:21,3588,15.4896,19.0998
+2016-07-29 04:32:24,3588,15.4938,19.1015
+2016-07-29 04:42:27,3588,15.5252,19.0652
+2016-07-29 04:52:29,3588,15.521,19.0652
+2016-07-29 05:02:32,3588,15.5224,19.0652
+2016-07-29 05:12:35,3587,15.5224,19.1015
+2016-07-29 05:22:37,3587,15.5224,19.1362
+2016-07-29 05:32:40,3587,15.5224,19.0998
+2016-07-29 05:42:43,3587,15.521,19.1015
+2016-07-29 05:52:45,3587,15.521,19.0635
+2016-07-29 06:02:48,3587,15.5224,19.0635
+2016-07-29 06:12:51,3586,15.5224,19.1015
+2016-07-29 06:22:53,3586,15.5183,19.0652
+2016-07-29 06:32:56,3587,15.4855,19.0998
+2016-07-29 06:42:59,3586,15.5183,19.0998
+2016-07-29 06:53:02,3587,15.5183,19.0652
+2016-07-29 07:03:04,3587,15.4841,19.0635
+2016-07-29 07:13:07,3587,15.5183,19.0998
+2016-07-29 07:23:10,3587,15.5511,19.0652
+2016-07-29 07:33:13,3587,15.5183,19.1362
+2016-07-29 07:43:15,3587,15.5183,19.1015
+2016-07-29 07:53:18,3588,15.5183,19.0635
+2016-07-29 08:03:21,3588,15.5183,19.0998
+2016-07-29 08:13:23,3588,15.5183,19.1015
+2016-07-29 08:23:26,3589,15.5497,19.1015
+2016-07-29 08:33:29,3590,15.5183,19.0652
+2016-07-29 08:43:31,3591,15.5183,19.0998
+2016-07-29 08:53:34,3591,15.5183,19.0998
+2016-07-29 09:03:37,3591,15.5183,19.0998
+2016-07-29 09:13:40,3592,15.5197,19.0652
+2016-07-29 09:23:43,3592,15.5183,19.0998
+2016-07-29 09:33:45,3592,15.5183,19.1015
+2016-07-29 09:43:48,3592,15.5183,19.0998
+2016-07-29 09:53:51,3593,15.5183,19.0998
+2016-07-29 10:03:54,3593,15.5183,19.0635
+2016-07-29 10:13:57,3592,15.5183,18.9926
+2016-07-29 10:23:59,3594,15.5183,18.9909
+2016-07-29 10:34:02,3595,15.5169,18.9547
+2016-07-29 10:44:04,3595,15.5183,18.9909
+2016-07-29 10:54:07,3596,15.5183,18.9496
+2016-07-29 11:04:10,3597,15.5511,18.9547
+2016-07-29 11:14:12,3598,15.5183,18.9547
+2016-07-29 11:24:15,3598,15.5183,18.9547
+2016-07-29 11:34:18,3598,15.5183,18.9926
+2016-07-29 11:44:21,3598,15.5511,18.9564
+2016-07-29 11:54:24,3599,15.5183,18.9564
+2016-07-29 12:04:26,3598,15.5183,18.9547
+2016-07-29 12:14:29,3599,15.4201,18.9547
+2016-07-29 12:24:32,3599,15.3874,18.9564
+2016-07-29 12:34:35,3599,15.4201,18.9909
+2016-07-29 12:44:38,3599,15.4201,18.9858
+2016-07-29 12:54:41,3599,15.4201,18.9547
+2016-07-29 13:04:44,3599,15.3222,18.9547
+2016-07-29 13:14:47,3599,15.2896,18.9496
+2016-07-29 13:24:50,3599,15.3222,18.9496
+2016-07-29 13:34:52,3599,15.3222,18.9858
+2016-07-29 13:44:55,3599,15.3208,18.8824
+2016-07-29 13:54:58,3599,15.3222,18.8429
+2016-07-29 14:05:01,3599,15.1921,18.8773
+2016-07-29 14:15:04,3599,15.2571,18.8824
+2016-07-29 14:25:07,3599,15.2246,18.8807
+2016-07-29 14:35:10,3599,15.2246,18.8773
+2016-07-29 14:45:13,3599,15.2246,18.9151
+2016-07-29 14:55:16,3599,15.2557,18.9202
+2016-07-29 15:05:19,3599,15.1259,18.879
+2016-07-29 15:15:22,3599,15.1596,18.879
+2016-07-29 15:25:25,3599,15.1637,18.879
+2016-07-29 15:35:28,3599,15.1272,18.9135
+2016-07-29 15:45:31,3599,15.1312,18.8773
+2016-07-29 15:55:34,3599,15.1637,18.879
+2016-07-29 16:05:37,3599,15.1596,18.879
+2016-07-29 16:15:40,3599,15.1299,18.8841
+2016-07-29 16:25:43,3599,15.1623,18.8773
+2016-07-29 16:35:46,3599,15.0975,18.879
+2016-07-29 16:45:49,3599,15.0989,18.9151
+2016-07-29 16:55:52,3599,15.0989,18.8824
+2016-07-29 17:05:55,3599,15.1623,18.8036
+2016-07-29 17:15:58,3599,15.1637,18.9151
+2016-07-29 17:26:01,3599,15.0975,18.879
+2016-07-29 17:36:04,3599,15.1015,18.8413
+2016-07-29 17:46:07,3599,15.1015,18.8773
+2016-07-29 17:56:10,3599,15.1339,18.9135
+2016-07-29 18:06:13,3599,15.1326,18.8773
+2016-07-29 18:16:16,3599,15.1339,18.879
+2016-07-29 18:26:19,3599,-10.203,18.8773
+2016-07-29 18:36:22,3599,15.1339,18.879
+2016-07-29 18:46:25,3599,15.1015,18.8773
+2016-07-29 18:56:27,3599,15.1015,18.8824
+2016-07-29 19:06:30,3599,15.1339,18.879
+2016-07-29 19:16:33,3599,15.1015,18.8773
+2016-07-29 19:26:36,3599,15.1664,18.8773
+2016-07-29 19:36:39,3599,15.1664,18.879
+2016-07-29 19:46:42,3599,15.1339,18.8773
+2016-07-29 19:56:45,3599,15.1664,18.879
+2016-07-29 20:06:48,3599,15.1339,18.8773
+2016-07-29 20:16:50,3599,15.1029,18.8773
+2016-07-29 20:26:53,3599,15.1664,18.8773
+2016-07-29 20:36:56,3599,15.1664,18.8773
+2016-07-29 20:46:59,3599,15.1339,18.879
+2016-07-29 20:57:02,3599,15.1664,18.879
+2016-07-29 21:07:04,3599,15.1015,18.879
+2016-07-29 21:17:07,3599,15.1339,18.8773
+2016-07-29 21:27:10,3599,15.1339,18.8773
+2016-07-29 21:37:13,3599,15.1664,18.8773
+2016-07-29 21:47:15,3598,15.1339,18.879
+2016-07-29 21:57:18,3598,15.1029,18.8773
+2016-07-29 22:07:21,3598,15.1664,18.9135
+2016-07-29 22:17:24,3598,15.1015,18.8773
+2016-07-29 22:27:26,3598,15.1015,18.9151
+2016-07-29 22:37:29,3598,15.1339,18.8773
+2016-07-29 22:47:31,3598,15.1664,18.879
+2016-07-29 22:57:34,3597,15.1339,18.8773
+2016-07-29 23:07:37,3597,15.1339,18.8773
+2016-07-29 23:17:39,3596,15.1339,18.879
+2016-07-29 23:27:42,3596,15.2313,18.879
+2016-07-29 23:37:45,3595,15.2313,18.879
+2016-07-29 23:47:47,3595,15.2313,18.879
+2016-07-29 23:57:50,3595,15.2638,18.8773
+2016-07-30 00:07:52,3594,15.2638,18.9151
+2016-07-30 00:17:55,3593,15.2638,18.8773
+2016-07-30 00:27:57,3593,15.1975,18.9185
+2016-07-30 00:38:00,3593,15.2313,18.8824
+2016-07-30 00:48:02,3592,15.1988,18.8824
+2016-07-30 00:58:05,3592,15.2313,18.879
+2016-07-30 01:08:07,3592,15.2638,18.8773
+2016-07-30 01:18:10,3592,15.2638,18.8773
+2016-07-30 01:28:12,3592,15.2313,18.8773
+2016-07-30 01:38:15,3592,15.2638,18.9135
+2016-07-30 01:48:17,3592,15.2638,18.9151
+2016-07-30 01:58:20,3592,15.2652,18.9202
+2016-07-30 02:08:23,3591,15.2313,18.8773
+2016-07-30 02:18:25,3591,15.2313,18.8841
+2016-07-30 02:28:28,3591,15.2638,18.8824
+2016-07-30 02:38:31,3590,15.2286,18.8824
+2016-07-30 02:48:33,3590,15.2923,18.8841
+2016-07-30 02:58:36,3589,15.2611,18.8773
+2016-07-30 03:08:39,3589,15.1948,18.8773
+2016-07-30 03:18:41,3589,15.2273,18.879
+2016-07-30 03:28:44,3588,15.1948,18.9135
+2016-07-30 03:38:46,3588,15.2611,18.8841
+2016-07-30 03:48:49,3588,15.2273,18.8773
+2016-07-30 03:58:51,3588,15.2273,18.8824
+2016-07-30 04:08:53,3588,15.1948,18.8824
+2016-07-30 04:18:56,3586,15.2246,18.9202
+2016-07-30 04:28:59,3586,15.2598,18.8773
+2016-07-30 04:39:01,3586,15.2571,18.8824
+2016-07-30 04:49:04,3586,15.2571,18.8841
+2016-07-30 04:59:06,3586,15.2571,18.879
+2016-07-30 05:09:09,3585,15.2557,18.879
+2016-07-30 05:19:11,3585,15.2246,18.8841
+2016-07-30 05:29:14,3585,15.2246,18.8824
+2016-07-30 05:39:16,3585,15.2259,18.9185
+2016-07-30 05:49:19,3584,15.2571,18.879
+2016-07-30 05:59:22,3584,15.2571,18.8773
+2016-07-30 06:09:24,3584,15.2571,18.8841
+2016-07-30 06:19:27,3583,15.2232,18.8841
+2016-07-30 06:29:29,3584,15.2246,18.8824
+2016-07-30 06:39:32,3582,15.2246,18.9202
+2016-07-30 06:49:34,3582,15.2571,18.8841
+2016-07-30 06:59:37,3585,15.2584,18.879
+2016-07-30 07:09:40,3584,15.2571,18.8841
+2016-07-30 07:19:42,3584,15.2571,18.8824
+2016-07-30 07:29:45,3585,15.2557,18.8824
+2016-07-30 07:39:47,3585,15.2571,18.9202
+2016-07-30 07:49:50,3585,15.2205,18.9185
+2016-07-30 07:59:52,3586,15.2191,18.879
+2016-07-30 08:09:55,3586,15.2191,18.879
+2016-07-30 08:19:57,3586,15.253,18.8824
+2016-07-30 08:30:00,3587,15.2896,18.8841
+2016-07-30 08:40:03,3587,15.1867,18.8773
+2016-07-30 08:50:05,3588,15.188,18.8824
+2016-07-30 09:00:08,3588,15.253,18.8841
+2016-07-30 09:10:11,3588,15.253,18.8773
+2016-07-30 09:20:15,3589,15.2205,18.879
+2016-07-30 09:30:18,3588,15.253,18.8052
+2016-07-30 09:40:21,3591,15.2191,18.7709
+2016-07-30 09:50:23,3591,15.2205,18.7709
+2016-07-30 10:00:26,3590,15.2205,18.7743
+2016-07-30 10:10:29,3593,15.1542,18.7743
+2016-07-30 10:20:32,3593,15.1232,18.7759
+2016-07-30 10:30:34,3595,15.0895,18.8052
+2016-07-30 10:40:37,3595,15.1218,18.7709
+2016-07-30 10:50:40,3596,15.1556,18.8069
+2016-07-30 11:00:43,3597,15.1218,18.7693
+2016-07-30 11:10:45,3598,15.1218,18.8052
+2016-07-30 11:20:48,3596,15.1232,18.7709
+2016-07-30 11:30:51,3598,15.1556,18.7709
+2016-07-30 11:40:54,3598,15.1556,18.8052
+2016-07-30 11:50:56,3598,15.1542,18.7709
+2016-07-30 12:00:59,3598,15.1218,18.7759
+2016-07-30 12:11:02,3598,15.1556,18.7693
+2016-07-30 12:21:05,3599,15.1218,18.7693
+2016-07-30 12:31:08,3599,15.0261,18.7693
+2016-07-30 12:41:11,3599,15.0261,18.7693
+2016-07-30 12:51:13,3599,15.0571,18.6258
+2016-07-30 13:01:16,3599,15.0301,18.6632
+2016-07-30 13:11:19,3599,15.0585,18.6616
+2016-07-30 13:21:22,3599,15.0261,18.6632
+2016-07-30 13:31:25,3599,14.9979,18.6632
+2016-07-30 13:41:28,3599,15.0301,18.6258
+2016-07-30 13:51:30,3599,15.0625,18.6632
+2016-07-30 14:01:33,3599,14.932,18.6632
+2016-07-30 14:11:36,3599,14.8972,18.6616
+2016-07-30 14:21:39,3599,14.9656,18.6616
+2016-07-30 14:31:42,3599,14.9334,18.6632
+2016-07-30 14:41:45,3599,14.9656,18.6616
+2016-07-30 14:51:48,3599,14.9643,18.6632
+2016-07-30 15:01:51,3599,14.932,18.6632
+2016-07-30 15:11:54,3599,14.9656,18.6616
+2016-07-30 15:21:56,3599,14.8369,18.6274
+2016-07-30 15:31:59,3599,14.8369,18.6632
+2016-07-30 15:42:02,3599,14.8369,18.6632
+2016-07-30 15:52:05,3599,14.869000000000002,18.5559
+2016-07-30 16:02:08,3599,14.8355,18.5543
+2016-07-30 16:12:11,3599,14.8355,18.5543
+2016-07-30 16:22:14,3599,14.8355,18.5543
+2016-07-30 16:32:17,3599,14.8677,18.5543
+2016-07-30 16:42:19,3599,14.8342,18.5543
+2016-07-30 16:52:22,3599,14.869000000000002,18.5543
+2016-07-30 17:02:25,3599,14.869000000000002,18.5543
+2016-07-30 17:12:28,3599,14.8369,18.5543
+2016-07-30 17:22:31,3599,14.8355,18.5559
+2016-07-30 17:32:34,3599,14.8355,18.59
+2016-07-30 17:42:37,3599,14.8677,18.5559
+2016-07-30 17:52:40,3599,14.8369,18.5543
+2016-07-30 18:02:43,3599,14.8369,18.5559
+2016-07-30 18:12:46,3599,14.8716,18.5543
+2016-07-30 18:22:48,3599,14.869000000000002,18.5559
+2016-07-30 18:32:51,3599,14.94,18.5559
+2016-07-30 18:42:54,3599,14.8395,18.5543
+2016-07-30 18:52:57,3599,14.8716,18.5543
+2016-07-30 19:03:00,3599,14.8395,18.5543
+2016-07-30 19:13:03,3599,14.8395,18.5559
+2016-07-30 19:23:06,3598,-11.2629,18.5543
+2016-07-30 19:33:09,3599,14.8395,18.5559
+2016-07-30 19:43:11,3599,14.8395,18.5543
+2016-07-30 19:53:14,3599,14.8716,18.5543
+2016-07-30 20:03:17,3599,14.8716,18.5543
+2016-07-30 20:13:20,3599,14.8395,18.5543
+2016-07-30 20:23:23,3599,14.8716,18.5559
+2016-07-30 20:33:26,3599,14.8395,18.5559
+2016-07-30 20:43:29,3599,14.8395,18.5559
+2016-07-30 20:53:32,3599,14.8408,18.5559
+2016-07-30 21:03:35,3598,14.8395,18.5186
+2016-07-30 21:13:37,3598,14.8716,18.5543
+2016-07-30 21:23:40,3598,14.9038,18.5543
+2016-07-30 21:33:43,3598,14.8395,18.5559
+2016-07-30 21:43:46,3598,14.8716,18.5543
+2016-07-30 21:53:49,3597,14.873,18.5559
+2016-07-30 22:03:52,3597,14.8395,18.5543
+2016-07-30 22:13:54,3596,14.8716,18.5559
+2016-07-30 22:23:57,3595,14.8395,18.5543
+2016-07-30 22:34:00,3594,14.8395,18.5543
+2016-07-30 22:44:02,3595,14.9051,18.5559
+2016-07-30 22:54:05,3593,14.8716,18.5559
+2016-07-30 23:04:08,3593,14.8369,18.5559
+2016-07-30 23:14:11,3592,14.8395,18.5543
+2016-07-30 23:24:13,3591,14.9012,18.5917
+2016-07-30 23:34:16,3592,14.8716,18.5543
+2016-07-30 23:44:19,3592,14.9078,18.5543
+2016-07-30 23:54:21,3590,14.8395,18.5543
+2016-07-31 00:04:24,3588,14.8369,18.5543
+2016-07-31 00:14:27,3588,14.8369,18.5543
+2016-07-31 00:24:29,3589,14.1468,18.5543
+2016-07-31 00:34:32,3586,14.8369,18.5559
+2016-07-31 00:44:35,3587,14.8369,18.5543
+2016-07-31 00:54:37,3586,14.8369,18.5543
+2016-07-31 01:04:40,3587,14.8369,18.59
+2016-07-31 01:14:43,3585,14.8369,18.5559
+2016-07-31 01:24:45,3585,14.869000000000002,18.5917
+2016-07-31 01:34:48,3585,14.869000000000002,18.5559
+2016-07-31 01:44:51,3584,14.869000000000002,18.5917
+2016-07-31 01:54:53,3584,14.8677,18.5559
+2016-07-31 02:04:56,3583,14.8355,18.5543
+2016-07-31 02:14:59,3581,14.8369,18.5559
+2016-07-31 02:25:01,3582,14.869000000000002,18.5559
+2016-07-31 02:35:04,3581,14.8369,18.5559
+2016-07-31 02:45:07,3581,14.8382,18.5559
+2016-07-31 02:55:09,3577,14.869000000000002,18.5559
+2016-07-31 03:05:12,3578,14.8369,18.5559
+2016-07-31 03:15:15,3580,14.8637,18.5559
+2016-07-31 03:25:16,3580,14.8329,18.5543
+2016-07-31 03:35:19,3580,14.8637,18.59
+2016-07-31 03:45:21,3580,14.865,18.5559
+2016-07-31 03:55:24,3579,14.8637,18.5917
+2016-07-31 04:05:26,3580,14.8329,18.5543
+2016-07-31 04:15:29,3579,14.8959,18.5559
+2016-07-31 04:25:31,3580,14.8316,18.5559
+2016-07-31 04:35:34,3580,14.8316,18.5559
+2016-07-31 04:45:36,3580,14.8329,18.5559
+2016-07-31 04:55:39,3580,14.8316,18.5559
+2016-07-31 05:05:41,3580,14.8316,18.5559
+2016-07-31 05:15:44,3580,14.865,18.5543
+2016-07-31 05:25:46,3581,14.8637,18.5559
+2016-07-31 05:35:49,3581,14.9616,18.5543
+2016-07-31 05:45:52,3581,14.9603,18.59
+2016-07-31 05:55:54,3581,14.9294,18.5559
+2016-07-31 06:05:57,3581,14.9576,18.5559
+2016-07-31 06:15:59,3581,14.9616,18.59
+2016-07-31 06:26:02,3581,14.9254,18.5543
+2016-07-31 06:36:04,3581,14.9576,18.5543
+2016-07-31 06:46:07,3581,14.9576,18.5917
+2016-07-31 06:56:09,3581,14.8611,18.5559
+2016-07-31 07:06:12,3581,14.8276,18.5543
+2016-07-31 07:16:14,3581,14.8932,18.5559
+2016-07-31 07:26:17,3581,14.8276,18.5559
+2016-07-31 07:36:19,3585,14.8289,18.5559
+2016-07-31 07:46:22,3581,14.8611,18.5559
+2016-07-31 07:56:24,3582,14.8932,18.5543
+2016-07-31 08:06:27,3582,14.8932,18.5559
+2016-07-31 08:16:29,3583,14.8892,18.5559
+2016-07-31 08:26:39,3585,14.8558,18.5543
+2016-07-31 08:36:42,3585,14.8879,18.5559
+2016-07-31 08:46:45,3585,14.8611,18.5543
+2016-07-31 08:56:47,3586,14.825,18.5153
+2016-07-31 09:06:50,3587,14.8237,18.551
+2016-07-31 09:16:53,3588,14.8237,18.5543
+2016-07-31 09:26:55,3588,14.8571,18.5559
+2016-07-31 09:36:58,3589,14.8558,18.5559
+2016-07-31 09:47:00,3591,14.8237,18.551
+2016-07-31 09:57:03,3592,14.825,18.551
+2016-07-31 10:07:05,3593,14.8237,18.5493
+2016-07-31 10:17:08,3594,14.8571,18.551
+2016-07-31 10:27:11,3595,14.8237,18.5493
+2016-07-31 10:37:13,3596,14.8237,18.5867
+2016-07-31 10:47:16,3597,14.8237,18.551
+2016-07-31 10:57:18,3598,14.825,18.5493
+2016-07-31 11:07:21,3598,14.8571,18.5526
+2016-07-31 11:17:24,3598,14.825,18.551
+2016-07-31 11:27:26,3598,14.8571,18.551
+2016-07-31 11:37:29,3599,14.7608,18.5543
+2016-07-31 11:47:32,3592,14.6557,18.4085
+2016-07-31 11:57:35,3599,14.7288,18.4085
+2016-07-31 12:07:38,3599,14.7608,18.4441
+2016-07-31 12:17:40,3599,14.7648,18.4425
+2016-07-31 12:27:44,3599,14.7314,18.4069
+2016-07-31 12:37:48,3599,14.7327,18.4441
+2016-07-31 12:47:51,3599,14.6688,18.4069
+2016-07-31 12:57:54,3599,14.7007,18.4457
+2016-07-31 13:07:57,3599,14.6688,18.4441
+2016-07-31 13:18:00,3599,14.6368,18.4408
+2016-07-31 13:28:03,3599,14.6368,18.4441
+2016-07-31 13:38:06,3599,14.6688,18.478
+2016-07-31 13:48:08,3599,14.6727,18.478
+2016-07-31 13:58:11,3599,14.6688,18.4425
+2016-07-31 14:08:14,3599,14.6675,18.4797
+2016-07-31 14:18:17,3599,14.6075,18.4441
+2016-07-31 14:28:20,3599,14.6088,18.478
+2016-07-31 14:38:23,3599,14.5756,18.336
+2016-07-31 14:48:26,3599,14.5769,18.3714
+2016-07-31 14:58:29,3599,14.5769,18.3376
+2016-07-31 15:08:32,3599,14.5769,18.336
+2016-07-31 15:18:35,3599,14.5769,18.3714
+2016-07-31 15:28:37,3599,14.6075,18.336
+2016-07-31 15:38:40,3599,14.5769,18.3376
+2016-07-31 15:48:43,3599,14.5756,18.3376
+2016-07-31 15:58:46,3599,14.5769,18.336
+2016-07-31 16:08:49,3599,14.5438,18.373
+2016-07-31 16:18:52,3599,14.5769,18.336
+2016-07-31 16:28:55,3599,14.5756,18.3714
+2016-07-31 16:38:58,3599,14.5438,18.373
+2016-07-31 16:49:01,3599,14.5769,18.336
+2016-07-31 16:59:04,3599,14.5769,18.3714
+2016-07-31 17:09:07,3599,14.5756,18.3376
+2016-07-31 17:19:10,3599,14.5769,18.336
+2016-07-31 17:29:13,3599,14.5756,18.3714
+2016-07-31 17:39:15,3599,14.5795,18.336
+2016-07-31 17:49:18,3599,14.5756,18.3311
+2016-07-31 17:59:21,3599,14.5756,18.336
+2016-07-31 18:09:24,3599,14.5808,18.3376
+2016-07-31 18:19:27,3599,14.5489,18.3311
+2016-07-31 18:29:29,3599,14.5808,18.336
+2016-07-31 18:39:32,3598,14.5808,18.336
+2016-07-31 18:49:35,3598,14.6075,18.336
+2016-07-31 18:59:38,3598,14.5451,18.373
+2016-07-31 19:09:41,3598,14.5808,18.3714
+2016-07-31 19:19:43,3597,14.5808,18.336
+2016-07-31 19:29:46,3595,14.5795,18.3376
+2016-07-31 19:39:49,3593,14.5808,18.3376
+2016-07-31 19:49:52,3593,14.5808,18.3311
+2016-07-31 19:59:54,3593,14.5808,18.3714
+2016-07-31 20:09:57,3590,14.6753,18.3311
+2016-07-31 20:20:00,3590,14.6766,18.3376
+2016-07-31 20:30:03,3589,14.6433,18.373
+2016-07-31 20:40:05,3588,14.6753,18.3327
+2016-07-31 20:50:08,3587,14.6727,18.3714
+2016-07-31 21:00:11,3586,14.6727,18.373
+2016-07-31 21:10:13,3586,14.6727,18.373
+2016-07-31 21:20:16,3585,14.6714,18.3376
+2016-07-31 21:30:19,3585,14.7034,18.3714
+2016-07-31 21:40:20,3583,14.6714,18.373
+2016-07-31 21:50:23,3584,14.6727,18.336
+2016-07-31 22:00:26,3583,14.6714,18.336
+2016-07-31 22:10:28,3583,14.6714,18.3376
+2016-07-31 22:20:31,3581,14.7047,18.373
+2016-07-31 22:30:33,3581,14.6727,18.3392
+2016-07-31 22:40:36,3580,14.6727,18.3681
+2016-07-31 22:50:38,3580,14.7674,18.336
+2016-07-31 23:00:41,3579,14.8008,18.336
+2016-07-31 23:10:43,3578,14.7674,18.3376
+2016-07-31 23:20:46,3578,14.7687,18.3714
+2016-07-31 23:30:48,3576,14.8008,18.3665
+2016-07-31 23:40:51,3575,14.8008,18.336
+2016-07-31 23:50:53,3574,14.8008,18.3714
+2016-08-01 01:01:11,3574,14.7327,18.373
+2016-08-01 01:11:13,3573,14.7968,18.336
+2016-08-01 01:21:16,3573,14.7327,18.3376
+2016-08-01 01:31:18,3572,14.7648,18.336
+2016-08-01 01:41:20,3572,14.7968,18.373
+2016-08-01 01:51:23,3571,14.7608,18.336
+2016-08-01 02:01:25,3568,14.7608,18.3376
+2016-08-01 02:11:28,3567,14.7916,18.3714
+2016-08-01 02:21:30,3568,14.7929,18.336
+2016-08-01 02:31:33,3567,14.7301,18.3376
+2016-08-01 02:41:35,3567,14.7608,18.336
+2016-08-01 02:51:38,3566,14.7275,18.336
+2016-08-01 03:01:40,3566,14.7929,18.373
+2016-08-01 03:11:42,3565,14.7595,18.373
+2016-08-01 03:21:45,3565,14.7929,18.3714
+2016-08-01 03:31:47,3564,14.8879,18.3714
+2016-08-01 03:41:50,3564,14.8892,18.3665
+2016-08-01 03:51:52,3562,14.8531,18.3376
+2016-08-01 04:01:55,3561,14.8518,18.3714
+2016-08-01 04:11:57,3560,14.8853,18.3714
+2016-08-01 04:21:59,3560,14.8531,18.336
+2016-08-01 04:32:02,3560,14.8531,18.3714
+2016-08-01 04:42:05,3560,14.8853,18.3681
+2016-08-01 04:52:07,3556,14.8853,18.373
+2016-08-01 05:02:09,3559,14.8197,18.373
+2016-08-01 05:12:12,3559,14.8531,18.373
+2016-08-01 05:22:14,3559,14.8853,18.3376
+2016-08-01 05:32:17,3559,14.8531,18.3665
+2016-08-01 05:42:19,3559,14.8518,18.336
+2016-08-01 05:52:22,3559,14.8813,18.3714
+2016-08-01 06:02:24,3558,14.8813,18.3681
+2016-08-01 06:12:26,3559,14.8813,18.3714
+2016-08-01 06:22:29,3559,14.8478,18.3327
+2016-08-01 06:32:31,3559,14.8492,18.3327
+2016-08-01 06:42:33,3559,14.8505,18.3747
+2016-08-01 06:52:35,3559,14.8773,18.3665
+2016-08-01 07:02:37,3559,14.8786,18.3327
+2016-08-01 07:12:40,3559,14.8452,18.3665
+2016-08-01 07:22:42,3559,14.8131,18.3327
+2016-08-01 07:32:44,3560,14.8465,18.3311
+2016-08-01 07:42:47,3561,14.8452,18.3311
+2016-08-01 07:52:49,3562,14.8773,18.3665
+2016-08-01 08:02:51,3562,14.8452,18.3311
+2016-08-01 08:12:54,3563,14.8452,18.3665
+2016-08-01 08:22:56,3564,14.8452,18.3311
+2016-08-01 08:32:58,3564,14.8131,18.3681
+2016-08-01 08:43:01,3565,14.8773,18.3327
+2016-08-01 08:53:03,3566,14.8452,18.3665
+2016-08-01 09:03:06,3566,14.8412,18.3327
+2016-08-01 09:13:08,3567,14.8412,18.402
+2016-08-01 09:23:10,3567,14.8412,18.3681
+2016-08-01 09:33:13,3569,14.8105,18.3311
+2016-08-01 09:43:15,3570,14.8734,18.3665
+2016-08-01 09:53:17,3571,14.8092,18.3311
+2016-08-01 10:03:20,3572,14.8426,18.3681
+2016-08-01 10:13:22,3573,14.8734,18.3327
+2016-08-01 10:23:24,3574,14.8412,18.3665
+2016-08-01 10:33:27,3574,14.8412,18.3665
+2016-08-01 10:43:29,3576,14.8426,18.2619
+2016-08-01 10:53:31,3576,14.8426,18.2619
+2016-08-01 11:03:34,3577,14.7464,18.2619
+2016-08-01 11:13:36,3577,14.7451,18.2266
+2016-08-01 11:23:39,3578,14.7451,18.2603
+2016-08-01 11:33:41,3579,14.7451,18.2619
+2016-08-01 11:43:43,3580,14.7451,18.2266
+2016-08-01 11:53:46,3581,14.7784,18.225
+2016-08-01 12:03:48,3581,14.7771,18.2619
+2016-08-01 12:13:51,3581,14.7451,18.2603
+2016-08-01 12:23:53,3584,14.6492,18.2957
+2016-08-01 12:33:56,3584,14.6173,18.2619
+2016-08-01 12:43:58,3585,14.6505,18.2266
+2016-08-01 12:54:01,3585,14.6505,18.2636
+2016-08-01 13:04:04,3586,14.6505,18.2603
+2016-08-01 13:14:06,3586,14.6492,18.2619
+2016-08-01 13:24:09,3586,14.6492,18.2619
+2016-08-01 13:34:11,3587,14.6492,18.225
+2016-08-01 13:44:14,3586,14.6492,18.225
+2016-08-01 13:54:16,3587,14.6811,18.2619
+2016-08-01 14:04:19,3587,14.6492,18.2636
+2016-08-01 14:14:21,3587,14.6492,18.2603
+2016-08-01 14:24:24,3588,14.5536,18.2266
+2016-08-01 14:34:26,3588,14.5536,18.225
+2016-08-01 14:44:29,3588,14.5854,18.2619
+2016-08-01 14:54:32,3588,14.5536,18.225
+2016-08-01 15:04:34,3588,14.5536,18.2619
+2016-08-01 15:14:37,3589,14.5536,18.2266
+2016-08-01 15:24:39,3589,14.5536,18.2619
+2016-08-01 15:34:42,3589,14.5536,18.225
+2016-08-01 15:44:45,3589,14.5536,18.225
+2016-08-01 15:54:49,3589,14.5536,18.2619
+2016-08-01 16:04:52,3590,14.5536,18.2603
+2016-08-01 16:14:54,3590,14.5536,18.2619
+2016-08-01 16:24:57,3591,14.5536,18.2266
+2016-08-01 16:35:00,3590,14.5536,18.2636
+2016-08-01 16:45:02,3591,14.5536,18.225
+2016-08-01 16:55:05,3591,14.5549,18.2266
+2016-08-01 17:05:08,3592,14.5536,18.2266
+2016-08-01 17:15:11,3591,14.5854,18.2266
+2016-08-01 17:25:13,3591,14.5536,18.225
+2016-08-01 17:35:16,3592,14.5536,18.2266
+2016-08-01 17:45:19,3591,14.5536,18.2603
+2016-08-01 17:55:21,3591,14.5549,18.2266
+2016-08-01 18:05:24,3591,14.5854,18.2266
+2016-08-01 18:15:27,3591,14.5536,18.225
+2016-08-01 18:25:29,3591,14.5536,18.2603
+2016-08-01 18:35:32,3591,14.5536,18.2266
+2016-08-01 18:45:34,3590,14.5536,18.2603
+2016-08-01 18:55:37,3590,14.5854,18.2266
+2016-08-01 19:05:40,3589,14.5536,18.225
+2016-08-01 19:15:42,3588,14.5536,18.225
+2016-08-01 19:25:45,3588,14.5854,18.2619
+2016-08-01 19:35:48,3588,14.5536,18.2603
+2016-08-01 19:45:50,3587,14.5854,18.2603
+2016-08-01 19:55:53,3586,14.5549,18.2603
+2016-08-01 20:05:55,3587,14.5575,18.1545
+2016-08-01 20:15:58,3586,14.5536,18.1561
+2016-08-01 20:26:01,3585,14.5575,18.1177
+2016-08-01 20:36:03,3584,14.5536,18.1193
+2016-08-01 20:46:06,3584,14.5893,18.1209
+2016-08-01 20:56:08,3583,14.5575,18.2603
+2016-08-01 21:06:11,3582,14.5536,18.1209
+2016-08-01 21:16:14,3580,14.5575,18.2957
+2016-08-01 21:26:16,3578,14.5575,18.1545
+2016-08-01 21:36:18,3578,14.5536,18.1193
+2016-08-01 21:46:28,3576,14.5549,18.1513
+2016-08-01 21:56:31,3576,14.5854,18.1513
+2016-08-01 22:06:33,3575,14.5536,18.1161
+2016-08-01 22:16:36,3574,14.5536,18.1513
+2016-08-01 22:26:38,3574,14.5536,18.1161
+2016-08-01 22:36:41,3572,14.5867,18.1161
+2016-08-01 22:46:43,3572,14.5549,18.1161
+2016-08-01 22:56:46,3572,14.5536,18.2619
+2016-08-01 23:06:48,3571,14.5854,18.2603
+2016-08-01 23:16:51,3571,14.5854,18.1161
+2016-08-01 23:26:53,3570,14.5854,18.2218
+2016-08-01 23:36:56,3569,14.5854,18.2218
+2016-08-01 23:46:58,3569,14.5854,18.1161
+2016-08-01 23:57:01,3568,14.5536,18.1513
+2016-08-02 00:07:03,3568,14.5536,18.1161
+2016-08-02 00:17:06,3568,14.5854,18.1161
+2016-08-02 00:27:08,3567,14.5536,18.1161
+2016-08-02 00:37:11,3566,14.5854,18.1513
+2016-08-02 00:47:13,3564,14.5829,18.1513
+2016-08-02 00:57:15,3565,14.5536,18.1161
+2016-08-02 01:07:17,3565,14.5854,18.1161
+2016-08-02 01:17:19,3565,14.550999999999998,18.1513
+2016-08-02 01:27:22,3564,14.5829,18.1513
+2016-08-02 01:37:24,3563,14.5497,18.1161
+2016-08-02 01:47:27,3564,14.5816,18.1865
+2016-08-02 01:57:29,3564,14.5497,18.1513
+2016-08-02 02:07:32,3552,-9.8501,18.1513
+2016-08-02 02:17:34,3563,14.5497,18.1865
+2016-08-02 02:27:37,3560,14.5497,18.1513
+2016-08-02 02:37:39,3561,14.5497,18.1513
+2016-08-02 02:47:42,3560,14.5777,18.1513
+2016-08-02 02:57:44,3559,14.5471,18.1865
+2016-08-02 03:07:46,3560,14.5458,18.1161
+2016-08-02 03:17:49,3560,14.5777,18.1161
+2016-08-02 03:27:51,3560,14.5458,18.1161
+2016-08-02 03:37:54,3559,14.579,18.1161
+2016-08-02 03:47:56,3559,14.542,18.1513
+2016-08-02 03:57:59,3559,14.5433,18.1513
+2016-08-02 04:08:01,3559,14.542,18.1496
+2016-08-02 04:18:04,3559,14.6375,18.1513
+2016-08-02 04:28:06,3560,14.6375,18.1865
+2016-08-02 04:38:09,3552,14.5673,18.1464
+2016-08-02 04:48:11,3559,14.6388,18.1161
+2016-08-02 04:58:13,3559,14.6375,18.1112
+2016-08-02 05:08:16,3560,14.6375,18.1513
+2016-08-02 05:18:18,3560,14.6388,18.1529
+2016-08-02 05:28:20,3560,14.6388,18.1464
+2016-08-02 05:38:23,3559,14.6694,18.1112
+2016-08-02 05:48:25,3559,14.6707,18.1464
+2016-08-02 05:58:28,3560,14.6388,18.1112
+2016-08-02 06:08:30,3561,14.6375,18.1464
+2016-08-02 06:18:33,3561,14.6375,18.1464
+2016-08-02 06:28:35,3560,14.6375,18.1112
+2016-08-02 06:38:38,3560,14.6375,18.1464
+2016-08-02 06:48:40,3559,14.6694,18.1464
+2016-08-02 06:58:43,3560,14.6375,18.1096
+2016-08-02 07:08:45,3560,14.6388,18.1464
+2016-08-02 07:18:48,3561,14.6375,18.1464
+2016-08-02 07:28:50,3561,14.6388,18.1816
+2016-08-02 07:38:53,3561,14.6694,18.1112
+2016-08-02 07:48:55,3562,14.6401,18.1112
+2016-08-02 07:58:58,3563,14.6375,18.1464
+2016-08-02 08:09:00,3563,14.6694,18.1464
+2016-08-02 08:19:03,3563,14.6375,18.1464
+2016-08-02 08:29:06,3564,14.6349,18.1096
+2016-08-02 08:39:08,3564,14.6694,18.1112
+2016-08-02 08:49:11,3564,14.6349,18.1464
+2016-08-02 08:59:13,3564,14.6349,18.1464
+2016-08-02 09:09:16,3563,14.6349,18.1464
+2016-08-02 09:19:18,3559,14.6668,18.1816
+2016-08-02 09:29:21,3559,14.6668,18.1112
+2016-08-02 09:39:23,3563,14.6668,18.1112
+2016-08-02 09:49:26,3563,14.6668,18.1112
+2016-08-02 09:59:28,3566,14.6668,18.1816
+2016-08-02 10:09:30,3566,14.6336,18.1112
+2016-08-02 10:19:33,3566,14.6349,18.1816
+2016-08-02 10:29:35,3566,14.6655,18.1816
+2016-08-02 10:39:38,3567,14.6349,18.1048
+2016-08-02 10:49:40,3566,14.6629,18.1416
+2016-08-02 10:59:43,3567,14.6336,18.1416
+2016-08-02 11:09:45,3567,14.630999999999998,18.1112
+2016-08-02 11:19:48,3569,14.6668,18.1464
+2016-08-02 11:29:50,3569,14.6629,18.1112
+2016-08-02 11:39:53,3572,14.630999999999998,18.1416
+2016-08-02 11:49:55,3569,14.6297,18.1752
+2016-08-02 11:59:57,3569,14.6629,18.1064
+2016-08-02 12:10:00,3570,14.6616,18.1752
+2016-08-02 12:20:02,3566,14.630999999999998,18.1064
+2016-08-02 12:30:04,3571,14.6629,18.1768
+2016-08-02 12:40:07,3572,14.630999999999998,18.1416
+2016-08-02 12:50:09,3572,14.6629,18.1416
+2016-08-02 13:00:12,3573,14.630999999999998,18.1064
+2016-08-02 13:10:14,3563,-7.6414,18.1416
+2016-08-02 13:20:17,3574,14.630999999999998,18.1416
+2016-08-02 13:30:20,3586,14.5978,18.1064
+2016-08-02 13:40:22,3579,14.6297,18.1064
+2016-08-02 13:50:25,3581,14.6297,18.1064
+2016-08-02 14:00:27,3581,14.6297,18.1064
+2016-08-02 14:10:30,3581,14.630999999999998,18.1064
+2016-08-02 14:20:33,3582,14.630999999999998,18.1416
+2016-08-02 14:30:35,3582,14.5355,18.1768
+2016-08-02 14:40:38,3582,14.5394,18.1768
+2016-08-02 14:50:41,3571,14.4681,18.1064
+2016-08-02 15:00:43,3584,14.5394,18.1064
+2016-08-02 15:10:46,3584,14.5355,18.1416
+2016-08-02 15:20:48,3586,14.5394,18.1064
+2016-08-02 15:30:51,3581,14.5394,18.1432
+2016-08-02 15:40:53,3587,14.5394,18.1416
+2016-08-02 15:50:56,3584,14.5712,18.14
+2016-08-02 16:00:59,3585,-8.9037,18.1064
+2016-08-02 16:11:01,3589,14.5394,18.1048
+2016-08-02 16:21:04,3587,14.7841,18.1064
+2016-08-02 16:31:06,3589,14.5394,18.1416
+2016-08-02 16:41:09,3588,14.5394,18.1064
+2016-08-02 16:51:11,3588,14.5381,18.1064
+2016-08-02 17:01:14,3589,14.5394,18.1416
+2016-08-02 17:11:17,3589,14.542,18.1416
+2016-08-02 17:21:19,3589,14.542,18.1064
+2016-08-02 17:31:22,3589,14.542,18.1064
+2016-08-02 17:41:24,3590,14.542,18.1416
+2016-08-02 17:51:27,3587,14.5738,18.1064
+2016-08-02 18:01:30,3588,14.5445,18.1064
+2016-08-02 18:11:32,3588,14.542,18.1768
+2016-08-02 18:21:35,3587,14.542,18.1416
+2016-08-02 18:31:38,3588,14.5433,18.14
+2016-08-02 18:41:40,3588,14.542,18.1064
+2016-08-02 18:51:43,3587,14.5433,18.14
+2016-08-02 19:01:46,3588,14.5738,18.1064
+2016-08-02 19:11:50,3588,14.542,18.1064
+2016-08-02 19:21:53,3588,14.5433,18.1064
+2016-08-02 19:31:56,3588,14.542,18.1416
+2016-08-02 19:41:58,3587,14.5738,18.1064
+2016-08-02 19:52:01,3587,14.5738,18.1064
+2016-08-02 20:02:04,3587,14.542,18.1064
+2016-08-02 20:12:06,3585,14.5738,18.1064
+2016-08-02 20:22:09,3586,14.542,18.1064
+2016-08-02 20:32:12,3585,14.542,18.1064
+2016-08-02 20:42:14,3586,14.542,18.1064
+2016-08-02 20:52:17,3584,14.542,18.14
+2016-08-02 21:02:20,3584,14.5738,18.14
+2016-08-02 21:12:23,3584,14.5433,18.1064
+2016-08-02 21:22:25,3583,14.542,18.1064
+2016-08-02 21:32:28,3582,14.5738,18.108
+2016-08-02 21:42:31,3582,14.5738,18.1064
+2016-08-02 21:52:34,3582,14.542,18.1064
+2016-08-02 22:02:36,3582,14.542,18.1064
+2016-08-02 22:12:39,3581,14.5738,18.1064
+2016-08-02 22:22:41,3581,14.542,18.1064
+2016-08-02 22:32:44,3581,14.5433,18.1064
+2016-08-02 22:42:46,3578,14.542,18.1064
+2016-08-02 22:52:49,3578,14.542,18.1048
+2016-08-02 23:02:52,3578,14.542,18.1064
+2016-08-02 23:12:54,3580,14.542,18.1064
+2016-08-02 23:22:57,3577,14.542,18.1416
+2016-08-02 23:33:00,3580,14.542,18.1064
+2016-08-02 23:43:02,3579,14.5738,18.1064
+2016-08-02 23:53:05,3579,14.542,18.1416
+2016-08-03 00:03:07,3579,14.5738,18.1064
+2016-08-03 00:13:10,3579,14.542,18.1064
+2016-08-03 00:23:12,3580,14.5751,18.1416
+2016-08-03 00:33:15,3580,14.542,18.1064
+2016-08-03 00:43:17,3580,14.5738,18.1416
+2016-08-03 00:53:20,3579,14.5433,18.1064
+2016-08-03 01:03:23,3579,14.542,18.1048
+2016-08-03 01:13:25,3579,14.5738,18.1064
+2016-08-03 01:23:28,3579,14.5738,18.1416
+2016-08-03 01:33:30,3578,14.5738,18.1064
+2016-08-03 01:43:33,3579,14.5738,18.1416
+2016-08-03 01:53:35,3578,14.542,18.14
+2016-08-03 02:03:38,3579,14.542,18.1416
+2016-08-03 02:13:40,3577,14.5738,18.14
+2016-08-03 02:23:43,3578,14.5738,18.1064
+2016-08-03 02:33:45,3579,14.542,18.1064
+2016-08-03 02:43:48,3578,14.5433,18.1064
+2016-08-03 02:53:50,3578,14.5433,18.1064
+2016-08-03 03:03:53,3578,14.5738,18.1064
+2016-08-03 03:13:55,3578,14.542,18.108
+2016-08-03 03:23:58,3578,14.542,18.1064
+2016-08-03 03:34:00,3578,14.542,18.1416
+2016-08-03 03:44:10,3577,14.5738,18.1048
+2016-08-03 03:54:12,3578,14.542,18.1064
+2016-08-03 04:04:15,3578,14.542,18.1064
+2016-08-03 04:14:17,3576,14.5738,18.1416
+2016-08-03 04:24:19,3577,14.5433,18.1064
+2016-08-03 04:34:21,3577,14.542,18.1064
+2016-08-03 04:44:24,3578,14.542,18.1416
+2016-08-03 04:54:26,3578,14.5738,18.1416
+2016-08-03 05:04:29,3578,14.6375,18.14
+2016-08-03 05:14:31,3578,14.6375,18.1064
+2016-08-03 05:24:34,3579,14.6694,18.1064
+2016-08-03 05:34:36,3579,14.6388,18.1416
+2016-08-03 05:44:38,3578,14.6388,18.1064
+2016-08-03 05:54:48,3579,14.6388,18.1416
+2016-08-03 06:04:51,3580,14.6375,18.1064
+2016-08-03 06:14:53,3579,14.5738,18.1064
+2016-08-03 06:24:56,3580,14.542,18.1064
+2016-08-03 06:34:58,3580,14.542,18.1064
+2016-08-03 06:45:01,3580,14.542,18.1416
+2016-08-03 06:55:03,3581,14.6375,18.1064
+2016-08-03 07:05:06,3580,14.5738,18.1048
+2016-08-03 07:15:08,3581,14.542,18.1416
+2016-08-03 07:25:11,3581,14.542,18.1064
+2016-08-03 07:35:14,3581,14.542,18.1064
+2016-08-03 07:45:16,3581,14.542,18.1064
+2016-08-03 07:55:19,3581,14.542,18.1064
+2016-08-03 08:05:21,3581,14.5738,18.1416
+2016-08-03 08:15:24,3582,14.542,18.1416
+2016-08-03 08:25:27,3581,14.542,18.14
+2016-08-03 08:35:29,3582,14.542,18.1064
+2016-08-03 08:45:32,3582,14.6388,18.1064
+2016-08-03 08:55:34,3582,14.542,18.1064
+2016-08-03 09:05:37,3583,14.5738,18.1016
+2016-08-03 09:15:39,3584,14.5738,18.1064
+2016-08-03 09:25:42,3584,14.6375,18.1064
+2016-08-03 09:35:45,3584,14.6694,18.1064
+2016-08-03 09:45:47,3584,14.542,18.1048
+2016-08-03 09:55:50,3585,14.5738,18.1416
+2016-08-03 10:05:53,3583,14.6694,18.0362
+2016-08-03 10:15:55,3584,14.5738,18.0012
+2016-08-03 10:25:58,3586,14.5738,18.0362
+2016-08-03 10:36:01,3587,14.5433,18.0012
+2016-08-03 10:46:04,3588,14.5738,18.0012
+2016-08-03 10:56:06,3588,14.542,17.9964
+2016-08-03 11:06:16,3589,14.542,18.0012
+2016-08-03 11:16:19,3591,14.5738,17.9996
+2016-08-03 11:26:22,3591,14.5738,18.0012
+2016-08-03 11:36:24,3591,14.542,18.0362
+2016-08-03 11:46:27,3592,14.6375,18.0012
+2016-08-03 11:56:30,3592,14.542,18.0713
+2016-08-03 12:06:32,3592,14.5738,18.0012
+2016-08-03 12:16:35,3593,14.542,18.0028
+2016-08-03 12:26:38,3593,14.542,18.0012
+2016-08-03 12:36:41,3594,14.542,18.0012
+2016-08-03 12:46:43,3595,14.542,18.0012
+2016-08-03 12:56:46,3595,14.5445,18.0012
+2016-08-03 13:06:49,3596,14.5738,18.033
+2016-08-03 13:16:51,3597,14.5777,17.9964
+2016-08-03 13:27:01,3598,14.514,18.0012
+2016-08-03 13:37:04,3598,14.5458,18.0012
+2016-08-03 13:47:06,3598,14.5458,18.0012
+2016-08-03 13:57:09,3598,14.5458,18.0314
+2016-08-03 14:07:12,3598,14.5777,18.0028
+2016-08-03 14:17:14,3599,14.5458,17.9964
+2016-08-03 14:27:17,3599,14.5497,18.0362
+2016-08-03 14:37:20,3599,14.5829,18.0012
+2016-08-03 14:47:23,3598,14.550999999999998,18.0362
+2016-08-03 14:57:25,3599,14.5816,17.9964
+2016-08-03 15:07:28,3599,14.5497,17.9964
+2016-08-03 15:17:31,3598,14.5497,18.0012
+2016-08-03 15:27:34,3599,14.5536,17.9296
+2016-08-03 15:37:36,3599,14.5523,17.9264
+2016-08-03 15:47:39,3599,14.5536,17.9312
+2016-08-03 15:57:42,3599,14.5854,17.9264
+2016-08-03 16:07:45,3599,14.5549,17.9312
+2016-08-03 16:17:47,3598,14.5536,17.9312
+2016-08-03 16:27:50,3599,14.5854,17.9312
+2016-08-03 16:37:53,3598,14.5536,17.9312
+2016-08-03 16:47:56,3598,14.5536,17.9264
+2016-08-03 16:57:58,3598,14.5536,17.9312
+2016-08-03 17:08:01,3598,14.5536,17.9264
+2016-08-03 17:18:04,3598,14.5536,17.9296
+2016-08-03 17:28:07,3598,14.5523,17.963
+2016-08-03 17:38:09,3598,14.5854,17.9312
+2016-08-03 17:48:12,3598,14.5536,17.9312
+2016-08-03 17:58:15,3598,14.5536,17.9312
+2016-08-03 18:08:18,3598,14.5536,17.9312
+2016-08-03 18:18:20,3598,14.5575,17.9312
+2016-08-03 18:28:23,3598,14.5893,17.8963
+2016-08-03 18:38:26,3598,14.5893,17.9264
+2016-08-03 18:48:28,3596,14.5575,17.9264
+2016-08-03 18:58:31,3598,14.5575,17.9312
+2016-08-03 19:08:34,3596,14.5575,17.9312
+2016-08-03 19:18:37,3597,14.5575,17.9662
+2016-08-03 19:28:39,3597,14.5893,17.9296
+2016-08-03 19:38:42,3596,14.5906,17.9312
+2016-08-03 19:48:45,3596,14.5575,17.9312
+2016-08-03 19:58:47,3596,14.5906,17.9662
+2016-08-03 20:08:50,3595,14.5575,17.9312
+2016-08-03 20:18:53,3594,14.5575,17.9312
+2016-08-03 20:28:55,3595,14.5575,17.9312
+2016-08-03 20:38:58,3595,14.5588,17.9264
+2016-08-03 20:49:00,3594,14.5575,17.9662
+2016-08-03 20:59:03,3594,14.5893,17.9312
+2016-08-03 21:09:05,3594,14.5893,17.9662
+2016-08-03 21:19:08,3592,14.5575,17.9312
+2016-08-03 21:29:11,3592,14.5893,17.9662
+2016-08-03 21:39:13,3592,14.5575,17.9312
+2016-08-03 21:49:16,3593,14.5575,17.9312
+2016-08-03 21:59:18,3592,14.5893,17.9312
+2016-08-03 22:09:21,3592,14.5893,17.9662
+2016-08-03 22:19:24,3592,14.5575,17.9312
+2016-08-03 22:29:28,3592,14.5575,17.9312
+2016-08-03 22:39:31,3592,14.5575,17.9312
+2016-08-03 22:49:34,3592,14.5906,17.9312
+2016-08-03 22:59:36,3592,14.5893,17.9296
+2016-08-03 23:09:39,3591,14.5893,17.9296
+2016-08-03 23:19:42,3592,14.5575,17.9312
+2016-08-03 23:29:44,3591,14.5614,17.9312
+2016-08-03 23:39:47,3591,14.5893,17.9312
+2016-08-03 23:49:50,3591,14.5893,17.9312
+2016-08-03 23:59:52,3591,14.5575,17.9312
+2016-08-04 00:09:55,3590,14.5614,17.9312
+2016-08-04 00:19:58,3591,14.5575,17.9312
+2016-08-04 00:30:00,3590,14.5575,17.9312
+2016-08-04 00:40:03,3590,14.5614,17.9312
+2016-08-04 00:50:06,3590,14.5893,17.9312
+2016-08-04 01:00:08,3590,14.5575,17.9312
+2016-08-04 01:10:11,3589,14.5906,17.9312
+2016-08-04 01:20:14,3589,14.5893,17.9312
+2016-08-04 01:30:16,3589,14.5575,17.9296
+2016-08-04 01:40:19,3588,14.5893,17.9312
+2016-08-04 01:50:22,3588,14.5893,17.9312
+2016-08-04 02:00:25,3589,14.5575,17.9312
+2016-08-04 02:10:27,3587,14.5562,17.9312
+2016-08-04 02:20:30,3588,14.5893,17.9296
+2016-08-04 02:30:33,3588,14.5893,17.9312
+2016-08-04 02:40:35,3588,14.5893,17.9312
+2016-08-04 02:50:38,3588,14.5919,17.9312
+2016-08-04 03:00:41,3588,14.5588,17.9312
+2016-08-04 03:10:43,3588,14.5575,17.9312
+2016-08-04 03:20:46,3588,14.5575,17.9662
+2016-08-04 03:30:49,3588,14.5575,17.9312
+2016-08-04 03:40:51,3588,14.5575,17.9312
+2016-08-04 03:50:54,3587,14.5893,17.9312
+2016-08-04 04:00:57,3588,14.5575,17.9312
+2016-08-04 04:10:59,3587,14.5893,17.9312
+2016-08-04 04:21:02,3587,14.5893,17.9312
+2016-08-04 04:31:05,3587,14.5575,17.9662
+2016-08-04 04:41:07,3587,14.5893,17.9312
+2016-08-04 04:51:10,3586,14.5575,17.9312
+2016-08-04 05:01:12,3587,14.5893,17.9312
+2016-08-04 05:11:15,3587,14.5893,17.9312
+2016-08-04 05:21:18,3587,14.5575,17.9312
+2016-08-04 05:31:20,3587,14.5893,17.9312
+2016-08-04 05:41:23,3587,14.5893,17.9328
+2016-08-04 05:51:25,3587,14.5575,17.9312
+2016-08-04 06:01:28,3587,14.5575,17.9312
+2016-08-04 06:11:31,3586,14.5893,17.9312
+2016-08-04 06:21:33,3586,14.5575,17.9312
+2016-08-04 06:31:36,3586,14.5575,17.9312
+2016-08-04 06:41:38,3586,14.5575,17.9662
+2016-08-04 06:51:41,3585,14.5893,17.9662
+2016-08-04 07:01:43,3586,14.5893,17.9312
+2016-08-04 07:11:46,3586,14.5575,17.9296
+2016-08-04 07:21:49,3586,14.5575,17.9312
+2016-08-04 07:31:51,3586,14.5575,17.9312
+2016-08-04 07:41:53,3587,14.5893,17.9662
+2016-08-04 07:51:56,3586,14.5575,17.9312
+2016-08-04 08:01:58,3587,14.5893,17.9312
+2016-08-04 08:12:01,3583,14.5588,17.9662
+2016-08-04 08:22:04,3587,14.5575,17.9312
+2016-08-04 08:32:07,3588,14.5575,17.9662
+2016-08-04 08:42:09,3588,14.5575,17.9312
+2016-08-04 08:52:12,3588,14.5575,17.9312
+2016-08-04 09:02:15,3588,14.5575,17.9312
+2016-08-04 09:12:17,3590,14.5575,17.9312
+2016-08-04 09:22:20,3590,14.5893,17.9312
+2016-08-04 09:32:23,3591,14.5893,17.9312
+2016-08-04 09:42:25,3591,14.5575,17.9312
+2016-08-04 09:52:28,3591,14.5893,17.9312
+2016-08-04 10:02:31,3591,14.5575,17.9312
+2016-08-04 10:12:33,3591,14.5575,17.9312
+2016-08-04 10:22:36,3592,14.5906,17.8963
+2016-08-04 10:32:38,3591,14.5575,17.9312
+2016-08-04 10:42:41,3590,14.5575,17.9312
+2016-08-04 10:52:44,3590,14.5575,17.9312
+2016-08-04 11:02:46,3589,14.5575,17.9312
+2016-08-04 11:12:49,3590,14.5893,17.9312
+2016-08-04 11:22:52,3591,14.5906,17.9312
+2016-08-04 11:32:54,3587,14.5893,17.9312
+2016-08-04 11:42:57,3590,14.5575,17.9312
+2016-08-04 11:53:00,3592,14.5575,17.9328
+2016-08-04 12:03:02,3592,14.5575,17.9312
+2016-08-04 12:13:05,3592,14.5893,17.9312
+2016-08-04 12:23:08,3593,14.5893,17.9312
+2016-08-04 12:33:11,3593,14.4621,17.9312
+2016-08-04 12:43:13,3594,14.4939,17.8614
+2016-08-04 12:53:16,3595,14.4621,17.9312
+2016-08-04 13:03:19,3595,14.4621,17.8266
+2016-08-04 13:13:21,3595,14.4621,17.8266
+2016-08-04 13:23:24,3596,14.4939,17.8266
+2016-08-04 13:33:27,3596,14.4608,17.7902
+2016-08-04 13:43:30,3597,14.4621,17.8266
+2016-08-04 13:53:33,3597,14.4634,17.7918
+2016-08-04 14:03:35,3598,14.4621,17.7918
+2016-08-04 14:13:38,3598,14.4621,17.8266
+2016-08-04 14:23:41,3598,14.4939,17.8614
+2016-08-04 14:33:44,3598,14.4621,17.8266
+2016-08-04 14:43:47,3598,14.4621,17.8266
+2016-08-04 14:53:49,3598,14.4608,17.8266
+2016-08-04 15:03:52,3598,14.367,17.8282
+2016-08-04 15:13:55,3598,14.367,17.8266
+2016-08-04 15:23:58,3598,14.367,17.8266
+2016-08-04 15:34:01,3598,14.367,17.8266
+2016-08-04 15:44:03,3598,14.367,17.7918
+2016-08-04 15:54:06,3598,14.367,17.8266
+2016-08-04 16:04:09,3598,14.3708,17.8266
+2016-08-04 16:14:12,3598,14.367,17.8266
+2016-08-04 16:24:14,3598,14.3696,17.7918
+2016-08-04 16:34:17,3598,14.3696,17.825
+2016-08-04 16:44:19,3598,14.3696,17.8266
+2016-08-04 16:54:21,3598,14.3696,17.8266
+2016-08-04 17:04:24,3598,14.3696,17.8266
+2016-08-04 17:14:27,3599,14.3696,17.8282
+2016-08-04 17:24:30,3598,14.3696,17.8266
+2016-08-04 17:34:32,3598,14.3696,17.7918
+2016-08-04 17:44:35,3599,14.4012,17.7918
+2016-08-04 17:54:38,3598,14.3696,17.8614
+2016-08-04 18:04:41,3598,14.3734,17.8266
+2016-08-04 18:14:43,3598,14.3696,17.7902
+2016-08-04 18:24:46,3598,14.3696,17.8266
+2016-08-04 18:34:48,3598,14.3708,17.8266
+2016-08-04 18:44:51,3598,14.3734,17.8266
+2016-08-04 18:54:54,3598,14.3734,17.8266
+2016-08-04 19:04:56,3596,14.4368,17.8266
+2016-08-04 19:14:59,3598,14.3734,17.8266
+2016-08-04 19:25:02,3598,14.4051,17.8266
+2016-08-04 19:35:04,3598,14.3734,17.7918
+2016-08-04 19:45:07,3597,14.3734,17.8266
+2016-08-04 19:55:10,3596,14.4064,17.8266
+2016-08-04 20:05:12,3594,14.4368,17.8266
+2016-08-04 20:15:15,3593,14.3734,17.8266
+2016-08-04 20:25:18,3593,14.3734,17.7918
+2016-08-04 20:35:20,3590,14.5003,17.8266
+2016-08-04 20:45:23,3588,14.5003,17.825
+2016-08-04 20:55:26,3588,14.4686,17.7902
+2016-08-04 21:05:28,3588,14.5003,17.8266
+2016-08-04 21:15:31,3587,14.4686,17.8266
+2016-08-04 21:25:34,3587,14.5003,17.7918
+2016-08-04 21:35:36,3585,14.4686,17.8266
+2016-08-04 21:45:39,3585,14.4698,17.8266
+2016-08-04 21:55:41,3582,14.4686,17.8266
+2016-08-04 22:05:44,3584,14.4698,17.8614
+2016-08-04 22:15:46,3583,14.4686,17.7918
+2016-08-04 22:25:49,3585,14.4686,17.8266
+2016-08-04 22:35:52,3582,14.5003,17.7918
+2016-08-04 22:45:54,3583,14.5321,17.8614
+2016-08-04 22:55:57,3583,14.466,17.7918
+2016-08-04 23:05:59,3582,14.4965,17.8266
+2016-08-04 23:16:02,3580,14.466,17.8266
+2016-08-04 23:26:04,3581,14.466,17.8266
+2016-08-04 23:36:07,3581,14.4673,17.8266
+2016-08-04 23:46:09,3581,14.466,17.8282
+2016-08-04 23:56:12,3581,14.466,17.8266
+2016-08-05 00:06:15,3581,14.466,17.8266
+2016-08-05 00:16:17,3580,14.4647,17.8266
+2016-08-05 00:26:20,3580,14.4977,17.8266
+2016-08-05 00:36:22,3580,14.4647,17.8614
+2016-08-05 00:46:25,3579,14.4647,17.8614
+2016-08-05 00:56:27,3578,14.4621,17.8266
+2016-08-05 01:06:30,3578,14.4621,17.8266
+2016-08-05 01:16:32,3578,14.4939,17.8266
+2016-08-05 01:26:35,3578,14.4621,17.8266
+2016-08-05 01:36:37,3577,14.4939,17.8266
+2016-08-05 01:46:41,3577,14.4939,17.8266
+2016-08-05 01:56:44,3577,14.4621,17.8266
+2016-08-05 02:06:47,3577,14.4621,17.8266
+2016-08-05 02:16:50,3576,14.4621,17.8266
+2016-08-05 02:26:52,3576,14.4608,17.8266
+2016-08-05 02:36:55,3576,14.4621,17.8266
+2016-08-05 02:46:57,3576,14.5257,17.8266
+2016-08-05 02:57:00,3576,14.4621,17.7918
+2016-08-05 03:07:03,3576,14.4621,17.8266
+2016-08-05 03:17:05,3575,14.4621,17.8266
+2016-08-05 03:27:08,3576,14.4939,17.8282
+2016-08-05 03:37:10,3575,14.4621,17.8266
+2016-08-05 03:47:13,3576,14.4621,17.8266
+2016-08-05 03:57:15,3575,14.4621,17.8266
+2016-08-05 04:07:18,3575,14.4939,17.8266
+2016-08-05 04:17:20,3575,14.49,17.8266
+2016-08-05 04:27:23,3576,14.49,17.8266
+2016-08-05 04:37:25,3575,14.4583,17.8266
+2016-08-05 04:47:28,3575,14.5218,17.8266
+2016-08-05 04:57:30,3574,14.49,17.8266
+2016-08-05 05:07:33,3574,14.4583,17.8266
+2016-08-05 05:17:35,3574,14.4583,17.8614
+2016-08-05 05:27:38,3574,14.49,17.8266
+2016-08-05 05:37:40,3574,14.49,17.8266
+2016-08-05 05:47:43,3574,14.4583,17.8266
+2016-08-05 05:57:45,3574,14.5218,17.8266
+2016-08-05 06:07:47,3572,14.49,17.8266
+2016-08-05 06:17:50,3574,14.49,17.8266
+2016-08-05 06:27:52,3573,14.4583,17.8266
+2016-08-05 06:37:55,3573,14.457,17.8266
+2016-08-05 06:47:57,3573,14.4583,17.8266
+2016-08-05 06:58:00,3574,14.4583,17.7918
+2016-08-05 07:08:02,3574,14.4583,17.8266
+2016-08-05 07:18:04,3574,14.49,17.7918
+2016-08-05 07:28:07,3574,14.4583,17.8266
+2016-08-05 07:38:09,3573,14.4887,17.8266
+2016-08-05 07:48:12,3574,14.49,17.8266
+2016-08-05 07:58:14,3574,14.49,17.8266
+2016-08-05 08:08:17,3575,14.4583,17.8266
+2016-08-05 08:18:19,3575,14.4544,17.8266
+2016-08-05 08:28:22,3576,14.4544,17.8266
+2016-08-05 08:38:24,3576,14.4544,17.8282
+2016-08-05 08:48:27,3577,14.4544,17.8266
+2016-08-05 08:58:29,3577,14.4861,17.8266
+2016-08-05 09:08:32,3577,14.4544,17.8266
+2016-08-05 09:18:34,3578,14.4544,17.8266
+2016-08-05 09:28:37,3579,14.4544,17.8266
+2016-08-05 09:38:39,3580,14.4544,17.8266
+2016-08-05 09:48:42,3581,14.4544,17.8266
+2016-08-05 09:58:45,3581,14.4544,17.8266
+2016-08-05 10:08:47,3582,14.5179,17.8614
+2016-08-05 10:18:50,3583,14.4861,17.8266
+2016-08-05 10:28:52,3583,14.4544,17.8266
+2016-08-05 10:38:55,3585,14.4861,17.8266
+2016-08-05 10:48:57,3585,14.4861,17.825
+2016-08-05 10:58:59,3586,14.3593,17.8266
+2016-08-05 11:09:02,3587,14.4227,17.8266
+2016-08-05 11:19:04,3588,14.3593,17.8266
+2016-08-05 11:29:07,3588,14.3593,17.7918
+2016-08-05 11:39:10,3589,14.3593,17.8266
+2016-08-05 11:49:12,3591,14.3593,17.8266
+2016-08-05 11:59:15,3592,14.3593,17.8266
+2016-08-05 12:09:18,3592,14.3593,17.8266
+2016-08-05 12:19:21,3593,14.390999999999998,17.8266
+2016-08-05 12:29:23,3593,14.3593,17.8266
+2016-08-05 12:39:26,3593,14.3593,17.8266
+2016-08-05 12:49:29,3595,14.2961,17.825
+2016-08-05 12:59:31,3595,14.2974,17.825
+2016-08-05 13:09:34,3595,14.2646,17.8266
+2016-08-05 13:19:37,3596,14.2961,17.8266
+2016-08-05 13:29:39,3597,14.2961,17.7223
+2016-08-05 13:39:42,3598,14.2684,17.7223
+2016-08-05 13:49:45,3598,14.2987,17.7223
+2016-08-05 13:59:47,3598,14.2987,17.7223
+2016-08-05 14:09:50,3598,14.2987,17.7223
+2016-08-05 14:19:53,3598,14.2987,17.7223
+2016-08-05 14:29:56,3598,14.2987,17.7223
+2016-08-05 14:39:58,3598,14.1725,17.7223
+2016-08-05 14:50:01,3598,14.2053,17.7223
+2016-08-05 15:00:04,3598,14.1738,17.6876
+2016-08-05 15:10:06,3598,14.2053,17.7223
+2016-08-05 15:20:09,3598,14.1738,17.7223
+2016-08-05 15:30:12,3598,14.2053,17.6876
+2016-08-05 15:40:14,3598,14.1738,17.7223
+2016-08-05 15:50:17,3598,14.204,17.7223
+2016-08-05 16:00:20,3598,14.2053,17.6876
+2016-08-05 16:10:23,3598,14.2053,17.7223
+2016-08-05 16:20:25,3598,14.2053,17.7223
+2016-08-05 16:30:28,3599,14.2053,17.757
+2016-08-05 16:40:31,3599,14.204,17.7223
+2016-08-05 16:50:34,3599,14.1776,17.7223
+2016-08-05 17:00:37,3599,14.2053,17.7223
+2016-08-05 17:10:39,3599,14.1776,17.7223
+2016-08-05 17:20:42,3599,14.2078,17.7223
+2016-08-05 17:30:44,3599,14.2091,17.7223
+2016-08-05 17:40:55,3599,14.1776,17.7223
+2016-08-05 17:50:58,3599,14.1776,17.7223
+2016-08-05 18:01:01,3599,14.1763,17.6184
+2016-08-05 18:11:03,3599,14.2091,17.7223
+2016-08-05 18:21:06,3598,14.2091,17.5839
+2016-08-05 18:31:09,3599,14.2091,17.5839
+2016-08-05 18:41:12,3598,14.2091,17.5839
+2016-08-05 18:51:14,3598,14.2091,17.5839
+2016-08-05 19:01:17,3598,14.2091,17.6184
+2016-08-05 19:11:20,3598,14.2078,17.5839
+2016-08-05 19:21:23,3598,14.2091,17.5823
+2016-08-05 19:31:25,3598,14.1763,17.6184
+2016-08-05 19:41:28,3598,14.1776,17.6184
+2016-08-05 19:51:30,3598,14.1776,17.5839
+2016-08-05 20:01:32,3598,14.2078,17.5839
+2016-08-05 20:11:35,3598,14.1776,17.6184
+2016-08-05 20:21:37,3597,14.2393,17.5839
+2016-08-05 20:31:40,3597,14.2091,17.6184
+2016-08-05 20:41:43,3596,14.2091,17.6184
+2016-08-05 20:51:45,3595,14.2091,17.5839
+2016-08-05 21:01:48,3595,14.2091,17.6184
+2016-08-05 21:11:51,3594,14.2078,17.6184
+2016-08-05 21:21:53,3593,14.2091,17.6184
+2016-08-05 21:31:56,3592,14.2078,17.6184
+2016-08-05 21:41:58,3592,14.1776,17.6184
+2016-08-05 21:52:01,3591,14.2091,17.5839
+2016-08-05 22:02:04,3591,14.2091,17.6184
+2016-08-05 22:12:06,3591,14.2129,17.5839
+2016-08-05 22:22:09,3590,14.2078,17.6184
+2016-08-05 22:32:11,3588,14.1776,17.6184
+2016-08-05 22:42:14,3588,14.2091,17.6184
+2016-08-05 22:52:16,3588,14.2091,17.6184
+2016-08-05 23:02:19,3588,14.2091,17.6169
+2016-08-05 23:12:21,3587,14.2091,17.653
+2016-08-05 23:22:24,3586,14.2091,17.6184
+2016-08-05 23:32:26,3586,14.2091,17.6184
+2016-08-05 23:42:28,3586,14.2091,17.6184
+2016-08-05 23:52:31,3585,14.2091,17.653
+2016-08-06 00:02:33,3585,14.2091,17.6546
+2016-08-06 00:12:36,3582,14.2091,17.5854
+2016-08-06 00:22:38,3581,14.2091,17.5839
+2016-08-06 00:32:41,3581,14.2406,17.653
+2016-08-06 00:42:43,3581,14.2091,17.6184
+2016-08-06 00:52:46,3581,14.2091,17.653
+2016-08-06 01:02:48,3581,14.2091,17.5839
+2016-08-06 01:12:51,3581,14.2091,17.653
+2016-08-06 01:22:53,3581,14.1776,17.653
+2016-08-06 01:32:56,3581,14.2078,17.5839
+2016-08-06 01:42:58,3581,14.2091,17.6514
+2016-08-06 01:53:01,3581,14.2091,17.6184
+2016-08-06 02:03:04,3581,14.2091,17.653
+2016-08-06 02:13:06,3580,14.2091,17.6184
+2016-08-06 02:23:09,3580,14.2091,17.6546
+2016-08-06 02:33:11,3579,14.2091,17.6184
+2016-08-06 02:43:14,3579,14.2053,17.6184
+2016-08-06 02:53:16,3579,14.2053,17.5839
+2016-08-06 03:03:19,3579,14.2053,17.653
+2016-08-06 03:13:21,3578,14.204,17.6184
+2016-08-06 03:23:24,3578,14.2053,17.6184
+2016-08-06 03:33:26,3578,14.2053,17.6184
+2016-08-06 03:43:29,3578,14.2053,17.5839
+2016-08-06 03:53:31,3578,14.2053,17.5839
+2016-08-06 04:03:34,3577,14.2053,17.6184
+2016-08-06 04:13:37,3578,14.2053,17.6184
+2016-08-06 04:23:39,3577,14.2053,17.62
+2016-08-06 04:33:42,3577,14.2053,17.6184
+2016-08-06 04:43:44,3577,14.1738,17.5839
+2016-08-06 04:53:46,3577,14.2053,17.6169
+2016-08-06 05:03:50,3577,14.2053,17.62
+2016-08-06 05:13:53,3576,14.2053,17.6184
+2016-08-06 05:23:56,3576,14.1725,17.653
+2016-08-06 05:33:58,3576,14.2053,17.6184
+2016-08-06 05:44:01,3577,14.204,17.6184
+2016-08-06 05:54:03,3577,14.2053,17.6184
+2016-08-06 06:04:06,3577,14.2053,17.653
+2016-08-06 06:14:09,3577,14.2053,17.6184
+2016-08-06 06:24:11,3577,14.2053,17.6184
+2016-08-06 06:34:14,3577,14.2053,17.5839
+2016-08-06 06:44:17,3577,14.2015,17.5839
+2016-08-06 06:54:19,3577,14.2015,17.5839
+2016-08-06 07:04:22,3577,14.2015,17.6184
+2016-08-06 07:14:24,3577,14.2015,17.5839
+2016-08-06 07:24:27,3577,14.2015,17.5839
+2016-08-06 07:34:29,3578,14.233,17.6184
+2016-08-06 07:44:32,3578,14.1965,17.6184
+2016-08-06 07:54:35,3578,14.2015,17.6184
+2016-08-06 08:04:37,3578,14.2015,17.6184
+2016-08-06 08:14:40,3580,14.17,17.653
+2016-08-06 08:24:43,3580,14.2015,17.5839
+2016-08-06 08:34:45,3580,14.2002,17.5839
+2016-08-06 08:44:48,3581,14.2015,17.6184
+2016-08-06 08:54:51,3581,14.2015,17.6184
+2016-08-06 09:04:53,3581,14.1977,17.6184
+2016-08-06 09:14:56,3581,14.1965,17.6184
+2016-08-06 09:24:58,3581,14.2015,17.653
+2016-08-06 09:35:01,3581,14.1965,17.6184
+2016-08-06 09:45:03,3581,14.2015,17.6184
+2016-08-06 09:55:06,3581,14.1977,17.6184
+2016-08-06 10:05:09,3581,14.1977,17.5839
+2016-08-06 10:15:11,3582,14.1977,17.653
+2016-08-06 10:25:14,3582,14.1977,17.6184
+2016-08-06 10:35:17,3583,14.1977,17.6184
+2016-08-06 10:45:19,3582,14.1977,17.5839
+2016-08-06 10:55:22,3582,14.1977,17.653
+2016-08-06 11:05:24,3581,14.1977,17.6184
+2016-08-06 11:15:27,3582,14.1965,17.653
+2016-08-06 11:25:29,3582,14.1965,17.653
+2016-08-06 11:35:32,3583,14.1977,17.653
+2016-08-06 11:45:35,3583,14.1977,17.5839
+2016-08-06 11:55:37,3583,14.1977,17.5839
+2016-08-06 12:05:40,3585,14.1977,17.6184
+2016-08-06 12:15:43,3585,14.1977,17.6184
+2016-08-06 12:25:45,3586,14.1977,17.5839
+2016-08-06 12:35:48,3586,14.1662,17.6184
+2016-08-06 12:45:51,3586,14.1965,17.6184
+2016-08-06 12:55:53,3586,14.1977,17.5839
+2016-08-06 13:05:56,3587,14.1977,17.6184
+2016-08-06 13:15:59,3588,14.1977,17.5839
+2016-08-06 13:26:01,3588,14.2015,17.653
+2016-08-06 13:36:04,3589,14.1977,17.6184
+2016-08-06 13:46:06,3589,14.1977,17.6184
+2016-08-06 13:56:09,3591,14.165,17.6184
+2016-08-06 14:06:11,3592,14.1977,17.6184
+2016-08-06 14:16:13,3593,14.1071,17.6184
+2016-08-06 14:26:16,3593,14.0757,17.5839
+2016-08-06 14:36:19,3593,14.1071,17.5839
+2016-08-06 14:46:21,3594,14.1071,17.5494
+2016-08-06 14:56:24,3595,14.1071,17.5149
+2016-08-06 15:06:27,3594,14.1071,17.4805
+2016-08-06 15:16:29,3595,14.1071,17.5494
+2016-08-06 15:26:32,3595,14.1059,17.5149
+2016-08-06 15:36:35,3595,14.1059,17.5494
+2016-08-06 15:46:38,3595,14.1059,17.5494
+2016-08-06 15:56:40,3595,14.1071,17.5494
+2016-08-06 16:06:43,3596,14.1071,17.5149
+2016-08-06 16:16:46,3596,14.1071,17.5149
+2016-08-06 16:26:48,3596,14.1071,17.4805
+2016-08-06 16:36:51,3596,14.1386,17.5494
+2016-08-06 16:46:54,3596,14.1071,17.5494
+2016-08-06 16:56:56,3595,14.1071,17.5494
+2016-08-06 17:06:59,3595,14.1071,17.5494
+2016-08-06 17:17:02,3594,14.1071,17.5149
+2016-08-06 17:27:04,3593,14.1109,17.5149
+2016-08-06 17:37:07,3592,14.0795,17.5494
+2016-08-06 17:47:10,3591,14.0795,17.5494
+2016-08-06 17:57:12,3589,14.1411,17.5494
+2016-08-06 18:07:14,3588,14.1109,17.5478
+2016-08-06 18:17:24,3587,14.1109,17.5149
+2016-08-06 18:27:27,3586,14.1109,17.5149
+2016-08-06 18:37:29,3585,14.1097,17.5494
+2016-08-06 18:47:32,3584,14.1097,17.5494
+2016-08-06 18:57:34,3582,14.1109,17.5494
+2016-08-06 19:07:37,3582,14.1097,17.5494
+2016-08-06 19:17:39,3581,14.1109,17.5494
+2016-08-06 19:27:42,3581,14.1109,17.5494
+2016-08-06 19:37:45,3580,14.1109,17.5494
+2016-08-06 19:47:47,3578,14.1109,17.5149
+2016-08-06 19:57:50,3577,14.1071,17.5133
+2016-08-06 20:07:52,3576,14.2015,17.5149
+2016-08-06 20:17:55,3576,14.2015,17.5494
+2016-08-06 20:27:57,3575,14.2015,17.5494
+2016-08-06 20:38:00,3574,14.2015,17.5494
+2016-08-06 20:48:02,3574,14.2015,17.5149
+2016-08-06 20:58:05,3574,14.2015,17.5494
+2016-08-06 21:08:07,3574,14.2015,17.5149
+2016-08-06 21:18:10,3573,14.2015,17.5133
+2016-08-06 21:28:12,3573,14.1688,17.5494
+2016-08-06 21:38:15,3573,14.165,17.5149
+2016-08-06 21:48:17,3573,14.1977,17.5509
+2016-08-06 21:58:20,3572,14.1977,17.5494
+2016-08-06 22:08:23,3572,14.1977,17.5494
+2016-08-06 22:18:25,3571,14.1965,17.5149
+2016-08-06 22:28:28,3572,14.1977,17.5149
+2016-08-06 22:38:30,3571,14.1977,17.5149
+2016-08-06 22:48:33,3571,14.2292,17.5494
+2016-08-06 22:58:35,3571,14.1977,17.5494
+2016-08-06 23:08:38,3572,14.2242,17.5149
+2016-08-06 23:18:40,3571,14.1939,17.5494
+2016-08-06 23:28:42,3571,14.1977,17.5494
+2016-08-06 23:38:44,3571,14.1939,17.5149
+2016-08-06 23:48:47,3571,14.1939,17.5149
+2016-08-06 23:58:49,3571,14.1939,17.5494
+2016-08-07 00:08:51,3571,14.1939,17.5494
+2016-08-07 00:18:54,3571,14.1927,17.5149
+2016-08-07 00:28:56,3571,14.1927,17.5494
+2016-08-07 00:38:59,3571,14.1939,17.5149
+2016-08-07 00:49:01,3570,14.1939,17.5165
+2016-08-07 00:59:04,3570,14.1927,17.5149
+2016-08-07 01:09:06,3570,14.1927,17.5149
+2016-08-07 01:19:09,3570,14.1939,17.5494
+2016-08-07 01:29:11,3570,14.1939,17.5149
+2016-08-07 01:39:13,3569,14.1939,17.5149
+2016-08-07 01:49:16,3569,14.1927,17.5494
+2016-08-07 01:59:18,3569,14.1939,17.4805
+2016-08-07 02:09:21,3569,14.2254,17.5133
+2016-08-07 02:19:23,3569,14.1927,17.5494
+2016-08-07 02:29:26,3568,14.1939,17.5494
+2016-08-07 02:39:28,3568,14.1625,17.5494
+2016-08-07 02:49:31,3568,14.1939,17.5494
+2016-08-07 02:59:33,3568,14.1939,17.5494
+2016-08-07 03:09:35,3568,14.1939,17.5494
+2016-08-07 03:19:38,3568,14.1927,17.5494
+2016-08-07 03:29:40,3567,14.1939,17.5494
+2016-08-07 03:39:42,3567,14.1927,17.5494
+2016-08-07 03:49:45,3567,14.1927,17.5494
+2016-08-07 03:59:47,3566,14.2254,17.5494
+2016-08-07 04:09:50,3567,14.1939,17.5494
+2016-08-07 04:19:52,3567,14.1939,17.5494
+2016-08-07 04:29:55,3567,14.2216,17.5509
+2016-08-07 04:39:57,3567,14.1889,17.5494
+2016-08-07 04:50:00,3566,14.1914,17.5494
+2016-08-07 05:00:02,3566,14.1901,17.5149
+2016-08-07 05:10:05,3566,14.1901,17.5494
+2016-08-07 05:20:07,3566,14.1901,17.5494
+2016-08-07 05:30:10,3566,14.1914,17.5165
+2016-08-07 05:40:12,3565,14.1876,17.5494
+2016-08-07 05:50:15,3565,14.1901,17.5149
+2016-08-07 06:00:17,3565,14.1901,17.5494
+2016-08-07 06:10:20,3565,14.1864,17.5494
+2016-08-07 06:20:23,3565,14.2191,17.5494
+2016-08-07 06:30:25,3565,14.1864,17.5494
+2016-08-07 06:40:28,3565,14.1864,17.5494
+2016-08-07 06:50:30,3566,14.1876,17.5478
+2016-08-07 07:00:33,3566,14.1876,17.5149
+2016-08-07 07:10:35,3566,14.1826,17.5149
+2016-08-07 07:20:38,3566,14.1876,17.5494
+2016-08-07 07:30:40,3566,14.1826,17.5149
+2016-08-07 07:40:42,3566,14.2178,17.5494
+2016-08-07 07:50:45,3567,14.214,17.4805
+2016-08-07 08:00:47,3568,14.1826,17.5149
+2016-08-07 08:10:50,3569,14.1826,17.5494
+2016-08-07 08:20:54,3569,14.1826,17.5102
+2016-08-07 08:30:57,3570,14.1826,17.5447
+2016-08-07 08:41:00,3571,14.1826,17.5149
+2016-08-07 08:51:02,3572,14.1838,17.5447
+2016-08-07 09:01:05,3572,14.1826,17.5102
+2016-08-07 09:11:07,3573,14.1826,17.5462
+2016-08-07 09:21:10,3574,14.1838,17.5494
+2016-08-07 09:31:13,3574,14.1826,17.4758
+2016-08-07 09:41:15,3575,14.1826,17.5447
+2016-08-07 09:51:18,3576,14.1838,17.5102
+2016-08-07 10:01:20,3577,14.1826,17.5102
+2016-08-07 10:11:23,3578,14.1826,17.5447
+2016-08-07 10:21:26,3581,14.1826,17.5087
+2016-08-07 10:31:28,3582,14.214,17.5462
+2016-08-07 10:41:31,3585,14.1838,17.5431
+2016-08-07 10:51:34,3586,14.1838,17.5447
+2016-08-07 11:01:36,3586,14.1826,17.5447
+2016-08-07 11:11:39,3588,14.214,17.5102
+2016-08-07 11:21:42,3588,14.1826,17.4758
+2016-08-07 11:31:44,3589,14.057,17.5102
+2016-08-07 11:41:47,3591,14.0883,17.5102
+2016-08-07 11:51:50,3592,14.0883,17.5102
+2016-08-07 12:01:52,3593,14.0883,17.5447
+2016-08-07 12:11:55,3594,14.0883,17.4758
+2016-08-07 12:21:57,3594,13.9956,17.5102
+2016-08-07 12:32:00,3595,13.9631,17.5447
+2016-08-07 12:42:03,3596,13.9981,17.5102
+2016-08-07 12:52:05,3597,13.9981,17.5462
+2016-08-07 13:02:08,3598,13.9981,17.5447
+2016-08-07 13:12:11,3598,13.9981,17.5102
+2016-08-07 13:22:14,3598,13.9043,17.5447
+2016-08-07 13:32:16,3598,13.8731,17.5431
+2016-08-07 13:42:19,3599,13.8756,17.5102
+2016-08-07 13:52:22,3599,13.908,17.5447
+2016-08-07 14:02:25,3599,13.9068,17.5447
+2016-08-07 14:12:27,3599,13.9393,17.5102
+2016-08-07 14:22:30,3599,13.9068,17.5447
+2016-08-07 14:32:33,3599,13.9105,17.5071
+2016-08-07 14:42:36,3599,13.8768,17.5102
+2016-08-07 14:52:38,3599,13.8805,17.5102
+2016-08-07 15:02:41,3599,13.9118,17.4414
+2016-08-07 15:12:44,3599,13.817,17.4024
+2016-08-07 15:22:47,3599,13.817,17.4071
+2016-08-07 15:32:50,3599,13.8182,17.4055
+2016-08-07 15:42:52,3599,13.817,17.4024
+2016-08-07 15:52:55,3599,13.817,17.4414
+2016-08-07 16:02:58,3599,13.8481,17.4383
+2016-08-07 16:13:01,3599,13.8481,17.4368
+2016-08-07 16:23:04,3599,13.8182,17.404
+2016-08-07 16:33:07,3599,13.817,17.4024
+2016-08-07 16:43:09,3599,13.8494,17.4024
+2016-08-07 16:53:12,3599,13.8182,17.4024
+2016-08-07 17:03:15,3599,13.817,17.4024
+2016-08-07 17:13:18,3599,13.8207,17.404
+2016-08-07 17:23:21,3599,13.8531,17.4352
+2016-08-07 17:33:23,3599,13.8531,17.4383
+2016-08-07 17:43:26,3599,13.8219,17.4024
+2016-08-07 17:53:29,3599,13.8256,17.4368
+2016-08-07 18:03:32,3599,13.8244,17.4383
+2016-08-07 18:13:34,3599,13.7322,17.4024
+2016-08-07 18:23:37,3599,13.731,17.4071
+2016-08-07 18:33:40,3599,13.7322,17.4368
+2016-08-07 18:43:43,3599,13.7347,17.4024
+2016-08-07 18:53:46,3599,13.7322,17.4024
+2016-08-07 19:03:49,3599,13.7322,17.4383
+2016-08-07 19:13:52,3599,13.7347,17.4024
+2016-08-07 19:23:55,3599,13.7645,17.4368
+2016-08-07 19:33:58,3599,13.7347,17.4024
+2016-08-07 19:44:01,3599,13.7645,17.4383
+2016-08-07 19:54:03,3599,13.7359,17.4368
+2016-08-07 20:04:06,3599,13.8293,17.4024
+2016-08-07 20:14:09,3599,13.828,17.4368
+2016-08-07 20:24:12,3599,13.8605,17.404
+2016-08-07 20:34:14,3599,13.8605,17.4414
+2016-08-07 20:44:17,3599,13.8268,17.4024
+2016-08-07 20:54:20,3599,13.8592,17.4368
+2016-08-07 21:04:23,3599,13.828,17.4055
+2016-08-07 21:14:26,3599,13.828,17.4368
+2016-08-07 21:24:28,3598,13.8592,17.4024
+2016-08-07 21:34:31,3598,13.828,17.4071
+2016-08-07 21:44:34,3598,13.8305,17.404
+2016-08-07 21:54:37,3598,13.833,17.404
+2016-08-07 22:04:39,3598,13.833,17.4368
+2016-08-07 22:14:42,3598,13.8317,17.4414
+2016-08-07 22:24:45,3598,13.8317,17.4414
+2016-08-07 22:34:48,3598,13.8629,17.4414
+2016-08-07 22:44:50,3598,13.833,17.4071
+2016-08-07 22:54:53,3598,13.8642,17.4368
+2016-08-07 23:04:56,3598,13.833,17.4414
+2016-08-07 23:14:59,3597,13.8317,17.4071
+2016-08-07 23:25:01,3597,13.8642,17.4414
+2016-08-07 23:35:04,3597,13.833,17.4414
+2016-08-07 23:45:06,3596,13.8317,17.4055
+2016-08-07 23:55:09,3596,13.8629,17.4071
+2016-08-08 00:05:12,3596,13.8642,17.4071
+2016-08-08 00:15:15,3596,13.8317,17.4414
+2016-08-08 00:25:17,3594,13.8642,17.4399
+2016-08-08 00:35:20,3594,13.833,17.4071
+2016-08-08 00:45:23,3595,13.8317,17.4055
+2016-08-08 00:55:25,3595,13.8629,17.4086
+2016-08-08 01:05:28,3595,13.8317,17.4071
+2016-08-08 01:15:31,3595,13.8317,17.4414
+2016-08-08 01:25:34,3595,13.833,17.4071
+2016-08-08 01:35:37,3595,13.8317,17.4414
+2016-08-08 01:45:39,3595,13.8629,17.4414
+2016-08-08 01:55:42,3595,13.8642,17.4071
+2016-08-08 02:05:45,3595,13.833,17.4414
+2016-08-08 02:15:48,3582,13.8293,17.4414
+2016-08-08 02:25:51,3594,13.833,17.4414
+2016-08-08 02:35:53,3592,13.8642,17.4071
+2016-08-08 02:45:55,3596,13.8642,17.3043
+2016-08-08 02:55:58,3595,13.8317,17.3043
+2016-08-08 03:06:01,3596,13.833,17.4414
+2016-08-08 03:16:04,3595,13.833,17.4071
+2016-08-08 03:26:06,3596,13.8317,17.3043
+2016-08-08 03:36:09,3595,13.833,17.4414
+2016-08-08 03:46:12,3595,13.833,17.4071
+2016-08-08 03:56:14,3595,13.8317,17.3043
+2016-08-08 04:06:17,3595,13.8642,17.3043
+2016-08-08 04:16:20,3595,13.8317,17.4071
+2016-08-08 04:26:23,3595,13.833,17.4071
+2016-08-08 04:36:25,3595,13.8617,17.4071
+2016-08-08 04:46:28,3595,13.8317,17.3043
+2016-08-08 04:56:31,3593,13.8317,17.3089
+2016-08-08 05:06:33,3594,13.8317,17.3043
+2016-08-08 05:16:36,3594,13.8317,17.3089
+2016-08-08 05:26:38,3594,13.833,17.3043
+2016-08-08 05:36:41,3594,13.8317,17.4117
+2016-08-08 05:46:44,3594,13.833,17.3043
+2016-08-08 05:56:47,3594,13.8642,17.3074
+2016-08-08 06:06:49,3594,13.8317,17.3089
+2016-08-08 06:16:52,3594,13.833,17.3431
+2016-08-08 06:26:54,3595,13.833,17.3089
+2016-08-08 06:36:57,3594,13.833,17.3043
+2016-08-08 06:47:00,3594,13.8617,17.3089
+2016-08-08 06:57:02,3594,13.8317,17.3089
+2016-08-08 07:07:05,3594,13.8317,17.3431
+2016-08-08 07:17:07,3594,13.833,17.3074
+2016-08-08 07:27:10,3595,13.8629,17.3089
+2016-08-08 07:37:13,3595,13.9266,17.3089
+2016-08-08 07:47:15,3595,13.833,17.3089
+2016-08-08 07:57:18,3596,13.833,17.3089
+2016-08-08 08:07:20,3596,13.8317,17.3089
+2016-08-08 08:17:23,3597,13.8629,17.2747
+2016-08-08 08:27:26,3596,13.8317,17.3089
+2016-08-08 08:37:28,3598,13.833,17.3089
+2016-08-08 08:47:31,3598,13.833,17.3089
+2016-08-08 08:57:33,3598,13.8629,17.3074
+2016-08-08 09:07:36,3598,13.8317,17.3089
+2016-08-08 09:17:39,3598,13.8305,17.3104
+2016-08-08 09:27:42,3598,13.833,17.3089
+2016-08-08 09:37:44,3599,13.833,17.3089
+2016-08-08 09:47:47,3598,13.8305,17.3089
+2016-08-08 09:57:50,3599,13.833,17.3089
+2016-08-08 10:07:53,3599,13.8305,17.3089
+2016-08-08 10:17:55,3599,13.7707,17.3089
+2016-08-08 10:27:58,3599,13.7383,17.3089
+2016-08-08 10:38:01,3599,13.7396,17.3074
+2016-08-08 10:48:04,3599,13.7383,17.3089
+2016-08-08 10:58:07,3599,13.6774,17.3089
+2016-08-08 11:08:09,3599,13.6488,17.3074
+2016-08-08 11:18:12,3599,13.6799,17.3089
+2016-08-08 11:28:15,3599,13.6488,17.3089
+2016-08-08 11:38:19,3599,13.6488,17.2049
+2016-08-08 11:48:23,3599,13.6476,17.2064
+2016-08-08 11:58:26,3599,13.6488,17.2406
+2016-08-08 12:08:29,3599,13.6513,17.2064
+2016-08-08 12:18:31,3599,13.6513,17.2406
+2016-08-08 12:28:34,3599,13.6525,17.2064
+2016-08-08 12:38:37,3599,13.6525,17.2064
+2016-08-08 12:48:40,3599,13.6525,17.208
+2016-08-08 12:58:43,3599,13.5595,17.2064
+2016-08-08 13:08:46,3599,13.5595,17.2049
+2016-08-08 13:18:49,3599,13.5595,17.2064
+2016-08-08 13:28:52,3599,13.5583,17.2406
+2016-08-08 13:38:55,3599,13.5595,17.2064
+2016-08-08 13:48:58,3599,13.5595,17.2049
+2016-08-08 13:59:01,3599,13.4656,17.2064
+2016-08-08 14:09:04,3599,13.4977,17.2064
+2016-08-08 14:19:07,3599,13.5001,17.2064
+2016-08-08 14:29:10,3599,13.5013,17.2406
+2016-08-08 14:39:13,3599,13.5013,17.2049
+2016-08-08 14:49:16,3599,13.4692,17.2406
+2016-08-08 14:59:19,3599,13.5013,17.2064
+2016-08-08 15:09:22,3599,13.468,17.2406
+2016-08-08 15:19:25,3599,13.3779,17.239
+2016-08-08 15:29:29,3599,13.4076,17.2064
+2016-08-08 15:39:32,3599,13.3768,17.2064
+2016-08-08 15:49:35,3599,13.3779,17.2064
+2016-08-08 15:59:38,3599,13.4087,17.2406
+2016-08-08 16:09:41,3599,13.3756,17.2049
+2016-08-08 16:19:44,3599,13.3768,17.2049
+2016-08-08 16:29:47,3599,13.3815,17.2064
+2016-08-08 16:39:50,3599,13.3779,17.2064
+2016-08-08 16:49:53,3599,13.4123,17.2406
+2016-08-08 16:59:55,3599,13.4111,17.2064
+2016-08-08 17:09:58,3599,13.3815,17.2049
+2016-08-08 17:20:01,3599,13.4123,17.2406
+2016-08-08 17:30:04,3599,13.5049,17.2406
+2016-08-08 17:40:07,3599,13.5049,17.2406
+2016-08-08 17:50:10,3599,13.474,17.2064
+2016-08-08 18:00:12,3599,13.4728,17.2064
+2016-08-08 18:10:15,3599,13.5049,17.2064
+2016-08-08 18:20:18,3599,13.474,17.2406
+2016-08-08 18:30:21,3599,13.5049,17.2064
+2016-08-08 18:40:24,3599,13.5049,17.2064
+2016-08-08 18:50:26,3599,13.474,17.2406
+2016-08-08 19:00:29,3599,13.5049,17.2064
+2016-08-08 19:10:32,3599,13.5049,17.2064
+2016-08-08 19:20:34,3599,13.4728,17.2064
+2016-08-08 19:30:37,3598,13.474,17.2064
+2016-08-08 19:40:40,3599,13.5049,17.2064
+2016-08-08 19:50:42,3598,13.5049,17.2064
+2016-08-08 20:00:45,3598,13.474,17.2406
+2016-08-08 20:10:48,3598,13.5037,17.2064
+2016-08-08 20:20:50,3598,13.5037,17.2064
+2016-08-08 20:30:53,3598,13.5049,17.2064
+2016-08-08 20:40:55,3598,13.4432,17.2421
+2016-08-08 20:50:57,3597,13.4704,17.2406
+2016-08-08 21:01:00,3597,13.5001,17.239
+2016-08-08 21:11:02,3596,13.5013,17.2064
+2016-08-08 21:21:05,3596,13.5013,17.2064
+2016-08-08 21:31:07,3595,13.4704,17.2064
+2016-08-08 21:41:10,3595,13.5001,17.2064
+2016-08-08 21:51:13,3595,13.5001,17.2064
+2016-08-08 22:01:15,3595,13.5013,17.2064
+2016-08-08 22:11:18,3594,13.5013,17.2406
+2016-08-08 22:21:21,3593,13.5001,17.2064
+2016-08-08 22:31:24,3593,13.5013,17.2064
+2016-08-08 22:41:26,3592,13.4692,17.2406
+2016-08-08 22:51:29,3592,13.5013,17.2064
+2016-08-08 23:01:31,3592,13.5013,17.2406
+2016-08-08 23:11:34,3592,13.5013,17.2406
+2016-08-08 23:21:37,3591,13.5013,17.2064
+2016-08-08 23:31:39,3591,13.5025,17.2406
+2016-08-08 23:41:42,3589,13.5013,17.2406
+2016-08-08 23:51:44,3590,13.5013,17.2406
+2016-08-09 00:01:47,3588,13.4384,17.2406
+2016-08-09 00:11:50,3588,13.5001,17.2064
+2016-08-09 00:21:52,3588,13.5001,17.2406
+2016-08-09 00:31:55,3587,13.4977,17.2406
+2016-08-09 00:41:57,3587,13.4965,17.2406
+2016-08-09 00:52:00,3586,13.4965,17.2406
+2016-08-09 01:02:02,3586,13.4977,17.2406
+2016-08-09 01:12:04,3586,13.5583,17.2064
+2016-08-09 01:22:07,3585,13.5595,17.2064
+2016-08-09 01:32:09,3585,13.5905,17.2064
+2016-08-09 01:42:12,3584,13.6215,17.2406
+2016-08-09 01:52:14,3583,13.5905,17.2406
+2016-08-09 02:02:17,3582,13.5595,17.208
+2016-08-09 02:12:19,3580,13.5583,17.2064
+2016-08-09 02:22:22,3581,13.5595,17.2049
+2016-08-09 02:32:25,3579,13.5583,17.2064
+2016-08-09 02:42:27,3579,13.5583,17.2049
+2016-08-09 02:52:29,3578,13.5595,17.2793
+2016-08-09 03:02:32,3578,13.5559,17.2406
+2016-08-09 03:12:34,3577,13.5559,17.2064
+2016-08-09 03:22:37,3577,13.5559,17.2406
+2016-08-09 03:32:39,3577,13.5869,17.2747
+2016-08-09 03:42:41,3577,13.5869,17.2064
+2016-08-09 03:52:44,3576,13.5523,17.208
+2016-08-09 04:02:46,3575,13.5535,17.2064
+2016-08-09 04:12:49,3574,13.5844,17.2421
+2016-08-09 04:22:51,3575,13.5535,17.211
+2016-08-09 04:32:54,3574,13.5832,17.2452
+2016-08-09 04:42:56,3574,13.5511,17.2064
+2016-08-09 04:52:59,3573,13.5844,17.2452
+2016-08-09 05:03:01,3574,13.5832,17.2406
+2016-08-09 05:13:03,3573,13.5523,17.2064
+2016-08-09 05:23:06,3572,13.5844,17.211
+2016-08-09 05:33:08,3572,13.5499,17.2064
+2016-08-09 05:43:10,3572,13.5832,17.2452
+2016-08-09 05:53:12,3571,13.5487,17.2064
+2016-08-09 06:03:14,3571,13.5487,17.2406
+2016-08-09 06:13:17,3571,13.5499,17.211
+2016-08-09 06:23:19,3571,13.5796,17.2406
+2016-08-09 06:33:21,3571,13.5808,17.211
+2016-08-09 06:43:23,3571,13.5808,17.2406
+2016-08-09 06:53:26,3571,13.5808,17.2064
+2016-08-09 07:03:28,3571,13.5796,17.2064
+2016-08-09 07:13:30,3571,13.5808,17.2747
+2016-08-09 07:23:33,3571,13.5772,17.2747
+2016-08-09 07:33:35,3571,13.6082,17.2406
+2016-08-09 07:43:37,3572,13.5772,17.2064
+2016-08-09 07:53:40,3572,13.5463,17.2421
+2016-08-09 08:03:42,3572,13.5463,17.2406
+2016-08-09 08:13:44,3572,13.5463,17.2064
+2016-08-09 08:23:47,3572,13.576,17.2406
+2016-08-09 08:33:49,3573,13.5772,17.2406
+2016-08-09 08:43:52,3573,13.6069,17.2406
+2016-08-09 08:53:54,3574,13.5427,17.2747
+2016-08-09 09:03:56,3574,13.6045,17.2406
+2016-08-09 09:13:59,3574,13.5724,17.2406
+2016-08-09 09:24:01,3574,13.5415,17.2406
+2016-08-09 09:34:04,3575,13.5427,17.2064
+2016-08-09 09:44:06,3576,13.5724,17.2406
+2016-08-09 09:54:08,3577,13.5415,17.2064
+2016-08-09 10:04:11,3578,13.5724,17.2406
+2016-08-09 10:14:13,3580,13.4797,17.2064
+2016-08-09 10:24:16,3581,13.4797,17.2049
+2016-08-09 10:34:18,3583,13.4773,17.2406
+2016-08-09 10:44:21,3584,13.4453,17.1383
+2016-08-09 10:54:23,3585,13.4761,17.1383
+2016-08-09 11:04:26,3586,13.4761,17.1383
+2016-08-09 11:14:28,3587,13.4761,17.1383
+2016-08-09 11:24:31,3588,13.4773,17.1383
+2016-08-09 11:34:33,3589,13.3837,17.1383
+2016-08-09 11:44:36,3591,13.353,17.1383
+2016-08-09 11:54:38,3592,13.3837,17.1368
+2016-08-09 12:04:41,3592,13.3837,17.1043
+2016-08-09 12:14:43,3593,13.3837,17.1383
+2016-08-09 12:24:46,3593,13.3837,17.1383
+2016-08-09 12:34:49,3594,13.3837,17.1383
+2016-08-09 12:44:51,3595,13.3837,17.1399
+2016-08-09 12:54:54,3595,13.3837,17.1383
+2016-08-09 13:04:56,3595,13.3849,17.1383
+2016-08-09 13:14:59,3595,13.3837,17.1368
+2016-08-09 13:25:02,3595,13.3542,17.1383
+2016-08-09 13:35:04,3596,13.353,17.1043
+2016-08-09 13:45:07,3596,13.3825,17.1043
+2016-08-09 13:55:09,3595,13.3837,17.1383
+2016-08-09 14:05:12,3596,13.3837,17.1383
+2016-08-09 14:15:15,3596,13.3837,17.1383
+2016-08-09 14:25:17,3597,13.3849,17.1383
+2016-08-09 14:35:20,3597,13.3837,17.1383
+2016-08-09 14:45:22,3598,13.353,17.0364
+2016-08-09 14:55:26,3598,13.353,17.0349
+2016-08-09 15:05:30,3598,13.3223,17.0364
+2016-08-09 15:15:32,3597,13.353,17.0364
+2016-08-09 15:25:35,3597,13.3837,17.0364
+2016-08-09 15:35:38,3597,13.3837,17.0364
+2016-08-09 15:45:40,3597,13.3837,17.0349
+2016-08-09 15:55:43,3597,13.353,17.0364
+2016-08-09 16:05:45,3596,13.3849,17.0364
+2016-08-09 16:15:48,3597,13.353,17.0364
+2016-08-09 16:25:51,3596,13.353,17.0026
+2016-08-09 16:35:53,3595,13.3837,17.0364
+2016-08-09 16:45:56,3595,13.3837,17.0364
+2016-08-09 16:55:59,3593,13.3825,16.9687
+2016-08-09 17:06:01,3595,13.3837,17.0349
+2016-08-09 17:16:04,3595,13.3837,17.0364
+2016-08-09 17:26:07,3595,13.3837,17.0364
+2016-08-09 17:36:09,3594,13.3837,17.0364
+2016-08-09 17:46:12,3594,13.353,17.0026
+2016-08-09 17:56:14,3593,13.3837,17.0364
+2016-08-09 18:06:17,3593,13.3837,17.0704
+2016-08-09 18:16:19,3592,13.3837,17.0026
+2016-08-09 18:26:22,3591,13.3837,17.0364
+2016-08-09 18:36:24,3591,13.3837,17.0364
+2016-08-09 18:46:27,3590,13.3542,17.0349
+2016-08-09 18:56:29,3589,13.3542,17.0364
+2016-08-09 19:06:32,3588,13.3849,17.0349
+2016-08-09 19:16:35,3587,13.3837,17.0364
+2016-08-09 19:26:37,3587,13.4145,17.0364
+2016-08-09 19:36:40,3586,13.3837,17.0349
+2016-08-09 19:46:42,3586,13.3837,17.0364
+2016-08-09 19:56:45,3585,13.3837,17.0026
+2016-08-09 20:06:48,3585,13.3837,17.0026
+2016-08-09 20:16:50,3584,13.353,17.0364
+2016-08-09 20:26:53,3583,13.3837,17.0364
+2016-08-09 20:36:55,3583,13.3837,17.0704
+2016-08-09 20:46:58,3582,13.3837,17.0364
+2016-08-09 20:57:00,3582,13.3837,17.0364
+2016-08-09 21:07:03,3581,13.3837,17.0364
+2016-08-09 21:17:05,3581,13.3849,17.0364
+2016-08-09 21:27:08,3577,13.3837,17.0364
+2016-08-09 21:37:10,3578,13.3837,17.0364
+2016-08-09 21:47:13,3578,13.3837,17.0364
+2016-08-09 21:57:15,3578,13.3837,17.0364
+2016-08-09 22:07:18,3577,13.3837,17.0364
+2016-08-09 22:17:20,3577,13.3837,17.0364
+2016-08-09 22:27:22,3577,13.3837,17.0364
+2016-08-09 22:37:25,3576,13.3837,17.0364
+2016-08-09 22:47:27,3574,13.3837,17.0349
+2016-08-09 22:57:30,3575,13.3837,17.0364
+2016-08-09 23:07:32,3574,13.3837,17.0364
+2016-08-09 23:17:35,3573,13.3837,17.0349
+2016-08-09 23:27:37,3574,13.3837,17.0704
+2016-08-09 23:37:39,3573,13.3849,17.0719
+2016-08-09 23:47:42,3572,13.3837,17.038
+2016-08-09 23:57:44,3572,13.3802,17.0364
+2016-08-10 00:07:46,3571,13.3825,17.0364
+2016-08-10 00:17:48,3571,13.3802,17.0364
+2016-08-10 00:27:50,3571,13.3506,17.0364
+2016-08-10 00:37:53,3570,13.3802,17.0364
+2016-08-10 00:47:55,3574,13.3494,17.0364
+2016-08-10 00:57:58,3576,13.4109,17.0364
+2016-08-10 01:08:00,3570,13.3825,17.0364
+2016-08-10 01:18:03,3569,13.3778,17.0364
+2016-08-10 01:28:05,3568,13.3778,17.0026
+2016-08-10 01:38:07,3568,13.3778,17.0319
+2016-08-10 01:48:10,3566,13.3778,17.0364
+2016-08-10 01:58:12,3564,13.3778,17.0364
+2016-08-10 02:08:15,3562,13.3778,17.0364
+2016-08-10 02:18:17,3560,13.3778,17.0364
+2016-08-10 02:28:19,3559,13.3742,16.998
+2016-08-10 02:38:22,3561,13.3742,17.0364
+2016-08-10 02:48:24,3561,13.3742,17.0364
+2016-08-10 02:58:26,3559,13.3742,17.0319
+2016-08-10 03:08:29,3559,13.3742,17.0319
+2016-08-10 03:18:31,3559,13.3742,17.0364
+2016-08-10 03:28:33,3559,13.405,17.0319
+2016-08-10 03:38:36,3559,13.3742,17.0319
+2016-08-10 03:48:38,3559,13.405,17.0364
+2016-08-10 03:58:40,3558,13.3742,17.0364
+2016-08-10 04:08:42,3557,13.3742,17.0334
+2016-08-10 04:18:45,3557,13.3742,17.0304
+2016-08-10 04:28:47,3557,13.3742,17.0319
+2016-08-10 04:38:49,3557,13.3742,17.0319
+2016-08-10 04:48:51,3557,13.3742,17.0319
+2016-08-10 04:58:53,3556,13.3742,17.0304
+2016-08-10 05:08:56,3556,13.3754,16.998
+2016-08-10 05:18:58,3556,13.3694,17.0319
+2016-08-10 05:29:00,3555,13.3706,17.0319
+2016-08-10 05:39:03,3555,13.3706,17.0319
+2016-08-10 05:49:05,3555,13.3694,17.0658
+2016-08-10 05:59:07,3553,13.3706,17.0319
+2016-08-10 06:09:09,3553,13.3706,17.0319
+2016-08-10 06:19:12,3553,13.3671,17.0334
+2016-08-10 06:29:14,3554,13.3671,17.0319
+2016-08-10 06:39:16,3552,13.3659,17.0319
+2016-08-10 06:49:18,3553,13.3671,17.0319
+2016-08-10 06:59:21,3553,13.3671,17.0319
+2016-08-10 07:09:23,3554,13.3635,17.0334
+2016-08-10 07:19:25,3554,13.3671,17.0274
+2016-08-10 07:29:27,3554,13.3635,17.0319
+2016-08-10 07:39:29,3555,13.3635,17.0274
+2016-08-10 07:49:32,3555,13.3635,17.0319
+2016-08-10 07:59:34,3556,13.3635,17.0334
+2016-08-10 08:09:36,3556,13.3635,17.0319
+2016-08-10 08:19:38,3557,13.3647,17.0319
+2016-08-10 08:29:41,3557,13.3635,16.9935
+2016-08-10 08:39:43,3558,13.3623,17.0258
+2016-08-10 08:49:45,3559,13.3635,17.0289
+2016-08-10 08:59:47,3559,13.3635,17.0289
+2016-08-10 09:09:49,3560,13.3635,17.0289
+2016-08-10 09:19:51,3561,13.3635,17.0274
+2016-08-10 09:29:53,3561,13.3942,17.0274
+2016-08-10 09:39:55,3562,13.3635,17.0274
+2016-08-10 09:49:58,3562,13.3647,17.0258
+2016-08-10 10:00:00,3563,13.3635,17.0289
+2016-08-10 10:10:02,3563,13.3635,17.0628
+2016-08-10 10:20:04,3563,13.3635,17.0289
+2016-08-10 10:30:07,3564,13.3635,17.0228
+2016-08-10 10:40:09,3564,13.3635,17.0628
+2016-08-10 10:50:11,3564,13.3635,17.0274
+2016-08-10 11:00:14,3564,13.3635,17.0289
+2016-08-10 11:10:16,3564,13.3635,17.0243
+2016-08-10 11:20:26,3562,13.3635,17.0289
+2016-08-10 11:30:28,3564,13.3635,17.0274
+2016-08-10 11:40:30,3564,13.3942,17.0243
+2016-08-10 11:50:33,3564,13.3635,17.0228
+2016-08-10 12:00:35,3565,13.3635,17.0243
+2016-08-10 12:10:37,3565,13.3588,17.0243
+2016-08-10 12:20:40,3566,13.3611,17.0228
+2016-08-10 12:30:42,3566,13.3588,17.0228
+2016-08-10 12:40:44,3567,13.3599,17.0228
+2016-08-10 12:50:47,3568,13.3599,17.0243
+2016-08-10 13:00:49,3569,13.3599,16.9214
+2016-08-10 13:10:51,3570,13.3304,16.9229
+2016-08-10 13:20:54,3571,13.3599,16.9229
+2016-08-10 13:30:56,3572,13.3281,16.9229
+2016-08-10 13:40:58,3572,13.3599,16.9567
+2016-08-10 13:51:01,3574,13.3599,16.9214
+2016-08-10 14:01:03,3576,13.3599,16.9214
+2016-08-10 14:11:06,3577,13.268,16.9214
+2016-08-10 14:21:08,3579,13.2986,16.8891
+2016-08-10 14:31:11,3581,13.2385,16.9229
+2016-08-10 14:41:13,3581,13.2668,16.9214
+2016-08-10 14:51:16,3582,13.268,16.9214
+2016-08-10 15:01:18,3582,13.2986,16.9214
+2016-08-10 15:11:21,3583,13.2385,16.9214
+2016-08-10 15:21:24,3583,13.268,16.9229
+2016-08-10 15:31:26,3583,13.268,16.9214
+2016-08-10 15:41:29,3583,13.2668,16.9214
+2016-08-10 15:51:31,3583,13.268,16.9229
+2016-08-10 16:01:34,3583,13.268,16.9199
+2016-08-10 16:11:36,3582,13.2668,16.8876
+2016-08-10 16:21:39,3582,13.268,16.9214
+2016-08-10 16:31:41,3582,13.2668,16.8876
+2016-08-10 16:41:44,3582,13.2668,16.9214
+2016-08-10 16:51:47,3581,13.268,16.9214
+2016-08-10 17:01:49,3581,13.268,16.9214
+2016-08-10 17:11:51,3581,13.2668,16.9214
+2016-08-10 17:21:54,3581,13.2373,16.9214
+2016-08-10 17:31:56,3581,13.268,16.9214
+2016-08-10 17:41:59,3580,13.2668,16.9214
+2016-08-10 17:52:01,3581,13.268,16.9214
+2016-08-10 18:02:04,3580,13.268,16.9214
+2016-08-10 18:12:06,3580,13.268,16.9214
+2016-08-10 18:22:09,3579,13.2373,16.9214
+2016-08-10 18:32:11,3579,13.2691,16.9214
+2016-08-10 18:42:14,3578,13.268,16.9214
+2016-08-10 18:52:16,3578,13.268,16.9214
+2016-08-10 19:02:19,3577,13.2373,16.9229
+2016-08-10 19:12:21,3577,13.268,16.9214
+2016-08-10 19:22:24,3576,13.268,16.9229
+2016-08-10 19:32:26,3576,13.2691,16.9199
+2016-08-10 19:42:29,3576,13.268,16.9214
+2016-08-10 19:52:31,3574,13.2691,16.9214
+2016-08-10 20:02:33,3574,13.2373,16.9199
+2016-08-10 20:12:36,3574,13.2691,16.9536
+2016-08-10 20:22:38,3573,13.268,16.9229
+2016-08-10 20:32:41,3572,13.268,16.9229
+2016-08-10 20:42:43,3571,13.2656,16.9214
+2016-08-10 20:52:46,3570,13.2644,16.9214
+2016-08-10 21:02:48,3570,13.2644,16.9229
+2016-08-10 21:12:51,3568,13.2656,16.9199
+2016-08-10 21:22:53,3568,13.2656,16.9214
+2016-08-10 21:32:56,3566,13.2656,16.9229
+2016-08-10 21:42:58,3566,13.2656,16.8876
+2016-08-10 21:53:00,3565,13.2656,16.9214
+2016-08-10 22:03:02,3565,13.2962,16.9214
+2016-08-10 22:13:05,3564,13.2644,16.9214
+2016-08-10 22:23:07,3563,13.2644,16.9567
+2016-08-10 22:33:09,3562,13.2644,16.8831
+2016-08-10 22:43:18,3561,13.2644,16.9199
+2016-08-10 22:53:21,3560,13.2621,16.9214
+2016-08-10 23:03:23,3559,13.2656,16.9214
+2016-08-10 23:13:25,3559,13.2621,16.9506
+2016-08-10 23:23:27,3558,13.2927,16.9229
+2016-08-10 23:33:30,3557,13.2915,16.9214
+2016-08-10 23:43:32,3557,13.2609,16.9214
+2016-08-10 23:53:34,3556,13.2315,16.9551
+2016-08-11 01:03:50,3551,13.3504,16.9521
+2016-08-11 01:13:53,3550,13.2573,16.9184
+2016-08-11 01:23:55,3550,13.2268,16.9521
+2016-08-11 01:33:57,3549,13.2573,16.9169
+2016-08-11 01:43:59,3549,13.2573,16.9184
+2016-08-11 01:54:02,3548,13.2573,16.9169
+2016-08-11 02:04:04,3548,13.2585,16.9506
+2016-08-11 02:14:06,3547,13.2585,16.9521
+2016-08-11 02:24:09,3547,13.2585,16.9184
+2016-08-11 02:34:11,3546,13.2573,16.9521
+2016-08-11 02:44:13,3545,13.2585,16.9184
+2016-08-11 02:54:16,3545,13.2573,16.9169
+2016-08-11 03:04:18,3545,13.2573,16.8846
+2016-08-11 03:14:20,3545,13.3493,16.9184
+2016-08-11 03:24:22,3544,13.255,16.9169
+2016-08-11 03:34:25,3544,13.2562,16.9184
+2016-08-11 03:44:27,3543,13.255,16.9123
+2016-08-11 03:54:29,3543,13.255,16.9184
+2016-08-11 04:04:31,3543,13.3481,16.9184
+2016-08-11 04:14:34,3542,13.3469,16.9169
+2016-08-11 04:24:36,3542,13.3445,16.9123
+2016-08-11 04:34:38,3542,13.3433,16.9184
+2016-08-11 04:44:40,3541,13.3433,16.9123
+2016-08-11 04:54:43,3541,13.3421,16.9138
+2016-08-11 05:04:45,3540,13.2514,16.9138
+2016-08-11 05:14:47,3540,13.3421,16.9138
+2016-08-11 05:24:49,3540,13.2479,16.9138
+2016-08-11 05:34:51,3540,13.2479,16.9138
+2016-08-11 05:44:53,3539,13.2479,16.9123
+2016-08-11 05:54:56,3539,13.2479,16.9476
+2016-08-11 06:04:58,3539,13.3398,16.8786
+2016-08-11 06:15:00,3539,13.3398,16.9461
+2016-08-11 06:25:02,3538,13.3704,16.9123
+2016-08-11 06:35:04,3538,13.3398,16.9078
+2016-08-11 06:45:07,3538,13.3398,16.9138
+2016-08-11 06:55:09,3538,13.3398,16.9138
+2016-08-11 07:05:11,3539,13.3398,16.9138
+2016-08-11 07:15:13,3539,13.3398,16.9461
+2016-08-11 07:25:15,3540,13.3398,16.9123
+2016-08-11 07:35:18,3540,13.3398,16.9138
+2016-08-11 07:45:20,3541,13.3398,16.9123
+2016-08-11 07:55:22,3542,13.3398,16.9093
+2016-08-11 08:05:25,3543,13.3398,16.9416
+2016-08-11 08:15:27,3544,13.2761,16.9476
+2016-08-11 08:25:29,3545,13.2444,16.9078
+2016-08-11 08:35:32,3545,13.2444,16.9093
+2016-08-11 08:45:34,3547,13.2456,16.9431
+2016-08-11 08:55:36,3548,13.2444,16.9416
+2016-08-11 09:05:39,3550,13.2444,16.9078
+2016-08-11 09:15:41,3550,13.2456,16.9078
+2016-08-11 09:25:43,3553,13.2444,16.9093
+2016-08-11 09:35:46,3554,13.2408,16.9093
+2016-08-11 09:45:48,3556,13.2444,16.9078
+2016-08-11 09:55:51,3557,13.2444,16.9078
+2016-08-11 10:05:53,3559,13.2408,16.9078
+2016-08-11 10:15:56,3560,13.2408,16.9078
+2016-08-11 10:25:58,3563,13.2408,16.9078
+2016-08-11 10:36:00,3564,13.2408,16.9093
+2016-08-11 10:46:03,3565,13.2408,16.9093
+2016-08-11 10:56:05,3566,13.2444,16.9078
+2016-08-11 11:06:08,3568,13.2714,16.9048
+2016-08-11 11:16:10,3569,13.2408,16.9033
+2016-08-11 11:26:13,3570,13.2103,16.9048
+2016-08-11 11:36:15,3572,13.2103,16.9033
+2016-08-11 11:46:18,3572,13.2408,16.9033
+2016-08-11 11:56:20,3573,13.1505,16.9033
+2016-08-11 12:06:23,3574,13.1528,16.9033
+2016-08-11 12:16:25,3574,13.1493,16.9033
+2016-08-11 12:26:28,3575,13.1493,16.9048
+2016-08-11 12:36:30,3576,13.1528,16.9033
+2016-08-11 12:46:33,3577,13.1493,16.9033
+2016-08-11 12:56:35,3577,13.1528,16.8696
+2016-08-11 13:06:38,3577,13.1528,16.9033
+2016-08-11 13:16:40,3578,13.1493,16.9033
+2016-08-11 13:26:43,3578,13.1528,16.9048
+2016-08-11 13:36:46,3579,13.1528,16.9048
+2016-08-11 13:46:48,3580,13.1528,16.8711
+2016-08-11 13:56:51,3581,13.1235,16.9033
+2016-08-11 14:06:53,3581,13.154000000000002,16.9033
+2016-08-11 14:16:56,3581,13.1528,16.9048
+2016-08-11 14:26:58,3581,13.1528,16.9048
+2016-08-11 14:37:01,3582,13.1516,16.9033
+2016-08-11 14:47:04,3583,13.154000000000002,16.9033
+2016-08-11 14:57:06,3583,13.1528,16.8038
+2016-08-11 15:07:09,3583,13.1528,16.8023
+2016-08-11 15:17:11,3583,13.1528,16.8038
+2016-08-11 15:27:14,3584,13.1563,16.8038
+2016-08-11 15:37:16,3584,13.1528,16.8023
+2016-08-11 15:47:19,3585,13.154000000000002,16.8023
+2016-08-11 15:57:22,3585,13.1528,16.8023
+2016-08-11 16:07:24,3585,13.1563,16.8023
+2016-08-11 16:17:26,3585,13.1563,16.8053
+2016-08-11 16:27:29,3585,13.1563,16.8023
+2016-08-11 16:37:32,3585,13.1563,16.7687
+2016-08-11 16:47:34,3585,13.1258,16.8038
+2016-08-11 16:57:37,3585,13.1563,16.8038
+2016-08-11 17:07:39,3585,13.1563,16.8023
+2016-08-11 17:17:42,3585,13.1563,16.7687
+2016-08-11 17:27:44,3584,13.1563,16.8023
+2016-08-11 17:37:47,3584,13.1563,16.8023
+2016-08-11 17:47:49,3583,13.1258,16.8023
+2016-08-11 17:57:52,3582,13.1563,16.8023
+2016-08-11 18:07:54,3581,13.1563,16.7687
+2016-08-11 18:17:57,3581,13.1563,16.8023
+2016-08-11 18:28:00,3581,13.1551,16.7687
+2016-08-11 18:38:02,3580,13.1563,16.8038
+2016-08-11 18:48:05,3579,13.1563,16.8053
+2016-08-11 18:58:07,3578,13.1563,16.8023
+2016-08-11 19:08:10,3577,13.1563,16.8359
+2016-08-11 19:18:12,3577,13.1563,16.8038
+2016-08-11 19:28:15,3576,13.1563,16.8038
+2016-08-11 19:38:17,3575,13.1563,16.8023
+2016-08-11 19:48:20,3574,13.1563,16.7702
+2016-08-11 19:58:22,3574,13.1563,16.8023
+2016-08-11 20:08:24,3573,13.1563,16.8038
+2016-08-11 20:18:27,3572,13.1563,16.8023
+2016-08-11 20:28:29,3572,13.1563,16.8023
+2016-08-11 20:38:32,3571,13.1563,16.8023
+2016-08-11 20:48:34,3571,13.1868,16.8023
+2016-08-11 20:58:37,3570,13.1563,16.8023
+2016-08-11 21:08:39,3570,13.1563,16.8038
+2016-08-11 21:18:41,3570,13.1563,16.7687
+2016-08-11 21:28:44,3569,13.1563,16.8023
+2016-08-11 21:38:46,3569,13.1868,16.8023
+2016-08-11 21:48:48,3569,13.1563,16.7978
+2016-08-11 21:58:50,3569,13.1868,16.7993
+2016-08-11 22:08:53,3568,13.1551,16.8038
+2016-08-11 22:18:55,3568,13.1868,16.8023
+2016-08-11 22:28:57,3568,13.1563,16.7978
+2016-08-11 22:39:00,3568,13.1868,16.7993
+2016-08-11 22:49:02,3567,13.1563,16.7993
+2016-08-11 22:59:04,3568,13.1563,16.8329
+2016-08-11 23:09:06,3567,13.1563,16.7993
+2016-08-11 23:19:09,3568,13.127,16.8023
+2016-08-11 23:29:11,3567,13.1563,16.7993
+2016-08-11 23:39:13,3567,13.1563,16.7993
+2016-08-11 23:49:16,3567,13.1563,16.7993
+2016-08-11 23:59:18,3567,13.1563,16.8008
+2016-08-12 00:09:20,3567,13.1563,16.7993
+2016-08-12 00:19:22,3566,13.1563,16.7993
+2016-08-12 00:29:25,3567,13.1563,16.7993
+2016-08-12 00:39:27,3567,13.1575,16.7993
+2016-08-12 00:49:29,3567,13.1563,16.7993
+2016-08-12 00:59:31,3567,13.1563,16.8329
+2016-08-12 01:09:34,3567,13.1563,16.7993
+2016-08-12 01:19:36,3567,13.1563,16.7993
+2016-08-12 01:29:38,3567,13.1563,16.7993
+2016-08-12 01:39:40,3567,13.1563,16.7993
+2016-08-12 01:49:43,3567,13.1258,16.7993
+2016-08-12 01:59:45,3568,13.1563,16.7993
+2016-08-12 02:09:47,3568,13.1563,16.7993
+2016-08-12 02:19:50,3568,13.1563,16.7993
+2016-08-12 02:29:53,3567,13.1563,16.7993
+2016-08-12 02:39:55,3567,13.1563,16.7993
+2016-08-12 02:49:58,3568,13.1575,16.7993
+2016-08-12 03:00:00,3568,13.1258,16.7993
+2016-08-12 03:10:03,3568,13.1563,16.7993
+2016-08-12 03:20:05,3568,13.1563,16.7993
+2016-08-12 03:30:08,3568,13.1563,16.7993
+2016-08-12 03:40:10,3568,13.1563,16.7993
+2016-08-12 03:50:13,3568,13.1563,16.7993
+2016-08-12 04:00:15,3568,13.1563,16.7978
+2016-08-12 04:10:18,3568,13.1563,16.7993
+2016-08-12 04:20:20,3568,13.1563,16.7993
+2016-08-12 04:30:23,3567,13.1563,16.7948
+2016-08-12 04:40:25,3567,13.1563,16.8285
+2016-08-12 04:50:28,3567,13.1563,16.7993
+2016-08-12 05:00:30,3566,13.1563,16.7613
+2016-08-12 05:10:32,3567,13.1563,16.7948
+2016-08-12 05:20:35,3566,13.1563,16.7993
+2016-08-12 05:30:37,3566,13.1563,16.8285
+2016-08-12 05:40:40,3566,13.1563,16.7963
+2016-08-12 05:50:42,3566,13.1563,16.7948
+2016-08-12 06:00:45,3566,13.1551,16.7948
+2016-08-12 06:10:47,3566,13.1563,16.7993
+2016-08-12 06:20:50,3565,13.1868,16.7948
+2016-08-12 06:30:52,3566,13.1258,16.7948
+2016-08-12 06:40:54,3566,13.1563,16.7948
+2016-08-12 06:50:57,3565,13.1258,16.7613
+2016-08-12 07:00:59,3565,13.1563,16.7948
+2016-08-12 07:11:01,3565,13.2479,16.7948
+2016-08-12 07:21:04,3565,13.2479,16.7948
+2016-08-12 07:31:06,3565,13.2479,16.7948
+2016-08-12 07:41:08,3565,13.2479,16.7948
+2016-08-12 07:51:11,3566,13.2479,16.7948
+2016-08-12 08:01:13,3541,13.2173,16.7948
+2016-08-12 08:11:16,3566,13.2479,16.7948
+2016-08-12 08:21:18,3566,13.2479,16.7948
+2016-08-12 08:31:21,3566,13.2479,16.7948
+2016-08-12 08:41:23,3566,13.2479,16.7948
+2016-08-12 08:51:26,3567,13.2479,16.7948
+2016-08-12 09:01:28,3566,13.2479,16.7948
+2016-08-12 09:11:30,3567,13.1563,16.7948
+2016-08-12 09:21:33,3566,13.1223,16.8285
+2016-08-12 09:31:35,3568,13.2479,16.7948
+2016-08-12 09:41:38,3570,13.1563,16.7948
+2016-08-12 09:51:40,3570,13.1528,16.7613
+2016-08-12 10:01:42,3571,13.1563,16.7948
+2016-08-12 10:11:45,3572,13.1563,16.7948
+2016-08-12 10:21:47,3573,13.1528,16.7948
+2016-08-12 10:31:50,3574,13.154000000000002,16.7948
+2016-08-12 10:41:52,3574,13.1563,16.8285
+2016-08-12 10:51:55,3575,13.1563,16.7948
+2016-08-12 11:01:57,3576,13.1563,16.7948
+2016-08-12 11:12:00,3577,13.1563,16.7613
+2016-08-12 11:22:02,3577,13.1563,16.7948
+2016-08-12 11:32:05,3578,13.1563,16.7948
+2016-08-12 11:42:07,3579,13.1258,16.7948
+2016-08-12 11:52:09,3580,13.2479,16.7948
+2016-08-12 12:02:12,3581,13.2479,16.7613
+2016-08-12 12:12:14,3581,13.2479,16.7948
+2016-08-12 12:22:17,3582,13.2479,16.7948
+2016-08-12 12:32:20,3583,13.2479,16.7948
+2016-08-12 12:42:22,3584,13.2479,16.7948
+2016-08-12 12:52:25,3584,13.2467,16.7948
+2016-08-12 13:02:27,3585,13.2479,16.824
+2016-08-12 13:12:30,3585,13.2479,16.7948
+2016-08-12 13:22:32,3585,13.2479,16.7904
+2016-08-12 13:32:35,3585,13.2479,16.7904
+2016-08-12 13:42:37,3586,13.2785,16.7553
+2016-08-12 13:52:40,3586,13.2479,16.7904
+2016-08-12 14:02:42,3587,13.2173,16.7904
+2016-08-12 14:12:45,3587,13.2479,16.7904
+2016-08-12 14:22:47,3588,13.2479,16.7904
+2016-08-12 14:32:50,3588,13.2479,16.7904
+2016-08-12 14:42:52,3588,13.2479,16.7904
+2016-08-12 14:52:55,3590,13.2479,16.7904
+2016-08-12 15:02:57,3591,13.2173,16.7948
+2016-08-12 15:13:00,3592,13.2479,16.7904
+2016-08-12 15:23:03,3592,13.2503,16.7553
+2016-08-12 15:33:05,3593,13.1586,16.7904
+2016-08-12 15:43:08,3593,13.2503,16.7568
+2016-08-12 15:53:10,3593,13.2197,16.7904
+2016-08-12 16:03:13,3593,13.1586,16.7904
+2016-08-12 16:13:16,3593,13.2503,16.7948
+2016-08-12 16:23:19,3593,13.255,16.7904
+2016-08-12 16:33:22,3593,13.255,16.7904
+2016-08-12 16:43:24,3593,13.255,16.7904
+2016-08-12 16:53:27,3594,13.2256,16.7904
+2016-08-12 17:03:30,3593,13.1621,16.7568
+2016-08-12 17:13:32,3593,13.1352,16.7568
+2016-08-12 17:23:35,3594,13.2573,16.7904
+2016-08-12 17:33:37,3594,13.2573,16.7889
+2016-08-12 17:43:40,3593,13.2573,16.7904
+2016-08-12 17:53:43,3594,13.2573,16.824
+2016-08-12 18:03:45,3593,13.2573,16.7904
+2016-08-12 18:13:48,3593,13.2573,16.7904
+2016-08-12 18:23:50,3593,13.2573,16.7904
+2016-08-12 18:33:53,3593,13.2573,16.7904
+2016-08-12 18:43:56,3593,13.2268,16.7904
+2016-08-12 18:53:58,3593,13.2573,16.7904
+2016-08-12 19:04:01,3593,13.2573,16.7568
+2016-08-12 19:14:03,3593,13.2573,16.7904
+2016-08-12 19:24:06,3593,13.2573,16.7904
+2016-08-12 19:34:09,3593,13.2585,16.7904
+2016-08-12 19:44:11,3593,13.2891,16.7568
+2016-08-12 19:54:14,3592,13.2573,16.7904
+2016-08-12 20:04:16,3592,13.2573,16.7904
+2016-08-12 20:14:19,3592,13.2573,16.7904
+2016-08-12 20:24:22,3592,13.2573,16.7889
+2016-08-12 20:34:24,3592,13.2573,16.7904
+2016-08-12 20:44:27,3592,13.2268,16.7948
+2016-08-12 20:54:29,3592,13.2621,16.7904
+2016-08-12 21:04:32,3592,13.2621,16.7904
+2016-08-12 21:14:35,3592,13.2609,16.7904
+2016-08-12 21:24:37,3592,13.2609,16.7904
+2016-08-12 21:34:40,3592,13.2609,16.7904
+2016-08-12 21:44:42,3591,13.2621,16.7904
+2016-08-12 21:54:45,3591,13.2609,16.7904
+2016-08-12 22:04:47,3591,13.2644,16.7904
+2016-08-12 22:14:50,3590,13.2621,16.7904
+2016-08-12 22:24:52,3590,13.2621,16.7904
+2016-08-12 22:34:55,3590,13.2303,16.7904
+2016-08-12 22:44:57,3590,13.295,16.7948
+2016-08-12 22:55:00,3590,13.1739,16.7904
+2016-08-12 23:05:02,3589,13.2644,16.7948
+2016-08-12 23:15:05,3590,13.2656,16.7904
+2016-08-12 23:25:08,3589,13.2656,16.7904
+2016-08-12 23:35:10,3589,13.2644,16.7904
+2016-08-12 23:45:13,3589,13.2656,16.7904
+2016-08-12 23:55:15,3588,13.235,16.7904
+2016-08-13 00:05:18,3588,13.2691,16.7948
+2016-08-13 00:15:20,3588,13.2656,16.7948
+2016-08-13 00:25:23,3588,13.2656,16.7613
+2016-08-13 00:35:26,3588,13.2668,16.7904
+2016-08-13 00:45:28,3588,13.2668,16.7948
+2016-08-13 00:55:30,3588,13.2986,16.7948
+2016-08-13 01:05:33,3588,13.268,16.7934
+2016-08-13 01:15:35,3588,13.2668,16.7948
+2016-08-13 01:25:37,3588,13.268,16.7948
+2016-08-13 01:35:39,3588,13.2691,16.7948
+2016-08-13 01:45:42,3588,13.268,16.7948
+2016-08-13 01:55:44,3588,13.2668,16.8285
+2016-08-13 02:05:47,3587,13.2373,16.7613
+2016-08-13 02:15:50,3588,13.2668,16.7934
+2016-08-13 02:25:52,3587,13.2668,16.7948
+2016-08-13 02:35:54,3587,13.268,16.7934
+2016-08-13 02:45:57,3587,13.268,16.7948
+2016-08-13 02:56:00,3587,13.268,16.7948
+2016-08-13 03:06:02,3587,13.2715,16.7613
+2016-08-13 03:16:05,3587,13.268,16.7948
+2016-08-13 03:26:07,3586,13.2668,16.7948
+2016-08-13 03:36:09,3586,13.268,16.8285
+2016-08-13 03:46:12,3586,13.2715,16.7613
+2016-08-13 03:56:14,3586,13.2715,16.7948
+2016-08-13 04:06:17,3586,13.2715,16.8285
+2016-08-13 04:16:19,3585,13.2715,16.7948
+2016-08-13 04:26:22,3585,13.2715,16.7948
+2016-08-13 04:36:24,3585,13.2715,16.7948
+2016-08-13 04:46:26,3585,13.3021,16.7948
+2016-08-13 04:56:29,3585,13.2409,16.8285
+2016-08-13 05:06:31,3585,13.2703,16.7948
+2016-08-13 05:16:34,3584,13.2715,16.7993
+2016-08-13 05:26:36,3584,13.2715,16.7948
+2016-08-13 05:36:39,3584,13.2715,16.7948
+2016-08-13 05:46:41,3584,13.2715,16.7948
+2016-08-13 05:56:44,3583,13.2715,16.7934
+2016-08-13 06:06:46,3583,13.2715,16.7993
+2016-08-13 06:16:49,3583,13.2703,16.7948
+2016-08-13 06:26:51,3582,13.2715,16.7948
+2016-08-13 06:36:54,3582,13.2715,16.7948
+2016-08-13 06:46:56,3582,13.2715,16.7948
+2016-08-13 06:56:59,3582,13.2703,16.7613
+2016-08-13 07:07:01,3582,13.2703,16.7613
+2016-08-13 07:17:03,3582,13.2715,16.7657
+2016-08-13 07:27:06,3582,13.2715,16.7948
+2016-08-13 07:37:08,3582,13.2715,16.7948
+2016-08-13 07:47:11,3583,13.2715,16.7993
+2016-08-13 07:57:13,3583,13.2715,16.7993
+2016-08-13 08:07:16,3583,13.2703,16.7993
+2016-08-13 08:17:18,3584,13.2715,16.7993
+2016-08-13 08:27:21,3585,13.2397,16.7978
+2016-08-13 08:37:23,3586,13.3021,16.7948
+2016-08-13 08:47:26,3586,13.2715,16.7993
+2016-08-13 08:57:29,3587,13.2715,16.7657
+2016-08-13 09:07:31,3588,13.2715,16.7993
+2016-08-13 09:17:34,3588,13.2409,16.7993
+2016-08-13 09:27:36,3590,13.2703,16.7993
+2016-08-13 09:37:39,3592,13.2715,16.8329
+2016-08-13 09:47:42,3593,13.2715,16.7993
+2016-08-13 09:57:45,3595,13.1785,16.8329
+2016-08-13 10:07:47,3595,13.1797,16.7993
+2016-08-13 10:17:49,3597,13.1797,16.7993
+2016-08-13 10:27:52,3598,13.1492,16.7993
+2016-08-13 10:37:54,3598,13.1797,16.7993
+2016-08-13 10:47:57,3599,13.1797,16.7993
+2016-08-13 10:57:59,3599,13.1797,16.7993
+2016-08-13 11:08:02,3599,13.1797,16.7993
+2016-08-13 11:18:05,3599,13.1785,16.7993
+2016-08-13 11:28:07,3599,13.1187,16.7993
+2016-08-13 11:38:10,3599,13.1187,16.7657
+2016-08-13 11:48:13,3599,13.0882,16.7993
+2016-08-13 11:58:16,3599,13.0566,16.7657
+2016-08-13 12:08:18,3599,13.1187,16.6987
+2016-08-13 12:18:21,3599,13.087,16.6652
+2016-08-13 12:28:24,3599,13.1187,16.6652
+2016-08-13 12:38:27,3599,13.0917,16.6987
+2016-08-13 12:48:30,3599,13.0003,16.6652
+2016-08-13 12:58:32,3599,13.0003,16.6652
+2016-08-13 13:08:35,3599,13.0038,16.6987
+2016-08-13 13:18:38,3599,13.0027,16.6987
+2016-08-13 13:28:41,3599,13.0038,16.6987
+2016-08-13 13:38:44,3599,13.0027,16.6987
+2016-08-13 13:48:46,3599,13.0331,16.6987
+2016-08-13 13:58:49,3599,13.0331,16.7002
+2016-08-13 14:08:52,3599,12.9769,16.6652
+2016-08-13 14:18:55,3599,13.0061,16.6637
+2016-08-13 14:28:58,3599,13.0061,16.6696
+2016-08-13 14:39:01,3599,13.0061,16.6682
+2016-08-13 14:49:04,3599,13.0073,16.6682
+2016-08-13 14:59:07,3599,12.9757,16.7016
+2016-08-13 15:09:09,3599,13.0061,16.6637
+2016-08-13 15:19:12,3599,13.0061,16.7016
+2016-08-13 15:29:15,3599,13.0073,16.6972
+2016-08-13 15:39:18,3599,12.9757,16.6652
+2016-08-13 15:49:21,3599,13.0061,16.7016
+2016-08-13 15:59:24,3599,12.9769,16.6696
+2016-08-13 16:09:27,3599,13.0061,16.6682
+2016-08-13 16:19:30,3599,12.9769,16.6682
+2016-08-13 16:29:33,3599,13.0096,16.6682
+2016-08-13 16:39:36,3599,13.0096,16.6682
+2016-08-13 16:49:38,3599,13.0096,16.6682
+2016-08-13 16:59:41,3599,13.0131,16.6347
+2016-08-13 17:09:44,3599,13.04,16.6013
+2016-08-13 17:19:54,3599,13.0131,16.5679
+2016-08-13 17:29:57,3599,12.9815,16.5679
+2016-08-13 17:40:00,3599,12.9826,16.5694
+2016-08-13 17:50:03,3599,13.0154,16.6347
+2016-08-13 18:00:05,3599,13.0131,16.5679
+2016-08-13 18:10:08,3599,13.047,16.6013
+2016-08-13 18:20:11,3599,13.0165,16.6013
+2016-08-13 18:30:14,3599,13.0165,16.6362
+2016-08-13 18:40:17,3599,12.9861,16.6028
+2016-08-13 18:50:19,3599,12.9861,16.6362
+2016-08-13 19:00:22,3599,13.0165,16.6013
+2016-08-13 19:10:25,3599,13.0165,16.6013
+2016-08-13 19:20:28,3599,13.047,16.6362
+2016-08-13 19:30:32,3599,13.0165,16.6347
+2016-08-13 19:40:36,3599,13.0165,16.5679
+2016-08-13 19:50:38,3599,12.9861,16.5679
+2016-08-13 20:00:41,3599,13.0165,16.6013
+2016-08-13 20:10:44,3599,13.0165,16.6347
+2016-08-13 20:20:47,3599,13.0165,16.6028
+2016-08-13 20:30:50,3599,12.985,16.6013
+2016-08-13 20:40:53,3599,13.047,16.6362
+2016-08-13 20:50:55,3599,13.0165,16.6347
+2016-08-13 21:00:58,3599,13.0177,16.5679
+2016-08-13 21:11:01,3599,13.0165,16.6013
+2016-08-13 21:21:04,3599,13.047,16.5694
+2016-08-13 21:31:06,3599,13.0165,16.6013
+2016-08-13 21:41:09,3599,13.0165,16.6028
+2016-08-13 21:51:12,3599,13.0165,16.6013
+2016-08-13 22:01:14,3598,13.047,16.6347
+2016-08-13 22:11:17,3598,13.0165,16.6347
+2016-08-13 22:21:20,3598,13.0165,16.6347
+2016-08-13 22:31:22,3598,13.047,16.6362
+2016-08-13 22:41:25,3598,13.047,16.6362
+2016-08-13 22:51:28,3598,13.0165,16.6347
+2016-08-13 23:01:31,3598,13.047,16.6362
+2016-08-13 23:11:33,3598,13.0165,16.6347
+2016-08-13 23:21:36,3597,13.047,16.6362
+2016-08-13 23:31:38,3597,13.047,16.6347
+2016-08-13 23:41:41,3597,13.0165,16.6362
+2016-08-13 23:51:44,3596,13.047,16.6347
+2016-08-14 00:01:47,3596,13.0458,16.6362
+2016-08-14 00:11:49,3595,13.0165,16.6362
+2016-08-14 00:21:52,3595,13.0165,16.6028
+2016-08-14 00:31:55,3595,13.0165,16.6377
+2016-08-14 00:41:57,3595,13.0177,16.6013
+2016-08-14 00:52:00,3594,13.0165,16.6347
+2016-08-14 01:02:03,3593,13.047,16.6362
+2016-08-14 01:12:05,3593,13.047,16.6013
+2016-08-14 01:22:08,3593,13.047,16.6362
+2016-08-14 01:32:11,3593,13.0458,16.6347
+2016-08-14 01:42:13,3592,13.0165,16.6028
+2016-08-14 01:52:16,3592,13.0165,16.6043
+2016-08-14 02:02:19,3592,13.047,16.6347
+2016-08-14 02:12:21,3591,12.9861,16.6013
+2016-08-14 02:22:24,3592,13.0165,16.6347
+2016-08-14 02:32:26,3591,12.9861,16.6013
+2016-08-14 02:42:36,3591,13.047,16.6028
+2016-08-14 02:52:39,3591,13.047,16.6347
+2016-08-14 03:02:42,3590,13.0165,16.6347
+2016-08-14 03:12:44,3590,13.0165,16.6362
+2016-08-14 03:22:47,3590,12.9861,16.6392
+2016-08-14 03:32:49,3589,13.0165,16.6013
+2016-08-14 03:42:52,3589,13.0165,16.6057
+2016-08-14 03:52:54,3589,13.0458,16.6072
+2016-08-14 04:02:57,3588,13.0165,16.6072
+2016-08-14 04:13:00,3588,13.047,16.6057
+2016-08-14 04:23:02,3588,13.0119,16.6406
+2016-08-14 04:33:04,3588,13.0482,16.6406
+2016-08-14 04:43:07,3588,13.047,16.6392
+2016-08-14 04:53:09,3587,13.0131,16.6406
+2016-08-14 05:03:12,3587,13.0435,16.5724
+2016-08-14 05:13:14,3587,13.0131,16.6406
+2016-08-14 05:23:17,3587,13.0131,16.6392
+2016-08-14 05:33:19,3586,13.0435,16.6406
+2016-08-14 05:43:22,3586,13.0435,16.6072
+2016-08-14 05:53:25,3586,13.0131,16.6392
+2016-08-14 06:03:27,3586,13.0131,16.6392
+2016-08-14 06:13:30,3586,13.0131,16.6406
+2016-08-14 06:23:32,3586,13.0435,16.6072
+2016-08-14 06:33:35,3586,13.0108,16.6057
+2016-08-14 06:43:37,3586,13.0096,16.6392
+2016-08-14 06:53:40,3586,13.0412,16.6057
+2016-08-14 07:03:42,3586,13.0412,16.6057
+2016-08-14 07:13:45,3586,13.04,16.6072
+2016-08-14 07:23:47,3587,13.0108,16.6057
+2016-08-14 07:33:50,3587,12.9803,16.6436
+2016-08-14 07:43:53,3587,13.0412,16.6117
+2016-08-14 07:53:55,3587,13.0108,16.6406
+2016-08-14 08:03:58,3588,13.0096,16.6406
+2016-08-14 08:14:00,3588,13.04,16.6406
+2016-08-14 08:24:03,3588,13.0096,16.6392
+2016-08-14 08:34:06,3588,13.0108,16.6072
+2016-08-14 08:44:08,3588,13.0366,16.6392
+2016-08-14 08:54:11,3588,13.0096,16.5738
+2016-08-14 09:04:13,3589,13.0412,16.6057
+2016-08-14 09:14:16,3590,13.0108,16.6072
+2016-08-14 09:24:19,3590,12.9792,16.6406
+2016-08-14 09:34:21,3591,13.0377,16.6117
+2016-08-14 09:44:24,3592,13.0108,16.6057
+2016-08-14 09:54:26,3592,13.0377,16.6436
+2016-08-14 10:04:29,3592,13.0096,16.6436
+2016-08-14 10:14:31,3593,13.0061,16.6057
+2016-08-14 10:24:34,3594,13.0084,16.6072
+2016-08-14 10:34:37,3594,13.0412,16.6102
+2016-08-14 10:44:39,3595,13.0108,16.6451
+2016-08-14 10:54:42,3595,13.0412,16.6436
+2016-08-14 11:04:44,3595,13.0377,16.6057
+2016-08-14 11:14:47,3595,13.0073,16.6392
+2016-08-14 11:24:49,3596,13.005,16.5738
+2016-08-14 11:34:52,3597,13.0061,16.6392
+2016-08-14 11:44:54,3597,12.9792,16.6117
+2016-08-14 11:54:57,3598,13.0377,16.6406
+2016-08-14 12:05:00,3598,13.0354,16.6102
+2016-08-14 12:15:02,3598,13.0412,16.6102
+2016-08-14 12:25:05,3598,13.0061,16.6436
+2016-08-14 12:35:07,3598,13.0108,16.5768
+2016-08-14 12:45:10,3598,12.9196,16.6406
+2016-08-14 12:55:13,3598,12.9488,16.6117
+2016-08-14 13:05:15,3598,12.9196,16.6451
+2016-08-14 13:15:18,3598,12.9173,16.5116
+2016-08-14 13:25:21,3598,12.9161,16.5449
+2016-08-14 13:35:23,3598,12.9184,16.5101
+2016-08-14 13:45:25,3599,12.9184,16.5057
+2016-08-14 13:55:28,3599,12.9184,16.5434
+2016-08-14 14:05:30,3599,12.9184,16.5101
+2016-08-14 14:15:33,3599,12.9173,16.5101
+2016-08-14 14:25:36,3599,12.9184,16.5057
+2016-08-14 14:35:38,3599,12.9488,16.5101
+2016-08-14 14:45:41,3599,12.9184,16.5101
+2016-08-14 14:55:44,3599,12.9196,16.5434
+2016-08-14 15:05:46,3599,12.8275,16.5101
+2016-08-14 15:15:49,3599,12.8578,16.5116
+2016-08-14 15:25:51,3599,12.8286,16.5101
+2016-08-14 15:35:54,3599,12.8275,16.4783
+2016-08-14 15:45:57,3599,12.8309,16.4768
+2016-08-14 15:55:59,3599,12.8286,16.5101
+2016-08-14 16:06:02,3599,12.8309,16.5101
+2016-08-14 16:16:05,3599,12.8309,16.5116
+2016-08-14 16:26:07,3599,12.8612,16.5116
+2016-08-14 16:36:10,3599,12.8309,16.5101
+2016-08-14 16:46:12,3599,12.8309,16.5101
+2016-08-14 16:56:15,3599,12.8612,16.5101
+2016-08-14 17:06:18,3599,12.8309,16.5434
+2016-08-14 17:16:20,3599,12.8309,16.5101
+2016-08-14 17:26:23,3599,12.8612,16.5116
+2016-08-14 17:36:26,3599,12.8635,16.5101
+2016-08-14 17:46:28,3599,12.8309,16.5101
+2016-08-14 17:56:31,3599,12.8612,16.5101
+2016-08-14 18:06:33,3599,12.8343,16.5116
+2016-08-14 18:16:36,3599,12.8343,16.5101
+2016-08-14 18:26:39,3599,12.8343,16.5116
+2016-08-14 18:36:41,3599,12.8647,16.5116
+2016-08-14 18:46:44,3599,12.8647,16.539
+2016-08-14 18:56:46,3599,12.8343,16.5101
+2016-08-14 19:06:49,3599,12.8332,16.5101
+2016-08-14 19:16:52,3599,12.8343,16.5116
+2016-08-14 19:26:54,3599,12.8647,16.5101
+2016-08-14 19:36:57,3599,12.8343,16.5116
+2016-08-14 19:46:59,3599,12.8635,16.5101
+2016-08-14 19:57:02,3598,12.8647,16.5449
+2016-08-14 20:07:05,3598,12.8343,16.4768
+2016-08-14 20:17:07,3599,12.8647,16.5434
+2016-08-14 20:27:10,3598,12.8355,16.5101
+2016-08-14 20:37:12,3598,12.8647,16.5101
+2016-08-14 20:47:15,3598,12.8647,16.5101
+2016-08-14 20:57:17,3598,12.8647,16.5116
+2016-08-14 21:07:20,3598,12.8343,16.5116
+2016-08-14 21:17:22,3598,12.8647,16.5116
+2016-08-14 21:27:25,3597,12.8332,16.5101
+2016-08-14 21:37:27,3596,12.8343,16.5434
+2016-08-14 21:47:30,3595,12.8343,16.5101
+2016-08-14 21:57:32,3595,12.8343,16.5116
+2016-08-14 22:07:34,3595,12.8343,16.5101
+2016-08-14 22:17:37,3594,12.8343,16.5116
+2016-08-14 22:27:39,3593,12.8635,16.5116
+2016-08-14 22:37:42,3593,12.8647,16.5449
+2016-08-14 22:47:46,3593,12.8647,16.5116
+2016-08-14 22:57:49,3593,12.8343,16.5116
+2016-08-14 23:07:52,3592,12.8343,16.5116
+2016-08-14 23:17:55,3592,12.8343,16.5101
+2016-08-14 23:27:57,3592,12.8343,16.5101
+2016-08-14 23:38:00,3592,12.8343,16.5449
+2016-08-14 23:48:02,3591,12.8647,16.5101
+2016-08-14 23:58:05,3590,12.8343,16.5116
+2016-08-15 00:08:08,3590,12.8343,16.5101
+2016-08-15 00:18:10,3590,12.8647,16.5101
+2016-08-15 00:28:13,3590,12.8647,16.5116
+2016-08-15 00:38:15,3589,12.8343,16.5116
+2016-08-15 00:48:18,3589,12.8647,16.5101
+2016-08-15 00:58:20,3588,12.8309,16.5101
+2016-08-15 01:08:23,3588,12.8647,16.5101
+2016-08-15 01:18:25,3588,12.8612,16.5116
+2016-08-15 01:28:28,3588,12.8309,16.5101
+2016-08-15 01:38:30,3588,12.8309,16.5116
+2016-08-15 01:48:33,3588,12.8612,16.5101
+2016-08-15 01:58:36,3588,12.8309,16.5116
+2016-08-15 02:08:38,3588,12.8298,16.5116
+2016-08-15 02:18:41,3587,12.8612,16.516
+2016-08-15 02:28:44,3587,12.8309,16.5116
+2016-08-15 02:38:46,3587,12.8309,16.5101
+2016-08-15 02:48:49,3587,12.8309,16.5434
+2016-08-15 02:58:51,3586,12.9522,16.5116
+2016-08-15 03:08:54,3586,12.9196,16.5145
+2016-08-15 03:18:56,3585,12.9196,16.5116
+2016-08-15 03:28:59,3585,12.9219,16.5145
+2016-08-15 03:39:01,3585,12.8275,16.5493
+2016-08-15 03:49:04,3585,12.8275,16.516
+2016-08-15 03:59:06,3585,12.8589,16.5101
+2016-08-15 04:09:09,3584,12.8578,16.5145
+2016-08-15 04:19:11,3584,12.8286,16.5145
+2016-08-15 04:29:14,3583,12.8589,16.516
+2016-08-15 04:39:16,3583,12.8275,16.5479
+2016-08-15 04:49:19,3583,12.8275,16.5145
+2016-08-15 04:59:21,3582,12.8589,16.516
+2016-08-15 05:09:24,3582,12.9488,16.516
+2016-08-15 05:19:26,3582,12.8544,16.5145
+2016-08-15 05:29:29,3581,12.8241,16.5145
+2016-08-15 05:39:31,3581,12.8555,16.516
+2016-08-15 05:49:34,3581,12.8544,16.516
+2016-08-15 05:59:36,3581,12.8544,16.5493
+2016-08-15 06:09:39,3581,12.8544,16.516
+2016-08-15 06:19:41,3581,12.8555,16.5116
+2016-08-15 06:29:44,3580,12.8555,16.5434
+2016-08-15 06:39:46,3580,12.8555,16.516
+2016-08-15 06:49:49,3580,12.9465,16.5493
+2016-08-15 06:59:51,3579,12.9161,16.516
+2016-08-15 07:09:54,3580,12.915,16.516
+2016-08-15 07:19:56,3580,12.9138,16.5145
+2016-08-15 07:29:58,3580,12.915,16.5479
+2016-08-15 07:40:01,3580,12.9161,16.5479
+2016-08-15 07:50:03,3580,12.9453,16.5449
+2016-08-15 08:00:05,3581,12.9465,16.516
+2016-08-15 08:10:07,3580,12.9453,16.5145
+2016-08-15 08:20:09,3581,12.8544,16.5145
+2016-08-15 08:30:12,3581,12.8252,16.516
+2016-08-15 08:40:14,3581,12.8252,16.516
+2016-08-15 08:50:17,3582,12.8252,16.5145
+2016-08-15 09:00:19,3583,12.8241,16.5145
+2016-08-15 09:10:22,3584,12.915,16.5116
+2016-08-15 09:20:24,3584,12.8544,16.516
+2016-08-15 09:30:27,3584,12.8241,16.5116
+2016-08-15 09:40:29,3585,12.8241,16.5145
+2016-08-15 09:50:32,3586,12.8241,16.5145
+2016-08-15 10:00:34,3587,12.8544,16.5493
+2016-08-15 10:10:37,3588,12.8555,16.516
+2016-08-15 10:20:39,3588,12.8252,16.516
+2016-08-15 10:30:42,3589,12.8555,16.5145
+2016-08-15 10:40:45,3591,12.8241,16.5116
+2016-08-15 10:50:47,3593,12.8544,16.5145
+2016-08-15 11:00:50,3593,12.8532,16.5479
+2016-08-15 11:10:52,3593,12.8544,16.516
+2016-08-15 11:20:55,3594,12.8252,16.5101
+2016-08-15 11:30:57,3595,12.8241,16.5434
+2016-08-15 11:41:00,3595,12.8544,16.5116
+2016-08-15 11:51:03,3595,12.8241,16.516
+2016-08-15 12:01:05,3596,12.8241,16.5116
+2016-08-15 12:11:08,3596,12.8241,16.4783
+2016-08-15 12:21:10,3597,12.7345,16.516
+2016-08-15 12:31:13,3597,12.7345,16.5145
+2016-08-15 12:41:16,3598,12.7636,16.5116
+2016-08-15 12:51:18,3598,12.7636,16.516
+2016-08-15 13:01:21,3598,12.7323,16.5101
+2016-08-15 13:11:23,3598,12.7334,16.5116
+2016-08-15 13:21:26,3598,12.7345,16.5116
+2016-08-15 13:31:28,3598,12.7345,16.4436
+2016-08-15 13:41:31,3598,12.7345,16.4162
+2016-08-15 13:51:33,3598,12.7938,16.4119
+2016-08-15 14:01:36,3598,12.7334,16.4104
+2016-08-15 14:11:39,3598,12.7345,16.4104
+2016-08-15 14:21:41,3598,12.7345,16.4104
+2016-08-15 14:31:44,3598,12.7334,16.4104
+2016-08-15 14:41:46,3599,12.7636,16.4104
+2016-08-15 14:51:49,3598,12.7334,16.4119
+2016-08-15 15:01:52,3598,12.7334,16.4119
+2016-08-15 15:11:54,3598,12.7334,16.4436
+2016-08-15 15:21:57,3599,12.7334,16.4104
+2016-08-15 15:32:00,3599,12.7334,16.4148
+2016-08-15 15:42:02,3599,12.7334,16.4451
+2016-08-15 15:52:05,3599,12.7625,16.4104
+2016-08-15 16:02:08,3598,12.7323,16.4119
+2016-08-15 16:12:10,3599,12.7334,16.4104
+2016-08-15 16:22:13,3599,12.7636,16.4162
+2016-08-15 16:32:16,3599,12.7636,16.4104
+2016-08-15 16:42:18,3599,12.7636,16.4104
+2016-08-15 16:52:21,3599,12.7334,16.4104
+2016-08-15 17:02:23,3599,12.7345,16.4119
+2016-08-15 17:12:25,3599,12.7334,16.4119
+2016-08-15 17:22:28,3599,12.7345,16.4104
+2016-08-15 17:32:31,3599,12.7334,16.4119
+2016-08-15 17:42:33,3599,12.7647,16.4451
+2016-08-15 17:52:36,3599,12.7334,16.4119
+2016-08-15 18:02:38,3598,12.7323,16.4104
+2016-08-15 18:12:41,3598,12.7334,16.4119
+2016-08-15 18:22:43,3598,12.7938,16.4436
+2016-08-15 18:32:46,3598,12.7636,16.4451
+2016-08-15 18:42:49,3598,12.7938,16.4436
+2016-08-15 18:52:51,3598,12.7636,16.4104
+2016-08-15 19:02:54,3598,12.7334,16.4104
+2016-08-15 19:12:56,3598,12.7334,16.4119
+2016-08-15 19:22:59,3598,12.7334,16.4104
+2016-08-15 19:33:01,3598,12.7334,16.4104
+2016-08-15 19:43:04,3598,12.7334,16.4104
+2016-08-15 19:53:07,3597,12.7334,16.4104
+2016-08-15 20:03:09,3596,12.7323,16.4119
+2016-08-15 20:13:12,3596,12.7345,16.4119
+2016-08-15 20:23:14,3595,12.7636,16.4119
+2016-08-15 20:33:17,3595,12.7334,16.4119
+2016-08-15 20:43:19,3594,12.7636,16.4104
+2016-08-15 20:53:22,3593,12.7636,16.4119
+2016-08-15 21:03:24,3593,12.7334,16.4104
+2016-08-15 21:13:27,3592,12.7636,16.4451
+2016-08-15 21:23:29,3591,12.7345,16.4104
+2016-08-15 21:33:32,3590,12.7334,16.4451
+2016-08-15 21:43:34,3589,12.7636,16.4436
+2016-08-15 21:53:37,3588,12.7345,16.4119
+2016-08-15 22:03:39,3588,12.7636,16.4119
+2016-08-15 22:13:42,3587,12.7334,16.4104
+2016-08-15 22:23:44,3586,12.7647,16.4104
+2016-08-15 22:33:46,3586,12.7625,16.4104
+2016-08-15 22:43:49,3586,12.7636,16.4119
+2016-08-15 22:53:51,3585,12.7647,16.4104
+2016-08-15 23:03:54,3585,12.7636,16.4119
+2016-08-15 23:13:56,3584,12.7647,16.4119
+2016-08-15 23:23:59,3583,12.7323,16.4119
+2016-08-15 23:34:01,3583,12.7636,16.4119
+2016-08-15 23:44:04,3582,12.7636,16.4119
+2016-08-15 23:54:06,3581,12.7334,16.4104
+2016-08-16 00:04:09,3581,12.7636,16.4104
+2016-08-16 00:14:11,3581,12.7636,16.4119
+2016-08-16 00:24:13,3580,12.7636,16.4436
+2016-08-16 00:34:16,3580,12.7647,16.4119
+2016-08-16 00:44:18,3579,12.7334,16.4436
+2016-08-16 00:54:21,3579,12.7345,16.4104
+2016-08-16 01:04:23,3578,12.7636,16.4104
+2016-08-16 01:14:25,3577,12.7345,16.4451
+2016-08-16 01:24:28,3577,12.7334,16.4451
+2016-08-16 01:34:30,3577,12.7636,16.4119
+2016-08-16 01:44:32,3577,12.7625,16.4451
+2016-08-16 01:54:35,3576,12.7334,16.4119
+2016-08-16 02:04:38,3576,12.7334,16.4451
+2016-08-16 02:14:41,3576,12.7602,16.4451
+2016-08-16 02:24:44,3576,12.7311,16.4436
+2016-08-16 02:34:46,3575,12.7938,16.4119
+2016-08-16 02:44:48,3574,12.7613,16.4119
+2016-08-16 02:54:51,3574,12.7311,16.4119
+2016-08-16 03:04:53,3574,12.7602,16.4436
+2016-08-16 03:14:55,3574,12.73,16.4451
+2016-08-16 03:24:58,3574,12.7602,16.4451
+2016-08-16 03:35:00,3574,12.73,16.4119
+2016-08-16 03:45:02,3574,12.7602,16.4436
+2016-08-16 03:55:05,3573,12.7579,16.4104
+2016-08-16 04:05:07,3572,12.7266,16.4119
+2016-08-16 04:15:09,3572,12.7311,16.4104
+2016-08-16 04:25:12,3572,12.7266,16.4436
+2016-08-16 04:35:14,3572,12.7277,16.4104
+2016-08-16 04:45:17,3571,12.7568,16.4119
+2016-08-16 04:55:19,3571,12.787,16.4451
+2016-08-16 05:05:21,3570,12.7277,16.4104
+2016-08-16 05:15:24,3570,12.7545,16.4104
+2016-08-16 05:25:26,3570,12.7579,16.4451
+2016-08-16 05:35:28,3569,12.7545,16.4436
+2016-08-16 05:45:30,3569,12.7545,16.4436
+2016-08-16 05:55:33,3568,12.7534,16.4104
+2016-08-16 06:05:35,3567,12.7534,16.4451
+2016-08-16 06:15:37,3567,12.7545,16.4119
+2016-08-16 06:25:40,3567,12.7545,16.4104
+2016-08-16 06:35:42,3567,12.7243,16.4436
+2016-08-16 06:45:44,3566,12.7545,16.3125
+2016-08-16 06:55:47,3567,12.7847,16.4104
+2016-08-16 07:05:49,3566,12.7243,16.3456
+2016-08-16 07:15:51,3566,12.7232,16.3441
+2016-08-16 07:25:54,3566,12.7243,16.3125
+2016-08-16 07:35:56,3567,12.7545,16.3456
+2016-08-16 07:45:58,3567,12.7545,16.3456
+2016-08-16 07:56:01,3568,12.7545,16.3125
+2016-08-16 08:06:03,3569,12.7545,16.3787
+2016-08-16 08:16:05,3569,12.7232,16.3125
+2016-08-16 08:26:07,3570,12.7232,16.3456
+2016-08-16 08:36:10,3571,12.7545,16.3787
+2016-08-16 08:46:12,3572,12.7847,16.3441
+2016-08-16 08:56:14,3573,12.7545,16.3441
+2016-08-16 09:06:17,3574,12.720999999999998,16.3125
+2016-08-16 09:16:19,3575,12.7802,16.3456
+2016-08-16 09:26:22,3577,12.7198,16.3125
+2016-08-16 09:36:24,3577,12.7198,16.3456
+2016-08-16 09:46:27,3579,12.7198,16.3397
+2016-08-16 09:56:29,3581,12.7511,16.3412
+2016-08-16 10:06:32,3582,12.7511,16.3456
+2016-08-16 10:16:35,3585,12.75,16.3067
+2016-08-16 10:26:37,3586,12.7813,16.3067
+2016-08-16 10:36:40,3587,12.7198,16.311
+2016-08-16 10:46:42,3588,12.7523,16.3067
+2016-08-16 10:56:45,3589,12.7813,16.3067
+2016-08-16 11:06:47,3591,12.720999999999998,16.3067
+2016-08-16 11:16:49,3592,12.7198,16.3397
+2016-08-16 11:26:52,3593,12.720999999999998,16.3067
+2016-08-16 11:36:54,3593,12.75,16.3412
+2016-08-16 11:46:56,3595,12.720999999999998,16.3397
+2016-08-16 11:57:07,3595,12.720999999999998,16.3397
+2016-08-16 12:07:10,3595,12.6629,16.3412
+2016-08-16 12:17:12,3596,12.6328,16.3067
+2016-08-16 12:27:15,3597,12.6328,16.3067
+2016-08-16 12:37:18,3597,12.6328,16.3067
+2016-08-16 12:47:20,3598,12.6641,16.3067
+2016-08-16 12:57:23,3598,12.634,16.3081
+2016-08-16 13:07:26,3598,12.634,16.2076
+2016-08-16 13:17:29,3598,12.6629,16.242
+2016-08-16 13:27:31,3598,12.6629,16.2406
+2016-08-16 13:37:34,3599,12.6641,16.2406
+2016-08-16 13:47:37,3599,12.634,16.2091
+2016-08-16 13:57:39,3599,12.634,16.2406
+2016-08-16 14:07:42,3599,12.6629,16.242
+2016-08-16 14:17:45,3599,12.5727,16.242
+2016-08-16 14:27:47,3599,12.6028,16.242
+2016-08-16 14:37:50,3599,12.5727,16.242
+2016-08-16 14:47:53,3599,12.5738,16.242
+2016-08-16 14:57:55,3599,12.5727,16.2406
+2016-08-16 15:07:58,3599,12.5438,16.2406
+2016-08-16 15:18:01,3599,12.5727,16.2076
+2016-08-16 15:28:03,3599,12.5738,16.2076
+2016-08-16 15:38:06,3599,12.5727,16.2076
+2016-08-16 15:48:09,3599,12.5738,16.2033
+2016-08-16 15:58:11,3599,12.5738,16.242
+2016-08-16 16:08:14,3599,12.5727,16.2406
+2016-08-16 16:18:17,3599,12.5727,16.2406
+2016-08-16 16:28:20,3599,12.5761,16.2091
+2016-08-16 16:38:23,3599,12.5772,16.2363
+2016-08-16 16:48:25,3599,12.5761,16.2091
+2016-08-16 16:58:28,3599,12.5761,16.2377
+2016-08-16 17:08:31,3599,12.5761,16.2406
+2016-08-16 17:18:34,3599,12.5761,16.2033
+2016-08-16 17:28:37,3599,12.5761,16.1704
+2016-08-16 17:38:39,3599,12.5772,16.2377
+2016-08-16 17:48:42,3599,12.5794,16.2091
+2016-08-16 17:58:45,3599,12.5794,16.2076
+2016-08-16 18:08:48,3599,12.5794,16.2363
+2016-08-16 18:18:50,3599,12.5494,16.2377
+2016-08-16 18:28:53,3599,12.5805,16.2377
+2016-08-16 18:38:56,3599,12.5794,16.2406
+2016-08-16 18:48:58,3599,12.5794,16.2047
+2016-08-16 18:59:01,3599,12.5794,16.2062
+2016-08-16 19:09:04,3599,12.5494,16.2047
+2016-08-16 19:19:07,3599,12.5794,16.2033
+2016-08-16 19:29:09,3599,12.6129,16.2033
+2016-08-16 19:39:12,3599,12.5794,16.2363
+2016-08-16 19:49:15,3599,12.5828,16.2047
+2016-08-16 19:59:18,3599,12.5828,16.2363
+2016-08-16 20:09:20,3598,12.5828,16.2363
+2016-08-16 20:19:22,3598,12.5839,16.2033
+2016-08-16 20:29:25,3598,12.5828,16.2363
+2016-08-16 20:39:28,3598,12.5828,16.2047
+2016-08-16 20:49:30,3598,12.5828,16.2363
+2016-08-16 20:59:33,3598,12.5839,16.2377
+2016-08-16 21:09:36,3596,12.5828,16.2363
+2016-08-16 21:19:39,3595,12.6129,16.2363
+2016-08-16 21:29:41,3595,12.5839,16.2047
+2016-08-16 21:39:44,3594,12.614,16.2076
+2016-08-16 21:49:47,3593,12.5828,16.2377
+2016-08-16 21:59:49,3593,12.5817,16.2047
+2016-08-16 22:09:52,3592,12.5828,16.2033
+2016-08-16 22:19:54,3591,12.5839,16.2363
+2016-08-16 22:29:57,3590,12.5828,16.2363
+2016-08-16 22:40:00,3589,12.5839,16.242
+2016-08-16 22:50:02,3588,12.5828,16.2406
+2016-08-16 23:00:05,3588,12.5828,16.2033
+2016-08-16 23:10:07,3587,12.6129,16.242
+2016-08-16 23:20:10,3586,12.5839,16.2033
+2016-08-16 23:30:12,3585,12.5817,16.2363
+2016-08-16 23:40:15,3585,12.5794,16.2033
+2016-08-16 23:50:18,3584,12.5828,16.2363
+2016-08-17 00:10:30,3582,12.5505,16.2076
+2016-08-17 00:20:32,3581,12.5805,16.2363
+2016-08-17 00:30:35,3581,12.5794,16.242
+2016-08-17 00:40:37,3580,12.5794,16.242
+2016-08-17 00:50:40,3579,12.5805,16.2363
+2016-08-17 01:00:42,3578,12.5805,16.2091
+2016-08-17 01:10:45,3577,12.5505,16.2406
+2016-08-17 01:20:47,3577,12.5772,16.2377
+2016-08-17 01:30:50,3576,12.5772,16.2377
+2016-08-17 01:40:52,3575,12.5761,16.242
+2016-08-17 01:50:55,3574,12.5772,16.2406
+2016-08-17 02:00:57,3574,12.5772,16.2377
+2016-08-17 02:11:00,3574,12.5772,16.2047
+2016-08-17 02:21:02,3573,12.5472,16.242
+2016-08-17 02:31:05,3572,12.5727,16.2377
+2016-08-17 02:41:07,3572,12.5727,16.2406
+2016-08-17 02:51:10,3571,12.5727,16.2363
+2016-08-17 03:01:12,3571,12.6028,16.2033
+2016-08-17 03:11:15,3570,12.5727,16.2076
+2016-08-17 03:21:17,3569,12.5738,16.242
+2016-08-17 03:31:20,3569,12.5738,16.242
+2016-08-17 03:41:22,3568,12.5727,16.242
+2016-08-17 03:51:24,3567,12.5727,16.2076
+2016-08-17 04:01:27,3566,12.6028,16.2406
+2016-08-17 04:11:29,3566,12.5727,16.2377
+2016-08-17 04:21:32,3566,12.5438,16.2377
+2016-08-17 04:31:34,3565,12.6039,16.2377
+2016-08-17 04:41:37,3565,12.5738,16.2047
+2016-08-17 04:51:39,3564,12.5738,16.242
+2016-08-17 05:01:42,3564,12.5738,16.242
+2016-08-17 05:11:44,3563,12.605,16.2406
+2016-08-17 05:21:48,3563,12.5705,16.2377
+2016-08-17 05:31:51,3562,12.5738,16.242
+2016-08-17 05:41:54,3561,12.5738,16.242
+2016-08-17 05:51:56,3560,12.5405,16.242
+2016-08-17 06:01:58,3560,12.5705,16.242
+2016-08-17 06:12:01,3560,12.6016,16.242
+2016-08-17 06:22:03,3559,12.5705,16.242
+2016-08-17 06:32:06,3559,12.5671,16.2363
+2016-08-17 06:42:08,3559,12.5682,16.2377
+2016-08-17 06:52:11,3559,12.5671,16.2363
+2016-08-17 07:02:13,3559,12.5671,16.2363
+2016-08-17 07:12:16,3559,12.5671,16.2363
+2016-08-17 07:22:18,3559,12.5671,16.2377
+2016-08-17 07:32:21,3559,12.5638,16.242
+2016-08-17 07:42:23,3559,12.5649,16.2033
+2016-08-17 07:52:26,3560,12.5938,16.2363
+2016-08-17 08:02:28,3561,12.5938,16.2377
+2016-08-17 08:12:31,3561,12.5649,16.2091
+2016-08-17 08:22:33,3562,12.5638,16.2377
+2016-08-17 08:32:36,3563,12.5916,16.2693
+2016-08-17 08:42:38,3563,12.5593,16.2377
+2016-08-17 08:52:40,3564,12.5604,16.2363
+2016-08-17 09:02:50,3564,12.5604,16.2377
+2016-08-17 09:12:53,3565,12.5904,16.2033
+2016-08-17 09:22:55,3565,12.5904,16.2047
+2016-08-17 09:32:58,3566,12.5604,16.2693
+2016-08-17 09:43:01,3567,12.5604,16.2377
+2016-08-17 09:53:03,3568,12.5604,16.2363
+2016-08-17 10:03:06,3570,12.5615,16.2377
+2016-08-17 10:13:08,3571,12.5904,16.2392
+2016-08-17 10:23:18,3571,12.5604,16.2033
+2016-08-17 10:33:20,3573,12.5604,16.2377
+2016-08-17 10:43:23,3573,12.5604,16.2363
+2016-08-17 10:53:25,3574,12.5604,16.2033
+2016-08-17 11:03:28,3576,12.5604,16.2363
+2016-08-17 11:13:30,3578,12.4705,16.2377
+2016-08-17 11:23:33,3579,12.4705,16.2363
+2016-08-17 11:33:36,3581,12.4705,16.1718
+2016-08-17 11:43:38,3582,12.4705,16.2363
+2016-08-17 11:53:41,3583,12.4705,16.2392
+2016-08-17 12:03:43,3585,12.4406,16.2363
+2016-08-17 12:13:46,3585,12.4705,16.2047
+2016-08-17 12:23:48,3587,12.4705,16.2363
+2016-08-17 12:33:51,3588,12.4705,16.2363
+2016-08-17 12:43:53,3588,12.4705,16.2363
+2016-08-17 12:53:56,3590,12.4705,16.2363
+2016-08-17 13:03:58,3591,12.4705,16.2363
+2016-08-17 13:14:01,3592,12.4705,16.2033
+2016-08-17 13:24:04,3592,12.4705,16.2047
+2016-08-17 13:34:06,3593,12.4705,16.2363
+2016-08-17 13:44:09,3594,12.4716,16.1389
+2016-08-17 13:54:12,3595,12.5005,16.1389
+2016-08-17 14:04:14,3596,12.4705,16.1375
+2016-08-17 14:14:17,3597,12.4705,16.1375
+2016-08-17 14:24:20,3598,12.4705,16.1375
+2016-08-17 14:34:22,3598,12.4705,16.1375
+2016-08-17 14:44:24,3598,12.4705,16.1046
+2016-08-17 14:54:27,3598,12.4705,16.1375
+2016-08-17 15:04:29,3598,12.4705,16.0732
+2016-08-17 15:14:32,3598,12.4705,16.1389
+2016-08-17 15:24:35,3598,12.4705,16.0732
+2016-08-17 15:34:38,3598,12.4705,16.1046
+2016-08-17 15:44:40,3598,12.4705,16.1375
+2016-08-17 15:54:43,3598,12.4705,16.1046
+2016-08-17 16:04:46,3598,12.4738,16.1046
+2016-08-17 16:14:49,3598,12.4738,16.1046
+2016-08-17 16:24:51,3598,12.4749,16.1375
+2016-08-17 16:34:54,3598,12.4749,16.1046
+2016-08-17 16:44:57,3598,12.4738,16.1389
+2016-08-17 16:55:00,3598,12.4738,16.1046
+2016-08-17 17:05:02,3598,12.4738,16.1389
+2016-08-17 17:15:05,3598,12.4749,16.1389
+2016-08-17 17:25:08,3598,12.4772,16.1375
+2016-08-17 17:35:11,3598,12.4772,16.1375
+2016-08-17 17:45:13,3598,12.4472,16.1375
+2016-08-17 17:55:16,3598,12.4772,16.0718
+2016-08-17 18:05:19,3598,12.4772,16.1375
+2016-08-17 18:15:22,3598,12.4472,16.1046
+2016-08-17 18:25:25,3598,12.4772,16.1046
+2016-08-17 18:35:27,3598,12.4772,16.1046
+2016-08-17 18:45:30,3598,12.4772,16.0732
+2016-08-17 18:55:33,3598,12.4772,16.1375
+2016-08-17 19:05:36,3598,12.4772,16.1389
+2016-08-17 19:15:38,3598,12.4772,16.1046
+2016-08-17 19:25:41,3598,12.4805,16.1375
+2016-08-17 19:35:44,3597,12.4794,16.1375
+2016-08-17 19:45:46,3596,12.4805,16.1046
+2016-08-17 19:55:49,3595,12.4805,16.105999999999998
+2016-08-17 20:05:51,3595,12.4794,16.1389
+2016-08-17 20:15:54,3594,12.4794,16.1375
+2016-08-17 20:25:57,3594,12.4494,16.1375
+2016-08-17 20:35:59,3593,12.4794,16.105999999999998
+2016-08-17 20:46:02,3592,12.4494,16.1046
+2016-08-17 20:56:04,3592,12.4505,16.1389
+2016-08-17 21:06:07,3590,12.4794,16.1375
+2016-08-17 21:16:09,3589,12.4794,16.105999999999998
+2016-08-17 21:26:12,3588,12.4794,16.1375
+2016-08-17 21:36:15,3588,12.4794,16.1046
+2016-08-17 21:46:17,3587,12.4805,16.1046
+2016-08-17 21:56:20,3587,12.5094,16.1389
+2016-08-17 22:06:22,3586,12.4794,16.1389
+2016-08-17 22:16:25,3586,12.4805,16.1403
+2016-08-17 22:26:27,3585,12.4816,16.1389
+2016-08-17 22:36:37,3585,12.4794,16.1389
+2016-08-17 22:46:40,3584,12.4805,16.1375
+2016-08-17 22:56:43,3583,12.4794,16.1375
+2016-08-17 23:06:45,3582,12.4794,16.1389
+2016-08-17 23:16:48,3582,12.4794,16.1389
+2016-08-17 23:26:50,3581,12.4805,16.1046
+2016-08-17 23:36:52,3581,12.5105,16.1046
+2016-08-17 23:46:54,3581,12.4805,16.1389
+2016-08-17 23:56:57,3581,12.4772,16.1375
+2016-08-18 00:06:59,3580,12.5071,16.1375
+2016-08-18 00:17:02,3579,12.4794,16.1389
+2016-08-18 00:27:04,3578,12.4772,16.1375
+2016-08-18 00:37:06,3578,12.4772,16.1375
+2016-08-18 00:47:09,3578,12.4772,16.1375
+2016-08-18 00:57:11,3577,12.4772,16.1375
+2016-08-18 01:07:13,3577,12.4783,16.1046
+2016-08-18 01:17:16,3577,12.4772,16.1389
+2016-08-18 01:27:18,3577,12.4783,16.1389
+2016-08-18 01:37:21,3577,12.4772,16.1389
+2016-08-18 01:47:23,3576,12.4772,16.1375
+2016-08-18 01:57:26,3575,12.4783,16.1375
+2016-08-18 02:07:28,3575,12.4738,16.1389
+2016-08-18 02:17:31,3575,12.4738,16.1046
+2016-08-18 02:27:33,3574,12.4738,16.1375
+2016-08-18 02:37:35,3574,12.4738,16.1389
+2016-08-18 02:47:38,3574,12.4749,16.105999999999998
+2016-08-18 02:57:40,3574,12.4738,16.1375
+2016-08-18 03:07:43,3574,12.4738,16.1389
+2016-08-18 03:17:45,3574,12.4738,16.1075
+2016-08-18 03:27:47,3574,12.4749,16.1389
+2016-08-18 03:37:50,3573,12.4738,16.1375
+2016-08-18 03:47:52,3573,12.5005,16.1389
+2016-08-18 03:57:54,3573,12.4705,16.1403
+2016-08-18 04:07:56,3573,12.4705,16.1375
+2016-08-18 04:17:59,3573,12.4705,16.1389
+2016-08-18 04:28:01,3572,12.4705,16.1375
+2016-08-18 04:38:03,3572,12.4705,16.1375
+2016-08-18 04:48:05,3572,12.5005,16.1375
+2016-08-18 04:58:08,3572,12.4716,16.1375
+2016-08-18 05:08:10,3572,12.4716,16.1718
+2016-08-18 05:18:12,3571,12.4705,16.1075
+2016-08-18 05:28:14,3571,12.4705,16.1389
+2016-08-18 05:38:17,3571,12.4705,16.1375
+2016-08-18 05:48:19,3571,12.4705,16.1389
+2016-08-18 05:58:21,3571,12.4694,16.1389
+2016-08-18 06:08:24,3571,12.5005,16.1389
+2016-08-18 06:18:26,3571,12.4716,16.1389
+2016-08-18 06:28:28,3571,12.4705,16.1389
+2016-08-18 06:38:30,3571,12.4406,16.1389
+2016-08-18 06:48:33,3571,12.4716,16.0418
+2016-08-18 06:58:35,3571,12.4705,16.039
+2016-08-18 07:08:38,3571,12.4705,16.1389
+2016-08-18 07:18:40,3571,12.4705,16.0404
+2016-08-18 07:28:42,3571,12.5005,16.0077
+2016-08-18 07:38:45,3571,12.5005,16.0404
+2016-08-18 07:48:47,3571,12.5005,16.0077
+2016-08-18 07:58:49,3572,12.5005,16.039
+2016-08-18 08:08:52,3572,12.4705,16.039
+2016-08-18 08:18:54,3573,12.4705,16.0062
+2016-08-18 08:28:56,3574,12.4705,16.0077
+2016-08-18 08:38:59,3574,12.4705,16.0062
+2016-08-18 08:49:03,3575,12.4705,16.0062
+2016-08-18 08:59:05,3576,12.5016,16.0404
+2016-08-18 09:09:08,3577,12.4705,16.039
+2016-08-18 09:19:10,3578,12.4705,16.0404
+2016-08-18 09:29:13,3580,12.4705,16.0404
+2016-08-18 09:39:15,3581,12.4705,16.0418
+2016-08-18 09:49:17,3583,12.4683,16.0062
+2016-08-18 09:59:20,3585,12.5005,16.0361
+2016-08-18 10:09:22,3586,12.4705,16.0062
+2016-08-18 10:19:25,3588,12.4705,16.039
+2016-08-18 10:29:27,3589,12.4705,16.0361
+2016-08-18 10:39:30,3591,12.3775,16.0361
+2016-08-18 10:49:33,3593,12.3808,16.0034
+2016-08-18 10:59:35,3594,12.3808,16.0048
+2016-08-18 11:09:38,3595,12.3808,16.0034
+2016-08-18 11:19:40,3596,12.3808,16.0361
+2016-08-18 11:29:43,3597,12.3808,16.0034
+2016-08-18 11:39:46,3598,12.3819,16.0361
+2016-08-18 11:49:49,3598,12.3808,16.0034
+2016-08-18 11:59:51,3598,12.3808,16.0034
+2016-08-18 12:09:54,3598,12.3808,16.0034
+2016-08-18 12:19:57,3598,12.3211,16.0034
+2016-08-18 12:29:59,3599,12.3211,16.0034
+2016-08-18 12:40:02,3598,12.2913,16.002
+2016-08-18 12:50:05,3599,12.2913,15.9054
+2016-08-18 13:00:07,3599,12.2913,15.9366
+2016-08-18 13:10:09,3599,12.2913,15.938
+2016-08-18 13:20:20,3599,12.2913,15.938
+2016-08-18 13:30:23,3599,12.2913,15.938
+2016-08-18 13:40:26,3599,12.2913,15.938
+2016-08-18 13:50:28,3599,12.2913,15.9366
+2016-08-18 14:00:31,3599,12.2913,15.938
+2016-08-18 14:10:34,3599,12.2913,15.938
+2016-08-18 14:20:37,3599,12.2053,15.938
+2016-08-18 14:30:40,3599,12.2021,15.938
+2016-08-18 14:40:42,3599,12.2053,15.938
+2016-08-18 14:50:45,3599,12.2351,15.9054
+2016-08-18 15:00:48,3599,12.2053,15.9366
+2016-08-18 15:10:51,3599,12.2351,15.938
+2016-08-18 15:20:53,3599,12.2097,15.9039
+2016-08-18 15:30:56,3599,12.2086,15.9054
+2016-08-18 15:40:59,3599,12.2086,15.938
+2016-08-18 15:51:02,3599,12.2394,15.938
+2016-08-18 16:01:05,3599,12.2383,15.9054
+2016-08-18 16:11:08,3599,12.2086,15.938
+2016-08-18 16:21:10,3599,12.2119,15.9054
+2016-08-18 16:31:13,3599,12.1524,15.9054
+2016-08-18 16:41:16,3599,12.1217,15.9011
+2016-08-18 16:51:19,3599,12.1217,15.9054
+2016-08-18 17:01:21,3599,12.1557,15.938
+2016-08-18 17:11:24,3599,12.1217,15.938
+2016-08-18 17:21:27,3599,12.1513,15.938
+2016-08-18 17:31:30,3599,12.1249,15.9366
+2016-08-18 17:41:32,3599,12.1249,15.9054
+2016-08-18 17:51:34,3599,12.1249,15.938
+2016-08-18 18:01:37,3599,12.1557,15.9054
+2016-08-18 18:11:40,3599,12.1546,15.938
+2016-08-18 18:21:43,3599,12.1546,15.9054
+2016-08-18 18:31:45,3599,12.214,15.9338
+2016-08-18 18:41:48,3599,12.2449,15.9054
+2016-08-18 18:51:51,3599,12.2151,15.938
+2016-08-18 19:01:54,3599,12.214,15.9054
+2016-08-18 19:11:57,3599,12.2438,15.9366
+2016-08-18 19:22:00,3599,12.2438,15.938
+2016-08-18 19:32:02,3599,12.2438,15.938
+2016-08-18 19:42:05,3599,12.2438,15.9338
+2016-08-18 19:52:08,3599,12.214,15.938
+2016-08-18 20:02:11,3599,12.2151,15.9054
+2016-08-18 20:12:13,3599,12.214,15.9054
+2016-08-18 20:22:16,3599,12.1843,15.9054
+2016-08-18 20:32:19,3599,12.2438,15.938
+2016-08-18 20:42:21,3599,12.2151,15.9054
+2016-08-18 20:52:24,3598,12.2449,15.938
+2016-08-18 21:02:27,3598,12.214,15.9054
+2016-08-18 21:12:29,3598,12.214,15.9054
+2016-08-18 21:22:32,3598,12.214,15.9039
+2016-08-18 21:32:35,3598,12.214,15.9054
+2016-08-18 21:42:37,3597,12.1854,15.938
+2016-08-18 21:52:40,3596,12.2151,15.9352
+2016-08-18 22:02:43,3595,12.1854,15.938
+2016-08-18 22:12:45,3595,12.2438,15.938
+2016-08-18 22:22:48,3594,12.214,15.938
+2016-08-18 22:32:51,3593,12.2438,15.938
+2016-08-18 22:42:53,3593,12.214,15.938
+2016-08-18 22:52:56,3592,12.214,15.938
+2016-08-18 23:02:58,3592,12.2438,15.9394
+2016-08-18 23:13:01,3590,12.2449,15.9054
+2016-08-18 23:23:03,3590,12.2151,15.938
+2016-08-18 23:33:06,3588,12.214,15.938
+2016-08-18 23:43:08,3588,12.2438,15.9366
+2016-08-18 23:53:11,3588,12.2151,15.938
+2016-08-19 00:03:13,3587,12.2438,15.938
+2016-08-19 00:13:16,3587,12.2151,15.938
+2016-08-19 00:23:18,3586,12.214,15.938
+2016-08-19 00:33:21,3586,12.2449,15.9394
+2016-08-19 00:43:23,3585,12.214,15.938
+2016-08-19 00:53:26,3585,12.214,15.938
+2016-08-19 01:03:28,3584,12.2438,15.938
+2016-08-19 01:13:31,3583,12.2151,15.938
+2016-08-19 01:23:33,3583,12.1843,15.9054
+2016-08-19 01:33:36,3582,12.214,15.9082
+2016-08-19 01:43:38,3581,12.2151,15.9423
+2016-08-19 01:53:41,3581,12.2438,15.9096
+2016-08-19 02:03:43,3581,12.2449,15.938
+2016-08-19 02:13:46,3581,12.2151,15.9749
+2016-08-19 02:23:48,3580,12.2151,15.9408
+2016-08-19 02:33:50,3579,12.2449,15.938
+2016-08-19 02:43:53,3579,12.2449,15.9394
+2016-08-19 02:53:55,3578,12.2438,15.9423
+2016-08-19 03:03:57,3578,12.214,15.9437
+2016-08-19 03:13:59,3577,12.2151,15.911
+2016-08-19 03:24:02,3577,12.2438,15.9054
+2016-08-19 03:34:04,3577,12.2449,15.9423
+2016-08-19 03:44:07,3577,12.2151,15.938
+2016-08-19 03:54:09,3577,12.2438,15.9054
+2016-08-19 04:04:11,3577,12.214,15.9054
+2016-08-19 04:14:14,3576,12.2108,15.9096
+2016-08-19 04:24:16,3576,12.2405,15.9423
+2016-08-19 04:34:18,3576,12.2119,15.9082
+2016-08-19 04:44:21,3576,12.2119,15.9749
+2016-08-19 04:54:23,3575,12.2108,15.9423
+2016-08-19 05:04:26,3575,12.2108,15.9408
+2016-08-19 05:14:28,3575,12.2416,15.9423
+2016-08-19 05:24:30,3575,12.2097,15.9408
+2016-08-19 05:34:33,3574,12.181,15.938
+2016-08-19 05:44:35,3574,12.1789,15.9408
+2016-08-19 05:54:38,3574,12.2383,15.9408
+2016-08-19 06:04:40,3574,12.2383,15.9423
+2016-08-19 06:14:42,3574,12.2086,15.9423
+2016-08-19 06:24:45,3574,12.2383,15.9423
+2016-08-19 06:34:47,3574,12.2351,15.938
+2016-08-19 06:44:49,3574,12.2086,15.9408
+2016-08-19 06:54:52,3573,12.2097,15.9408
+2016-08-19 07:04:54,3574,12.2383,15.9408
+2016-08-19 07:14:57,3574,12.2053,15.9096
+2016-08-19 07:24:59,3574,12.2351,15.9423
+2016-08-19 07:35:01,3574,12.2351,15.9408
+2016-08-19 07:45:04,3574,12.2362,15.9423
+2016-08-19 07:55:06,3574,12.2351,15.9408
+2016-08-19 08:05:08,3575,12.2064,15.9423
+2016-08-19 08:15:11,3576,12.2351,15.9408
+2016-08-19 08:25:13,3577,12.2351,15.9408
+2016-08-19 08:35:15,3577,12.2351,15.9408
+2016-08-19 08:45:18,3578,12.1724,15.9082
+2016-08-19 08:55:20,3579,12.2351,15.9423
+2016-08-19 09:05:22,3581,12.2318,15.938
+2016-08-19 09:15:25,3581,12.2021,15.9423
+2016-08-19 09:25:27,3583,12.2318,15.9423
+2016-08-19 09:35:29,3585,12.1724,15.9437
+2016-08-19 09:45:32,3586,12.2318,15.9054
+2016-08-19 09:55:34,3588,12.2053,15.9408
+2016-08-19 10:05:36,3589,12.1724,15.9408
+2016-08-19 10:15:39,3591,12.2021,15.938
+2016-08-19 10:25:41,3593,12.1427,15.9423
+2016-08-19 10:35:44,3594,12.1131,15.9408
+2016-08-19 10:45:46,3595,12.1163,15.9082
+2016-08-19 10:55:49,3596,12.1427,15.9082
+2016-08-19 11:05:51,3598,12.1427,15.938
+2016-08-19 11:15:54,3598,12.1459,15.9423
+2016-08-19 11:25:56,3598,12.1459,15.843
+2016-08-19 11:35:59,3598,12.1427,15.8076
+2016-08-19 11:46:02,3599,12.1163,15.8402
+2016-08-19 11:56:05,3599,12.1163,15.8402
+2016-08-19 12:06:09,3599,12.1163,15.8105
+2016-08-19 12:16:12,3599,12.1163,15.8076
+2016-08-19 12:26:15,3599,12.1459,15.8076
+2016-08-19 12:36:18,3599,12.1195,15.8076
+2016-08-19 12:46:21,3599,12.1492,15.8076
+2016-08-19 12:56:23,3599,12.1492,15.8402
+2016-08-19 13:06:26,3599,12.1195,15.8388
+2016-08-19 13:16:29,3599,12.1195,15.8076
+2016-08-19 13:26:32,3599,12.1195,15.8076
+2016-08-19 13:36:35,3599,12.1217,15.8048
+2016-08-19 13:46:38,3599,12.1217,15.8076
+2016-08-19 13:56:41,3599,12.0328,15.8076
+2016-08-19 14:06:44,3599,12.0624,15.8076
+2016-08-19 14:16:47,3599,12.0328,15.8402
+2016-08-19 14:26:50,3599,12.0328,15.8076
+2016-08-19 14:36:52,3599,12.036,15.8076
+2016-08-19 14:46:55,3599,12.0371,15.7088
+2016-08-19 14:56:58,3599,12.036,15.7102
+2016-08-19 15:07:01,3599,12.0656,15.7102
+2016-08-19 15:17:04,3599,12.0371,15.7102
+2016-08-19 15:27:07,3599,12.036,15.7102
+2016-08-19 15:37:10,3599,12.036,15.7102
+2016-08-19 15:47:13,3599,12.036,15.7102
+2016-08-19 15:57:16,3599,12.036,15.7102
+2016-08-19 16:07:19,3599,11.9484,15.7102
+2016-08-19 16:17:22,3599,12.036,15.7427
+2016-08-19 16:27:25,3599,11.9484,15.7102
+2016-08-19 16:37:28,3599,11.9473,15.7102
+2016-08-19 16:47:31,3599,11.9515,15.7102
+2016-08-19 16:57:34,3599,11.98,15.7102
+2016-08-19 17:07:37,3599,11.98,15.7102
+2016-08-19 17:17:40,3599,11.9505,15.7427
+2016-08-19 17:27:43,3599,11.9547,15.7102
+2016-08-19 17:37:46,3599,11.9537,15.7102
+2016-08-19 17:47:49,3599,11.9537,15.7088
+2016-08-19 17:57:52,3599,11.9537,15.7102
+2016-08-19 18:07:55,3599,11.9569,15.7427
+2016-08-19 18:17:58,3599,11.9569,15.7102
+2016-08-19 18:28:01,3599,11.9558,15.7102
+2016-08-19 18:38:04,3599,11.9569,15.7102
+2016-08-19 18:48:07,3599,11.9864,15.7102
+2016-08-19 18:58:10,3599,11.9864,15.7427
+2016-08-19 19:08:13,3599,11.9579,15.7102
+2016-08-19 19:18:16,3599,11.9854,15.7102
+2016-08-19 19:28:19,3599,11.9875,15.7455
+2016-08-19 19:38:22,3599,11.9569,15.7102
+2016-08-19 19:48:25,3599,11.9864,15.7102
+2016-08-19 19:58:28,3599,11.9875,15.7469
+2016-08-19 20:08:31,3599,11.9569,15.7102
+2016-08-19 20:18:33,3599,11.9558,15.7102
+2016-08-19 20:28:36,3599,11.9558,15.7144
+2016-08-19 20:38:39,3599,12.0456,15.7088
+2016-08-19 20:48:42,3599,12.0763,15.7102
+2016-08-19 20:58:45,3599,12.0456,15.713
+2016-08-19 21:08:47,3599,12.0753,15.713
+2016-08-19 21:18:50,3599,12.0467,15.713
+2016-08-19 21:28:53,3599,12.0785,15.7144
+2016-08-19 21:38:56,3599,12.0488,15.7455
+2016-08-19 21:48:58,3599,12.0796,15.713
+2016-08-19 21:59:01,3599,12.0488,15.713
+2016-08-19 22:09:04,3599,12.0785,15.7469
+2016-08-19 22:19:07,3599,12.0796,15.7793
+2016-08-19 22:29:10,3599,12.0488,15.7144
+2016-08-19 22:39:13,3599,12.0785,15.7158
+2016-08-19 22:49:16,3599,12.0192,15.713
+2016-08-19 22:59:19,3599,12.0785,15.7779
+2016-08-19 23:09:21,3599,12.0488,15.713
+2016-08-19 23:19:24,3599,12.0488,15.713
+2016-08-19 23:29:27,3599,12.0499,15.7793
+2016-08-19 23:39:29,3599,12.0796,15.713
+2016-08-19 23:49:32,3599,12.0478,15.7144
+2016-08-19 23:59:35,3599,12.0478,15.713
+2016-08-20 00:09:38,3599,12.0488,15.713
+2016-08-20 00:19:40,3599,12.0488,15.7158
+2016-08-20 00:29:43,3599,12.0796,15.7469
+2016-08-20 00:39:46,3599,12.0774,15.7144
+2016-08-20 00:49:49,3599,12.0817,15.6806
+2016-08-20 00:59:51,3599,12.0478,15.7469
+2016-08-20 01:09:54,3599,12.0488,15.7455
+2016-08-20 01:19:57,3599,12.0499,15.713
+2016-08-20 01:30:00,3599,12.0488,15.713
+2016-08-20 01:40:02,3599,12.0488,15.7144
+2016-08-20 01:50:05,3599,12.0488,15.7144
+2016-08-20 02:00:08,3599,12.0774,15.713
+2016-08-20 02:10:10,3599,12.0488,15.7144
+2016-08-20 02:20:13,3599,12.0478,15.6482
+2016-08-20 02:30:16,3599,12.0488,15.713
+2016-08-20 02:40:18,3599,12.0478,15.6482
+2016-08-20 02:50:21,3599,12.0499,15.713
+2016-08-20 03:00:24,3599,12.0785,15.713
+2016-08-20 03:10:26,3598,12.0478,15.7455
+2016-08-20 03:20:29,3598,12.0499,15.713
+2016-08-20 03:30:32,3598,12.0488,15.7158
+2016-08-20 03:40:35,3598,12.0488,15.7144
+2016-08-20 03:50:37,3598,12.0796,15.7158
+2016-08-20 04:00:40,3598,12.0796,15.713
+2016-08-20 04:10:43,3598,12.0488,15.7455
+2016-08-20 04:20:45,3598,12.0488,15.6496
+2016-08-20 04:30:48,3598,12.0796,15.7469
+2016-08-20 04:40:51,3598,12.0478,15.7158
+2016-08-20 04:50:53,3598,12.0488,15.713
+2016-08-20 05:00:56,3597,12.0785,15.713
+2016-08-20 05:10:58,3598,12.0478,15.7807
+2016-08-20 05:21:01,3597,12.0488,15.7455
+2016-08-20 05:31:04,3597,12.0499,15.713
+2016-08-20 05:41:06,3597,12.0785,15.713
+2016-08-20 05:51:09,3596,12.0488,15.7455
+2016-08-20 06:01:11,3595,12.0785,15.713
+2016-08-20 06:11:13,3595,12.0796,15.713
+2016-08-20 06:21:16,3595,12.0742,15.713
+2016-08-20 06:31:18,3595,12.0499,15.713
+2016-08-20 06:41:21,3595,12.0488,15.7469
+2016-08-20 06:51:23,3595,12.0488,15.7469
+2016-08-20 07:01:26,3595,12.0467,15.7469
+2016-08-20 07:11:29,3595,12.0774,15.7483
+2016-08-20 07:21:31,3595,12.0456,15.713
+2016-08-20 07:31:34,3595,12.0488,15.7469
+2016-08-20 07:41:36,3595,12.0446,15.7469
+2016-08-20 07:51:39,3595,12.0753,15.7144
+2016-08-20 08:01:42,3596,12.0456,15.7455
+2016-08-20 08:11:44,3595,12.0753,15.7144
+2016-08-20 08:21:47,3596,12.0456,15.7469
+2016-08-20 08:31:50,3597,12.0785,15.7469
+2016-08-20 08:41:52,3597,12.0467,15.7469
+2016-08-20 08:51:55,3598,12.0446,15.7455
+2016-08-20 09:01:57,3598,12.0467,15.713
+2016-08-20 09:12:00,3598,12.0467,15.7469
+2016-08-20 09:22:03,3598,12.0753,15.7144
+2016-08-20 09:32:05,3598,12.0753,15.7455
+2016-08-20 09:42:08,3598,12.0488,15.7455
+2016-08-20 09:52:10,3599,12.0456,15.7144
+2016-08-20 10:02:13,3599,12.0456,15.6496
+2016-08-20 10:12:16,3599,12.0742,15.6806
+2016-08-20 10:22:18,3599,12.0753,15.6806
+2016-08-20 10:32:21,3599,12.0456,15.6806
+2016-08-20 10:42:24,3599,12.0499,15.6482
+2016-08-20 10:52:26,3599,12.0796,15.6806
+2016-08-20 11:02:29,3599,12.0785,15.6482
+2016-08-20 11:12:32,3599,12.0785,15.6482
+2016-08-20 11:22:34,3599,12.0774,15.6482
+2016-08-20 11:32:37,3599,12.0478,15.6482
+2016-08-20 11:42:40,3599,11.9611,15.6482
+2016-08-20 11:52:43,3599,12.0478,15.6496
+2016-08-20 12:02:45,3599,12.0521,15.6482
+2016-08-20 12:12:48,3599,12.0499,15.6496
+2016-08-20 12:22:51,3599,12.0521,15.6806
+2016-08-20 12:32:54,3599,12.0828,15.6806
+2016-08-20 12:42:57,3599,12.0521,15.651
+2016-08-20 12:52:59,3599,11.9928,15.6496
+2016-08-20 13:03:02,3599,11.9632,15.6482
+2016-08-20 13:13:05,3599,11.9632,15.6482
+2016-08-20 13:23:08,3599,11.9928,15.6482
+2016-08-20 13:33:10,3599,11.9664,15.6159
+2016-08-20 13:43:13,3599,11.9654,15.6496
+2016-08-20 13:53:16,3599,11.9654,15.6496
+2016-08-20 14:03:19,3599,11.9654,15.682
+2016-08-20 14:13:22,3599,11.9664,15.6496
+2016-08-20 14:23:25,3599,11.996,15.6482
+2016-08-20 14:33:28,3599,11.9664,15.6482
+2016-08-20 14:43:31,3599,11.995,15.6482
+2016-08-20 14:53:34,3599,11.9664,15.6482
+2016-08-20 15:03:37,3599,11.996,15.6482
+2016-08-20 15:13:40,3599,11.9664,15.6482
+2016-08-20 15:23:43,3599,11.9654,15.6496
+2016-08-20 15:33:55,3599,11.9664,15.6159
+2016-08-20 15:43:57,3599,11.9654,15.6524
+2016-08-20 15:54:00,3599,11.9664,15.6482
+2016-08-20 16:04:03,3599,11.9654,15.6482
+2016-08-20 16:14:06,3599,11.9664,15.6496
+2016-08-20 16:24:09,3599,11.9654,15.6482
+2016-08-20 16:34:12,3599,11.9664,15.6482
+2016-08-20 16:44:15,3599,11.995,15.6482
+2016-08-20 16:54:17,3599,11.9696,15.6496
+2016-08-20 17:04:20,3599,11.9696,15.6482
+2016-08-20 17:14:23,3599,11.9696,15.6524
+2016-08-20 17:24:26,3599,11.9992,15.6482
+2016-08-20 17:34:29,3599,11.9992,15.6848
+2016-08-20 17:44:31,3599,11.9686,15.682
+2016-08-20 17:54:34,3599,11.9686,15.6482
+2016-08-20 18:04:37,3599,11.9686,15.6524
+2016-08-20 18:14:39,3599,11.9696,15.6524
+2016-08-20 18:24:50,3599,11.9686,15.6524
+2016-08-20 18:34:52,3599,11.9696,15.6524
+2016-08-20 18:44:55,3599,11.9696,15.6538
+2016-08-20 18:54:58,3599,11.9728,15.6215
+2016-08-20 19:05:01,3599,11.9686,15.6524
+2016-08-20 19:15:03,3599,11.9686,15.6524
+2016-08-20 19:25:06,3599,11.9718,15.6524
+2016-08-20 19:35:09,3599,11.9696,15.6538
+2016-08-20 19:45:12,3599,12.0024,15.6524
+2016-08-20 19:55:15,3599,11.9728,15.6538
+2016-08-20 20:05:17,3599,11.9696,15.6524
+2016-08-20 20:15:20,3599,11.9728,15.6538
+2016-08-20 20:25:23,3599,11.9728,15.6862
+2016-08-20 20:35:25,3598,11.9728,15.6524
+2016-08-20 20:45:28,3598,11.9728,15.6524
+2016-08-20 20:55:31,3598,11.9728,15.6524
+2016-08-20 21:05:34,3598,12.0024,15.6538
+2016-08-20 21:15:36,3598,11.9728,15.6538
+2016-08-20 21:25:39,3598,11.9686,15.6538
+2016-08-20 21:35:42,3598,11.9992,15.6538
+2016-08-20 21:45:45,3598,12.0617,15.6538
+2016-08-20 21:55:47,3598,12.0617,15.6524
+2016-08-20 22:05:50,3597,12.0882,15.6242
+2016-08-20 22:15:53,3597,12.0574,15.658
+2016-08-20 22:25:56,3597,12.0882,15.6566
+2016-08-20 22:35:58,3596,12.0574,15.689
+2016-08-20 22:46:01,3595,12.0574,15.6566
+2016-08-20 22:56:04,3595,12.0585,15.658
+2016-08-20 23:06:07,3595,12.0585,15.658
+2016-08-20 23:16:09,3595,12.0585,15.658
+2016-08-20 23:26:12,3595,12.0882,15.6566
+2016-08-20 23:36:15,3595,12.0574,15.6566
+2016-08-20 23:46:18,3594,12.0871,15.6566
+2016-08-20 23:56:20,3593,12.0585,15.658
+2016-08-21 01:06:38,3591,12.0553,15.6904
+2016-08-21 01:16:41,3591,12.0553,15.658
+2016-08-21 01:26:44,3590,12.0542,15.6566
+2016-08-21 01:36:46,3590,12.0553,15.658
+2016-08-21 01:46:49,3590,12.0553,15.6566
+2016-08-21 01:56:52,3589,12.0542,15.658
+2016-08-21 02:06:55,3588,12.0553,15.6566
+2016-08-21 02:16:57,3588,12.0542,15.658
+2016-08-21 02:27:00,3588,12.0553,15.658
+2016-08-21 02:37:03,3588,12.0553,15.6904
+2016-08-21 02:47:06,3588,12.0849,15.6566
+2016-08-21 02:57:08,3587,12.0553,15.6904
+2016-08-21 03:07:11,3587,12.0553,15.658
+2016-08-21 03:17:14,3587,12.0849,15.6566
+2016-08-21 03:27:17,3586,12.0542,15.6566
+2016-08-21 03:37:19,3586,12.0542,15.6932
+2016-08-21 03:47:22,3586,12.0553,15.6904
+2016-08-21 03:57:25,3586,12.0553,15.6608
+2016-08-21 04:07:27,3585,12.0849,15.6566
+2016-08-21 04:17:30,3585,12.0849,15.6566
+2016-08-21 04:27:33,3585,12.0849,15.658
+2016-08-21 04:37:35,3585,12.0553,15.6566
+2016-08-21 04:47:38,3585,12.0849,15.6566
+2016-08-21 04:57:41,3584,12.0839,15.658
+2016-08-21 05:07:43,3584,12.0521,15.6622
+2016-08-21 05:17:46,3584,12.0521,15.6904
+2016-08-21 05:27:48,3583,12.0817,15.6608
+2016-08-21 05:37:51,3583,12.0521,15.6946
+2016-08-21 05:47:54,3583,12.0806,15.6622
+2016-08-21 05:57:56,3582,12.0817,15.6946
+2016-08-21 06:07:59,3582,12.0785,15.6904
+2016-08-21 06:18:01,3582,12.0488,15.6608
+2016-08-21 06:28:04,3581,12.0488,15.6622
+2016-08-21 06:38:07,3582,12.0488,15.6622
+2016-08-21 06:48:09,3582,12.0488,15.658
+2016-08-21 06:58:12,3581,12.0478,15.6622
+2016-08-21 07:08:14,3582,12.0796,15.6946
+2016-08-21 07:18:17,3582,12.0499,15.658
+2016-08-21 07:28:19,3582,12.0753,15.6946
+2016-08-21 07:38:22,3582,12.0753,15.6608
+2016-08-21 07:48:25,3583,12.0456,15.6608
+2016-08-21 07:58:27,3583,12.0753,15.658
+2016-08-21 08:08:30,3584,12.0467,15.6932
+2016-08-21 08:18:32,3585,12.0763,15.6622
+2016-08-21 08:28:35,3585,12.0456,15.6622
+2016-08-21 08:38:37,3585,12.0753,15.6608
+2016-08-21 08:48:40,3586,12.0467,15.6608
+2016-08-21 08:58:43,3587,12.0753,15.6566
+2016-08-21 09:08:45,3588,12.0456,15.6932
+2016-08-21 09:18:48,3588,12.0446,15.6566
+2016-08-21 09:28:50,3588,12.0456,15.5933
+2016-08-21 09:38:52,3589,12.0456,15.6622
+2016-08-21 09:48:55,3590,12.0456,15.5638
+2016-08-21 09:58:57,3591,12.0456,15.5652
+2016-08-21 10:09:00,3592,11.9569,15.5638
+2016-08-21 10:19:03,3593,11.9569,15.561
+2016-08-21 10:29:06,3594,11.9569,15.5638
+2016-08-21 10:39:09,3595,11.9569,15.5596
+2016-08-21 10:49:11,3596,11.9569,15.5596
+2016-08-21 10:59:14,3597,11.9558,15.5638
+2016-08-21 11:09:17,3598,11.9569,15.5919
+2016-08-21 11:19:20,3598,11.9854,15.5638
+2016-08-21 11:29:22,3598,11.9569,15.561
+2016-08-21 11:39:25,3598,11.9569,15.5596
+2016-08-21 11:49:27,3598,11.9579,15.561
+2016-08-21 11:59:30,3598,11.9864,15.5596
+2016-08-21 12:09:33,3598,11.9569,15.5652
+2016-08-21 12:19:36,3598,11.9864,15.561
+2016-08-21 12:29:38,3599,11.9569,15.5919
+2016-08-21 12:39:41,3599,11.9558,15.5288
+2016-08-21 12:49:44,3599,11.9558,15.5596
+2016-08-21 12:59:46,3599,11.9569,15.5596
+2016-08-21 13:09:49,3599,11.9569,15.5596
+2016-08-21 13:19:52,3599,11.9854,15.561
+2016-08-21 13:29:54,3599,11.9569,15.5933
+2016-08-21 13:39:57,3599,11.8683,15.5596
+2016-08-21 13:50:00,3599,11.8694,15.561
+2016-08-21 14:00:03,3599,11.8683,15.5596
+2016-08-21 14:10:05,3599,11.8672,15.561
+2016-08-21 14:20:08,3599,11.8683,15.561
+2016-08-21 14:30:11,3599,11.8999,15.5919
+2016-08-21 14:40:14,3599,11.8967,15.561
+2016-08-21 14:50:16,3599,11.8409,15.5933
+2016-08-21 15:00:19,3599,11.901,15.5596
+2016-08-21 15:10:22,3599,11.8715,15.5596
+2016-08-21 15:20:24,3599,11.901,15.5596
+2016-08-21 15:30:26,3599,11.8704,15.5596
+2016-08-21 15:40:37,3599,11.8715,15.5596
+2016-08-21 15:50:40,3599,11.8704,15.5596
+2016-08-21 16:00:42,3599,11.8704,15.561
+2016-08-21 16:10:45,3599,11.8715,15.561
+2016-08-21 16:20:48,3599,11.8715,15.561
+2016-08-21 16:30:50,3599,11.8715,15.5596
+2016-08-21 16:40:53,3599,11.8715,15.561
+2016-08-21 16:50:56,3599,11.8746,15.561
+2016-08-21 17:00:59,3599,11.8704,15.5596
+2016-08-21 17:11:01,3599,11.8704,15.5596
+2016-08-21 17:21:04,3599,11.8736,15.5596
+2016-08-21 17:31:07,3599,11.8704,15.5596
+2016-08-21 17:41:09,3599,11.8746,15.5933
+2016-08-21 17:51:12,3599,11.8715,15.561
+2016-08-21 18:01:15,3598,11.9042,15.5596
+2016-08-21 18:11:17,3599,11.8746,15.5596
+2016-08-21 18:21:20,3599,11.8746,15.5596
+2016-08-21 18:31:23,3599,11.8746,15.5596
+2016-08-21 18:41:27,3599,11.8746,15.5933
+2016-08-21 18:51:30,3599,11.8746,15.5933
+2016-08-21 19:01:33,3599,11.9031,15.561
+2016-08-21 19:11:36,3598,11.8746,15.561
+2016-08-21 19:21:39,3599,11.8736,15.5596
+2016-08-21 19:31:42,3598,11.8746,15.561
+2016-08-21 19:41:44,3598,11.8746,15.5596
+2016-08-21 19:51:47,3598,11.8736,15.5596
+2016-08-21 20:01:50,3598,11.8746,15.5933
+2016-08-21 20:11:53,3598,11.8746,15.5596
+2016-08-21 20:21:55,3598,11.9042,15.5919
+2016-08-21 20:31:58,3598,11.9031,15.5596
+2016-08-21 20:42:00,3598,11.8746,15.5596
+2016-08-21 20:52:03,3598,11.8746,15.561
+2016-08-21 21:02:06,3597,11.8746,15.561
+2016-08-21 21:12:08,3597,11.8746,15.5596
+2016-08-21 21:22:11,3597,11.8746,15.5933
+2016-08-21 21:32:13,3596,11.9042,15.5596
+2016-08-21 21:42:16,3596,11.8746,15.5596
+2016-08-21 21:52:19,3595,11.8746,15.5596
+2016-08-21 22:02:21,3595,11.8736,15.561
+2016-08-21 22:12:24,3595,11.8746,15.561
+2016-08-21 22:22:26,3595,11.8746,15.5933
+2016-08-21 22:32:29,3594,11.901,15.5919
+2016-08-21 22:42:32,3593,11.902,15.5919
+2016-08-21 22:52:34,3593,11.901,15.5596
+2016-08-21 23:02:44,3592,11.8715,15.561
+2016-08-21 23:12:46,3592,11.8715,15.561
+2016-08-21 23:22:49,3592,11.8715,15.5933
+2016-08-21 23:32:52,3592,11.8725,15.5933
+2016-08-21 23:42:54,3591,11.8999,15.5596
+2016-08-21 23:52:57,3590,11.8715,15.561
+2016-08-22 00:02:59,3590,11.8715,15.561
+2016-08-22 00:13:02,3589,11.8704,15.561
+2016-08-22 00:23:05,3588,11.901,15.5933
+2016-08-22 00:33:07,3588,11.8683,15.561
+2016-08-22 00:43:10,3588,11.8683,15.5638
+2016-08-22 00:53:12,3588,11.8978,15.5596
+2016-08-22 01:03:15,3588,11.8978,15.561
+2016-08-22 01:13:17,3588,11.8683,15.561
+2016-08-22 01:23:20,3587,11.8694,15.561
+2016-08-22 01:33:22,3586,11.8672,15.5596
+2016-08-22 01:43:25,3586,11.8694,15.5652
+2016-08-22 01:53:28,3586,11.8967,15.561
+2016-08-22 02:03:30,3586,11.8683,15.5638
+2016-08-22 02:13:33,3586,11.8967,15.561
+2016-08-22 02:23:36,3586,11.8683,15.561
+2016-08-22 02:33:38,3586,11.8672,15.5596
+2016-08-22 02:43:41,3585,11.8672,15.5596
+2016-08-22 02:53:43,3586,11.8694,15.5919
+2016-08-22 03:03:46,3586,11.8978,15.5638
+2016-08-22 03:13:49,3585,11.8683,15.5596
+2016-08-22 03:23:51,3585,11.8672,15.5919
+2016-08-22 03:33:54,3586,11.8683,15.5596
+2016-08-22 03:43:56,3585,11.8683,15.5933
+2016-08-22 03:53:58,3586,11.8683,15.561
+2016-08-22 04:04:00,3585,11.8978,15.5933
+2016-08-22 04:14:03,3585,11.8683,15.5919
+2016-08-22 04:24:05,3585,11.8683,15.5596
+2016-08-22 04:34:08,3585,11.8683,15.5596
+2016-08-22 04:44:10,3585,11.8989,15.561
+2016-08-22 04:54:13,3585,11.8989,15.561
+2016-08-22 05:04:15,3584,11.8694,15.5919
+2016-08-22 05:14:18,3584,11.8978,15.5933
+2016-08-22 05:24:20,3584,11.8683,15.5596
+2016-08-22 05:34:23,3584,11.8683,15.561
+2016-08-22 05:44:25,3584,11.8672,15.561
+2016-08-22 05:54:28,3584,11.8399,15.561
+2016-08-22 06:04:30,3583,11.8683,15.5919
+2016-08-22 06:14:33,3583,11.8683,15.5596
+2016-08-22 06:24:36,3583,11.8672,15.5919
+2016-08-22 06:34:38,3583,11.9273,15.5933
+2016-08-22 06:44:41,3583,11.8672,15.5596
+2016-08-22 06:54:43,3583,11.8989,15.5933
+2016-08-22 07:04:46,3583,11.8651,15.561
+2016-08-22 07:14:48,3584,11.8946,15.5596
+2016-08-22 07:24:51,3585,11.8683,15.5596
+2016-08-22 07:34:53,3585,11.8651,15.5933
+2016-08-22 07:44:56,3584,11.8662,15.5596
+2016-08-22 07:54:58,3585,11.8651,15.561
+2016-08-22 08:05:01,3586,11.8946,15.561
+2016-08-22 08:15:03,3586,11.8651,15.561
+2016-08-22 08:25:06,3586,11.8662,15.561
+2016-08-22 08:35:09,3587,11.8651,15.5975
+2016-08-22 08:45:11,3587,11.8662,15.561
+2016-08-22 08:55:14,3588,11.8651,15.5596
+2016-08-22 09:05:16,3588,11.8662,15.561
+2016-08-22 09:15:19,3589,11.8651,15.5596
+2016-08-22 09:25:21,3590,11.8946,15.5933
+2016-08-22 09:35:24,3592,11.8651,15.561
+2016-08-22 09:45:27,3593,11.8651,15.5596
+2016-08-22 09:55:30,3594,11.8662,15.5596
+2016-08-22 10:05:32,3595,11.8651,15.561
+2016-08-22 10:15:35,3597,11.8946,15.5596
+2016-08-22 10:25:38,3598,11.8946,15.561
+2016-08-22 10:35:40,3598,11.8946,15.561
+2016-08-22 10:45:43,3598,11.8662,15.5596
+2016-08-22 10:55:46,3598,11.8651,15.5933
+2016-08-22 11:05:48,3599,11.8651,15.561
+2016-08-22 11:15:51,3599,11.8651,15.5596
+2016-08-22 11:25:54,3599,11.8651,15.5288
+2016-08-22 11:35:57,3599,11.8651,15.5596
+2016-08-22 11:45:59,3599,11.8651,15.5596
+2016-08-22 11:56:02,3599,11.8651,15.5596
+2016-08-22 12:06:05,3599,11.8651,15.5596
+2016-08-22 12:16:08,3599,11.8683,15.5596
+2016-08-22 12:26:11,3599,11.8651,15.5596
+2016-08-22 12:36:13,3599,11.8672,15.561
+2016-08-22 12:46:16,3599,11.8683,15.463
+2016-08-22 13:06:28,3599,11.7789,15.4644
+2016-08-22 13:16:30,3599,11.8094,15.4308
+2016-08-22 13:26:33,3599,11.8094,15.4322
+2016-08-22 13:36:36,3599,11.7789,15.4589
+2016-08-22 13:46:38,3599,11.8094,15.4308
+2016-08-22 13:56:41,3599,11.781,15.463
+2016-08-22 14:06:44,3599,11.8094,15.463
+2016-08-22 14:16:46,3599,11.8094,15.463
+2016-08-22 14:26:49,3599,11.8094,15.4644
+2016-08-22 14:36:52,3599,11.8094,15.463
+2016-08-22 14:46:55,3599,11.8094,15.463
+2016-08-22 14:56:58,3599,11.7789,15.4308
+2016-08-22 15:07:00,3599,11.8104,15.4308
+2016-08-22 15:17:03,3599,11.8094,15.4602
+2016-08-22 15:27:06,3599,11.7789,15.463
+2016-08-22 15:37:09,3599,11.8094,15.463
+2016-08-22 15:47:11,3599,11.78,15.4589
+2016-08-22 15:57:14,3599,11.8083,15.463
+2016-08-22 16:07:17,3599,11.7789,15.463
+2016-08-22 16:17:20,3599,11.7821,15.4589
+2016-08-22 16:27:22,3599,11.8125,15.4322
+2016-08-22 16:37:25,3599,11.7547,15.4602
+2016-08-22 16:47:28,3599,11.7841,15.463
+2016-08-22 16:57:31,3599,11.8115,15.4589
+2016-08-22 17:07:33,3599,11.8115,15.4644
+2016-08-22 17:17:36,3599,11.7862,15.4589
+2016-08-22 17:27:39,3599,11.8157,15.463
+2016-08-22 17:37:42,3599,11.8157,15.463
+2016-08-22 17:47:45,3599,11.8146,15.4589
+2016-08-22 17:57:47,3599,11.7568,15.4267
+2016-08-22 18:07:50,3599,11.7862,15.4602
+2016-08-22 18:17:53,3599,11.8157,15.4322
+2016-08-22 18:27:56,3599,11.8157,15.4644
+2016-08-22 18:37:58,3599,11.7894,15.4616
+2016-08-22 18:48:01,3599,11.8178,15.463
+2016-08-22 18:58:04,3599,11.7883,15.463
+2016-08-22 19:08:07,3599,11.7883,15.463
+2016-08-22 19:18:09,3599,11.8178,15.4589
+2016-08-22 19:28:12,3599,11.8178,15.4644
+2016-08-22 19:38:15,3599,11.8178,15.463
+2016-08-22 19:48:18,3599,11.8188,15.4644
+2016-08-22 19:58:20,3599,11.8188,15.463
+2016-08-22 20:08:23,3599,11.7894,15.463
+2016-08-22 20:18:26,3599,11.7894,15.463
+2016-08-22 20:28:29,3599,11.8178,15.4644
+2016-08-22 20:38:31,3599,11.8188,15.463
+2016-08-22 20:48:34,3599,11.8188,15.4308
+2016-08-22 20:58:37,3599,11.8188,15.4308
+2016-08-22 21:08:39,3599,11.8178,15.4308
+2016-08-22 21:18:42,3599,11.7894,15.4644
+2016-08-22 21:28:45,3599,11.8188,15.463
+2016-08-22 21:38:47,3599,11.8178,15.4644
+2016-08-22 21:48:50,3599,11.8188,15.463
+2016-08-22 21:58:54,3599,11.8188,15.4644
+2016-08-22 22:08:58,3599,11.8188,15.4644
+2016-08-22 22:19:00,3599,11.7894,15.463
+2016-08-22 22:29:03,3599,11.8188,15.463
+2016-08-22 22:39:06,3598,11.7589,15.4644
+2016-08-22 22:49:09,3598,11.8188,15.463
+2016-08-22 22:59:12,3599,11.8178,15.463
+2016-08-22 23:09:14,3598,11.7894,15.463
+2016-08-22 23:19:17,3598,11.8188,15.4644
+2016-08-22 23:29:20,3598,11.8178,15.463
+2016-08-22 23:39:23,3598,11.8178,15.463
+2016-08-22 23:49:25,3598,11.8188,15.4644
+2016-08-22 23:59:28,3598,11.8178,15.463
+2016-08-23 00:09:31,3598,11.8178,15.4644
+2016-08-23 00:19:34,3598,11.8188,15.4644
+2016-08-23 00:29:36,3598,11.8188,15.4644
+2016-08-23 00:39:39,3598,11.8178,15.4644
+2016-08-23 00:49:42,3598,11.8188,15.4644
+2016-08-23 00:59:44,3598,11.7894,15.463
+2016-08-23 01:09:47,3598,11.76,15.4644
+2016-08-23 01:19:50,3598,11.7883,15.463
+2016-08-23 01:29:52,3597,11.7894,15.4685
+2016-08-23 01:39:55,3597,11.8157,15.463
+2016-08-23 01:49:58,3597,11.8157,15.4644
+2016-08-23 02:00:00,3597,11.8157,15.4644
+2016-08-23 02:10:03,3596,11.8178,15.4644
+2016-08-23 02:20:06,3596,11.8188,15.4952
+2016-08-23 02:30:08,3596,11.8188,15.4685
+2016-08-23 02:40:11,3595,11.7852,15.4671
+2016-08-23 02:50:13,3595,11.8157,15.463
+2016-08-23 03:00:16,3595,11.8157,15.4644
+2016-08-23 03:10:19,3595,11.8157,15.4671
+2016-08-23 03:20:21,3595,11.8746,15.4671
+2016-08-23 03:30:24,3595,11.8746,15.4644
+2016-08-23 03:40:26,3595,11.8746,15.4322
+2016-08-23 03:50:29,3595,11.9042,15.463
+2016-08-23 04:00:32,3595,11.8746,15.4965
+2016-08-23 04:10:34,3594,11.8746,15.463
+2016-08-23 04:20:37,3594,11.8157,15.4644
+2016-08-23 04:30:40,3594,11.7862,15.4685
+2016-08-23 04:40:42,3593,11.8157,15.4671
+2016-08-23 04:50:45,3593,11.8157,15.4685
+2016-08-23 05:00:47,3592,11.7568,15.4644
+2016-08-23 05:10:50,3592,11.8157,15.463
+2016-08-23 05:20:53,3592,11.8157,15.4685
+2016-08-23 05:30:55,3592,11.7841,15.4671
+2016-08-23 05:40:58,3592,11.7821,15.4685
+2016-08-23 05:51:01,3592,11.8125,15.463
+2016-08-23 06:01:03,3592,11.7831,15.4644
+2016-08-23 06:11:06,3592,11.8157,15.4671
+2016-08-23 06:21:08,3592,11.8136,15.4671
+2016-08-23 06:31:11,3592,11.7831,15.4671
+2016-08-23 06:41:13,3592,11.8125,15.4644
+2016-08-23 06:51:16,3592,11.8115,15.5007
+2016-08-23 07:01:18,3592,11.8136,15.4685
+2016-08-23 07:11:20,3592,11.8715,15.4685
+2016-08-23 07:21:23,3592,11.8725,15.4671
+2016-08-23 07:31:25,3593,11.8725,15.4671
+2016-08-23 07:41:28,3593,11.8715,15.4349
+2016-08-23 07:51:30,3594,11.8725,15.4685
+2016-08-23 08:01:33,3595,11.8715,15.4685
+2016-08-23 08:11:36,3595,11.8715,15.4671
+2016-08-23 08:21:38,3595,11.8683,15.463
+2016-08-23 08:31:41,3596,11.8672,15.4685
+2016-08-23 08:41:44,3596,11.8978,15.4685
+2016-08-23 08:51:47,3597,11.8704,15.3707
+2016-08-23 09:01:49,3598,11.8694,15.3707
+2016-08-23 09:11:52,3598,11.8967,15.3721
+2016-08-23 09:21:55,3598,11.901,15.3707
+2016-08-23 09:31:57,3598,11.8683,15.3707
+2016-08-23 09:42:00,3598,11.8704,15.3707
+2016-08-23 09:52:03,3598,11.8672,15.368
+2016-08-23 10:02:06,3599,11.7841,15.3707
+2016-08-23 10:12:08,3599,11.8115,15.3721
+2016-08-23 10:22:11,3599,11.8094,15.3707
+2016-08-23 10:32:14,3599,11.7821,15.4001
+2016-08-23 10:42:17,3599,11.7841,15.3666
+2016-08-23 10:52:20,3599,11.7841,15.3721
+2016-08-23 11:02:22,3599,11.7821,15.3707
+2016-08-23 11:12:25,3599,11.8125,15.3666
+2016-08-23 11:22:28,3599,11.7831,15.3707
+2016-08-23 11:32:31,3599,11.7831,15.3707
+2016-08-23 11:42:34,3599,11.8125,15.3987
+2016-08-23 11:52:37,3599,11.7831,15.3721
+2016-08-23 12:02:40,3599,11.8125,15.3707
+2016-08-23 12:12:42,3599,11.8125,15.3721
+2016-08-23 12:22:45,3599,11.8157,15.3387
+2016-08-23 12:32:48,3599,11.8157,15.3721
+2016-08-23 12:42:51,3599,11.7862,15.368
+2016-08-23 12:52:54,3599,11.8157,15.34
+2016-08-23 13:02:57,3599,11.8146,15.3707
+2016-08-23 13:13:00,3599,11.8157,15.3721
+2016-08-23 13:23:03,3599,11.7883,15.3721
+2016-08-23 13:33:06,3599,11.8188,15.368
+2016-08-23 13:43:09,3599,11.7012,15.3707
+2016-08-23 13:53:12,3599,11.7306,15.368
+2016-08-23 14:03:15,3599,11.8188,15.3666
+2016-08-23 14:13:18,3599,11.8188,15.3387
+2016-08-23 14:23:21,3599,11.7295,15.3721
+2016-08-23 14:33:24,3599,11.7306,15.3707
+2016-08-23 14:43:27,3599,11.7295,15.3707
+2016-08-23 14:53:29,3599,11.7012,15.3707
+2016-08-23 15:03:32,3599,11.7001,15.34
+2016-08-23 15:13:35,3599,11.7295,15.3387
+2016-08-23 15:23:38,3599,11.7306,15.3707
+2016-08-23 15:33:41,3599,11.7306,15.3707
+2016-08-23 15:43:44,3599,11.7306,15.3666
+2016-08-23 15:53:47,3599,11.7306,15.3066
+2016-08-23 16:03:50,3599,11.7306,15.276
+2016-08-23 16:13:52,3599,11.7295,15.2719
+2016-08-23 16:23:55,3599,11.7033,15.2427
+2016-08-23 16:33:58,3599,11.7295,15.308
+2016-08-23 16:44:01,3599,11.7337,15.2746
+2016-08-23 16:54:04,3599,11.7337,15.276
+2016-08-23 17:04:07,3599,11.7326,15.308
+2016-08-23 17:14:10,3599,11.7043,15.2746
+2016-08-23 17:24:13,3599,11.7368,15.276
+2016-08-23 17:34:16,3599,11.7358,15.3066
+2016-08-23 17:44:19,3599,11.7368,15.2746
+2016-08-23 17:54:21,3599,11.7368,15.2746
+2016-08-23 18:04:24,3599,11.7368,15.2427
+2016-08-23 18:14:27,3599,11.7389,15.3066
+2016-08-23 18:24:30,3599,11.7368,15.244000000000002
+2016-08-23 18:34:33,3599,11.7389,15.3066
+2016-08-23 18:44:35,3599,11.7389,15.2427
+2016-08-23 18:54:38,3599,11.7389,15.3066
+2016-08-23 19:04:41,3599,11.7389,15.3066
+2016-08-23 19:14:44,3599,11.7389,15.244000000000002
+2016-08-23 19:24:47,3599,11.7389,15.3066
+2016-08-23 19:34:49,3599,11.7095,15.2746
+2016-08-23 19:44:52,3599,11.7389,15.2427
+2016-08-23 19:54:55,3599,11.7095,15.276
+2016-08-23 20:04:58,3599,11.7095,15.276
+2016-08-23 20:15:01,3599,11.7389,15.2427
+2016-08-23 20:25:04,3599,11.7389,15.276
+2016-08-23 20:35:06,3599,11.7389,15.3066
+2016-08-23 20:45:09,3599,11.7431,15.276
+2016-08-23 20:55:12,3599,11.7389,15.2746
+2016-08-23 21:05:14,3599,11.7095,15.3066
+2016-08-23 21:15:17,3599,11.7137,15.2468
+2016-08-23 21:25:20,3599,11.7389,15.276
+2016-08-23 21:35:22,3599,11.742,15.2746
+2016-08-23 21:45:25,3599,11.7389,15.2746
+2016-08-23 21:55:28,3599,11.742,15.3066
+2016-08-23 22:05:30,3599,11.742,15.2746
+2016-08-23 22:15:33,3599,11.742,15.2746
+2016-08-23 22:25:35,3599,11.742,15.3094
+2016-08-23 22:35:38,3599,11.742,15.2801
+2016-08-23 22:45:41,3599,11.7126,15.2774
+2016-08-23 22:55:43,3599,11.742,15.2746
+2016-08-23 23:05:46,3599,11.7389,15.2774
+2016-08-23 23:15:49,3599,11.742,15.2427
+2016-08-23 23:25:51,3599,11.7126,15.3107
+2016-08-23 23:35:54,3599,11.7431,15.2481
+2016-08-23 23:45:57,3599,11.7715,15.3121
+2016-08-23 23:55:59,3599,11.7095,15.2801
+2016-08-24 00:06:02,3599,11.742,15.3094
+2016-08-24 00:16:04,3599,11.7126,15.3121
+2016-08-24 00:26:07,3599,11.742,15.2787
+2016-08-24 00:36:10,3599,11.742,15.3107
+2016-08-24 00:46:12,3599,11.742,15.3121
+2016-08-24 00:56:15,3599,11.7379,15.2468
+2016-08-24 01:06:17,3599,11.7389,15.2468
+2016-08-24 01:16:22,3599,11.7389,15.2787
+2016-08-24 01:26:25,3599,11.7085,15.2787
+2016-08-24 01:36:28,3599,11.7389,15.2801
+2016-08-24 01:46:30,3599,11.7389,15.2801
+2016-08-24 01:56:33,3599,11.7389,15.2481
+2016-08-24 02:06:35,3599,11.7389,15.2787
+2016-08-24 02:16:38,3598,11.7389,15.3107
+2016-08-24 02:26:41,3598,11.7095,15.2801
+2016-08-24 02:36:44,3599,11.7389,15.3121
+2016-08-24 02:46:46,3599,11.7389,15.3107
+2016-08-24 02:56:49,3598,11.7389,15.2801
+2016-08-24 03:06:52,3599,11.7389,15.3121
+2016-08-24 03:16:55,3599,11.7095,15.3107
+2016-08-24 03:26:57,3599,11.7389,15.3094
+2016-08-24 03:37:00,3599,11.7085,15.2468
+2016-08-24 03:47:03,3598,11.7095,15.3121
+2016-08-24 03:57:06,3598,11.7095,15.2787
+2016-08-24 04:07:09,3598,11.7095,15.2468
+2016-08-24 04:17:11,3599,11.7095,15.2787
+2016-08-24 04:27:14,3598,11.7095,15.2787
+2016-08-24 04:37:17,3598,11.7389,15.2468
+2016-08-24 04:47:19,3598,11.7389,15.3121
+2016-08-24 04:57:22,3598,11.7095,15.2454
+2016-08-24 05:07:25,3598,11.7389,15.2787
+2016-08-24 05:17:27,3598,11.7389,15.2801
+2016-08-24 05:27:30,3598,11.74,15.2801
+2016-08-24 05:37:33,3598,11.7389,15.3121
+2016-08-24 05:47:35,3598,11.7389,15.2774
+2016-08-24 05:57:38,3598,11.7389,15.2801
+2016-08-24 06:07:41,3598,11.7389,15.2774
+2016-08-24 06:17:43,3598,11.7389,15.2787
+2016-08-24 06:27:46,3598,11.7389,15.2774
+2016-08-24 06:37:49,3598,11.7389,15.2801
+2016-08-24 06:47:52,3598,11.7389,15.2787
+2016-08-24 06:57:54,3598,11.7389,15.2774
+2016-08-24 07:07:57,3598,11.7389,15.3121
+2016-08-24 07:18:00,3598,11.7389,15.2801
+2016-08-24 07:28:02,3598,11.7074,15.2774
+2016-08-24 07:38:05,3598,11.7389,15.3107
+2016-08-24 07:48:08,3598,11.7074,15.3107
+2016-08-24 07:58:10,3598,11.7389,15.2801
+2016-08-24 08:08:13,3598,11.7683,15.3107
+2016-08-24 08:18:16,3598,11.7389,15.2787
+2016-08-24 08:28:18,3599,11.7368,15.2774
+2016-08-24 08:38:21,3599,11.7074,15.2787
+2016-08-24 08:48:24,3599,11.7389,15.2787
+2016-08-24 08:58:26,3599,11.7368,15.3107
+2016-08-24 09:08:29,3599,11.7389,15.3107
+2016-08-24 09:18:32,3599,11.7389,15.2468
+2016-08-24 09:28:35,3599,11.7368,15.2774
+2016-08-24 09:38:37,3599,11.7095,15.2468
+2016-08-24 09:48:40,3599,11.7389,15.2468
+2016-08-24 09:58:43,3599,11.7095,15.3107
+2016-08-24 10:08:46,3599,11.7095,15.2801
+2016-08-24 10:18:48,3599,11.7389,15.2801
+2016-08-24 10:28:51,3599,11.7389,15.2787
+2016-08-24 10:38:54,3599,11.7389,15.2774
+2016-08-24 10:48:56,3599,11.7095,15.2801
+2016-08-24 10:58:59,3599,11.7389,15.2481
+2016-08-24 11:09:02,3599,11.7095,15.2801
+2016-08-24 11:19:05,3599,11.7389,15.3121
+2016-08-24 11:29:08,3599,11.6508,15.2468
+2016-08-24 11:39:11,3599,11.6508,15.3094
+2016-08-24 11:49:14,3599,11.6508,15.2801
+2016-08-24 11:59:17,3599,11.6214,15.3121
+2016-08-24 12:09:20,3599,11.6508,15.2801
+2016-08-24 12:19:23,3599,11.6508,15.2787
+2016-08-24 12:29:26,3599,11.6539,15.2787
+2016-08-24 12:39:29,3599,11.6539,15.2774
+2016-08-24 12:49:32,3599,11.6539,15.2801
+2016-08-24 12:59:35,3599,11.6549,15.2774
+2016-08-24 13:09:38,3599,11.6245,15.2454
+2016-08-24 13:19:41,3599,11.6539,15.3094
+2016-08-24 13:29:44,3599,11.6245,15.3107
+2016-08-24 13:39:47,3599,11.6245,15.2481
+2016-08-24 13:49:50,3599,11.656,15.2787
+2016-08-24 13:59:53,3599,11.6276,15.3094
+2016-08-24 14:09:56,3599,11.658,15.2481
+2016-08-24 14:20:00,3599,11.657,15.3094
+2016-08-24 14:30:03,3599,11.6266,15.3094
+2016-08-24 14:40:06,3599,11.6308,15.2787
+2016-08-24 14:50:09,3599,11.6308,15.3094
+2016-08-24 15:00:12,3599,11.6308,15.2468
+2016-08-24 15:10:16,3599,11.6601,15.2468
+2016-08-24 15:20:19,3599,11.5428,15.2481
+2016-08-24 15:30:22,3599,11.5721,15.3107
+2016-08-24 15:40:25,3599,11.5721,15.2787
+2016-08-24 15:50:29,3599,11.5731,15.2454
+2016-08-24 16:00:32,3599,11.5742,15.3121
+2016-08-24 16:10:35,3599,11.5459,15.3121
+2016-08-24 16:20:38,3599,11.5752,15.2481
+2016-08-24 16:30:42,3599,11.5742,15.2787
+2016-08-24 16:40:45,3599,11.5742,15.3107
+2016-08-24 16:50:48,3599,11.5742,15.2801
+2016-08-24 17:00:51,3599,11.5752,15.2787
+2016-08-24 17:10:55,3599,11.548,15.3107
+2016-08-24 17:20:58,3599,11.5762,15.2454
+2016-08-24 17:31:01,3599,11.548,15.2787
+2016-08-24 17:41:04,3599,11.548,15.2787
+2016-08-24 17:51:08,3599,11.548,15.2801
+2016-08-24 18:01:11,3599,11.5803,15.3121
+2016-08-24 18:11:14,3599,11.550999999999998,15.2468
+2016-08-24 18:21:17,3599,11.5803,15.2468
+2016-08-24 18:31:20,3599,11.550999999999998,15.2814
+2016-08-24 18:41:24,3599,11.5803,15.2468
+2016-08-24 18:51:27,3599,11.5803,15.1829
+2016-08-24 19:01:30,3599,11.5803,15.1843
+2016-08-24 19:11:33,3599,11.5803,15.3094
+2016-08-24 19:21:36,3599,11.5803,15.1829
+2016-08-24 19:31:39,3599,11.550999999999998,15.187
+2016-08-24 19:41:42,3599,11.5541,15.1524
+2016-08-24 19:51:45,3599,11.5834,15.1551
+2016-08-24 20:01:48,3599,11.5541,15.1883
+2016-08-24 20:11:51,3599,11.5552,15.1856
+2016-08-24 20:21:54,3599,11.5541,15.1816
+2016-08-24 20:31:57,3599,11.5541,15.2175
+2016-08-24 20:42:00,3599,11.5541,15.1829
+2016-08-24 20:52:03,3599,11.5541,15.187
+2016-08-24 21:02:06,3599,11.5541,15.1856
+2016-08-24 21:12:09,3599,11.5552,15.1537
+2016-08-24 21:22:12,3599,11.5541,15.1856
+2016-08-24 21:32:15,3599,11.5541,15.1883
+2016-08-24 21:42:18,3599,11.5541,15.187
+2016-08-24 21:52:20,3599,11.5552,15.2189
+2016-08-24 22:02:23,3599,11.5541,15.1856
+2016-08-24 22:12:26,3599,11.5541,15.187
+2016-08-24 22:22:29,3599,11.5541,15.2189
+2016-08-24 22:32:32,3599,11.5541,15.191
+2016-08-24 22:42:35,3599,11.5541,15.187
+2016-08-24 22:52:38,3599,11.5834,15.2216
+2016-08-24 23:02:41,3599,11.5834,15.1564
+2016-08-24 23:12:43,3599,11.5552,15.191
+2016-08-24 23:22:46,3599,11.6715,15.191
+2016-08-24 23:32:49,3599,11.6715,15.191
+2016-08-24 23:42:52,3599,11.6715,15.191
+2016-08-24 23:52:54,3599,11.6715,15.191
+2016-08-25 00:02:57,3599,11.6421,15.191
+2016-08-25 00:13:00,3599,11.6421,15.191
+2016-08-25 00:23:02,3599,11.6726,15.191
+2016-08-25 00:33:05,3599,11.6715,15.191
+2016-08-25 00:43:08,3599,11.6715,15.191
+2016-08-25 00:53:11,3599,11.6715,15.191
+2016-08-25 01:03:13,3599,11.6421,15.191
+2016-08-25 01:13:16,3599,11.6715,15.1591
+2016-08-25 01:23:19,3599,11.6715,15.191
+2016-08-25 01:33:22,3599,11.6715,15.191
+2016-08-25 01:43:24,3599,11.6715,15.1897
+2016-08-25 01:53:27,3599,11.6421,15.191
+2016-08-25 02:03:30,3599,11.6432,15.1951
+2016-08-25 02:13:33,3599,11.6726,15.1937
+2016-08-25 02:23:35,3599,11.6421,15.1951
+2016-08-25 02:33:38,3599,11.6432,15.1951
+2016-08-25 02:43:41,3599,11.6715,15.1951
+2016-08-25 02:53:44,3599,11.6715,15.1951
+2016-08-25 03:03:46,3599,11.639,15.227
+2016-08-25 03:13:49,3599,11.6684,15.1937
+2016-08-25 03:23:52,3599,11.6684,15.1964
+2016-08-25 03:33:55,3599,11.6684,15.1951
+2016-08-25 03:43:58,3599,11.639,15.2256
+2016-08-25 03:54:00,3599,11.6694,15.1951
+2016-08-25 04:04:03,3599,11.6684,15.1951
+2016-08-25 04:14:06,3599,11.6684,15.1937
+2016-08-25 04:24:09,3599,11.6684,15.1951
+2016-08-25 04:34:13,3599,11.639,15.1951
+2016-08-25 04:44:17,3599,11.6684,15.227
+2016-08-25 04:54:19,3599,11.639,15.1951
+2016-08-25 05:04:22,3599,11.6684,15.1951
+2016-08-25 05:14:25,3599,11.6684,15.1951
+2016-08-25 05:24:28,3599,11.6684,15.1951
+2016-08-25 05:34:30,3599,11.6684,15.2256
+2016-08-25 05:44:33,3599,11.639,15.2284
+2016-08-25 05:54:36,3599,11.6684,15.1951
+2016-08-25 06:04:39,3599,11.6684,15.1951
+2016-08-25 06:14:41,3599,11.6684,15.1632
+2016-08-25 06:24:44,3599,11.6653,15.227
+2016-08-25 06:34:46,3599,11.548,15.1937
+2016-08-25 06:44:49,3599,11.5773,15.1951
+2016-08-25 06:54:52,3599,11.5783,15.1951
+2016-08-25 07:04:55,3599,11.5773,15.1951
+2016-08-25 07:14:57,3599,11.6328,15.227
+2016-08-25 07:25:00,3599,11.6632,15.1951
+2016-08-25 07:35:03,3599,11.5742,15.1937
+2016-08-25 07:45:05,3599,11.6622,15.227
+2016-08-25 07:55:08,3599,11.5742,15.1964
+2016-08-25 08:05:11,3599,11.5742,15.1951
+2016-08-25 08:15:13,3599,11.5742,15.1951
+2016-08-25 08:25:16,3599,11.5449,15.1951
+2016-08-25 08:35:19,3599,11.5742,15.2284
+2016-08-25 08:45:22,3599,11.6622,15.1964
+2016-08-25 08:55:24,3599,11.6328,15.1951
+2016-08-25 09:05:27,3599,11.6328,15.227
+2016-08-25 09:15:30,3599,11.6328,15.1951
+2016-08-25 09:25:33,3599,11.6328,15.1951
+2016-08-25 09:35:35,3599,11.6622,15.2284
+2016-08-25 09:45:38,3599,11.5752,15.1951
+2016-08-25 09:55:41,3599,11.5752,15.1951
+2016-08-25 10:05:44,3599,11.5752,15.227
+2016-08-25 10:15:47,3599,11.5449,15.227
+2016-08-25 10:25:50,3599,11.5742,15.227
+2016-08-25 10:35:53,3599,11.5742,15.1937
+2016-08-25 10:45:56,3599,11.5742,15.227
+2016-08-25 10:55:59,3599,11.5449,15.1951
+2016-08-25 11:06:02,3599,11.5459,15.227
+2016-08-25 11:16:05,3599,11.5742,15.1937
+2016-08-25 11:26:08,3599,11.5742,15.1951
+2016-08-25 11:36:11,3599,11.5742,15.1937
+2016-08-25 11:46:14,3599,11.5752,15.1951
+2016-08-25 11:56:17,3599,11.5742,15.1618
+2016-08-25 12:06:20,3599,11.5459,15.13
+2016-08-25 12:16:24,3599,11.548,15.0981
+2016-08-25 12:26:27,3599,11.4561,15.1313
+2016-08-25 12:36:30,3599,11.4612,15.0995
+2016-08-25 12:46:33,3599,11.4894,15.1313
+2016-08-25 12:56:37,3599,11.4894,15.0995
+2016-08-25 13:06:40,3599,11.4602,15.0995
+2016-08-25 13:16:43,3599,11.4925,15.0995
+2016-08-25 13:26:46,3599,11.4633,15.0995
+2016-08-25 13:36:49,3599,11.4633,15.0995
+2016-08-25 13:46:52,3599,11.3757,15.1313
+2016-08-25 13:56:55,3599,11.4048,15.0981
+2016-08-25 14:06:59,3599,11.3757,15.0995
+2016-08-25 14:17:02,3599,11.3757,15.0995
+2016-08-25 14:27:05,3599,11.4048,15.13
+2016-08-25 14:37:09,3599,11.4048,15.1313
+2016-08-25 14:47:12,3599,11.4079,15.0995
+2016-08-25 14:57:16,3599,11.3787,15.1313
+2016-08-25 15:07:19,3599,11.3787,15.1313
+2016-08-25 15:17:23,3599,11.3787,15.1313
+2016-08-25 15:27:26,3599,11.4079,15.0995
+2016-08-25 15:37:30,3599,11.3787,15.0981
+2016-08-25 15:47:33,3599,11.4079,15.0981
+2016-08-25 15:57:36,3599,11.4069,15.0995
+2016-08-25 16:07:40,3599,11.3817,15.0995
+2016-08-25 16:17:43,3599,11.4119,15.0042
+2016-08-25 16:27:47,3599,11.4109,15.0042
+2016-08-25 16:37:50,3599,11.3817,15.0359
+2016-08-25 16:47:54,3599,11.3838,15.0042
+2016-08-25 16:57:57,3599,11.3838,15.0042
+2016-08-25 17:08:01,3599,11.3848,15.0042
+2016-08-25 17:18:04,3599,11.3848,15.0042
+2016-08-25 17:28:08,3599,11.3848,15.0029
+2016-08-25 17:38:11,3599,11.3858,15.0042
+2016-08-25 17:48:15,3599,11.414,15.0042
+2016-08-25 17:58:18,3599,11.3848,15.0042
+2016-08-25 18:08:21,3599,11.3878,15.0029
+2016-08-25 18:18:25,3599,11.3838,15.0029
+2016-08-25 18:28:28,3599,11.3878,15.0042
+2016-08-25 18:38:31,3599,11.416,15.0042
+2016-08-25 18:48:35,3599,11.3878,15.0042
+2016-08-25 18:58:38,3599,11.3868,15.0042
+2016-08-25 19:08:41,3599,11.3878,15.0042
+2016-08-25 19:18:45,3599,11.3868,15.0055
+2016-08-25 19:28:48,3599,11.416,15.0042
+2016-08-25 19:38:51,3599,11.3868,15.0042
+2016-08-25 19:48:54,3599,11.416,15.0042
+2016-08-25 19:58:58,3599,11.416,15.0029
+2016-08-25 20:09:01,3599,11.3878,15.0042
+2016-08-25 20:19:04,3599,11.3868,15.0042
+2016-08-25 20:29:07,3599,11.416,15.0042
+2016-08-25 20:39:11,3599,11.3868,15.0346
+2016-08-25 20:49:14,3599,11.3878,15.0029
+2016-08-25 20:59:17,3599,11.417,15.0042
+2016-08-25 21:09:20,3599,11.3868,15.0042
+2016-08-25 21:19:23,3599,11.3898,15.0042
+2016-08-25 21:29:26,3599,11.3868,15.0042
+2016-08-25 21:39:30,3599,11.4191,15.0082
+2016-08-25 21:49:33,3599,11.3898,15.0069
+2016-08-25 21:59:36,3599,11.3898,15.0082
+2016-08-25 22:09:39,3599,11.3898,15.0042
+2016-08-25 22:19:42,3599,11.4191,15.0413
+2016-08-25 22:29:45,3599,11.3898,15.0082
+2016-08-25 22:39:48,3599,11.3898,15.0082
+2016-08-25 22:49:51,3599,11.3898,15.0082
+2016-08-25 22:59:54,3599,11.4191,15.0082
+2016-08-25 23:09:57,3599,11.3909,15.0399
+2016-08-25 23:20:00,3599,11.3868,15.0399
+2016-08-25 23:30:03,3599,11.4191,15.0082
+2016-08-25 23:40:06,3599,11.3898,15.0082
+2016-08-25 23:50:09,3599,11.4191,15.0082
+2016-08-26 00:00:12,3599,11.4201,15.0082
+2016-08-26 00:10:15,3599,11.4191,15.0082
+2016-08-26 00:20:18,3599,11.3898,15.0082
+2016-08-26 00:30:21,3599,11.3909,15.0082
+2016-08-26 00:40:24,3599,11.4493,15.0082
+2016-08-26 00:50:27,3599,11.3878,15.044
+2016-08-26 01:00:30,3599,11.4462,15.0082
+2016-08-26 01:10:33,3599,11.3878,15.0122
+2016-08-26 01:20:36,3599,11.3878,15.0082
+2016-08-26 01:30:39,3599,11.3878,15.0122
+2016-08-26 01:40:41,3599,11.3868,15.0122
+2016-08-26 01:50:44,3599,11.4462,15.044
+2016-08-26 02:00:47,3599,11.3878,15.0399
+2016-08-26 02:10:50,3599,11.4442,15.044
+2016-08-26 02:20:53,3599,11.3868,15.0122
+2016-08-26 02:30:56,3599,11.4462,15.044
+2016-08-26 02:40:59,3599,11.3868,15.0122
+2016-08-26 02:51:02,3599,11.3878,15.0122
+2016-08-26 03:01:04,3599,11.3868,15.0122
+2016-08-26 03:11:07,3599,11.3868,15.0135
+2016-08-26 03:21:10,3599,11.416,15.0122
+2016-08-26 03:31:13,3599,11.416,15.0122
+2016-08-26 03:41:16,3599,11.3868,15.0109
+2016-08-26 03:51:19,3599,11.3868,15.044
+2016-08-26 04:01:21,3599,11.3868,15.0122
+2016-08-26 04:11:24,3599,11.3878,15.0122
+2016-08-26 04:21:27,3599,11.3868,15.044
+2016-08-26 04:31:30,3599,11.4452,15.0149
+2016-08-26 04:41:32,3599,11.416,15.044
+2016-08-26 04:51:35,3599,11.3838,15.044
+2016-08-26 05:01:38,3599,11.3838,15.0109
+2016-08-26 05:11:41,3599,11.3848,15.0149
+2016-08-26 05:21:43,3599,11.3838,15.0162
+2016-08-26 05:31:46,3599,11.413,15.0149
+2016-08-26 05:41:49,3599,11.3848,15.0149
+2016-08-26 05:51:52,3599,11.413,15.0149
+2016-08-26 06:01:54,3599,11.414,15.0466
+2016-08-26 06:11:57,3599,11.3858,15.0149
+2016-08-26 06:22:00,3599,11.413,15.0162
+2016-08-26 06:32:02,3599,11.414,15.0466
+2016-08-26 06:42:05,3599,11.3858,15.0175
+2016-08-26 06:52:08,3599,11.414,15.0162
+2016-08-26 07:02:11,3599,11.3848,15.0466
+2016-08-26 07:12:14,3599,11.414,15.0149
+2016-08-26 07:22:16,3599,11.3848,15.0149
+2016-08-26 07:32:19,3599,11.3817,15.0149
+2016-08-26 07:42:22,3599,11.3817,15.0162
+2016-08-26 07:52:24,3599,11.3828,15.0149
+2016-08-26 08:02:26,3599,11.4109,15.0162
+2016-08-26 08:12:29,3599,11.4109,15.0162
+2016-08-26 08:22:32,3599,11.4109,15.0149
+2016-08-26 08:32:34,3599,11.3817,15.0149
+2016-08-26 08:42:37,3599,11.3817,15.0149
+2016-08-26 08:52:40,3599,11.4371,15.0149
+2016-08-26 09:02:43,3599,11.4109,15.0466
+2016-08-26 09:12:46,3599,11.4079,15.0466
+2016-08-26 09:22:49,3599,11.4401,15.0149
+2016-08-26 09:32:52,3599,11.3787,15.0149
+2016-08-26 09:42:55,3599,11.4079,15.0149
+2016-08-26 09:52:58,3599,11.3787,15.048
+2016-08-26 10:03:01,3599,11.3787,15.0162
+2016-08-26 10:13:04,3599,11.3787,15.0175
+2016-08-26 10:23:07,3599,11.3787,15.0149
+2016-08-26 10:33:10,3599,11.4391,15.0466
+2016-08-26 10:43:13,3599,11.3787,15.0149
+2016-08-26 10:53:16,3599,11.4109,15.0162
+2016-08-26 11:03:19,3599,11.3828,15.048
+2016-08-26 11:13:22,3599,11.3817,15.0149
+2016-08-26 11:23:26,3599,11.3817,15.0493
+2016-08-26 11:33:29,3599,11.3817,15.048
+2016-08-26 11:43:32,3599,11.4109,15.0162
+2016-08-26 11:53:35,3599,11.3838,15.0784
+2016-08-26 12:03:39,3599,11.3848,15.0798
+2016-08-26 12:13:42,3599,11.3264,15.0162
+2016-08-26 12:23:46,3599,11.3264,15.0149
+2016-08-26 12:33:49,3599,11.3264,15.0149
+2016-08-26 12:43:52,3599,11.3264,15.0162
+2016-08-26 12:53:56,3599,11.3254,15.0162
+2016-08-26 13:03:59,3599,11.3264,15.0466
+2016-08-26 13:14:03,3599,11.2963,15.0149
+2016-08-26 13:24:06,3599,11.3264,14.9845
+2016-08-26 13:34:09,3599,11.3285,14.9832
+2016-08-26 13:44:13,3599,11.2121,15.0162
+2016-08-26 13:54:16,3599,11.2411,14.9211
+2016-08-26 14:04:20,3599,11.3285,14.9198
+2016-08-26 14:14:23,3599,11.3003,14.9198
+2016-08-26 14:24:27,3599,11.2421,14.9211
+2016-08-26 14:34:30,3599,11.2411,14.9198
+2016-08-26 14:44:34,3599,11.2441,14.9198
+2016-08-26 14:54:37,3599,11.2441,14.9198
+2016-08-26 15:04:41,3599,11.2441,14.9198
+2016-08-26 15:14:45,3599,11.2451,14.9198
+2016-08-26 15:24:48,3599,11.2481,14.9198
+2016-08-26 15:34:52,3599,11.2471,14.9211
+2016-08-26 15:44:55,3599,11.2481,14.9211
+2016-08-26 15:54:59,3599,11.2481,14.9198
+2016-08-26 16:05:03,3599,11.2471,14.9211
+2016-08-26 16:15:06,3599,11.2471,14.9198
+2016-08-26 16:25:10,3599,11.2501,14.9198
+2016-08-26 16:35:14,3599,11.222,14.9198
+2016-08-26 16:45:17,3599,11.2501,14.9198
+2016-08-26 16:55:20,3599,11.2531,14.9198
+2016-08-26 17:05:24,3599,11.2531,14.9198
+2016-08-26 17:15:27,3599,11.2531,14.9198
+2016-08-26 17:25:31,3599,11.2531,14.9198
+2016-08-26 17:35:34,3599,11.224,14.9225
+2016-08-26 17:45:38,3599,11.2531,14.9198
+2016-08-26 17:55:41,3599,11.224,14.9198
+2016-08-26 18:05:45,3599,11.2531,14.8895
+2016-08-26 18:15:48,3599,11.2531,14.9198
+2016-08-26 18:25:52,3599,11.2561,14.9198
+2016-08-26 18:35:55,3599,11.227,14.9211
+2016-08-26 18:45:58,3599,11.2571,14.8882
+2016-08-26 18:56:02,3599,11.2561,14.9198
+2016-08-26 19:06:05,3599,11.2561,14.9515
+2016-08-26 19:16:09,3599,11.2561,14.9198
+2016-08-26 19:26:12,3599,11.2561,14.9198
+2016-08-26 19:36:16,3599,11.2561,14.9198
+2016-08-26 19:46:19,3599,11.2561,14.8882
+2016-08-26 19:56:22,3599,11.2591,14.9198
+2016-08-26 20:06:26,3599,11.2551,14.9211
+2016-08-26 20:16:29,3599,11.2581,14.9198
+2016-08-26 20:26:32,3599,11.2591,14.8566
+2016-08-26 20:36:35,3599,11.2591,14.9198
+2016-08-26 20:46:38,3599,11.2581,14.8237
+2016-08-26 20:56:42,3599,11.2591,14.9211
+2016-08-26 21:06:45,3599,11.2601,14.8566
+2016-08-26 21:16:48,3599,11.2591,14.8566
+2016-08-26 21:26:51,3599,11.2591,14.825
+2016-08-26 21:36:55,3599,11.2591,14.8566
+2016-08-26 21:46:58,3599,11.2591,14.8566
+2016-08-26 22:07:11,3599,11.2581,14.8579
+2016-08-26 22:17:15,3599,11.2591,14.8592
+2016-08-26 22:27:18,3599,11.2621,14.8592
+2016-08-26 22:37:21,3599,11.2591,14.8566
+2016-08-26 22:47:24,3599,11.233,14.8566
+2016-08-26 22:57:27,3599,11.2601,14.8566
+2016-08-26 23:07:31,3599,11.2591,14.825
+2016-08-26 23:17:34,3599,11.2621,14.8869
+2016-08-26 23:27:37,3599,11.2621,14.825
+2016-08-26 23:37:40,3599,11.233,14.8316
+2016-08-26 23:47:43,3599,11.2621,14.8592
+2016-08-26 23:57:46,3599,11.2621,14.8606
+2016-08-27 00:07:49,3599,11.2621,14.829
+2016-08-27 00:17:52,3599,11.2621,14.8619
+2016-08-27 00:27:55,3599,11.2591,14.8606
+2016-08-27 00:37:58,3599,11.2581,14.8606
+2016-08-27 00:48:01,3599,11.2591,14.8606
+2016-08-27 00:58:04,3599,11.2591,14.8606
+2016-08-27 01:08:07,3599,11.2621,14.8592
+2016-08-27 01:18:10,3599,11.2591,14.8606
+2016-08-27 01:28:13,3599,11.2581,14.8606
+2016-08-27 01:38:16,3599,11.23,14.8592
+2016-08-27 01:48:19,3599,11.2591,14.8922
+2016-08-27 01:58:22,3599,11.2591,14.8619
+2016-08-27 02:08:24,3599,11.2591,14.8961
+2016-08-27 02:18:27,3599,11.2591,14.8961
+2016-08-27 02:28:30,3599,11.2591,14.829
+2016-08-27 02:38:33,3599,11.2591,14.8658
+2016-08-27 02:48:36,3599,11.2591,14.8961
+2016-08-27 02:58:39,3599,11.2591,14.8645
+2016-08-27 03:08:42,3599,11.2591,14.8645
+2016-08-27 03:18:45,3599,11.227,14.8658
+2016-08-27 03:28:48,3599,11.2561,14.8645
+2016-08-27 03:38:51,3599,11.2561,14.8329
+2016-08-27 03:48:54,3599,11.2561,14.8658
+2016-08-27 03:58:56,3599,11.2561,14.8329
+2016-08-27 04:08:59,3599,11.2561,14.8645
+2016-08-27 04:19:02,3599,11.2561,14.8658
+2016-08-27 04:29:05,3599,11.2561,14.8658
+2016-08-27 04:39:08,3599,11.2853,14.8645
+2016-08-27 04:49:11,3599,11.229,14.8645
+2016-08-27 04:59:14,3599,11.227,14.8658
+2016-08-27 05:09:16,3599,11.227,14.8658
+2016-08-27 05:19:19,3599,11.2581,14.8343
+2016-08-27 05:29:22,3599,11.2561,14.8645
+2016-08-27 05:39:25,3599,11.3144,14.8343
+2016-08-27 05:49:28,3599,11.3154,14.8658
+2016-08-27 05:59:30,3599,11.3426,14.8645
+2016-08-27 06:09:33,3599,11.3426,14.8645
+2016-08-27 06:19:36,3599,11.3446,14.8658
+2016-08-27 06:29:39,3599,11.3114,14.8658
+2016-08-27 06:39:42,3599,11.3406,14.8698
+2016-08-27 06:49:45,3599,11.3406,14.8369
+2016-08-27 06:59:47,3599,11.3698,14.8698
+2016-08-27 07:09:50,3599,11.3406,14.8698
+2016-08-27 07:19:53,3599,11.3406,14.8685
+2016-08-27 07:29:56,3599,11.3406,14.8698
+2016-08-27 07:39:59,3599,11.3406,14.8698
+2016-08-27 07:50:01,3599,11.3406,14.8685
+2016-08-27 08:00:04,3599,11.3406,14.8685
+2016-08-27 08:10:07,3599,11.3406,14.8382
+2016-08-27 08:20:10,3599,11.3416,14.8369
+2016-08-27 08:30:12,3599,11.3406,14.8698
+2016-08-27 08:40:15,3599,11.3406,14.8685
+2016-08-27 08:50:18,3599,11.3406,14.8685
+2016-08-27 09:00:21,3599,11.3385,14.8685
+2016-08-27 09:10:23,3599,11.3385,14.8685
+2016-08-27 09:20:26,3599,11.3667,14.8698
+2016-08-27 09:30:29,3599,11.3385,14.8382
+2016-08-27 09:40:32,3599,11.3385,14.8698
+2016-08-27 09:50:35,3599,11.2491,14.8685
+2016-08-27 10:00:38,3599,11.2501,14.8698
+2016-08-27 10:10:41,3599,11.2501,14.8685
+2016-08-27 10:20:44,3599,11.2792,14.8685
+2016-08-27 10:30:47,3599,11.2511,14.8369
+2016-08-27 10:40:50,3599,11.2511,14.8685
+2016-08-27 10:50:53,3599,11.2501,14.8054
+2016-08-27 11:00:56,3599,11.2501,14.8672
+2016-08-27 11:10:58,3599,11.220999999999998,14.8369
+2016-08-27 11:21:01,3599,11.222,14.8685
+2016-08-27 11:31:04,3599,11.2511,14.8698
+2016-08-27 11:41:07,3599,11.220999999999998,14.8698
+2016-08-27 11:51:10,3599,11.2501,14.8369
+2016-08-27 12:01:13,3599,11.2491,14.8685
+2016-08-27 12:11:16,3599,11.2511,14.8685
+2016-08-27 12:21:19,3599,11.2501,14.8685
+2016-08-27 12:31:22,3599,11.2511,14.8685
+2016-08-27 12:41:26,3599,11.1659,14.8698
+2016-08-27 12:51:29,3599,11.1629,14.8369
+2016-08-27 13:01:32,3599,11.1649,14.8698
+2016-08-27 13:11:35,3599,11.1659,14.8698
+2016-08-27 13:21:39,3599,11.1659,14.8369
+2016-08-27 13:31:42,3599,11.1659,14.7423
+2016-08-27 13:41:45,3599,11.1659,14.7751
+2016-08-27 13:51:48,3599,11.1659,14.7751
+2016-08-27 14:01:51,3599,11.1659,14.7751
+2016-08-27 14:11:54,3599,11.1659,14.7436
+2016-08-27 14:21:58,3599,11.1659,14.7423
+2016-08-27 14:32:01,3599,11.1659,14.7436
+2016-08-27 14:42:04,3599,11.1659,14.7423
+2016-08-27 14:52:08,3599,11.0818,14.7423
+2016-08-27 15:02:11,3599,11.0789,14.7423
+2016-08-27 15:12:14,3599,11.0828,14.7436
+2016-08-27 15:22:17,3599,11.0538,14.7423
+2016-08-27 15:32:21,3599,11.0818,14.7436
+2016-08-27 15:42:24,3599,11.0818,14.7423
+2016-08-27 15:52:27,3599,11.0818,14.7751
+2016-08-27 16:02:31,3599,11.0818,14.7423
+2016-08-27 16:12:34,3599,11.0828,14.7423
+2016-08-27 16:22:37,3599,11.0848,14.7423
+2016-08-27 16:32:41,3599,11.0848,14.7436
+2016-08-27 16:42:44,3599,11.0848,14.7423
+2016-08-27 16:52:47,3599,11.0558,14.7738
+2016-08-27 17:02:51,3599,11.0868,14.7423
+2016-08-27 17:12:54,3599,11.0587,14.7423
+2016-08-27 17:22:57,3599,11.0877,14.7423
+2016-08-27 17:33:01,3599,11.0877,14.7436
+2016-08-27 17:43:04,3599,11.0578,14.7423
+2016-08-27 17:53:07,3599,11.0877,14.7423
+2016-08-27 18:03:11,3599,11.0877,14.7423
+2016-08-27 18:13:14,3599,11.0877,14.7423
+2016-08-27 18:23:17,3599,11.0868,14.7423
+2016-08-27 18:33:21,3599,11.0877,14.7738
+2016-08-27 18:43:24,3599,11.0587,14.7423
+2016-08-27 18:53:27,3599,11.0868,14.7738
+2016-08-27 19:03:30,3599,11.0897,14.7423
+2016-08-27 19:13:34,3599,11.1778,14.7423
+2016-08-27 19:23:37,3599,11.1478,14.7423
+2016-08-27 19:33:40,3599,11.1768,14.7738
+2016-08-27 19:43:43,3599,11.1768,14.7463
+2016-08-27 19:53:46,3599,11.1778,14.7109
+2016-08-27 20:03:50,3599,11.1778,14.7738
+2016-08-27 20:13:53,3599,11.1768,14.7423
+2016-08-27 20:23:55,3599,11.1778,14.7423
+2016-08-27 20:33:58,3599,11.1768,14.7765
+2016-08-27 20:44:01,3599,11.1778,14.745
+2016-08-27 20:54:05,3599,11.1478,14.7423
+2016-08-27 21:04:08,3599,11.1778,14.7778
+2016-08-27 21:14:11,3599,11.1778,14.7423
+2016-08-27 21:24:14,3599,11.1778,14.7791
+2016-08-27 21:34:16,3599,11.1778,14.7765
+2016-08-27 21:44:19,3599,11.1778,14.7778
+2016-08-27 21:54:22,3599,11.2049,14.7751
+2016-08-27 22:04:25,3599,11.1778,14.7463
+2016-08-27 22:14:28,3599,11.1768,14.7476
+2016-08-27 22:24:31,3599,11.1778,14.7463
+2016-08-27 22:34:34,3599,11.1778,14.7463
+2016-08-27 22:44:37,3599,11.1778,14.7778
+2016-08-27 22:54:40,3599,11.1768,14.7436
+2016-08-27 23:04:43,3599,11.1778,14.7463
+2016-08-27 23:14:46,3599,11.1768,14.7476
+2016-08-27 23:24:49,3599,11.1768,14.7476
+2016-08-27 23:34:52,3599,11.1778,14.7476
+2016-08-27 23:44:55,3599,11.1778,14.7463
+2016-08-27 23:54:58,3599,11.1778,14.7476
+2016-08-28 00:05:01,3599,11.1778,14.7778
+2016-08-28 00:15:04,3599,11.2069,14.745
+2016-08-28 00:25:07,3599,11.1778,14.7476
+2016-08-28 00:35:09,3599,11.1778,14.7463
+2016-08-28 00:45:12,3599,11.1778,14.745
+2016-08-28 00:55:15,3599,11.1778,14.7791
+2016-08-28 01:05:18,3599,11.1778,14.7161
+2016-08-28 01:15:21,3599,11.1778,14.7463
+2016-08-28 01:25:24,3599,11.1778,14.7791
+2016-08-28 01:35:27,3599,11.1778,14.7476
+2016-08-28 01:45:30,3599,11.2059,14.7476
+2016-08-28 01:55:33,3599,11.1488,14.7778
+2016-08-28 02:05:36,3599,11.2039,14.7476
+2016-08-28 02:15:39,3599,11.1778,14.7463
+2016-08-28 02:25:42,3599,11.1729,14.7476
+2016-08-28 02:35:44,3599,11.1748,14.7476
+2016-08-28 02:45:47,3599,11.1748,14.8093
+2016-08-28 02:55:50,3599,11.1739,14.7476
+2016-08-28 03:05:53,3599,11.1739,14.7476
+2016-08-28 03:15:56,3599,11.1748,14.745
+2016-08-28 03:25:59,3599,11.1739,14.7778
+2016-08-28 03:36:02,3599,11.1739,14.7463
+2016-08-28 03:46:05,3599,11.1748,14.7778
+2016-08-28 03:56:07,3599,11.1748,14.7778
+2016-08-28 04:06:10,3599,11.1748,14.7476
+2016-08-28 04:16:13,3599,11.1739,14.7765
+2016-08-28 04:26:16,3599,11.1748,14.7476
+2016-08-28 04:36:19,3599,11.1748,14.7463
+2016-08-28 04:46:21,3599,11.1458,14.7476
+2016-08-28 04:56:24,3599,11.1748,14.7463
+2016-08-28 05:06:27,3599,11.2029,14.7791
+2016-08-28 05:16:30,3599,11.1748,14.7476
+2016-08-28 05:26:32,3599,11.1748,14.7463
+2016-08-28 05:36:34,3599,11.1719,14.7476
+2016-08-28 05:46:37,3599,11.1719,14.7817
+2016-08-28 05:56:40,3599,11.1729,14.7765
+2016-08-28 06:06:43,3599,11.1719,14.7476
+2016-08-28 06:16:45,3599,11.1719,14.7463
+2016-08-28 06:26:48,3599,11.1709,14.7515
+2016-08-28 06:36:51,3599,11.1438,14.783
+2016-08-28 06:46:54,3599,11.1709,14.745
+2016-08-28 06:56:56,3599,11.1709,14.7161
+2016-08-28 07:06:59,3599,11.2009,14.7804
+2016-08-28 07:17:02,3599,11.1719,14.7502
+2016-08-28 07:27:05,3599,11.1689,14.7843
+2016-08-28 07:37:07,3599,11.1689,14.7502
+2016-08-28 07:47:10,3599,11.1679,14.7515
+2016-08-28 07:57:13,3599,11.1679,14.7804
+2016-08-28 08:07:16,3599,11.1689,14.72
+2016-08-28 08:17:19,3599,11.1689,14.7804
+2016-08-28 08:27:21,3599,11.1699,14.783
+2016-08-28 08:37:24,3599,11.1679,14.7489
+2016-08-28 08:47:27,3599,11.1689,14.7502
+2016-08-28 08:57:30,3599,11.1689,14.7515
+2016-08-28 09:07:33,3599,11.0519,14.783
+2016-08-28 09:17:36,3599,11.1699,14.7515
+2016-08-28 09:27:38,3599,11.0808,14.7804
+2016-08-28 09:37:41,3599,11.0818,14.7817
+2016-08-28 09:47:44,3599,11.0818,14.7528
+2016-08-28 09:57:47,3599,11.1108,14.7502
+2016-08-28 10:07:50,3599,11.0528,14.783
+2016-08-28 10:17:53,3599,11.0818,14.7817
+2016-08-28 10:27:56,3599,11.0808,14.7528
+2016-08-28 10:37:59,3599,11.0818,14.7515
+2016-08-28 10:48:02,3599,11.1108,14.7817
+2016-08-28 10:58:05,3599,11.0808,14.7502
+2016-08-28 11:08:08,3599,11.0818,14.7791
+2016-08-28 11:18:11,3599,11.0818,14.7515
+2016-08-28 11:28:14,3599,11.0818,14.7515
+2016-08-28 11:38:17,3599,11.0808,14.7515
+2016-08-28 11:48:21,3599,11.0818,14.7502
+2016-08-28 11:58:24,3599,11.0818,14.7515
+2016-08-28 12:08:27,3599,11.0528,14.7502
+2016-08-28 12:18:30,3599,11.0848,14.7515
+2016-08-28 12:28:33,3599,11.0838,14.7502
+2016-08-28 12:38:37,3599,11.0858,14.7502
+2016-08-28 12:48:40,3599,11.0848,14.7502
+2016-08-28 12:58:43,3599,11.0848,14.7817
+2016-08-28 13:08:47,3599,11.0868,14.7502
+2016-08-28 13:18:50,3599,11.0848,14.7515
+2016-08-28 13:28:53,3599,11.0868,14.6572
+2016-08-28 13:38:57,3599,11.0587,14.7489
+2016-08-28 13:49:00,3599,11.0877,14.6559
+2016-08-28 13:59:03,3599,11.0868,14.6572
+2016-08-28 14:09:06,3599,11.0877,14.6572
+2016-08-28 14:19:10,3599,11.0877,14.6546
+2016-08-28 14:29:12,3599,11.0868,14.6886
+2016-08-28 14:39:15,3599,11.0877,14.6873
+2016-08-28 14:49:18,3599,11.0877,14.6559
+2016-08-28 14:59:22,3599,11.0868,14.6886
+2016-08-28 15:09:25,3599,11.0907,14.6572
+2016-08-28 15:19:28,3599,11.0907,14.6546
+2016-08-28 15:29:31,3599,11.0897,14.6873
+2016-08-28 15:39:34,3599,11.0907,14.6873
+2016-08-28 15:49:38,3599,11.0038,14.6559
+2016-08-28 15:59:41,3599,11.0038,14.686
+2016-08-28 16:09:44,3599,11.0038,14.6546
+2016-08-28 16:19:47,3599,11.0038,14.6585
+2016-08-28 16:29:50,3599,11.0327,14.686
+2016-08-28 16:39:54,3599,11.0057,14.6572
+2016-08-28 16:49:57,3599,11.0057,14.6572
+2016-08-28 17:00:00,3599,11.0057,14.6546
+2016-08-28 17:10:03,3599,11.0057,14.6873
+2016-08-28 17:20:07,3599,11.0057,14.6912
+2016-08-28 17:30:10,3599,11.0057,14.686
+2016-08-28 17:40:13,3599,10.9797,14.6572
+2016-08-28 17:50:16,3599,10.9797,14.686
+2016-08-28 18:00:19,3599,11.0087,14.6572
+2016-08-28 18:10:23,3599,11.0376,14.6873
+2016-08-28 18:20:26,3599,11.0666,14.6559
+2016-08-28 18:30:29,3599,11.0956,14.6598
+2016-08-28 18:40:32,3599,11.0956,14.6546
+2016-08-28 18:50:35,3599,11.0966,14.6598
+2016-08-28 19:00:38,3599,11.0956,14.6598
+2016-08-28 19:10:41,3599,11.0956,14.6886
+2016-08-28 19:20:44,3599,11.0956,14.6572
+2016-08-28 19:30:47,3599,11.0956,14.6598
+2016-08-28 19:40:50,3599,11.0956,14.6624
+2016-08-28 19:50:53,3599,11.0986,14.6886
+2016-08-28 20:00:56,3599,11.0696,14.6572
+2016-08-28 20:10:58,3599,11.0956,14.6912
+2016-08-28 20:21:01,3599,11.0956,14.6611
+2016-08-28 20:31:04,3599,11.0956,14.6912
+2016-08-28 20:41:07,3599,11.0676,14.6912
+2016-08-28 20:51:10,3599,11.0966,14.6598
+2016-08-28 21:01:13,3599,11.0956,14.6598
+2016-08-28 21:11:15,3599,11.0666,14.6598
+2016-08-28 21:21:18,3599,11.0956,14.6912
+2016-08-28 21:31:21,3599,11.0956,14.6598
+2016-08-28 21:41:23,3599,11.0946,14.6925
+2016-08-28 21:51:26,3599,11.0966,14.6912
+2016-08-28 22:01:29,3599,11.0956,14.6598
+2016-08-28 22:11:32,3599,11.0666,14.6585
+2016-08-28 22:21:34,3599,11.0966,14.6938
+2016-08-28 22:31:37,3599,11.0956,14.6912
+2016-08-28 22:41:40,3599,11.1247,14.6598
+2016-08-28 22:51:43,3599,11.0956,14.6598
+2016-08-28 23:01:45,3599,11.1247,14.6611
+2016-08-28 23:11:48,3599,11.0956,14.6912
+2016-08-28 23:21:50,3599,11.1257,14.6912
+2016-08-28 23:31:53,3599,11.0956,14.6912
+2016-08-28 23:41:56,3599,11.0956,14.6899
+2016-08-28 23:51:58,3599,11.0956,14.6912
+2016-08-29 00:02:01,3599,11.1247,14.6598
+2016-08-29 00:12:04,3599,11.0647,14.6912
+2016-08-29 00:22:07,3599,11.0656,14.6598
+2016-08-29 00:32:10,3599,11.0656,14.6598
+2016-08-29 00:42:13,3599,11.0937,14.6912
+2016-08-29 00:52:15,3599,11.0647,14.6598
+2016-08-29 01:02:18,3599,11.1217,14.6598
+2016-08-29 01:12:21,3599,11.0927,14.6598
+2016-08-29 01:22:24,3599,11.0637,14.6912
+2016-08-29 01:32:27,3599,11.0927,14.6611
+2016-08-29 01:42:29,3599,11.0946,14.6899
+2016-08-29 01:52:32,3599,11.0907,14.6585
+2016-08-29 02:02:35,3599,11.1197,14.6912
+2016-08-29 02:12:37,3599,11.0607,14.6912
+2016-08-29 02:22:40,3599,11.0627,14.6624
+2016-08-29 02:32:43,3599,11.0907,14.6637
+2016-08-29 02:42:45,3599,11.0907,14.6637
+2016-08-29 02:52:48,3599,11.0897,14.6663
+2016-08-29 03:02:50,3599,11.0907,14.6637
+2016-08-29 03:12:53,3599,11.0907,14.6624
+2016-08-29 03:22:56,3599,11.0907,14.6951
+2016-08-29 03:32:58,3599,11.0907,14.6598
+2016-08-29 03:43:01,3599,11.0907,14.6912
+2016-08-29 03:53:03,3599,11.0877,14.6951
+2016-08-29 04:03:06,3599,11.0868,14.6637
+2016-08-29 04:13:08,3599,11.0868,14.6637
+2016-08-29 04:23:11,3599,11.0877,14.6637
+2016-08-29 04:33:13,3599,11.0877,14.6951
+2016-08-29 04:43:16,3599,11.0868,14.665
+2016-08-29 04:53:19,3599,11.0877,14.6951
+2016-08-29 05:03:21,3599,11.0877,14.6637
+2016-08-29 05:13:24,3599,11.0877,14.6951
+2016-08-29 05:23:26,3599,11.0887,14.665
+2016-08-29 05:33:36,3599,11.0848,14.6637
+2016-08-29 05:43:39,3599,11.0848,14.6977
+2016-08-29 05:53:41,3599,11.0848,14.6637
+2016-08-29 06:03:44,3599,11.0848,14.6951
+2016-08-29 06:13:47,3599,11.0848,14.6951
+2016-08-29 06:23:49,3599,11.0848,14.6951
+2016-08-29 06:33:52,3599,11.0848,14.6964
+2016-08-29 06:43:54,3599,11.0818,14.6663
+2016-08-29 06:53:57,3599,11.0818,14.6951
+2016-08-29 07:03:59,3599,11.0818,14.6964
+2016-08-29 07:14:02,3599,11.0818,14.6663
+2016-08-29 07:24:05,3599,11.1108,14.6951
+2016-08-29 07:34:07,3599,11.0818,14.6964
+2016-08-29 07:44:10,3599,11.0818,14.6951
+2016-08-29 07:54:12,3599,11.1108,14.6951
+2016-08-29 08:04:15,3599,11.0789,14.6964
+2016-08-29 08:14:17,3599,11.0789,14.6951
+2016-08-29 08:24:20,3599,11.0789,14.6951
+2016-08-29 08:34:22,3599,11.0789,14.6951
+2016-08-29 08:44:25,3599,11.0789,14.6964
+2016-08-29 08:54:27,3599,11.1079,14.7266
+2016-08-29 09:04:30,3599,11.0789,14.6624
+2016-08-29 09:14:32,3599,11.0789,14.6637
+2016-08-29 09:24:35,3599,11.0789,14.6663
+2016-08-29 09:34:38,3599,11.0789,14.6637
+2016-08-29 09:44:41,3599,11.0798,14.6951
+2016-08-29 09:54:43,3599,11.0789,14.6637
+2016-08-29 10:04:46,3599,11.0789,14.6624
+2016-08-29 10:14:48,3599,11.0789,14.6637
+2016-08-29 10:24:51,3599,11.0789,14.6951
+2016-08-29 10:34:54,3599,11.0789,14.6637
+2016-08-29 10:44:57,3599,11.0789,14.6637
+2016-08-29 10:54:59,3599,11.0769,14.6637
+2016-08-29 11:05:02,3599,11.0759,14.665
+2016-08-29 11:15:05,3599,11.0759,14.6977
+2016-08-29 11:25:08,3599,11.0759,14.6951
+2016-08-29 11:35:10,3599,11.1039,14.6624
+2016-08-29 11:45:13,3599,11.0759,14.6637
+2016-08-29 11:55:16,3599,11.0759,14.6637
+2016-08-29 12:05:18,3599,11.1049,14.6637
+2016-08-29 12:15:21,3599,11.0749,14.6951
+2016-08-29 12:25:24,3599,11.0759,14.6676
+2016-08-29 12:35:27,3599,11.0769,14.665
+2016-08-29 12:45:29,3599,11.0759,14.6585
+2016-08-29 12:55:32,3599,11.1049,14.6663
+2016-08-29 13:05:35,3599,11.0769,14.6899
+2016-08-29 13:15:37,3599,11.0759,14.6624
+2016-08-29 13:25:40,3599,11.046,14.665
+2016-08-29 13:35:43,3599,11.0479,14.6938
+2016-08-29 13:45:46,3599,11.0759,14.6637
+2016-08-29 13:55:48,3599,11.0729,14.6611
+2016-08-29 14:05:51,3599,11.1039,14.6624
+2016-08-29 14:15:54,3599,11.0769,14.5957
+2016-08-29 14:25:56,3599,11.0759,14.5683
+2016-08-29 14:35:59,3599,11.0739,14.630999999999998
+2016-08-29 14:46:02,3599,11.0739,14.5657
+2016-08-29 14:56:05,3599,11.0739,14.6284
+2016-08-29 15:06:07,3599,11.1029,14.5957
+2016-08-29 15:16:10,3599,11.0739,14.5657
+2016-08-29 15:26:13,3599,11.0729,14.5657
+2016-08-29 15:36:16,3599,11.0739,14.6297
+2016-08-29 15:46:19,3599,11.0739,14.5957
+2016-08-29 15:56:22,3599,11.045,14.5983
+2016-08-29 16:06:24,3599,11.0739,14.597
+2016-08-29 16:16:27,3599,11.0739,14.5644
+2016-08-29 16:26:30,3599,11.044,14.597
+2016-08-29 16:36:33,3599,11.0739,14.5657
+2016-08-29 16:46:36,3599,11.0729,14.567
+2016-08-29 16:56:38,3599,11.0749,14.567
+2016-08-29 17:06:41,3599,11.0729,14.597
+2016-08-29 17:16:43,3599,11.0479,14.5644
+2016-08-29 17:26:54,3599,11.0749,14.5657
+2016-08-29 17:36:57,3599,11.0739,14.597
+2016-08-29 17:46:59,3599,11.1029,14.5657
+2016-08-29 17:57:02,3599,11.044,14.6297
+2016-08-29 18:07:05,3599,11.045,14.5983
+2016-08-29 18:17:08,3599,11.1029,14.6271
+2016-08-29 18:27:10,3599,11.1319,14.597
+2016-08-29 18:37:13,3599,11.1029,14.567
+2016-08-29 18:47:16,3599,11.0739,14.597
+2016-08-29 18:57:19,3599,11.0729,14.567
+2016-08-29 19:07:21,3599,11.0739,14.5657
+2016-08-29 19:17:24,3599,11.0739,14.5996
+2016-08-29 19:27:27,3599,11.0739,14.597
+2016-08-29 19:37:30,3599,11.0739,14.5644
+2016-08-29 19:47:32,3599,11.0739,14.597
+2016-08-29 19:57:35,3599,11.0739,14.5996
+2016-08-29 20:07:38,3599,11.0729,14.597
+2016-08-29 20:17:41,3599,11.0729,14.5983
+2016-08-29 20:27:43,3599,11.0729,14.5957
+2016-08-29 20:37:46,3599,11.0739,14.5657
+2016-08-29 20:47:49,3599,11.0729,14.597
+2016-08-29 20:57:51,3599,11.0739,14.597
+2016-08-29 21:07:54,3599,11.0739,14.5957
+2016-08-29 21:17:56,3599,11.0729,14.597
+2016-08-29 21:27:59,3599,11.0739,14.5983
+2016-08-29 21:38:02,3599,11.0729,14.5996
+2016-08-29 21:48:04,3599,11.0739,14.597
+2016-08-29 21:58:07,3599,11.071,14.597
+2016-08-29 22:08:09,3599,11.0729,14.5983
+2016-08-29 22:18:12,3599,11.1289,14.5996
+2016-08-29 22:28:14,3599,11.071,14.597
+2016-08-29 22:38:17,3599,11.07,14.5657
+2016-08-29 22:48:19,3599,11.07,14.597
+2016-08-29 22:58:22,3599,11.071,14.597
+2016-08-29 23:08:24,3599,11.071,14.597
+2016-08-29 23:18:27,3599,11.0999,14.597
+2016-08-29 23:28:30,3599,11.068,14.5683
+2016-08-29 23:38:32,3599,11.068,14.597
+2016-08-29 23:48:34,3599,11.067,14.597
+2016-08-29 23:58:37,3599,11.095999999999998,14.5996
+2016-08-30 00:08:39,3599,11.068,14.5657
+2016-08-30 00:18:42,3599,11.068,14.597
+2016-08-30 00:28:44,3599,11.067,14.5996
+2016-08-30 00:38:46,3598,11.097,14.597
+2016-08-30 00:48:49,3598,11.097,14.5996
+2016-08-30 00:58:51,3598,11.0391,14.597
+2016-08-30 01:08:54,3598,11.068,14.5983
+2016-08-30 01:18:56,3598,11.068,14.5657
+2016-08-30 01:28:58,3598,11.0661,14.597
+2016-08-30 01:39:01,3598,11.094,14.597
+2016-08-30 01:49:03,3597,11.0641,14.5996
+2016-08-30 01:59:05,3596,11.0651,14.597
+2016-08-30 02:09:08,3596,11.094,14.5996
+2016-08-30 02:19:10,3595,11.0651,14.597
+2016-08-30 02:29:13,3595,11.093,14.5983
+2016-08-30 02:39:15,3594,11.123,14.5657
+2016-08-30 02:49:17,3594,11.0651,14.5983
+2016-08-30 02:59:20,3593,11.0641,14.597
+2016-08-30 03:09:22,3593,11.0621,14.5657
+2016-08-30 03:19:24,3592,11.0621,14.5957
+2016-08-30 03:29:27,3591,11.0621,14.5983
+2016-08-30 03:39:29,3591,11.0911,14.597
+2016-08-30 03:49:31,3589,11.0621,14.597
+2016-08-30 03:59:34,3588,11.0881,14.5983
+2016-08-30 04:09:36,3588,11.0592,14.5983
+2016-08-30 04:19:38,3588,11.0592,14.5657
+2016-08-30 04:29:41,3587,11.0592,14.5957
+2016-08-30 04:39:43,3587,11.1171,14.597
+2016-08-30 04:49:45,3586,11.0592,14.597
+2016-08-30 04:59:47,3586,11.0592,14.597
+2016-08-30 05:09:50,3585,11.0592,14.597
+2016-08-30 05:19:52,3585,11.0881,14.5983
+2016-08-30 05:29:54,3585,11.0562,14.5957
+2016-08-30 05:39:57,3584,11.0572,14.5631
+2016-08-30 05:49:59,3584,11.0562,14.5983
+2016-08-30 06:00:01,3583,11.0572,14.5931
+2016-08-30 06:10:03,3583,11.0562,14.5944
+2016-08-30 06:20:06,3583,11.0572,14.6572
+2016-08-30 06:30:08,3582,11.0572,14.6559
+2016-08-30 06:40:11,3582,11.0562,14.5631
+2016-08-30 06:50:13,3581,11.0822,14.6873
+2016-08-30 07:00:15,3582,11.0822,14.5631
+2016-08-30 07:10:18,3581,11.0533,14.6572
+2016-08-30 07:20:20,3582,11.0533,14.6899
+2016-08-30 07:30:22,3582,11.0503,14.5631
+2016-08-30 07:40:25,3582,11.0503,14.5918
+2016-08-30 07:50:27,3583,11.0503,14.5944
+2016-08-30 08:00:29,3583,11.0792,14.6899
+2016-08-30 08:10:32,3584,11.0792,14.6899
+2016-08-30 08:20:34,3584,11.0792,14.6886
+2016-08-30 08:30:36,3585,11.0503,14.5944
+2016-08-30 08:40:39,3586,11.0503,14.5944
+2016-08-30 08:50:41,3587,11.0792,14.6258
+2016-08-30 09:00:44,3588,11.0503,14.5957
+2016-08-30 09:10:46,3588,11.0503,14.5893
+2016-08-30 09:20:48,3589,11.0513,14.5579
+2016-08-30 09:30:51,3591,11.0483,14.5918
+2016-08-30 09:40:53,3592,11.0474,14.5592
+2016-08-30 09:50:56,3593,11.0483,14.5906
+2016-08-30 10:00:59,3595,11.0483,14.5906
+2016-08-30 10:11:01,3595,11.0483,14.588
+2016-08-30 10:21:04,3596,11.0483,14.5906
+2016-08-30 10:31:06,3598,11.0483,14.5579
+2016-08-30 10:41:09,3598,11.0483,14.5918
+2016-08-30 10:51:11,3598,11.0773,14.5893
+2016-08-30 11:01:14,3598,11.0483,14.5906
+2016-08-30 11:11:16,3599,11.0483,14.5906
+2016-08-30 11:21:19,3599,11.0474,14.5893
+2016-08-30 11:31:22,3599,11.0483,14.5592
+2016-08-30 11:41:26,3599,11.0474,14.5592
+2016-08-30 11:51:30,3599,11.0483,14.5906
+2016-08-30 12:01:33,3599,11.0474,14.5906
+2016-08-30 12:11:35,3599,11.0483,14.5893
+2016-08-30 12:21:38,3599,11.0773,14.5592
+2016-08-30 12:31:40,3599,11.0483,14.588
+2016-08-30 12:41:43,3599,11.0474,14.5906
+2016-08-30 12:51:46,3599,11.0483,14.5906
+2016-08-30 13:01:48,3599,11.0483,14.5906
+2016-08-30 13:11:51,3599,11.0474,14.5906
+2016-08-30 13:21:54,3599,11.0474,14.588
+2016-08-30 13:31:56,3599,11.0483,14.5906
+2016-08-30 13:41:59,3599,11.0483,14.5906
+2016-08-30 13:52:02,3599,11.0483,14.5906
+2016-08-30 14:02:05,3599,11.0483,14.5906
+2016-08-30 14:12:07,3599,11.0483,14.5893
+2016-08-30 14:22:10,3599,11.0483,14.5605
+2016-08-30 14:32:13,3599,11.0483,14.5893
+2016-08-30 14:42:15,3599,11.0483,14.5554
+2016-08-30 14:52:18,3599,11.0474,14.5854
+2016-08-30 15:02:21,3599,11.0185,14.5867
+2016-08-30 15:12:24,3599,11.0483,14.5841
+2016-08-30 15:22:26,3599,10.9349,14.5254
+2016-08-30 15:32:29,3599,10.9319,14.4343
+2016-08-30 15:42:32,3599,10.9598,14.4603
+2016-08-30 15:52:35,3599,10.9617,14.5241
+2016-08-30 16:02:38,3599,10.9627,14.5241
+2016-08-30 16:12:41,3599,10.9637,14.4603
+2016-08-30 16:22:43,3599,10.9925,14.4928
+2016-08-30 16:32:46,3599,10.9637,14.5228
+2016-08-30 16:42:49,3599,10.9925,14.4603
+2016-08-30 16:52:52,3599,10.9925,14.4928
+2016-08-30 17:02:55,3599,11.0214,14.4915
+2016-08-30 17:12:58,3599,10.9925,14.4915
+2016-08-30 17:23:01,3599,10.9916,14.4603
+2016-08-30 17:33:03,3599,10.9637,14.5254
+2016-08-30 17:43:06,3599,10.9627,14.4616
+2016-08-30 17:53:09,3599,10.9637,14.4928
+2016-08-30 18:03:12,3599,10.9627,14.5228
+2016-08-30 18:13:15,3599,10.9637,14.4915
+2016-08-30 18:23:18,3599,11.0493,14.4928
+2016-08-30 18:33:20,3599,10.9637,14.4603
+2016-08-30 18:43:23,3599,11.0214,14.4903
+2016-08-30 18:53:26,3599,11.0503,14.4616
+2016-08-30 19:03:29,3599,11.0503,14.4915
+2016-08-30 19:13:32,3599,11.0533,14.4629
+2016-08-30 19:23:34,3599,11.0533,14.4603
+2016-08-30 19:33:37,3599,11.0533,14.4629
+2016-08-30 19:43:40,3599,11.0533,14.4915
+2016-08-30 19:53:43,3599,11.0822,14.5228
+2016-08-30 20:03:45,3599,11.0533,14.5241
+2016-08-30 20:13:48,3599,11.0503,14.4915
+2016-08-30 20:23:51,3599,11.0533,14.4915
+2016-08-30 20:33:53,3599,11.0533,14.4928
+2016-08-30 20:43:56,3599,11.0822,14.4616
+2016-08-30 20:53:58,3599,11.0822,14.4915
+2016-08-30 21:04:00,3599,11.0503,14.5241
+2016-08-30 21:14:03,3599,11.0533,14.4928
+2016-08-30 21:24:05,3599,11.0822,14.4928
+2016-08-30 21:34:08,3599,11.0533,14.5241
+2016-08-30 21:44:10,3599,11.0503,14.4928
+2016-08-30 21:54:13,3599,11.0493,14.4928
+2016-08-30 22:04:16,3599,11.0792,14.4928
+2016-08-30 22:14:18,3599,11.0523,14.4603
+2016-08-30 22:24:21,3599,11.0503,14.4928
+2016-08-30 22:34:23,3599,11.0503,14.4603
+2016-08-30 22:44:26,3599,11.0503,14.4928
+2016-08-30 22:54:28,3599,11.0503,14.5241
+2016-08-30 23:04:31,3599,11.0792,14.4941
+2016-08-30 23:14:33,3599,11.0503,14.5254
+2016-08-30 23:24:36,3599,11.0503,14.4915
+2016-08-30 23:34:38,3599,11.0513,14.4915
+2016-08-30 23:44:41,3599,11.0513,14.4928
+2016-08-30 23:54:44,3598,11.0503,14.4603
+2016-08-31 01:05:02,3598,11.0503,14.4941
+2016-08-31 01:15:04,3598,11.0792,14.4915
+2016-08-31 01:25:07,3598,11.0483,14.4915
+2016-08-31 01:35:09,3598,11.0483,14.489
+2016-08-31 01:45:12,3598,11.0483,14.4616
+2016-08-31 01:55:14,3598,11.0483,14.489
+2016-08-31 02:05:17,3598,11.0474,14.5241
+2016-08-31 02:15:19,3598,11.0763,14.4565
+2016-08-31 02:25:22,3597,11.0483,14.4877
+2016-08-31 02:35:25,3597,11.0483,14.4877
+2016-08-31 02:45:27,3597,11.0773,14.4928
+2016-08-31 02:55:30,3596,11.0773,14.5189
+2016-08-31 03:05:32,3595,11.0773,14.4877
+2016-08-31 03:15:35,3595,11.0773,14.5215
+2016-08-31 03:25:37,3595,11.0483,14.5241
+2016-08-31 03:35:40,3594,11.0483,14.489
+2016-08-31 03:45:42,3594,11.0483,14.5202
+2016-08-31 03:55:45,3593,11.0483,14.4928
+2016-08-31 04:05:47,3593,11.0773,14.4603
+2016-08-31 04:15:50,3592,11.0483,14.5189
+2016-08-31 04:25:52,3592,11.0454,14.4578
+2016-08-31 04:35:55,3592,11.0454,14.5202
+2016-08-31 04:45:57,3592,11.0454,14.489
+2016-08-31 04:56:00,3591,11.0454,14.5202
+2016-08-31 05:06:02,3591,11.0454,14.4616
+2016-08-31 05:16:05,3591,11.0743,14.489
+2016-08-31 05:26:07,3591,11.0713,14.489
+2016-08-31 05:36:10,3591,11.0464,14.4603
+2016-08-31 05:46:12,3590,11.0415,14.4903
+2016-08-31 05:56:14,3590,11.0425,14.4591
+2016-08-31 06:06:16,3590,11.0713,14.4578
+2016-08-31 06:16:18,3589,11.0425,14.489
+2016-08-31 06:26:21,3589,11.0713,14.5189
+2016-08-31 06:36:23,3589,11.0425,14.4578
+2016-08-31 06:46:25,3590,11.0713,14.4877
+2016-08-31 06:56:28,3589,11.0425,14.4877
+2016-08-31 07:06:30,3589,11.0425,14.4877
+2016-08-31 07:16:33,3589,11.0425,14.4903
+2016-08-31 07:26:35,3589,11.0713,14.489
+2016-08-31 07:36:37,3590,11.0425,14.489
+2016-08-31 07:46:40,3590,11.0425,14.5202
+2016-08-31 07:56:42,3590,11.0395,14.5189
+2016-08-31 08:06:45,3592,11.0395,14.4877
+2016-08-31 08:16:47,3592,11.0395,14.4578
+2016-08-31 08:26:49,3592,11.0684,14.5202
+2016-08-31 08:36:52,3593,11.0395,14.5202
+2016-08-31 08:46:54,3594,11.0684,14.5189
+2016-08-31 08:56:57,3595,11.0395,14.4864
+2016-08-31 09:06:59,3596,11.0385,14.4851
+2016-08-31 09:17:02,3597,11.0395,14.5215
+2016-08-31 09:27:04,3598,11.0684,14.4838
+2016-08-31 09:37:07,3598,11.0395,14.5189
+2016-08-31 09:47:09,3598,11.0395,14.4864
+2016-08-31 09:57:12,3598,11.0684,14.4851
+2016-08-31 10:07:14,3599,11.0395,14.5163
+2016-08-31 10:17:17,3599,11.0395,14.4552
+2016-08-31 10:27:19,3599,11.0395,14.4526
+2016-08-31 10:37:22,3599,10.953,14.4838
+2016-08-31 10:47:24,3599,10.9808,14.515
+2016-08-31 10:57:27,3599,10.9808,14.4838
+2016-08-31 11:07:29,3599,10.9808,14.4526
+2016-08-31 11:17:32,3599,10.9808,14.4539
+2016-08-31 11:27:35,3599,10.9808,14.4552
+2016-08-31 11:37:37,3599,10.9818,14.4838
+2016-08-31 11:47:40,3599,10.953,14.4877
+2016-08-31 11:57:43,3599,10.9808,14.4552
+2016-08-31 12:07:46,3599,10.9818,14.4864
+2016-08-31 12:17:49,3599,10.9818,14.4526
+2016-08-31 12:27:52,3599,10.9808,14.4864
+2016-08-31 12:37:54,3599,10.952,14.4851
+2016-08-31 12:47:57,3599,10.9242,14.4825
+2016-08-31 12:58:00,3599,10.8954,14.4501
+2016-08-31 13:08:03,3599,10.8954,14.4539
+2016-08-31 13:18:05,3599,10.8954,14.4851
+2016-08-31 13:28:08,3599,10.8944,14.4526
+2016-08-31 13:38:11,3599,10.8983,14.4526
+2016-08-31 13:48:14,3599,10.8686,14.48
+2016-08-31 13:58:17,3599,10.8983,14.3891
+2016-08-31 14:08:19,3599,10.8408,14.3567
+2016-08-31 14:18:22,3599,10.8686,14.3865
+2016-08-31 14:28:25,3599,10.8686,14.3891
+2016-08-31 14:38:28,3599,10.8686,14.4189
+2016-08-31 14:48:31,3599,10.8973,14.3891
+2016-08-31 14:58:34,3599,10.8973,14.4176
+2016-08-31 15:08:39,3599,10.8973,14.3865
+2016-08-31 15:18:42,3599,10.8695,14.3891
+2016-08-31 15:28:45,3599,10.8695,14.3865
+2016-08-31 15:38:48,3599,10.9002,14.358
+2016-08-31 15:48:51,3599,10.9012,14.3878
+2016-08-31 15:58:54,3599,10.8715,14.3567
+2016-08-31 16:08:57,3599,10.8724,14.3554
+2016-08-31 16:19:00,3599,10.8724,14.4176
+2016-08-31 16:29:03,3599,10.8753,14.3865
+2016-08-31 16:39:06,3599,10.8753,14.3554
+2016-08-31 16:49:09,3599,10.8734,14.3878
+2016-08-31 16:59:12,3599,10.9041,14.3865
+2016-08-31 17:09:15,3599,10.8753,14.3554
+2016-08-31 17:19:18,3599,10.8753,14.3865
+2016-08-31 17:29:21,3599,10.9031,14.3878
+2016-08-31 17:39:24,3599,10.8753,14.3891
+2016-08-31 17:49:27,3599,10.9041,14.3865
+2016-08-31 17:59:30,3599,10.9041,14.3865
+2016-08-31 18:09:33,3599,10.9031,14.3865
+2016-08-31 18:19:36,3599,10.8744,14.3865
+2016-08-31 18:29:39,3599,10.9061,14.3567
+2016-08-31 18:39:42,3599,10.8773,14.3592
+2016-08-31 18:49:45,3599,10.907,14.3554
+2016-08-31 18:59:48,3599,10.8485,14.4189
+2016-08-31 19:09:51,3599,10.8773,14.3256
+2016-08-31 19:19:53,3599,10.9051,14.3554
+2016-08-31 19:29:56,3599,10.8773,14.3891
+2016-08-31 19:39:59,3599,10.9061,14.4202
+2016-08-31 19:50:02,3599,10.8763,14.4202
+2016-08-31 20:00:05,3599,10.9051,14.3878
+2016-08-31 20:10:08,3599,10.8773,14.3567
+2016-08-31 20:20:11,3599,10.9061,14.4202
+2016-08-31 20:30:13,3599,10.8773,14.3878
+2016-08-31 20:40:16,3599,10.8773,14.3878
+2016-08-31 20:50:19,3599,10.907,14.3878
+2016-08-31 21:00:22,3599,10.8773,14.3865
+2016-08-31 21:10:25,3599,10.8773,14.3891
+2016-08-31 21:20:27,3599,10.8763,14.3865
+2016-08-31 21:30:30,3599,10.8495,14.3567
+2016-08-31 21:40:33,3599,10.9061,14.3865
+2016-08-31 21:50:36,3599,10.8782,14.3865
+2016-08-31 22:00:38,3599,10.9061,14.3852
+2016-08-31 22:10:41,3599,10.9051,14.3903
+2016-08-31 22:20:44,3599,10.9061,14.3541
+2016-08-31 22:30:46,3599,10.907,14.3865
+2016-08-31 22:40:49,3599,10.9051,14.3865
+2016-08-31 22:50:52,3599,10.9061,14.3891
+2016-08-31 23:00:54,3599,10.9061,14.3852
+2016-08-31 23:10:57,3599,10.9051,14.3891
+2016-08-31 23:20:59,3599,10.8773,14.3541
+2016-08-31 23:31:02,3599,10.9061,14.3865
+2016-08-31 23:41:04,3599,10.9061,14.3878
+2016-08-31 23:51:07,3599,10.8773,14.3865
diff --git a/sphinx-doc/ressources/data/incidentsLKG.csv b/sphinx-doc/ressources/data/incidentsLKG.csv
new file mode 100644
index 0000000000000000000000000000000000000000..77ae8be5a9819077c217ab49100ef89b4d9f193c
--- /dev/null
+++ b/sphinx-doc/ressources/data/incidentsLKG.csv
@@ -0,0 +1,381 @@
+,incidents
+2020-01-01,0
+2020-01-02,0
+2020-01-03,0
+2020-01-04,0
+2020-01-05,0
+2020-01-06,0
+2020-01-07,0
+2020-01-08,0
+2020-01-09,0
+2020-01-10,0
+2020-01-11,0
+2020-01-12,0
+2020-01-13,0
+2020-01-14,0
+2020-01-15,0
+2020-01-16,0
+2020-01-17,0
+2020-01-18,0
+2020-01-19,0
+2020-01-20,0
+2020-01-21,0
+2020-01-22,0
+2020-01-23,0
+2020-01-24,0
+2020-01-25,0
+2020-01-26,0
+2020-01-27,0
+2020-01-28,0
+2020-01-29,0
+2020-01-30,0
+2020-01-31,0
+2020-02-01,1
+2020-02-02,0
+2020-02-03,0
+2020-02-04,0
+2020-02-05,0
+2020-02-06,0
+2020-02-07,0
+2020-02-08,0
+2020-02-09,0
+2020-02-10,0
+2020-02-11,0
+2020-02-12,0
+2020-02-13,0
+2020-02-14,0
+2020-02-15,0
+2020-02-16,0
+2020-02-17,0
+2020-02-18,0
+2020-02-19,0
+2020-02-20,0
+2020-02-21,0
+2020-02-22,0
+2020-02-23,0
+2020-02-24,0
+2020-02-25,0
+2020-02-26,0
+2020-02-27,1
+2020-02-28,0
+2020-02-29,0
+2020-03-01,0
+2020-03-02,0
+2020-03-03,0
+2020-03-04,0
+2020-03-05,0
+2020-03-06,5
+2020-03-07,10
+2020-03-08,8
+2020-03-09,8
+2020-03-10,20
+2020-03-11,27
+2020-03-12,24
+2020-03-13,34
+2020-03-14,32
+2020-03-15,25
+2020-03-16,45
+2020-03-17,21
+2020-03-18,23
+2020-03-19,16
+2020-03-20,28
+2020-03-21,29
+2020-03-22,22
+2020-03-23,17
+2020-03-24,12
+2020-03-25,14
+2020-03-26,9
+2020-03-27,10
+2020-03-28,12
+2020-03-29,4
+2020-03-30,16
+2020-03-31,11
+2020-04-01,5
+2020-04-02,7
+2020-04-03,10
+2020-04-04,9
+2020-04-05,11
+2020-04-06,5
+2020-04-07,10
+2020-04-08,7
+2020-04-09,2
+2020-04-10,2
+2020-04-11,6
+2020-04-12,5
+2020-04-13,1
+2020-04-14,4
+2020-04-15,6
+2020-04-16,5
+2020-04-17,2
+2020-04-18,3
+2020-04-19,3
+2020-04-20,1
+2020-04-21,4
+2020-04-22,1
+2020-04-23,1
+2020-04-24,1
+2020-04-25,2
+2020-04-26,0
+2020-04-27,1
+2020-04-28,0
+2020-04-29,0
+2020-04-30,2
+2020-05-01,0
+2020-05-02,0
+2020-05-03,0
+2020-05-04,1
+2020-05-05,0
+2020-05-06,2
+2020-05-07,0
+2020-05-08,0
+2020-05-09,1
+2020-05-10,4
+2020-05-11,1
+2020-05-12,2
+2020-05-13,3
+2020-05-14,1
+2020-05-15,14
+2020-05-16,7
+2020-05-17,0
+2020-05-18,3
+2020-05-19,4
+2020-05-20,4
+2020-05-21,2
+2020-05-22,3
+2020-05-23,2
+2020-05-24,1
+2020-05-25,7
+2020-05-26,8
+2020-05-27,15
+2020-05-28,4
+2020-05-29,3
+2020-05-30,3
+2020-05-31,2
+2020-06-01,2
+2020-06-02,4
+2020-06-03,3
+2020-06-04,4
+2020-06-05,9
+2020-06-06,7
+2020-06-07,13
+2020-06-08,14
+2020-06-09,24
+2020-06-10,9
+2020-06-11,10
+2020-06-12,17
+2020-06-13,16
+2020-06-14,9
+2020-06-15,49
+2020-06-16,482
+2020-06-17,142
+2020-06-18,70
+2020-06-19,164
+2020-06-20,49
+2020-06-21,110
+2020-06-22,141
+2020-06-23,69
+2020-06-24,37
+2020-06-25,59
+2020-06-26,46
+2020-06-27,40
+2020-06-28,36
+2020-06-29,80
+2020-06-30,54
+2020-07-01,18
+2020-07-02,15
+2020-07-03,7
+2020-07-04,9
+2020-07-05,5
+2020-07-06,5
+2020-07-07,16
+2020-07-08,22
+2020-07-09,8
+2020-07-10,12
+2020-07-11,8
+2020-07-12,7
+2020-07-13,2
+2020-07-14,3
+2020-07-15,11
+2020-07-16,6
+2020-07-17,7
+2020-07-18,3
+2020-07-19,10
+2020-07-20,1
+2020-07-21,5
+2020-07-22,7
+2020-07-23,4
+2020-07-24,6
+2020-07-25,5
+2020-07-26,3
+2020-07-27,5
+2020-07-28,4
+2020-07-29,2
+2020-07-30,5
+2020-07-31,10
+2020-08-01,6
+2020-08-02,7
+2020-08-03,8
+2020-08-04,6
+2020-08-05,14
+2020-08-06,13
+2020-08-07,14
+2020-08-08,11
+2020-08-09,6
+2020-08-10,13
+2020-08-11,9
+2020-08-12,9
+2020-08-13,27
+2020-08-14,6
+2020-08-15,6
+2020-08-16,3
+2020-08-17,10
+2020-08-18,3
+2020-08-19,7
+2020-08-20,7
+2020-08-21,2
+2020-08-22,8
+2020-08-23,6
+2020-08-24,4
+2020-08-25,3
+2020-08-26,3
+2020-08-27,2
+2020-08-28,10
+2020-08-29,3
+2020-08-30,0
+2020-08-31,4
+2020-09-01,8
+2020-09-02,1
+2020-09-03,12
+2020-09-04,5
+2020-09-05,6
+2020-09-06,1
+2020-09-07,3
+2020-09-08,4
+2020-09-09,4
+2020-09-10,0
+2020-09-11,6
+2020-09-12,6
+2020-09-13,1
+2020-09-14,7
+2020-09-15,5
+2020-09-16,8
+2020-09-17,4
+2020-09-18,2
+2020-09-19,5
+2020-09-20,4
+2020-09-21,9
+2020-09-22,9
+2020-09-23,11
+2020-09-24,5
+2020-09-25,4
+2020-09-26,12
+2020-09-27,5
+2020-09-28,11
+2020-09-29,13
+2020-09-30,10
+2020-10-01,10
+2020-10-02,10
+2020-10-03,20
+2020-10-04,6
+2020-10-05,8
+2020-10-06,18
+2020-10-07,18
+2020-10-08,29
+2020-10-09,21
+2020-10-10,23
+2020-10-11,11
+2020-10-12,24
+2020-10-13,28
+2020-10-14,34
+2020-10-15,44
+2020-10-16,44
+2020-10-17,30
+2020-10-18,51
+2020-10-19,47
+2020-10-20,42
+2020-10-21,71
+2020-10-22,75
+2020-10-23,67
+2020-10-24,80
+2020-10-25,41
+2020-10-26,75
+2020-10-27,114
+2020-10-28,140
+2020-10-29,133
+2020-10-30,107
+2020-10-31,79
+2020-11-01,44
+2020-11-02,149
+2020-11-03,111
+2020-11-04,112
+2020-11-05,87
+2020-11-06,76
+2020-11-07,58
+2020-11-08,93
+2020-11-09,160
+2020-11-10,114
+2020-11-11,85
+2020-11-12,120
+2020-11-13,111
+2020-11-14,77
+2020-11-15,48
+2020-11-16,121
+2020-11-17,107
+2020-11-18,125
+2020-11-19,90
+2020-11-20,86
+2020-11-21,98
+2020-11-22,82
+2020-11-23,98
+2020-11-24,129
+2020-11-25,93
+2020-11-26,50
+2020-11-27,87
+2020-11-28,92
+2020-11-29,36
+2020-11-30,128
+2020-12-01,114
+2020-12-02,88
+2020-12-03,136
+2020-12-04,120
+2020-12-05,102
+2020-12-06,56
+2020-12-07,78
+2020-12-08,87
+2020-12-09,99
+2020-12-10,104
+2020-12-11,91
+2020-12-12,120
+2020-12-13,77
+2020-12-14,80
+2020-12-15,168
+2020-12-16,144
+2020-12-17,172
+2020-12-18,142
+2020-12-19,112
+2020-12-20,85
+2020-12-21,124
+2020-12-22,134
+2020-12-23,227
+2020-12-24,178
+2020-12-25,19
+2020-12-26,30
+2020-12-27,24
+2020-12-28,89
+2020-12-29,156
+2020-12-30,214
+2020-12-31,153
+2021-01-01,59
+2021-01-02,19
+2021-01-03,40
+2021-01-04,57
+2021-01-05,167
+2021-01-06,198
+2021-01-07,90
+2021-01-08,75
+2021-01-09,71
+2021-01-10,8
+2021-01-11,26
+2021-01-12,129
+2021-01-13,37
+2021-01-14,0
diff --git a/sphinx-doc/ressources/images/cbooks_incidents1.png b/sphinx-doc/ressources/images/cbooks_incidents1.png
new file mode 100644
index 0000000000000000000000000000000000000000..7953e8aaf7f74e2c730db1db3709d3c0a4560683
Binary files /dev/null and b/sphinx-doc/ressources/images/cbooks_incidents1.png differ
diff --git a/sphinx-doc/ressources/images/cbooks_incidents2.png b/sphinx-doc/ressources/images/cbooks_incidents2.png
new file mode 100644
index 0000000000000000000000000000000000000000..b7be3787adccda75e7cc39668aaafebe1a8bc6a5
Binary files /dev/null and b/sphinx-doc/ressources/images/cbooks_incidents2.png differ
diff --git a/sphinx-doc/ressources/images/example_plot_1.png b/sphinx-doc/ressources/images/example_plot_1.png
new file mode 100644
index 0000000000000000000000000000000000000000..ae83ed9aa6db17f66e9138f3966d48e3f00cf48b
Binary files /dev/null and b/sphinx-doc/ressources/images/example_plot_1.png differ
diff --git a/sphinx-doc/ressources/images/example_plot_2.png b/sphinx-doc/ressources/images/example_plot_2.png
new file mode 100644
index 0000000000000000000000000000000000000000..e8d202102120cec43ff6c263a72131704f30b970
Binary files /dev/null and b/sphinx-doc/ressources/images/example_plot_2.png differ
diff --git a/sphinx-doc/ressources/images/example_plot_31.png b/sphinx-doc/ressources/images/example_plot_31.png
new file mode 100644
index 0000000000000000000000000000000000000000..4dfe0af8ea1bd2cb0fbdf87f7eafc1a3f07fa311
Binary files /dev/null and b/sphinx-doc/ressources/images/example_plot_31.png differ
diff --git a/sphinx-doc/ressources/images/example_plot_32.png b/sphinx-doc/ressources/images/example_plot_32.png
new file mode 100644
index 0000000000000000000000000000000000000000..69cc17fa79b08d8fc6980a1ffb3a14bb7573c671
Binary files /dev/null and b/sphinx-doc/ressources/images/example_plot_32.png differ
diff --git a/sphinx-doc/ressources/images/example_plot_33.png b/sphinx-doc/ressources/images/example_plot_33.png
new file mode 100644
index 0000000000000000000000000000000000000000..a86c8e2169606729ef178ed66d29985f3bdd0820
Binary files /dev/null and b/sphinx-doc/ressources/images/example_plot_33.png differ
diff --git a/sphinx-doc/ressources/images/example_plot_4.png b/sphinx-doc/ressources/images/example_plot_4.png
new file mode 100644
index 0000000000000000000000000000000000000000..dbab213afd89cfab464425f85ab28dac37a84735
Binary files /dev/null and b/sphinx-doc/ressources/images/example_plot_4.png differ
diff --git a/sphinx-doc/ressources/images/readme_image.png b/sphinx-doc/ressources/images/readme_image.png
new file mode 100644
index 0000000000000000000000000000000000000000..3bb830f6c8bce1aa16b9c4b4021637c3cd0cfe2c
Binary files /dev/null and b/sphinx-doc/ressources/images/readme_image.png differ
diff --git a/test/common.py b/test/common.py
deleted file mode 100644
index d5867e94476b9e7744826fd6f7d40f88770a0fbd..0000000000000000000000000000000000000000
--- a/test/common.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import io
-
-import numpy as np
-import pandas as pd
-import dios
-
-from saqc.flagger import (
-    PositionalFlagger,
-    CategoricalFlagger,
-    SimpleFlagger,
-    DmpFlagger,
-)
-
-
-TESTNODATA = (np.nan, -9999)
-
-
-TESTFLAGGER = (
-    CategoricalFlagger(["NIL", "GOOD", "BAD"]),
-    SimpleFlagger(),
-    DmpFlagger(),
-)
-
-def flagAll(data, field, flagger, **kwargs):
-    # NOTE: remember to rename flag -> flag_values
-    return data, flagger.setFlags(field=field, flag=flagger.BAD)
-
-
-def initData(cols=2, start_date="2017-01-01", end_date="2017-12-31", freq=None, rows=None):
-    if rows is None:
-        freq = freq or "1h"
-
-    di = dios.DictOfSeries(itype=dios.DtItype)
-    dates = pd.date_range(start=start_date, end=end_date, freq=freq, periods=rows)
-    dummy = np.arange(len(dates))
-
-    for col in range(1, cols + 1):
-        di[f"var{col}"] = pd.Series(data=dummy * col, index=dates)
-
-    return di
-
-
-def writeIO(content):
-    f = io.StringIO()
-    f.write(content)
-    f.seek(0)
-    return f
diff --git a/test/core/test_core.py b/test/core/test_core.py
deleted file mode 100644
index 55a75a227bf880d30084ad7a0e6956d1611d9d93..0000000000000000000000000000000000000000
--- a/test/core/test_core.py
+++ /dev/null
@@ -1,118 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import logging
-
-import pytest
-import numpy as np
-import pandas as pd
-
-from saqc import SaQC, register
-from saqc.funcs import flagRange
-from saqc.lib import plotting as splot
-from test.common import initData, TESTFLAGGER, flagAll
-
-
-# no logging output needed here
-# -> can this be configured on the test runner level?
-logging.disable(logging.CRITICAL)
-
-
-OPTIONAL = [False, True]
-
-
-register(masking='field')(flagAll)
-
-
-@pytest.fixture
-def data():
-    return initData(3)
-
-
-@pytest.fixture
-def flags(flagger, data, optional):
-    if not optional:
-        return flagger.initFlags(data[data.columns[::2]])._flags
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_errorHandling(data, flagger):
-
-    @register(masking='field')
-    def raisingFunc(data, field, flagger, **kwargs):
-        raise TypeError
-
-    var1 = data.columns[0]
-
-    for policy in ["ignore", "warn"]:
-        # NOTE: should not fail, that's all we are testing here
-        SaQC(flagger, data, error_policy=policy).raisingFunc(var1).getResult()
-
-    with pytest.raises(TypeError):
-        SaQC(flagger, data, error_policy='raise').raisingFunc(var1).getResult()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_duplicatedVariable(flagger):
-    data = initData(1)
-    var1 = data.columns[0]
-
-    pdata, pflags = SaQC(flagger, data).flagDummy(var1).flagDummy(var1).getResult()
-
-    if isinstance(pflags.columns, pd.MultiIndex):
-        cols = pflags.columns.get_level_values(0).drop_duplicates()
-        assert np.all(cols == [var1])
-    else:
-        assert (pflags.columns == [var1]).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_sourceTarget(flagger):
-    """
-    test implicit assignments
-    """
-    data = initData(1)
-    var1 = data.columns[0]
-    target = "new"
-
-    pdata, pflagger = SaQC(flagger, data).flagAll(field=var1, target=target).getResult(raw=True)
-    pflags = pflagger.isFlagged()
-
-    assert (pdata[var1] == pdata[target]).all(axis=None)
-    assert (pflags[var1] == False).all(axis=None)
-    assert (pflags[target] == True).all(axis=None)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("optional", OPTIONAL)
-def test_dtypes(data, flagger, flags):
-    """
-    Test if the categorical dtype is preserved through the core functionality
-    """
-    flagger = flagger.initFlags(data)
-    flags = flagger.getFlags()
-    var1, var2 = data.columns[:2]
-
-    pdata, pflagger = SaQC(flagger, data, flags=flags).flagAll(var1).flagAll(var2).getResult(raw=True)
-
-    pflags = pflagger.getFlags()
-    assert dict(flags.dtypes) == dict(pflags.dtypes)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_plotting(data, flagger):
-    """
-    Test if the plotting code runs, does not show any plot.
-
-    NOTE:
-    This test is ignored if matplotlib is not available on the test-system
-    """
-    pytest.importorskip("matplotlib", reason="requires matplotlib")
-    field, *_ = data.columns
-    flagger = flagger.initFlags(data)
-    _, flagger_range = flagRange(data, field, flagger, min=10, max=90, flag=flagger.BAD)
-    data_new, flagger_range = flagRange(data, field, flagger_range, min=40, max=60, flag=flagger.GOOD)
-    splot._interactive = False
-    splot._plotSingleVariable(data, data_new, flagger, flagger_range, sources=[], targets=[data_new.columns[0]])
-    splot._plotMultipleVariables(data, data_new, flagger, flagger_range, targets=data_new.columns)
-    splot._interactive = True
diff --git a/test/core/test_masking.py b/test/core/test_masking.py
deleted file mode 100644
index b41eebbcb2bc6ba6b3d9a1594fed5282e23ac5fc..0000000000000000000000000000000000000000
--- a/test/core/test_masking.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import logging
-
-import pytest
-import pandas as pd
-
-from saqc import SaQC, register
-from test.common import initData, TESTFLAGGER
-
-
-logging.disable(logging.CRITICAL)
-
-
-@pytest.fixture
-def data():
-    return initData(3)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_masking(data, flagger):
-    """
-    test if flagged values are exluded during the preceding tests
-    """
-    flagger = flagger.initFlags(data)
-    var1 = 'var1'
-    mn = min(data[var1])
-    mx = max(data[var1]) / 2
-
-    qc = SaQC(flagger, data)
-    qc = qc.flagRange(var1, mn, mx)
-    # min is not considered because its the smalles possible value.
-    # if masking works, `data > max` will be masked,
-    # so the following will deliver True for in range (data < max),
-    # otherwise False, like an inverse range-test
-    qc = qc.procGeneric("dummy", func=lambda var1: var1 >= mn)
-
-    pdata, pflagger = qc.getResult(raw=True)
-    out_of_range = pflagger.isFlagged(var1)
-    in_range = ~out_of_range
-
-    assert (pdata.loc[out_of_range, "dummy"] == False).all()
-    assert (pdata.loc[in_range, "dummy"] == True).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_masking_UnmaskingOnDataChange(data, flagger):
-    """ test if (un)masking works as expected on data-change.
-
-    If the data change in the func, unmasking should respect this changes and
-    should not reapply original data, instead take the new data (and flags) as is.
-    Also if flags change, the data should be taken as is.
-    """
-    FILLER = -9999
-
-    @register(masking='all')
-    def changeData(data, field, flagger, **kwargs):
-        mask = data.isna()
-        data.aloc[mask] = FILLER
-        return data, flagger
-
-    @register(masking='all')
-    def changeFlags(data, field, flagger, **kwargs):
-        mask = data.isna()
-        flagger = flagger.setFlags(field, loc=mask[field], flag=flagger.UNFLAGGED, force=True)
-        return data, flagger
-
-    var = data.columns[0]
-    var_data = data[var]
-    mn, mx = var_data.max() * .25, var_data.max() * .75
-    range_mask = (var_data < mn) | (var_data > mx)
-
-    qc = SaQC(flagger, data)
-    qc = qc.flagRange(var, mn, mx)
-    qcD = qc.changeData(var)
-    qcF = qc.changeFlags(var)
-
-    data, flagger = qcD.getResult()
-    assert (data[var][range_mask] == FILLER).all(axis=None)
-    # only flags change so the data should be still NaN, because
-    # the unmasking was disabled, but the masking indeed was happening
-    data, flagger = qcF.getResult()
-    assert data[var][range_mask].isna().all(axis=None)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_shapeDiffUnmasking(data, flagger):
-    """ test if (un)masking works as expected on index-change.
-
-    If the index of data (and flags) change in the func, the unmasking,
-    should not reapply original data, instead take the new data (and flags) as is.
-    """
-
-    FILLER = -1111
-
-    @register(masking='none')
-    def pseudoHarmo(data, field, flagger, **kwargs):
-        index = data[field].index.to_series()
-        index.iloc[-len(data[field])//2:] += pd.Timedelta("7.5Min")
-
-        data[field] = pd.Series(data=FILLER, index=index)
-
-        flags = flagger.getFlags()
-        flags[field] = pd.Series(data=flags[field].values, index=index)
-
-        flagger = flagger.initFlags(flags=flags)
-        return data, flagger
-
-    var = data.columns[0]
-    var_data = data[var]
-    mn, mx = var_data.max() * .25, var_data.max() * .75
-
-    qc = SaQC(flagger, data)
-    qc = qc.flagRange(var, mn, mx)
-    qc = qc.pseudoHarmo(var)
-
-    data, flagger = qc.getResult(raw=True)
-    assert (data[var] == FILLER).all(axis=None)
diff --git a/test/flagger/__init__.py b/test/flagger/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/test/flagger/test_dmpflagger.py b/test/flagger/test_dmpflagger.py
deleted file mode 100644
index b1a9c1b73df1d2a58866291119e5c709bfc06f90..0000000000000000000000000000000000000000
--- a/test/flagger/test_dmpflagger.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import json
-
-import numpy as np
-import pandas as pd
-import pytest
-
-from test.common import initData
-from saqc.flagger import DmpFlagger
-
-
-@pytest.fixture
-def data():
-    return initData(cols=1)
-
-
-@pytest.fixture
-def data_4cols():
-    return initData(cols=4)
-
-
-def parseComments(data):
-    return np.array([json.loads(v)["comment"] for v in data.to_df().values.flatten()])
-
-
-def test_initFlags(data):
-    flagger = DmpFlagger().initFlags(data=data)
-    assert (flagger._flags == flagger.UNFLAGGED).all(axis=None)
-    assert (flagger._causes == "").all(axis=None)
-    assert (flagger._comments == "").all(axis=None)
-
-
-def test_mergeFlaggerOuter(data):
-
-    flagger = DmpFlagger()
-
-    field = data.columns[0]
-
-    data_left = data
-
-    data_right = data.to_df()
-    dates = data_right.index.to_series()
-    dates[len(dates) // 2 :] += pd.Timedelta("1Min")
-    data_right.index = dates
-    data_right = data_right.to_dios()
-
-    left = flagger.initFlags(data=data_left).setFlags(
-        field=field, flag=flagger.BAD, cause="SaQCLeft", comment="testLeft"
-    )
-
-    right = flagger.initFlags(data=data_right).setFlags(
-        field=field, flag=flagger.GOOD, cause="SaQCRight", comment="testRight"
-    )
-
-    merged = left.merge(right, join="outer")
-
-    right_index = data_right[field].index.difference(data_left[field].index)
-    assert (merged._flags.loc[right_index] == flagger.GOOD).all(axis=None)
-    assert (merged._causes.loc[right_index] == "SaQCRight").all(axis=None)
-    assert np.all(parseComments(merged._comments.loc[right_index]) == "testRight")
-
-    left_index = data_left[field].index
-    assert (merged._flags.loc[left_index] == flagger.BAD).all(axis=None)
-    assert (merged._causes.loc[left_index] == "SaQCLeft").all(axis=None)
-    assert np.all(parseComments(merged._comments.loc[left_index]) == "testLeft")
-
-
-def test_mergeFlaggerInner(data):
-
-    flagger = DmpFlagger()
-
-    field = data.columns[0]
-
-    data_left = data
-    data_right = data.iloc[::2]
-
-    left = flagger.initFlags(data=data_left).setFlags(
-        field=field, flag=flagger.BAD, cause="SaQCLeft", comment="testLeft"
-    )
-
-    right = flagger.initFlags(data=data_right).setFlags(
-        field=field, flag=flagger.GOOD, cause="SaQCRight", comment="testRight"
-    )
-
-    merged = left.merge(right, join="inner")
-
-    assert (merged._flags[field].index == data_right[field].index).all()
-    assert (merged._causes[field].index == data_right[field].index).all()
-    assert (merged._comments[field].index == data_right[field].index).all()
-
-    assert (merged._flags[field] == flagger.BAD).all()
-    assert (merged._causes[field] == "SaQCLeft").all(axis=None)
-    assert np.all(parseComments(merged._comments) == "testLeft")
-
-
-def test_sliceFlaggerDrop(data):
-    flagger = DmpFlagger().initFlags(data)
-    with pytest.raises(TypeError):
-        flagger.getFlags(field=data.columns, drop="var")
-
-    field = data.columns[0]
-    expected = data[data.columns.drop(field)].to_df()
-
-    filtered = flagger.slice(drop=field)
-
-    assert (filtered._flags.columns == expected.columns).all(axis=None)
-    assert (filtered._comments.columns == expected.columns).all(axis=None)
-    assert (filtered._causes.columns == expected.columns).all(axis=None)
-
-    assert (filtered._flags.to_df().index == expected.index).all(axis=None)
-    assert (filtered._comments.to_df().index == expected.index).all(axis=None)
-    assert (filtered._causes.to_df().index == expected.index).all(axis=None)
-
diff --git a/test/flagger/test_flagger.py b/test/flagger/test_flagger.py
deleted file mode 100644
index 77f835b17e577ea9998b513a5bab0df0d13d28af..0000000000000000000000000000000000000000
--- a/test/flagger/test_flagger.py
+++ /dev/null
@@ -1,752 +0,0 @@
-#!/usr/bin/env python
-
-import pytest
-import numpy as np
-import pandas as pd
-from pandas.api.types import is_bool_dtype
-
-import dios
-
-from test.common import TESTFLAGGER, initData
-
-
-def _getDataset(rows, cols):
-    return initData(cols=cols, rows=rows, start_date="2011-01-01", end_date="2011-01-10")
-
-
-DATASETS = [
-    _getDataset(0, 1),
-    _getDataset(1, 1),
-    _getDataset(100, 1),
-    # _getDataset(1000, 1),
-    _getDataset(0, 4),
-    _getDataset(1, 4),
-    # _getDataset(100, 4),
-    # _getDataset(1000, 4),
-    # _getDataset(10000, 40),
-    _getDataset(20, 4),
-]
-
-
-def check_all_dios_index_length(tocheck, expected):
-    for c in tocheck:
-        if len(tocheck[c]) != len(expected[c]):
-            return False
-    return True
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_initFlags(data, flagger):
-    """
-    test before:
-    - None
-    """
-
-    newflagger = flagger.initFlags(data)
-    assert isinstance(newflagger, type(flagger))
-    assert newflagger is not flagger
-
-    flags = newflagger.getFlags()
-    assert isinstance(flags, dios.DictOfSeries)
-
-    assert len(flags.columns) >= len(data.columns)
-    assert check_all_dios_index_length(flags, data)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_initFlagsWithFlags(data, flagger):
-    flags = dios.DictOfSeries(pd.Series(data=flagger.BAD))
-    flagger = flagger.initFlags(flags=flags)
-    assert (flagger.flags == flags).all(axis=None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_getFlags(data, flagger):
-    """
-    test before:
-    - initFlags()
-
-    we need to check:
-    - access all flags -> get a dios
-    - access some columns of flags -> get a dios
-    - access one column of flags -> get a series
-    """
-
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    # all - dios
-    flags0 = flagger.getFlags()
-    assert isinstance(flags0, dios.DictOfSeries)
-    assert (flags0.columns == data.columns).all()
-    assert check_all_dios_index_length(flags0, data)
-    for dt in flags0.dtypes:
-        assert dt == flagger.dtype
-
-    # some - dios
-    if len(data.columns) >= 2:
-        cols = data.columns[:2].to_list()
-        flags1 = flagger.getFlags(cols)
-        assert isinstance(flags1, dios.DictOfSeries)
-        assert (flags1.columns == data.columns[:2]).all()
-        assert check_all_dios_index_length(flags1, data[cols])
-        for dt in flags1.dtypes:
-            assert dt == flagger.dtype
-
-    # series
-    flags2 = flagger.getFlags(field)
-    assert isinstance(flags2, pd.Series)
-    assert flags2.dtype == flagger.dtype
-    assert flags2.shape[0] == data[field].shape[0]
-    # NOTE: need fix in dios see issue #16 (has very low priority)
-    # assert flags2.name in data.columns
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_setFlags(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    """
-    flagger = flagger.initFlags(data)
-    sl = slice("2011-01-02", "2011-01-05")
-    field, *_ = data.columns
-
-    base = flagger.getFlags()
-
-    flagger_good = flagger.setFlags(field, flag=flagger.GOOD, loc=sl)
-    assert isinstance(flagger_good, type(flagger))
-    assert flagger_good is not flagger
-
-    flags_good = flagger_good.getFlags()
-    assert len(flags_good[field]) <= len(base[field])
-    assert (flags_good.columns == base.columns).all()
-    assert (flags_good.loc[sl, field] == flagger.GOOD).all()
-
-    # overflag works BAD > GOOD
-    flagger_bad = flagger_good.setFlags(field, flag=flagger.BAD)
-    assert (flagger_bad.getFlags(field) == flagger.BAD).all()
-
-    # overflag doesn't work GOOD < BAD
-    flagger_still_bad = flagger_bad.setFlags(field, flag=flagger.GOOD)
-    assert (flagger_still_bad.getFlags(field) == flagger.BAD).all()
-
-    # overflag does work with force
-    flagger_forced_good = flagger_bad.setFlags(field, flag=flagger.GOOD, force=True)
-    assert (flagger_forced_good.getFlags(field) == flagger.GOOD).all()
-
-    with pytest.raises(ValueError):
-        flagger.setFlags(field=None, flag=flagger.BAD)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_sliceFlagger(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags() inside slice()
-    """
-    sl = slice(None, None, 3)
-
-    flagger = flagger.initFlags(data)
-    newflagger = flagger.slice(loc=sl)
-    assert isinstance(newflagger, type(flagger))
-
-    newflags = newflagger.getFlags()
-    assert (newflags.columns == data.columns).all()
-    assert check_all_dios_index_length(newflags, data[sl])
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_sliceFlaggerDrop(data, flagger):
-    flagger = flagger.initFlags(data)
-    with pytest.raises(TypeError):
-        flagger.getFlags(field=data.columns, drop="var")
-
-    field = data.columns[0]
-    expected = data.columns.drop(field)
-
-    filtered = flagger.slice(drop=field)
-    assert (filtered.getFlags().columns == expected).all(axis=None)
-    assert (filtered.getFlags().to_df().index == data[expected].to_df().index).all(axis=None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlagger(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    - setFlags()
-    - slice()
-    """
-    field, *_ = data.columns
-    sl = slice(None, None, 3)
-
-    this_flagger = flagger.initFlags(data)
-    other_flagger = this_flagger.slice(loc=sl).setFlags(field)
-    result_flagger = this_flagger.merge(other_flagger)
-
-    result_flags = result_flagger.getFlags()
-    other_flags = other_flagger.getFlags()
-
-    # check flags that was set
-    check = result_flags.loc[sl, field] == other_flags[field]
-    assert check.all(None)
-    # check flags that was not set
-    mask = ~result_flags[field].index.isin(other_flags[field].index)
-    check = result_flags.loc[mask, field] == result_flagger.UNFLAGGED
-    assert check.all(None)
-
-    # check unchanged columns
-    cols = data.columns.to_list()
-    cols.remove(field)
-    check = result_flags[cols] == result_flagger.UNFLAGGED
-    assert check.all(None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerColumnsDiff(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    - setFlags()
-    - slice()
-    - merge()
-    """
-    field, *_ = data.columns
-    new_field = field + "_new"
-    sl = slice(None, None, 2)
-
-    other_data = data.loc[sl]
-    other_data.columns = [new_field] + data.columns[1:].to_list()
-    other_flagger = flagger.initFlags(other_data)
-
-    this_flagger = flagger.initFlags(data).setFlags(field, flag=flagger.BAD)
-    result_flagger = this_flagger.merge(other_flagger)
-
-    result_flags = result_flagger.getFlags()
-    other_flags = other_flagger.getFlags()
-
-    # we need to check if
-    # - the new column is present
-    # - the new column is identical to the original
-    # - the other column are unchanged
-    #   - field-column is BAD
-    #   - other columns are UNFLAGGED
-
-    assert new_field in result_flags
-
-    check = result_flags[new_field] == other_flags[new_field]
-    assert check.all(None)
-
-    check = result_flags[field] == result_flagger.BAD
-    assert check.all(None)
-
-    cols = data.columns.to_list()
-    cols.remove(field)
-    check = result_flags[cols] == result_flagger.UNFLAGGED
-    assert check.all(None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerIndexDiff(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    - setFlags()
-    - slice()
-    - merge()
-
-    we need to check:
-    - index is union of this and other's index
-    - indices + values that only in this, should be present
-    - indices + values that only in other, should be present
-    - indices that in this and other, have values from other
-    """
-    field, *_ = data.columns
-    sl = slice(None, None, 2)
-
-    def shiftindex(s):
-        s.index = s.index + pd.Timedelta(minutes=2, seconds=25)
-        return s
-
-    # create a sliced time-shifted version of data
-    other_data = data.loc[sl].apply(shiftindex)
-    if isinstance(other_data, pd.Series):
-        pass
-
-    this_flagger = flagger.initFlags(data).setFlags(field, flag=flagger.BAD)
-    other_flagger = flagger.initFlags(other_data)
-    result_flagger = this_flagger.merge(other_flagger)
-
-    result_flags = result_flagger.getFlags()
-    this_flags = this_flagger.getFlags()
-    other_flags = other_flagger.getFlags()
-
-    for c in result_flags:
-        t, o, r = this_flags[c], other_flags[c], result_flags[c]
-        assert (r.index == t.index.union(o.index)).all()
-
-        only_this = t.index.difference(o.index)
-        only_other = o.index.difference(t.index)
-        both = t.index.intersection(o.index)
-
-        # nothing is missing
-        assert (r.index == only_this.union(only_other).union(both)).all()
-
-        assert (r[only_this] == t[only_this]).all()
-        assert (r[only_other] == o[only_other]).all()
-        assert (r[both] == o[both]).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerOuter(data, flagger):
-
-    field = data.columns[0]
-
-    data_left = data
-    data_right = data.iloc[::2]
-
-    left = flagger.initFlags(data=data_left).setFlags(field=field, flag=flagger.BAD)
-
-    right = flagger.initFlags(data=data_right).setFlags(field, flag=flagger.GOOD)
-
-    merged = left.merge(right, join="outer")
-
-    loc = data_right[field].index.difference(data_left[field].index)
-    assert (merged.getFlags(field, loc=loc) == flagger.GOOD).all(axis=None)
-    assert (merged.getFlags(field, loc=data_left[field].index) == flagger.BAD).all(axis=None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerInner(data, flagger):
-
-    field = data.columns[0]
-
-    data_left = data
-    data_right = data.iloc[::2]
-
-    left = flagger.initFlags(data=data_left).setFlags(field=field, flag=flagger.BAD)
-
-    right = flagger.initFlags(data=data_right).setFlags(field, flag=flagger.GOOD)
-
-    merged = left.merge(right, join="inner")
-
-    assert (merged.getFlags(field).index == data_right[field].index).all()
-    assert (merged.getFlags(field) == flagger.BAD).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mergeFlaggerMerge(data, flagger):
-
-    field = data.columns[0]
-    data_left = data
-    data_right = data.iloc[::2]
-
-    left = flagger.initFlags(data=data_left).setFlags(field=field, flag=flagger.BAD)
-
-    right = flagger.initFlags(data=data_right).setFlags(field, flag=flagger.GOOD)
-
-    merged = left.merge(right, join="merge")
-
-    loc = data_left[field].index.difference(data_right[field].index)
-    assert (merged.getFlags(field, loc=data_right[field].index) == flagger.GOOD).all(axis=None)
-    assert (merged.getFlags(field, loc=loc) == flagger.BAD).all(axis=None)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_isFlaggedDios(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - setFlags()
-    """
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    mask = np.zeros(len(data[field]), dtype=bool)
-
-    df_tests = [
-        (flagger.isFlagged(), mask),
-        (flagger.setFlags(field).isFlagged(), ~mask),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(flag=flagger.GOOD, comparator=">"), mask,),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(flag=flagger.GOOD, comparator="<"), mask,),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(flag=flagger.GOOD, comparator="=="), ~mask,),
-    ]
-    for flags, expected in df_tests:
-        assert np.all(flags[field] == expected)
-        assert isinstance(flags, dios.DictOfSeries)
-        assert check_all_dios_index_length(flags, data)
-        assert (flags.columns == data.columns).all()
-        for dt in flags.dtypes:
-            assert is_bool_dtype(dt)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_isFlaggedSeries(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - setFlags()
-    """
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    mask = np.zeros(len(data[field]), dtype=bool)
-
-    series_tests = [
-        (flagger.isFlagged(field), mask),
-        (flagger.setFlags(field).isFlagged(field), ~mask),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(field, flag=flagger.GOOD, comparator=">"), mask,),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(field, flag=flagger.GOOD, comparator="<"), mask,),
-        (flagger.setFlags(field, flag=flagger.GOOD).isFlagged(field, flag=flagger.GOOD, comparator="=="), ~mask,),
-    ]
-    for flags, expected in series_tests:
-        assert np.all(flags == expected)
-        assert isinstance(flags, pd.Series)
-        assert flags.dtype == bool
-        assert flags.shape[0] == data[field].shape[0]
-        # NOTE: need fix in dios see issue #16 (has very low priority)
-        # assert flags.name in data.columns
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_isFlaggedSeries_fail(data, flagger):
-    """
-    test before:
-    - initFlags()
-    """
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    fail_tests = [
-        {"flag": pd.Series(index=data[field].index, data=flagger.BAD).astype(flagger.dtype)},
-        # NOTE: allowed since use of dios
-        # {"field": ["var1", "var2"]},
-    ]
-    for args in fail_tests:
-        with pytest.raises(TypeError):
-            flagger.isFlagged(**args)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_clearFlags(data, flagger):
-    """
-    test before:
-    - initFlags()
-    - getFlags()
-    - setFlags()
-    - isFlagged()
-    """
-    flagger = flagger.initFlags(data)
-    sl = slice("2011-01-02", "2011-01-05")
-    field, *_ = data.columns
-
-    base = flagger.getFlags(field)
-
-    flagger = flagger.setFlags(field=field, flag=flagger.BAD)
-    assert np.sum(flagger.isFlagged(field)) == len(base)
-
-    flaggernew = flagger.clearFlags(field)
-    assert isinstance(flaggernew, type(flagger))
-    assert flaggernew is not flagger
-    assert len(flagger.getFlags(field)) == len(data[field])
-
-    flagger = flagger.clearFlags(field)
-    assert np.sum(flagger.isFlagged(field)) == 0
-    assert len(flagger.getFlags(field)) == len(data[field])
-
-    flagger = flagger.setFlags(field=field, flag=flagger.BAD)
-    assert np.sum(flagger.isFlagged(field)) == len(base)
-    assert len(flagger.getFlags(field)) == len(data[field])
-
-    flagger = flagger.clearFlags(field, loc=sl)
-    assert len(flagger.getFlags(field)) == len(data[field])
-    unflagged = flagger.isFlagged(field, loc=sl)
-    assert np.sum(unflagged) == 0
-    assert np.sum(flagger.isFlagged(field)) == len(data[field]) - len(unflagged)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_dtype(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    tests = (
-        flagger.getFlags(field).astype(str),
-        "TEST",
-        55,
-    )
-
-    for test in tests:
-        with pytest.raises(TypeError):
-            flagger = flagger.setFlags(field, flag=test)
-        assert flagger.getFlags(field).dtype == flagger.dtype
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER[-1:])
-def test_returnCopy(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    base = flagger.getFlags()
-
-    assert flagger.getFlags() is not base
-    assert flagger.isFlagged() is not base
-    assert flagger.setFlags(field) is not flagger
-    assert flagger.clearFlags(field) is not flagger
-
-
-LOC_ILOC_FUNCS = ["isFlagged", "getFlags"]
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("flaggerfunc", LOC_ILOC_FUNCS)
-def test_loc(data, flagger, flaggerfunc):
-    flagger = flagger.initFlags(data)
-    sl = slice("2011-01-02", "2011-01-05")
-    field, *_ = data.columns
-
-    chunk = data.loc[sl, field]
-    d = data.loc[sl]
-    if d.empty:
-        mask = []
-    else:
-        m = data[field].index.get_loc(d[field].index[0])
-        M = data[field].index.get_loc(d[field].index[-1])
-        mask = np.full(len(data[field]), False)
-        mask[m:M] = True
-
-    flagger_func = getattr(flagger, flaggerfunc)
-
-    # masked
-    mflags0 = flagger_func(field, loc=mask)
-    mflags1 = flagger_func().loc[mask, field]
-    mflags2 = flagger_func(field).loc[mask]
-    mflags3 = flagger_func(loc=mask)[field]
-    assert (mflags0 == mflags1).all()
-    assert (mflags0 == mflags2).all()
-    assert (mflags0 == mflags3).all()
-
-    # indexed
-    iflags0 = flagger_func(field, loc=chunk.index)
-    iflags1 = flagger_func().loc[chunk.index, field]
-    iflags2 = flagger_func(field).loc[chunk.index]
-    iflags3 = flagger_func(loc=chunk.index)[field]
-    assert (iflags0 == iflags1).all()
-    assert (iflags0 == iflags2).all()
-    assert (iflags0 == iflags3).all()
-
-    # sliced
-    sflags0 = flagger_func(field, loc=sl)
-    sflags1 = flagger_func().loc[sl, field]
-    sflags2 = flagger_func(field).loc[sl]
-    sflags3 = flagger_func(loc=sl)[field]
-    assert (sflags0 == sflags1).all()
-    assert (sflags0 == sflags2).all()
-    assert (sflags0 == sflags3).all()
-
-    assert (sflags0 == iflags0).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_classicUseCases(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    flagger = flagger.clearFlags(field)
-
-    # data-mask, same length than flags
-    d = data[field]
-    mask = d < (d.max() - d.min()) // 2
-    flagged = flagger.setFlags(field, loc=mask, flag=flagger.BAD).isFlagged(field)
-    assert (flagged == mask).all()
-
-    flagger = flagger.clearFlags(field)
-
-    indices = np.arange(0, len(data[field]))
-    mask = indices % 3 == 0
-    indices = indices[mask]
-    # we had some fun with numpy and end up with
-    # numpy indices (positional), but with different length..
-    # make dt-index with iloc, then pass to loc
-    dt_idx = data[field].iloc[indices].index
-    flagged = flagger.setFlags(field, loc=dt_idx, flag=flagger.BAD).isFlagged(field)
-    assert (flagged.iloc[indices] == flagged[flagged]).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_getFlagsWithExtras(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    flags, extra = flagger.getFlags(field, full=True)
-    assert isinstance(flags, pd.Series)
-    assert isinstance(extra, dict)
-    for k, v in extra.items():
-        assert isinstance(v, pd.Series)
-        assert flags.index.equals(v.index)
-
-    flags, extra = flagger.getFlags(full=True)
-    assert isinstance(flags, dios.DictOfSeries)
-    assert isinstance(extra, dict)
-    for k, v in extra.items():
-        assert isinstance(v, dios.DictOfSeries)
-        assert flags.columns.equals(v.columns)
-        for c in flags:
-            assert flags[c].index.equals(v[c].index)
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_replace_delete(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-    newflagger = flagger.replaceField(field=field, flags=None)
-
-    new, newextra = newflagger.getFlags(full=True)
-    assert field not in newflagger.flags
-    for k in newextra:
-        assert field not in newextra[k]
-
-    with pytest.raises(ValueError):
-        flagger.replaceField(field="i_dont_exist", flags=None)
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_replace_insert(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-    newfield = 'fooo'
-    flags, extra = flagger.getFlags(field, full=True)
-    newflagger = flagger.replaceField(field=newfield, flags=flags, **extra)
-    old, oldextra = flagger.getFlags(full=True)
-    new, newextra = newflagger.getFlags(full=True)
-    assert newfield in newflagger.flags
-    assert (newflagger._flags[newfield] == flagger._flags[field]).all()
-    assert newflagger._flags[newfield] is not flagger._flags[field]  # not a copy
-    for k in newextra:
-        assert newfield in newextra[k]
-        assert (newextra[k][newfield] == oldextra[k][field]).all()
-
-
-@pytest.mark.parametrize("data", DATASETS)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_replace_replace(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-    flags, extra = flagger.getFlags(field, full=True)
-
-    # set everything to DOUBTFUL
-    flags[:] = flagger.BAD
-    for k, v in extra.items():
-        v[:] = flagger.BAD
-        extra[k] = v
-
-    newflagger = flagger.replaceField(field=field, flags=flags, **extra)
-
-    old, oldextra = flagger.getFlags(full=True)
-    new, newextra = newflagger.getFlags(full=True)
-    assert old.columns.equals(new.columns)
-    assert (new[field] == flagger.BAD).all()
-
-    assert oldextra.keys() == newextra.keys()
-    for k in newextra:
-        o, n = oldextra[k], newextra[k]
-        assert n.columns.equals(o.columns)
-        assert (n[field] == flagger.BAD).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagAfter(flagger):
-    idx = pd.date_range("2000", "2001", freq='1M')
-    s = pd.Series(0, index=idx)
-    data = dios.DictOfSeries(s, columns=['a'])
-    exp_base = pd.Series(flagger.UNFLAGGED, index=idx)
-
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    flags = flagger.setFlags(field, loc=s.index[3], flag_after=5).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[3: 3+5+1] = flagger.BAD
-    assert (flags == exp).all()
-
-    flags = flagger.setFlags(field, loc=s.index[3], flag_after=5, win_flag=flagger.GOOD).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[3: 3+5+1] = flagger.GOOD
-    exp[3] = flagger.BAD
-    assert (flags == exp).all()
-
-    # 3 month < 99 days < 4 month
-    flags = flagger.setFlags(field, loc=s.index[3], flag_after="99d").getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[3: 3+3+1] = flagger.BAD
-    assert (flags == exp).all()
-
-    # 3 month < 99 days < 4 month
-    flags = flagger.setFlags(field, loc=s.index[3], flag_after="99d", win_flag=flagger.GOOD).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[3: 3+3+1] = flagger.GOOD
-    exp[3] = flagger.BAD
-    assert (flags == exp).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagBefore(flagger):
-    idx = pd.date_range("2000", "2001", freq='1M')
-    s = pd.Series(0, index=idx)
-    data = dios.DictOfSeries(s, columns=['a'])
-    exp_base = pd.Series(flagger.UNFLAGGED, index=idx)
-
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-
-    flags = flagger.setFlags(field, loc=s.index[8], flag_before=5).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[8-5: 8+1] = flagger.BAD
-    assert (flags == exp).all()
-
-    flags = flagger.setFlags(field, loc=s.index[8], flag_before=5, win_flag=flagger.GOOD).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[8-5: 8+1] = flagger.GOOD
-    exp[8] = flagger.BAD
-    assert (flags == exp).all()
-
-    # 3 month < 99 days < 4 month
-    flags = flagger.setFlags(field, loc=s.index[8], flag_before="99d").getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[8-3: 8+1] = flagger.BAD
-    assert (flags == exp).all()
-
-    # 3 month < 99 days < 4 month
-    flags = flagger.setFlags(field, loc=s.index[8], flag_before="99d", win_flag=flagger.GOOD).getFlags(field)
-    exp = exp_base.copy()
-    exp.iloc[8-3: 8+1] = flagger.GOOD
-    exp[8] = flagger.BAD
-    assert (flags == exp).all()
diff --git a/test/flagger/test_positionalflagger.py b/test/flagger/test_positionalflagger.py
deleted file mode 100644
index 9875a7c74ab09aa0f120c09a802970adc870f602..0000000000000000000000000000000000000000
--- a/test/flagger/test_positionalflagger.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import pytest
-
-import numpy as np
-
-from test.common import initData
-from saqc.flagger import PositionalFlagger
-
-
-@pytest.fixture
-def data():
-    return initData(cols=2)
-
-
-def test_initFlags(data):
-    flagger = PositionalFlagger().initFlags(data=data)
-    assert (flagger.isFlagged() == False).all(axis=None)
-    assert (flagger.flags == flagger.UNFLAGGED).all(axis=None)
-
-
-def test_setFlags(data):
-    flagger = PositionalFlagger().initFlags(data=data)
-
-    field = data.columns[0]
-    mask = np.zeros(len(data[field]), dtype=bool)
-    mask[1:10:2] = True
-
-    flagger = flagger.setFlags(field=field, loc=mask, flag=flagger.SUSPICIOUS)
-    assert (flagger.flags.loc[mask, field] == "91").all(axis=None)
-    assert (flagger.flags.loc[~mask, field] == "90").all(axis=None)
-
-    flagger = flagger.setFlags(field=field, loc=~mask, flag=flagger.BAD)
-    assert (flagger.flags.loc[~mask, field] == "902").all(axis=None)
-    assert (flagger.flags.loc[mask, field] == "910").all(axis=None)
-
-    assert (flagger.flags[data.columns[1]] == "-1").all(axis=None)
-
-
-def test_isFlagged(data):
-    flagger = PositionalFlagger().initFlags(data=data)
-    field = data.columns[0]
-
-    mask_sus = np.zeros(len(data[field]), dtype=bool)
-    mask_sus[1:20:2] = True
-    flagger = flagger.setFlags(field=field, loc=mask_sus, flag=flagger.SUSPICIOUS)
-    assert (flagger.isFlagged(field=field, comparator=">=", flag=flagger.SUSPICIOUS)[mask_sus] == True).all(axis=None)
-    assert (flagger.isFlagged(field=field, comparator=">", flag=flagger.SUSPICIOUS) == False).all(axis=None)
-
-    mask_bad = np.zeros(len(data[field]), dtype=bool)
-    mask_bad[1:10:2] = True
-    flagger = flagger.setFlags(field=field, loc=mask_bad, flag=flagger.BAD)
-    assert (flagger.isFlagged(field=field, comparator=">")[mask_sus] == True).all(axis=None)
-    assert (flagger.isFlagged(field=field, comparator=">=", flag=flagger.BAD)[mask_bad] == True).all(axis=None)
-    assert (flagger.isFlagged(field=field, comparator=">", flag=flagger.BAD) == False).all(axis=None)
diff --git a/test/funcs/test_breaks_detection.py b/test/funcs/test_breaks_detection.py
deleted file mode 100644
index f07e949b2278a3d273b101fdbff9e2e6ba5b0a96..0000000000000000000000000000000000000000
--- a/test/funcs/test_breaks_detection.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import pytest
-
-from saqc.funcs.breaks_detection import breaks_flagSpektrumBased
-from test.common import TESTFLAGGER, initData
-
-
-@pytest.fixture
-def data():
-    return initData(cols=1, start_date="2011-01-01 00:00:00", end_date="2011-01-02 03:00:00", freq="5min")
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_breaks_flagSpektrumBased(data, flagger):
-    field, *_ = data.columns
-    data.iloc[5:15] += 100
-    break_positions = [5, 15]
-    flagger = flagger.initFlags(data)
-    data, flagger_result = breaks_flagSpektrumBased(data, field, flagger)
-    flag_result = flagger_result.getFlags(field)
-    test_sum = (flag_result[break_positions] == flagger.BAD).sum()
-    assert test_sum == len(break_positions)
diff --git a/test/funcs/test_constants_detection.py b/test/funcs/test_constants_detection.py
deleted file mode 100644
index 52e2f6d9e50fab7d0ecda01adea82d60ff3614ea..0000000000000000000000000000000000000000
--- a/test/funcs/test_constants_detection.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import pytest
-import numpy as np
-
-from saqc.funcs.constants_detection import constants_flagBasic, constants_flagVarianceBased
-
-from test.common import TESTFLAGGER, initData
-
-
-@pytest.fixture
-def data():
-    constants_data = initData(1, start_date="2011-01-01 00:00:00", end_date="2011-01-01 03:00:00", freq="5min")
-    constants_data.iloc[5:25] = 200
-    return constants_data
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_constants_flagBasic(data, flagger):
-    expected = np.arange(5, 22)
-    field, *_ = data.columns
-    flagger = flagger.initFlags(data)
-    data, flagger_result = constants_flagBasic(data, field, flagger, window="15Min", thresh=0.1,)
-    flags = flagger_result.getFlags(field)
-    assert np.all(flags[expected] == flagger.BAD)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_constants_flagVarianceBased(data, flagger):
-    expected = np.arange(5, 25)
-    field, *_ = data.columns
-    flagger = flagger.initFlags(data)
-    data, flagger_result1 = constants_flagVarianceBased(data, field, flagger, window="1h")
-
-    flag_result1 = flagger_result1.getFlags(field)
-    test_sum = (flag_result1[expected] == flagger.BAD).sum()
-    assert test_sum == len(expected)
diff --git a/test/funcs/test_functions.py b/test/funcs/test_functions.py
deleted file mode 100644
index 8670e09a2e675c7fa5b9338916e4c62104948090..0000000000000000000000000000000000000000
--- a/test/funcs/test_functions.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import pytest
-import numpy as np
-import pandas as pd
-import dios
-
-from saqc.funcs.functions import *
-from test.common import initData, TESTFLAGGER
-
-
-
-
-@pytest.fixture
-def data():
-    return initData(cols=1, start_date="2016-01-01", end_date="2018-12-31", freq="1D")
-
-
-@pytest.fixture
-def field(data):
-    return data.columns[0]
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagRange(data, field, flagger):
-    min, max = 10, 90
-    flagger = flagger.initFlags(data)
-    data, flagger = flagRange(data, field, flagger, min=min, max=max)
-    flagged = flagger.isFlagged(field)
-    expected = (data[field] < min) | (data[field] > max)
-    assert (flagged == expected).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagSesonalRange(data, field, flagger):
-    # prepare
-    data.iloc[::2] = 0
-    data.iloc[1::2] = 50
-    nyears = len(data[field].index.year.unique())
-
-    tests = [
-        ({"min": 1, "max": 100, "startmonth": 7, "startday": 1, "endmonth": 8, "endday": 31,}, 31 * 2 * nyears // 2,),
-        ({"min": 1, "max": 100, "startmonth": 12, "startday": 16, "endmonth": 1, "endday": 15,}, 31 * nyears // 2 + 1,),
-    ]
-
-    for test, expected in tests:
-        flagger = flagger.initFlags(data)
-        data, flagger = flagSesonalRange(data, field, flagger, **test)
-        flagged = flagger.isFlagged(field)
-        assert flagged.sum() == expected
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_clearFlags(data, field, flagger):
-    flagger = flagger.initFlags(data)
-    flags_orig = flagger.getFlags()
-    flags_set = flagger.setFlags(field, flag=flagger.BAD).getFlags()
-    _, flagger = clearFlags(data, field, flagger)
-    flags_cleared = flagger.getFlags()
-    assert (flags_orig != flags_set).all(None)
-    assert (flags_orig == flags_cleared).all(None)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_forceFlags(data, flagger):
-    flagger = flagger.initFlags(data)
-    field, *_ = data.columns
-    flags_orig = flagger.setFlags(field).getFlags(field)
-    _, flagger = forceFlags(data, field, flagger, flag=flagger.GOOD)
-    flags_forced = flagger.getFlags(field)
-    assert np.all(flags_orig != flags_forced)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagIsolated(data, flagger):
-    field = data.columns[0]
-    data.iloc[1:3, 0] = np.nan
-    data.iloc[4:5, 0] = np.nan
-    data.iloc[11:13, 0] = np.nan
-    data.iloc[15:17, 0] = np.nan
-    flagger = flagger.initFlags(data)
-    s = data[field].iloc[5:6]
-    flagger = flagger.setFlags(field, loc=s)
-
-    _, flagger_result = flagIsolated(data, field, flagger, group_window="1D", gap_window="2.1D")
-
-    assert flagger_result.isFlagged(field)[slice(3, 6, 2)].all()
-
-    data, flagger_result = flagIsolated(
-        data, field, flagger_result, group_window="2D", gap_window="2.1D", continuation_range="1.1D",
-    )
-    assert flagger_result.isFlagged(field)[[3, 5, 13, 14]].all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_2")])
-def test_flagCrossScoring(dat, flagger):
-    data1, characteristics = dat(initial_level=0, final_level=0, out_val=0)
-    data2, characteristics = dat(initial_level=0, final_level=0, out_val=10)
-    field = "dummy"
-    fields = ["data1", "data2"]
-    s1, s2 = data1.squeeze(), data2.squeeze()
-    s1 = pd.Series(data=s1.values, index=s1.index)
-    s2 = pd.Series(data=s2.values, index=s1.index)
-    data = dios.DictOfSeries([s1, s2], columns=["data1", "data2"])
-    flagger = flagger.initFlags(data)
-    _, flagger_result = flagCrossScoring(data, field, flagger, fields=fields, thresh=3, cross_stat=np.mean)
-    for field in fields:
-        isflagged = flagger_result.isFlagged(field)
-        assert isflagged[characteristics["raise"]].all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagManual(data, flagger):
-    field = data.columns[0]
-    flagger = flagger.initFlags(data)
-    args = data, field, flagger
-    dat = data[field]
-
-    mdata = pd.Series("lala", index=dat.index)
-    index_exp = mdata.iloc[[10, 33, 200, 500]].index
-    mdata.iloc[[101, 133, 220, 506]] = "b"
-    mdata.loc[index_exp] = "a"
-    shrinked = mdata.loc[index_exp.union(mdata.iloc[[1, 2, 3, 4, 600, 601]].index)]
-
-    kwargs_list = [
-        dict(mdata=mdata, mflag="a", method="plain"),
-        dict(mdata=mdata.to_list(), mflag="a", method="plain"),
-        dict(mdata=mdata, mflag="a", method="ontime"),
-        dict(mdata=shrinked, mflag="a", method="ontime"),
-    ]
-
-    for kw in kwargs_list:
-        _, fl = flagManual(*args, **kw)
-        isflagged = fl.isFlagged(field)
-        assert isflagged[isflagged].index.equals(index_exp)
-
-    # flag not exist in mdata
-    _, fl = flagManual(*args, mdata=mdata, mflag="i do not exist", method="ontime")
-    isflagged = fl.isFlagged(field)
-    assert isflagged[isflagged].index.equals(pd.DatetimeIndex([]))
-
-    # check right-open / ffill
-    index = pd.date_range(start="2016-01-01", end="2018-12-31", periods=11)
-    mdata = pd.Series(0, index=index)
-    mdata.loc[index[[1, 5, 6, 7, 9, 10]]] = 1
-    # >>> mdata
-    # 2016-01-01 00:00:00    0
-    # 2016-04-19 12:00:00    1
-    # 2016-08-07 00:00:00    0
-    # 2016-11-24 12:00:00    0
-    # 2017-03-14 00:00:00    0
-    # 2017-07-01 12:00:00    1
-    # 2017-10-19 00:00:00    1
-    # 2018-02-05 12:00:00    1
-    # 2018-05-26 00:00:00    0
-    # 2018-09-12 12:00:00    1
-    # 2018-12-31 00:00:00    1
-    # dtype: int64
-
-    # add first and last index from data
-    expected = mdata.copy()
-    expected.loc[dat.index[0]] = 0
-    expected.loc[dat.index[-1]] = 1
-    expected = expected.astype(bool)
-
-    _, fl = flagManual(*args, mdata=mdata, mflag=1, method="right-open")
-    isflagged = fl.isFlagged(field)
-    last = expected.index[0]
-    for curr in expected.index[1:]:
-        expected_value = mdata[last]
-        # datetime slicing is inclusive !
-        i = isflagged[last:curr].index[:-1]
-        chunk = isflagged.loc[i]
-        assert (chunk == expected_value).all()
-        last = curr
-    # check last value
-    assert isflagged[curr] == expected[curr]
-
-    # check left-open / bfill
-    expected.loc[dat.index[-1]] = 0  # this time the last is False
-    _, fl = flagManual(*args, mdata=mdata, mflag=1, method="left-open")
-    isflagged = fl.isFlagged(field)
-    last = expected.index[0]
-    assert isflagged[last] == expected[last]
-    for curr in expected.index[1:]:
-        expected_value = mdata[curr]
-        # datetime slicing is inclusive !
-        i = isflagged[last:curr].index[1:]
-        chunk = isflagged.loc[i]
-        assert (chunk == expected_value).all()
-        last = curr
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_1")])
-def test_flagDriftFromNormal(dat, flagger):
-    data = dat(periods=200, peak_level=5, name='d1')[0]
-    data['d2'] = dat(periods=200, peak_level=10, name='d2')[0]['d2']
-    data['d3'] = dat(periods=200, peak_level=100, name='d3')[0]['d3']
-    data['d4'] = 3 + 4 * data['d1']
-    data['d5'] = 3 + 4 * data['d1']
-
-    flagger = flagger.initFlags(data)
-    data_norm, flagger_norm = flagDriftFromNorm(data, 'dummy', flagger, ['d1', 'd2', 'd3'], segment_freq="200min",
-                                      norm_spread=5)
-
-    data_ref, flagger_ref = flagDriftFromReference(data, 'd1', flagger, ['d1', 'd2', 'd3'], segment_freq="3D",
-                                      thresh=20)
-
-    data_scale, flagger_scale = flagDriftScale(data, 'dummy', flagger, ['d1', 'd3'], ['d4', 'd5'], segment_freq="3D",
-                                                   thresh=20,  norm_spread=5)
-    assert flagger_norm.isFlagged()['d3'].all()
-    assert flagger_ref.isFlagged()['d3'].all()
-    assert flagger_scale.isFlagged()['d3'].all()
diff --git a/test/funcs/test_generic_api_functions.py b/test/funcs/test_generic_api_functions.py
deleted file mode 100644
index c800178cae77684a29c1e11f2b0ddf8f7e32001b..0000000000000000000000000000000000000000
--- a/test/funcs/test_generic_api_functions.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-import ast
-
-import pytest
-import numpy as np
-import pandas as pd
-
-from dios import DictOfSeries
-
-from test.common import TESTFLAGGER, TESTNODATA, initData, writeIO, flagAll
-from saqc.core.visitor import ConfigFunctionParser
-from saqc.core.config import Fields as F
-from saqc.core.register import register
-from saqc import SaQC, SimpleFlagger
-from saqc.funcs.functions import _execGeneric
-
-
-register(masking='field')(flagAll)
-
-
-@pytest.fixture
-def data():
-    return initData()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_addFieldFlagGeneric(data, flagger):
-    saqc = SaQC(data=data, flagger=flagger)
-
-    data, flags = saqc.flagGeneric(
-        "tmp1",
-        func=lambda var1: pd.Series(False, index=data[var1.name].index)
-    ).getResult()
-    assert "tmp1" in flags.columns and "tmp1" not in data
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_addFieldProcGeneric(data, flagger):
-    saqc = SaQC(data=data, flagger=flagger)
-
-    data, flagger = saqc.procGeneric("tmp1", func=lambda: pd.Series([])).getResult(raw=True)
-    assert "tmp1" in data.columns and data["tmp1"].empty
-
-    data, flagger = saqc.procGeneric("tmp2", func=lambda var1, var2: var1 + var2).getResult()
-    assert "tmp2" in data.columns and (data["tmp2"] == data["var1"] + data["var2"]).all(axis=None)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_mask(data, flagger):
-
-    saqc = SaQC(data=data, flagger=flagger)
-    data_org = data.copy(deep=True)
-    mean = data["var1"] / 2
-
-    data, _ = saqc.procGeneric("var1", lambda var1: mask(var1 < mean)).getResult()
-    assert ((data["var1"].isna()) == (data_org["var1"] < 10) & data_org["var1"].isna()).all(axis=None)
-
-    data, flags = saqc.procGeneric("tmp", lambda var1: mask(var1 < mean)).getResult()
-    assert ("tmp" in data.columns) and ("tmp" in flags.columns)
-    assert ((data["tmp"].isna()) == (data_org["var1"] < 10) & data_org["var1"].isna()).all(axis=None)
diff --git a/test/funcs/test_generic_config_functions.py b/test/funcs/test_generic_config_functions.py
deleted file mode 100644
index b761fece3127517fc6dbdca65ed539191bcf2c9a..0000000000000000000000000000000000000000
--- a/test/funcs/test_generic_config_functions.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import ast
-
-import pytest
-import numpy as np
-import pandas as pd
-
-from dios import DictOfSeries
-
-from test.common import TESTFLAGGER, TESTNODATA, initData, writeIO
-from saqc.core.visitor import ConfigFunctionParser
-from saqc.core.config import Fields as F
-from saqc.core.register import register
-from saqc import SaQC, SimpleFlagger
-from saqc.funcs.functions import _execGeneric
-
-
-@pytest.fixture
-def data():
-    return initData()
-
-
-@pytest.fixture
-def data_diff():
-    data = initData(cols=3)
-    col0 = data[data.columns[0]]
-    col1 = data[data.columns[1]]
-    mid = len(col0) // 2
-    offset = len(col0) // 8
-    return DictOfSeries(data={col0.name: col0.iloc[: mid + offset], col1.name: col1.iloc[mid - offset :],})
-
-
-def _compileGeneric(expr, flagger):
-    tree = ast.parse(expr, mode="eval")
-    _, kwargs = ConfigFunctionParser(flagger).parse(tree.body)
-    return kwargs["func"]
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_missingIdentifier(data, flagger):
-
-    # NOTE:
-    # - the error is only raised at runtime during parsing would be better
-    tests = [
-        "fff(var2) < 5",
-        "var3 != NODATA",
-    ]
-
-    for test in tests:
-        func = _compileGeneric(f"flagGeneric(func={test})", flagger)
-        with pytest.raises(NameError):
-            _execGeneric(flagger, data, func, field="", nodata=np.nan)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_syntaxError(flagger):
-
-    tests = [
-        "range(x=5",
-        "rangex=5)",
-        "range[x=5]" "range{x=5}" "int->float(x=4)" "int*float(x=4)",
-    ]
-
-    for test in tests:
-        with pytest.raises(SyntaxError):
-            _compileGeneric(f"flagGeneric(func={test})", flagger)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_typeError(flagger):
-
-    """
-    test that forbidden constructs actually throw an error
-    TODO: find a few more cases or get rid of the test
-    """
-
-    # : think about cases that should be forbidden
-    tests = ("lambda x: x * 2",)
-
-    for test in tests:
-        with pytest.raises(TypeError):
-            _compileGeneric(f"flagGeneric(func={test})", flagger)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_comparisonOperators(data, flagger):
-    flagger = flagger.initFlags(data)
-    var1, var2, *_ = data.columns
-    this = var1
-
-    tests = [
-        ("this > 100", data[this] > 100),
-        (f"10 >= {var2}", 10 >= data[var2]),
-        (f"{var2} < 100", data[var2] < 100),
-        (f"this <= {var2}", data[this] <= data[var2]),
-        (f"{var1} == {var2}", data[this] == data[var2]),
-        (f"{var1} != {var2}", data[this] != data[var2]),
-    ]
-
-    for test, expected in tests:
-        func = _compileGeneric(f"flagGeneric(func={test})", flagger)
-        result = _execGeneric(flagger, data, func, field=var1, nodata=np.nan)
-        assert np.all(result == expected)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_arithmeticOperators(data, flagger):
-    flagger = flagger.initFlags(data)
-    var1, *_ = data.columns
-    this = data[var1]
-
-    tests = [
-        ("var1 + 100 > 110", this + 100 > 110),
-        ("var1 - 100 > 0", this - 100 > 0),
-        ("var1 * 100 > 200", this * 100 > 200),
-        ("var1 / 100 > .1", this / 100 > 0.1),
-        ("var1 % 2 == 1", this % 2 == 1),
-        ("var1 ** 2 == 0", this ** 2 == 0),
-    ]
-
-    for test, expected in tests:
-        func = _compileGeneric(f"procGeneric(func={test})", flagger)
-        result = _execGeneric(flagger, data, func, field=var1, nodata=np.nan)
-        assert np.all(result == expected)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_nonReduncingBuiltins(data, flagger):
-    flagger = flagger.initFlags(data)
-    var1, *_ = data.columns
-    this = var1
-    mean = data[var1].mean()
-
-    tests = [
-        (f"abs({this})", np.abs(data[this])),
-        (f"log({this})", np.log(data[this])),
-        (f"exp({this})", np.exp(data[this])),
-        (f"ismissing(mask({this} < {mean}))", data.mask(data[this] < mean).isna()),
-    ]
-
-    for test, expected in tests:
-        func = _compileGeneric(f"procGeneric(func={test})", flagger)
-        result = _execGeneric(flagger, data, func, field=this, nodata=np.nan)
-        assert (result == expected).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("nodata", TESTNODATA)
-def test_reduncingBuiltins(data, flagger, nodata):
-
-    data.loc[::4] = nodata
-    flagger = flagger.initFlags(data)
-    var1 = data.columns[0]
-    this = data.iloc[:, 0]
-
-    tests = [
-        ("min(this)", np.nanmin(this)),
-        (f"max({var1})", np.nanmax(this)),
-        (f"sum({var1})", np.nansum(this)),
-        ("mean(this)", np.nanmean(this)),
-        (f"std({this.name})", np.std(this)),
-        (f"len({this.name})", len(this)),
-    ]
-
-    for test, expected in tests:
-        func = _compileGeneric(f"procGeneric(func={test})", flagger)
-        result = _execGeneric(flagger, data, func, field=this.name, nodata=nodata)
-        assert result == expected
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("nodata", TESTNODATA)
-def test_ismissing(data, flagger, nodata):
-
-    data.iloc[: len(data) // 2, 0] = np.nan
-    data.iloc[(len(data) // 2) + 1 :, 0] = -9999
-    this = data.iloc[:, 0]
-
-    tests = [
-        (f"ismissing({this.name})", (pd.isnull(this) | (this == nodata))),
-        (f"~ismissing({this.name})", (pd.notnull(this) & (this != nodata))),
-    ]
-
-    for test, expected in tests:
-        func = _compileGeneric(f"flagGeneric(func={test})", flagger)
-        result = _execGeneric(flagger, data, func, this.name, nodata)
-        assert np.all(result == expected)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("nodata", TESTNODATA)
-def test_bitOps(data, flagger, nodata):
-    var1, var2, *_ = data.columns
-    this = var1
-
-    flagger = flagger.initFlags(data)
-
-    tests = [
-        ("~(this > mean(this))", ~(data[this] > np.nanmean(data[this]))),
-        (f"(this <= 0) | (0 < {var1})", (data[this] <= 0) | (0 < data[var1])),
-        (f"({var2} >= 0) & (0 > this)", (data[var2] >= 0) & (0 > data[this])),
-    ]
-
-    for test, expected in tests:
-        func = _compileGeneric(f"flagGeneric(func={test})", flagger)
-        result = _execGeneric(flagger, data, func, this, nodata)
-        assert np.all(result == expected)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_isflagged(data, flagger):
-
-    var1, var2, *_ = data.columns
-
-    flagger = flagger.initFlags(data).setFlags(var1, loc=data[var1].index[::2], flag=flagger.BAD)
-
-    tests = [
-        (f"isflagged({var1})", flagger.isFlagged(var1)),
-        (f"isflagged({var1}, flag=BAD)", flagger.isFlagged(var1, flag=flagger.BAD, comparator=">=")),
-        (f"isflagged({var1}, UNFLAGGED, '==')", flagger.isFlagged(var1, flag=flagger.UNFLAGGED, comparator="==")),
-        (f"~isflagged({var2})", ~flagger.isFlagged(var2)),
-        (f"~({var2}>999) & (~isflagged({var2}))", ~(data[var2] > 999) & (~flagger.isFlagged(var2))),
-    ]
-
-    for test, expected in tests:
-        func = _compileGeneric(f"flagGeneric(func={test}, flag=BAD)", flagger)
-        result = _execGeneric(flagger, data, func, field=None, nodata=np.nan)
-        assert np.all(result == expected)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_variableAssignments(data, flagger):
-    var1, var2, *_ = data.columns
-
-    config = f"""
-    {F.VARNAME}  ; {F.TEST}
-    dummy1       ; procGeneric(func=var1 + var2)
-    dummy2       ; flagGeneric(func=var1 + var2 > 0)
-    """
-
-    fobj = writeIO(config)
-    saqc = SaQC(flagger, data).readConfig(fobj)
-    result_data, result_flagger = saqc.getResult(raw=True)
-
-    assert set(result_data.columns) == set(data.columns) | {
-        "dummy1",
-    }
-    assert set(result_flagger.getFlags().columns) == set(data.columns) | {"dummy1", "dummy2"}
-
-
-@pytest.mark.xfail(stric=True)
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_procGenericMultiple(data_diff, flagger):
-    var1, var2, *_ = data_diff.columns
-
-    config = f"""
-    {F.VARNAME} ; {F.TEST}
-    dummy       ; procGeneric(func=var1 + 1)
-    dummy       ; procGeneric(func=var2 - 1)
-    """
-
-    fobj = writeIO(config)
-    saqc = SaQC(flagger, data_diff).readConfig(fobj)
-    result_data, result_flagger = saqc.getResult()
-    assert len(result_data["dummy"]) == len(result_flagger.getFlags("dummy"))
-
-
-def test_callableArgumentsUnary(data):
-
-    window = 5
-
-    @register(masking='field')
-    def testFuncUnary(data, field, flagger, func, **kwargs):
-        data[field] = data[field].rolling(window=window).apply(func)
-        return data, flagger.initFlags(data=data)
-
-    flagger = SimpleFlagger()
-    var = data.columns[0]
-
-    config = f"""
-    {F.VARNAME} ; {F.TEST}
-    {var}       ; testFuncUnary(func={{0}})
-    """
-
-    tests = [
-        ("sum", np.sum),
-        ("std(exp(x))", lambda x: np.std(np.exp(x))),
-    ]
-
-    for (name, func) in tests:
-        fobj = writeIO(config.format(name))
-        result_config, _ = SaQC(flagger, data).readConfig(fobj).getResult()
-        result_api, _ = SaQC(flagger, data).testFuncUnary(var, func=func).getResult()
-        expected = data[var].rolling(window=window).apply(func)
-        assert (result_config[var].dropna() == expected.dropna()).all(axis=None)
-        assert (result_api[var].dropna() == expected.dropna()).all(axis=None)
-
-
-def test_callableArgumentsBinary(data):
-
-    flagger = SimpleFlagger()
-    var1, var2 = data.columns[:2]
-
-    @register(masking='field')
-    def testFuncBinary(data, field, flagger, func, **kwargs):
-        data[field] = func(data[var1], data[var2])
-        return data, flagger.initFlags(data=data)
-
-    config = f"""
-    {F.VARNAME} ; {F.TEST}
-    {var1}      ; testFuncBinary(func={{0}})
-    """
-
-    tests = [
-        ("x + y", lambda x, y: x + y),
-        ("y - (x * 2)", lambda y, x: y - (x * 2)),
-    ]
-
-    for (name, func) in tests:
-        fobj = writeIO(config.format(name))
-        result_config, _ = SaQC(flagger, data).readConfig(fobj).getResult()
-        result_api, _ = SaQC(flagger, data).testFuncBinary(var1, func=func).getResult()
-        expected = func(data[var1], data[var2])
-        assert (result_config[var1].dropna() == expected.dropna()).all(axis=None)
-        assert (result_api[var1].dropna() == expected.dropna()).all(axis=None)
diff --git a/test/funcs/test_harm_funcs.py b/test/funcs/test_harm_funcs.py
deleted file mode 100644
index d8825f9689c4c7108b53e9dc22772d203449ab04..0000000000000000000000000000000000000000
--- a/test/funcs/test_harm_funcs.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-
-# see test/functs/conftest.py for global fixtures "course_..."
-import pytest
-
-import numpy as np
-import pandas as pd
-import dios
-
-from test.common import TESTFLAGGER
-
-from saqc.funcs.harm_functions import (
-    harm_linear2Grid,
-    harm_interpolate2Grid,
-    harm_shift2Grid,
-    harm_aggregate2Grid,
-    harm_deharmonize,
-)
-
-RESHAPERS = ["nshift", "fshift", "bshift", "nagg", "bagg", "fagg", "interpolation"]
-
-INTERPOLATIONS = ["time", "polynomial"]
-
-
-@pytest.fixture
-def data():
-    index = pd.date_range(start="1.1.2011 00:00:00", end="1.1.2011 01:00:00", freq="15min")
-    index = index.insert(2, pd.Timestamp(2011, 1, 1, 0, 29, 0))
-    index = index.insert(2, pd.Timestamp(2011, 1, 1, 0, 28, 0))
-    index = index.insert(5, pd.Timestamp(2011, 1, 1, 0, 32, 0))
-    index = index.insert(5, pd.Timestamp(2011, 1, 1, 0, 31, 0))
-    index = index.insert(0, pd.Timestamp(2010, 12, 31, 23, 57, 0))
-    index = index.drop(pd.Timestamp("2011-01-01 00:30:00"))
-    dat = pd.Series(np.linspace(-50, 50, index.size), index=index, name="data")
-    # good to have some nan
-    dat[-3] = np.nan
-    data = dios.DictOfSeries(dat)
-    return data
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("reshaper", RESHAPERS)
-def test_harmSingleVarIntermediateFlagging(data, flagger, reshaper):
-    flagger = flagger.initFlags(data)
-    # make pre harm copies:
-    pre_data = data.copy()
-    pre_flags = flagger.getFlags()
-    freq = "15min"
-    assert len(data.columns) == 1
-    field = data.columns[0]
-    data, flagger = harm_linear2Grid(data, "data", flagger, freq)
-    # flag something bad
-    flagger = flagger.setFlags("data", loc=data[field].index[3:4])
-    data, flagger = harm_deharmonize(data, "data", flagger, method="inverse_" + reshaper)
-    d = data[field]
-    if reshaper == "nagg":
-        assert flagger.isFlagged(loc=d.index[3:7]).squeeze().all()
-        assert (~flagger.isFlagged(loc=d.index[0:3]).squeeze()).all()
-        assert (~flagger.isFlagged(loc=d.index[7:]).squeeze()).all()
-    if reshaper == "nshift":
-        assert (flagger.isFlagged().squeeze() == [False, False, False, False, True, False, False, False, False]).all()
-    if reshaper == "bagg":
-        assert flagger.isFlagged(loc=d.index[5:7]).squeeze().all()
-        assert (~flagger.isFlagged(loc=d.index[0:5]).squeeze()).all()
-        assert (~flagger.isFlagged(loc=d.index[7:]).squeeze()).all()
-    if reshaper == "bshift":
-        assert (flagger.isFlagged().squeeze() == [False, False, False, False, False, True, False, False, False]).all()
-    if reshaper == "fagg":
-        assert flagger.isFlagged(loc=d.index[3:5]).squeeze().all()
-        assert (~flagger.isFlagged(loc=d.index[0:3]).squeeze()).all()
-        assert (~flagger.isFlagged(loc=d.index[5:]).squeeze()).all()
-    if reshaper == "fshift":
-        assert (flagger.isFlagged().squeeze() == [False, False, False, False, True, False, False, False, False]).all()
-
-    flags = flagger.getFlags()
-    assert pre_data[field].equals(data[field])
-    assert len(data[field]) == len(flags[field])
-    assert (pre_flags[field].index == flags[field].index).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_harmSingleVarInterpolations(data, flagger):
-    flagger = flagger.initFlags(data)
-    field = data.columns[0]
-    pre_data = data[field]
-    pre_flags = flagger.getFlags(field)
-    tests = [
-        (
-            "nagg",
-            "15Min",
-            pd.Series(
-                data=[-87.5, -25.0, 0.0, 37.5, 50.0],
-                index=pd.date_range("2011-01-01 00:00:00", "2011-01-01 01:00:00", freq="15min"),
-            ),
-        ),
-        (
-            "nagg",
-            "30Min",
-            pd.Series(
-                data=[-87.5, -25.0, 87.5],
-                index=pd.date_range("2011-01-01 00:00:00", "2011-01-01 01:00:00", freq="30min"),
-            ),
-        ),
-        (
-            "bagg",
-            "15Min",
-            pd.Series(
-                data=[-50.0, -37.5, -37.5, 12.5, 37.5, 50.0],
-                index=pd.date_range("2010-12-31 23:45:00", "2011-01-01 01:00:00", freq="15min"),
-            ),
-        ),
-        (
-            "bagg",
-            "30Min",
-            pd.Series(
-                data=[-50.0, -75.0, 50.0, 50.0],
-                index=pd.date_range("2010-12-31 23:30:00", "2011-01-01 01:00:00", freq="30min"),
-            ),
-        ),
-    ]
-
-    for interpolation, freq, expected in tests:
-        data_harm, flagger_harm = harm_aggregate2Grid(
-            data, field, flagger, freq, value_func=np.sum, method=interpolation
-        )
-        assert data_harm[field].equals(expected)
-        data_deharm, flagger_deharm = harm_deharmonize(
-            data_harm, "data", flagger_harm, method="inverse_" + interpolation
-        )
-        assert data_deharm[field].equals(pre_data)
-        assert flagger_deharm.getFlags([field]).squeeze().equals(pre_flags)
-
-    tests = [
-        (
-            "fshift",
-            "15Min",
-            pd.Series(
-                data=[np.nan, -37.5, -25.0, 0.0, 37.5, 50.0],
-                index=pd.date_range("2010-12-31 23:45:00", "2011-01-01 01:00:00", freq="15Min"),
-            ),
-        ),
-        (
-            "fshift",
-            "30Min",
-            pd.Series(
-                data=[np.nan, -37.5, 0.0, 50.0],
-                index=pd.date_range("2010-12-31 23:30:00", "2011-01-01 01:00:00", freq="30Min"),
-            ),
-        ),
-        (
-            "bshift",
-            "15Min",
-            pd.Series(
-                data=[-50.0, -37.5, -25.0, 12.5, 37.5, 50.0],
-                index=pd.date_range("2010-12-31 23:45:00", "2011-01-01 01:00:00", freq="15Min"),
-            ),
-        ),
-        (
-            "bshift",
-            "30Min",
-            pd.Series(
-                data=[-50.0, -37.5, 12.5, 50.0],
-                index=pd.date_range("2010-12-31 23:30:00", "2011-01-01 01:00:00", freq="30Min"),
-            ),
-        ),
-        (
-            "nshift",
-            "15min",
-            pd.Series(
-                data=[np.nan, -37.5, -25.0, 12.5, 37.5, 50.0],
-                index=pd.date_range("2010-12-31 23:45:00", "2011-01-01 01:00:00", freq="15Min"),
-            ),
-        ),
-        (
-            "nshift",
-            "30min",
-            pd.Series(
-                data=[np.nan, -37.5, 12.5, 50.0],
-                index=pd.date_range("2010-12-31 23:30:00", "2011-01-01 01:00:00", freq="30Min"),
-            ),
-        ),
-    ]
-
-    for interpolation, freq, expected in tests:
-        data_harm, flagger_harm = harm_shift2Grid(data, field, flagger, freq, method=interpolation)
-        assert data_harm[field].equals(expected)
-        data_deharm, flagger_deharm = harm_deharmonize(
-            data_harm, "data", flagger_harm, method="inverse_" + interpolation
-        )
-        assert data_deharm[field].equals(pre_data)
-        assert flagger_deharm.getFlags([field]).squeeze().equals(pre_flags)
-
-
-@pytest.mark.parametrize("method", INTERPOLATIONS)
-def test_gridInterpolation(data, method):
-    freq = "15min"
-    data = data.squeeze()
-    field = data.name
-    data = (data * np.sin(data)).append(data.shift(1, "2h")).shift(1, "3s")
-    data = dios.DictOfSeries(data)
-    flagger = TESTFLAGGER[0].initFlags(data)
-
-    # we are just testing if the interpolation gets passed to the series without causing an error:
-
-    harm_interpolate2Grid(data, field, flagger, freq, method=method, downcast_interpolation=True)
-    if method == "polynomial":
-        harm_interpolate2Grid(data, field, flagger, freq, order=2, method=method, downcast_interpolation=True)
-        harm_interpolate2Grid(data, field, flagger, freq, order=10, method=method, downcast_interpolation=True)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_wrapper(data, flagger):
-    # we are only testing, whether the wrappers do pass processing:
-    field = data.columns[0]
-    freq = "15min"
-    flagger = flagger.initFlags(data)
-
-    harm_linear2Grid(data, field, flagger, freq, to_drop=None)
-    harm_aggregate2Grid(data, field, flagger, freq, value_func=np.nansum, method="nagg", to_drop=None)
-    harm_shift2Grid(data, field, flagger, freq, method="nshift", to_drop=None)
-    harm_interpolate2Grid(data, field, flagger, freq, method="spline")
diff --git a/test/funcs/test_modelling.py b/test/funcs/test_modelling.py
deleted file mode 100644
index f221944f1c6c2fcfd1c23acba4dd13f552b9063f..0000000000000000000000000000000000000000
--- a/test/funcs/test_modelling.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-
-# see test/functs/conftest.py for global fixtures "course_..."
-
-import pytest
-
-import numpy as np
-import pandas as pd
-import dios
-
-from test.common import TESTFLAGGER
-
-from saqc.funcs.modelling import modelling_polyFit, modelling_rollingMean, modelling_mask
-
-TF = TESTFLAGGER[:1]
-
-
-@pytest.mark.parametrize("flagger", TF)
-@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_2")])
-def test_modelling_polyFit_forRegular(dat, flagger):
-    data, _ = dat(freq="10min", periods=30, initial_level=0, final_level=100, out_val=-100)
-    # add some nice sine distortion
-    data = data + 10 * np.sin(np.arange(0, len(data.indexes[0])))
-    data = dios.DictOfSeries(data)
-    flagger = flagger.initFlags(data)
-    result1, _ = modelling_polyFit(data, "data", flagger, 11, 2, numba=False)
-    result2, _ = modelling_polyFit(data, "data", flagger, 11, 2, numba=True)
-    assert (result1["data"] - result2["data"]).abs().max() < 10 ** -10
-    result3, _ = modelling_polyFit(data, "data", flagger, "110min", 2, numba=False)
-    assert result3["data"].equals(result1["data"])
-    result4, _ = modelling_polyFit(data, "data", flagger, 11, 2, numba=True, min_periods=11)
-    assert (result4["data"] - result2["data"]).abs().max() < 10 ** -10
-    data.iloc[13:16] = np.nan
-    result5, _ = modelling_polyFit(data, "data", flagger, 11, 2, numba=True, min_periods=9)
-    assert result5["data"].iloc[10:19].isna().all()
-
-
-@pytest.mark.parametrize("flagger", TF)
-@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_2")])
-def test_modelling_rollingMean_forRegular(dat, flagger):
-    data, _ = dat(freq="10min", periods=30, initial_level=0, final_level=100, out_val=-100)
-    data = dios.DictOfSeries(data)
-    flagger = flagger.initFlags(data)
-    modelling_rollingMean(data, "data", flagger, 5, eval_flags=True, min_periods=0, center=True)
-    modelling_rollingMean(data, "data", flagger, 5, eval_flags=True, min_periods=0, center=False)
-
-@pytest.mark.parametrize("flagger", TF)
-@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_1")])
-def test_modelling_mask(dat, flagger):
-    data, _ = dat()
-    data = dios.DictOfSeries(data)
-    flagger = flagger.initFlags(data)
-    data_seasonal, flagger_seasonal = modelling_mask(data, "data", flagger, mode='seasonal', season_start="20:00",
-                                                     season_end="40:00", include_bounds=False)
-    flaggs = flagger_seasonal._flags["data"]
-    assert flaggs[np.logical_and(20 <= flaggs.index.minute, 40 >= flaggs.index.minute)].isna().all()
-    data_seasonal, flagger_seasonal = modelling_mask(data, "data", flagger, mode='seasonal', season_start="15:00:00",
-                                                     season_end="02:00:00")
-    flaggs = flagger_seasonal._flags["data"]
-    assert flaggs[np.logical_and(15 <= flaggs.index.hour, 2 >= flaggs.index.hour)].isna().all()
-    data_seasonal, flagger_seasonal = modelling_mask(data, "data", flagger, mode='seasonal', season_start="03T00:00:00",
-                                                     season_end="10T00:00:00")
-    flaggs = flagger_seasonal._flags["data"]
-    assert flaggs[np.logical_and(3 <= flaggs.index.hour, 10 >= flaggs.index.hour)].isna().all()
-
-    mask_ser = pd.Series(False, index=data["data"].index)
-    mask_ser[::5] = True
-    data["mask_ser"] = mask_ser
-    flagger = flagger.initFlags(data)
-    data_masked, flagger_masked = modelling_mask(data, "data", flagger, mode='mask_var', mask_var="mask_ser")
-    flaggs = flagger_masked._flags["data"]
-    assert flaggs[data_masked['mask_ser']].isna().all()
\ No newline at end of file
diff --git a/test/funcs/test_pattern_rec.py b/test/funcs/test_pattern_rec.py
deleted file mode 100644
index 66ebcbfd1fdf13f5cb30cb5bd34a5a457a31dc3d..0000000000000000000000000000000000000000
--- a/test/funcs/test_pattern_rec.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import pytest
-from dios import dios
-
-from saqc.funcs.pattern_rec import *
-from test.common import initData, TESTFLAGGER
-
-
-@pytest.fixture
-def data():
-    return initData(cols=1, start_date="2016-01-01", end_date="2018-12-31", freq="1D")
-
-
-@pytest.fixture
-def field(data):
-    return data.columns[0]
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagPattern_wavelet(flagger):
-
-    data = pd.Series(0, index=pd.date_range(start="2000", end='2001', freq='1d'))
-    data.iloc[2:4] = 7
-    pattern = data.iloc[1:6]
-
-    data = dios.DictOfSeries(dict(data=data, pattern_data=pattern))
-
-    flagger = flagger.initFlags(data)
-    data, flagger = flagPattern_wavelet(data, "data", flagger, ref_field="pattern_data")
-    assert (flagger.isFlagged("data")[1:6]).all()
-    assert (flagger.isFlagged("data")[:1]).any()
-    assert (flagger.isFlagged("data")[7:]).any()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagPattern_dtw(flagger):
-
-    data = pd.Series(0, index=pd.date_range(start="2000", end='2001', freq='1d'))
-    data.iloc[2:4] = 7
-    pattern = data.iloc[1:6]
-
-    data = dios.DictOfSeries(dict(data=data, pattern_data=pattern))
-
-    flagger = flagger.initFlags(data)
-    data, flagger = flagPattern_dtw(data, "data", flagger, ref_field="pattern_data")
-    assert (flagger.isFlagged("data")[1:6]).all()
-    assert (flagger.isFlagged("data")[:1]).any()
-    assert (flagger.isFlagged("data")[7:]).any()
diff --git a/test/funcs/test_proc_functions.py b/test/funcs/test_proc_functions.py
deleted file mode 100644
index 457c56f06b0da92dbe372a71ed9b570aa351dbd1..0000000000000000000000000000000000000000
--- a/test/funcs/test_proc_functions.py
+++ /dev/null
@@ -1,109 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-
-# see test/functs/conftest.py for global fixtures "course_..."
-
-import pytest
-import numpy as np
-import pandas as pd
-import dios
-
-from saqc.funcs.proc_functions import (
-    proc_interpolateMissing,
-    proc_resample,
-    proc_transform,
-    proc_rollingInterpolateMissing,
-    proc_interpolateGrid,
-    proc_offsetCorrecture
-)
-from saqc.lib.ts_operators import linearInterpolation, polynomialInterpolation
-
-from test.common import TESTFLAGGER
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_rollingInterpolateMissing(course_5, flagger):
-    data, characteristics = course_5(periods=10, nan_slice=[5, 6])
-    field = data.columns[0]
-    data = dios.DictOfSeries(data)
-    flagger = flagger.initFlags(data)
-    dataInt, *_ = proc_rollingInterpolateMissing(
-        data, field, flagger, 3, func=np.median, center=True, min_periods=0, interpol_flag="UNFLAGGED"
-    )
-    # import pdb
-    # pdb.set_trace()
-    assert dataInt[field][characteristics["missing"]].notna().all()
-    dataInt, *_ = proc_rollingInterpolateMissing(
-        data, field, flagger, 3, func=np.nanmean, center=False, min_periods=3, interpol_flag="UNFLAGGED"
-    )
-    assert dataInt[field][characteristics["missing"]].isna().all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_interpolateMissing(course_5, flagger):
-    data, characteristics = course_5(periods=10, nan_slice=[5])
-    field = data.columns[0]
-    data = dios.DictOfSeries(data)
-    flagger = flagger.initFlags(data)
-    dataLin, *_ = proc_interpolateMissing(data, field, flagger, method="linear")
-    dataPoly, *_ = proc_interpolateMissing(data, field, flagger, method="polynomial")
-    assert dataLin[field][characteristics["missing"]].notna().all()
-    assert dataPoly[field][characteristics["missing"]].notna().all()
-    data, characteristics = course_5(periods=10, nan_slice=[5, 6, 7])
-    dataLin1, *_ = proc_interpolateMissing(data, field, flagger, method="linear", inter_limit=2)
-    dataLin2, *_ = proc_interpolateMissing(data, field, flagger, method="linear", inter_limit=3)
-    dataLin3, *_ = proc_interpolateMissing(data, field, flagger, method="linear", inter_limit=4)
-    assert dataLin1[field][characteristics["missing"]].isna().all()
-    assert dataLin2[field][characteristics["missing"]].isna().all()
-    assert dataLin3[field][characteristics["missing"]].notna().all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_transform(course_5, flagger):
-    data, characteristics = course_5(periods=10, nan_slice=[5, 6])
-    field = data.columns[0]
-    data = dios.DictOfSeries(data)
-    flagger = flagger.initFlags(data)
-    data1, *_ = proc_transform(data, field, flagger, func=linearInterpolation)
-    assert data1[field][characteristics["missing"]].isna().all()
-    data1, *_ = proc_transform(data, field, flagger, func=lambda x: linearInterpolation(x, inter_limit=3))
-    assert data1[field][characteristics["missing"]].notna().all()
-    data1, *_ = proc_transform(
-        data, field, flagger, func=lambda x: polynomialInterpolation(x, inter_limit=3, inter_order=3)
-    )
-    assert data1[field][characteristics["missing"]].notna().all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_resample(course_5, flagger):
-    data, characteristics = course_5(freq="1min", periods=30, nan_slice=[1, 11, 12, 22, 24, 26])
-    field = data.columns[0]
-    data = dios.DictOfSeries(data)
-    flagger = flagger.initFlags(data)
-    data1, *_ = proc_resample(data, field, flagger, "10min", np.mean, max_invalid_total_d=2, max_invalid_consec_d=1)
-    assert ~np.isnan(data1[field].iloc[0])
-    assert np.isnan(data1[field].iloc[1])
-    assert np.isnan(data1[field].iloc[2])
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_interpolateGrid(course_5, course_3, flagger):
-    data, _ = course_5()
-    data_grid, characteristics = course_3()
-    data['grid'] = data_grid.to_df()
-    # data = dios.DictOfSeries(data)
-    flagger = flagger.initFlags(data)
-    dataInt, *_ = proc_interpolateGrid(data, 'data', flagger, '1h', 'time', grid_field='grid', inter_limit=10)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_offsetCorrecture(flagger):
-    data = pd.Series(0, index=pd.date_range('2000', freq='1d', periods=100), name='dat')
-    data.iloc[30:40] = -100
-    data.iloc[70:80] = 100
-    data = dios.DictOfSeries(data)
-    flagger = flagger.initFlags(data)
-    data, flagger = proc_offsetCorrecture(data, 'dat', flagger, 40, 20, '3d', 1)
-    assert (data == 0).all()[0]
-
diff --git a/test/funcs/test_soil_moisture_tests.py b/test/funcs/test_soil_moisture_tests.py
deleted file mode 100644
index d4eb78f788c94a2999a6093c20528954e7e20394..0000000000000000000000000000000000000000
--- a/test/funcs/test_soil_moisture_tests.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import pytest
-import numpy as np
-import pandas as pd
-import dios
-
-from saqc.funcs.soil_moisture_tests import sm_flagFrost, sm_flagPrecipitation, sm_flagConstants, sm_flagRandomForest
-
-from test.common import TESTFLAGGER, initData
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_sm_flagFrost(flagger):
-    index = pd.date_range(start="2011-01-01 00:00:00", end="2011-01-01 03:00:00", freq="5min")
-
-    sm = pd.Series(data=np.linspace(0, +1, index.size), index=index)
-    st = pd.Series(data=np.linspace(1, -1, index.size), index=index)
-    data = dios.DictOfSeries([sm, st], columns=["soil_moisture", "soil_temperature"])
-
-    flagger = flagger.initFlags(data)
-    data, flagger_result = sm_flagFrost(data, "soil_moisture", flagger, "soil_temperature")
-    flag_assertion = np.arange(19, 37)
-    flag_result = flagger_result.getFlags("soil_moisture")
-    assert (flag_result[flag_assertion] == flagger.BAD).all()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagSoilMoisturePrecipitationEvents(flagger):
-    index = pd.date_range(start="2011-01-01 00:00:00", end="2011-01-04 00:00:00", freq="15min")
-
-    sm = pd.Series(data=np.linspace(0, 1, index.size), index=index)
-    pr = pd.Series(data=np.linspace(1, 1, index.size), index=index)
-    data = dios.DictOfSeries([sm, pr], columns=["soil_moisture", "precipitation"])
-
-    data.loc["2011-01-03", "precipitation"] = 0
-    data.loc["2011-01-04", "precipitation"] = 0
-
-    flagger = flagger.initFlags(data)
-    data, flag_result = sm_flagPrecipitation(data, "soil_moisture", flagger, "precipitation")
-
-    flag_assertion = [288, 287]
-    flag_result = flag_result.getFlags("soil_moisture")
-    test_sum = (flag_result[flag_assertion] == flagger.BAD).sum()
-    assert test_sum == len(flag_assertion)
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_sm_flagConstantss(flagger):
-    data = initData(1, start_date="2011-01-01 00:00:00", end_date="2011-01-02 00:00:00", freq="5min")
-    data.iloc[5:25] = 0
-    data.iloc[100:120] = data.apply(max)[0]
-    field = data.columns[0]
-    flagger = flagger.initFlags(data)
-    data, flagger = sm_flagConstants(data, field, flagger, window="1h", precipitation_window="1h")
-
-    assert ~(flagger.isFlagged()[5:25]).all()[0]
-    assert (flagger.isFlagged()[100:120]).all()[0]
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_sm_flagRandomForest(flagger):
-    ### CREATE MWE DATA
-    data = pd.read_feather("ressources/machine_learning/data/soil_moisture_mwe.feather")
-    data = data.set_index(pd.DatetimeIndex(data.Time))
-    flags_raw = data[["SM1_Flag", "SM2_Flag", "SM3_Flag"]]
-    flags_raw.columns = ["SM1", "SM2", "SM3"]
-
-    # masks for flag preparation
-    mask_bad = flags_raw.isin(["Auto:BattV", "Auto:Range", "Auto:Spike"])
-    mask_unflagged = flags_raw.isin(["Manual"])
-    mask_good = flags_raw.isin(["OK"])
-
-    field = "SM2"
-
-    # prepare flagsframe
-    data = dios.to_dios(data)
-    flagger = flagger.initFlags(data)
-    flagger = flagger.setFlags(field, loc=mask_bad[field])
-    flagger = flagger.setFlags(field, loc=mask_unflagged[field], flag=flagger.UNFLAGGED)
-    flagger = flagger.setFlags(field, loc=mask_good[field], flag=flagger.GOOD)
-
-    references = ["Temp2", "BattV"]
-    window_values = 20
-    window_flags = 20
-    groupvar = 0.2
-    modelname = "testmodel"
-    path = f"ressources/machine_learning/models/{modelname}_{groupvar}.pkl"
-
-    outdat, outflagger = sm_flagRandomForest(data, field, flagger, references, window_values, window_flags, path)
-
-    # compare
-    # assert resulting no of bad flags
-    badflags = outflagger.isFlagged(field)
-    assert badflags.sum() == 10447
-
-    # Have the right values been flagged?
-    checkdates = pd.DatetimeIndex(
-        [
-            "2014-08-05 23:03:59",
-            "2014-08-06 01:35:44",
-            "2014-08-06 01:50:54",
-            "2014-08-06 02:06:05",
-            "2014-08-06 02:21:15",
-            "2014-08-06 04:22:38",
-            "2014-08-06 04:37:49",
-            "2014-08-06 04:52:59",
-        ]
-    )
-    assert badflags[checkdates].all()
diff --git a/test/funcs/test_spikes_detection.py b/test/funcs/test_spikes_detection.py
deleted file mode 100644
index cfdeb79b0a6a5f612f3b2c5a88cdd1e8fdaa61c6..0000000000000000000000000000000000000000
--- a/test/funcs/test_spikes_detection.py
+++ /dev/null
@@ -1,140 +0,0 @@
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# see test/functs/conftest.py for global fixtures "course_..."
-import pytest
-import numpy as np
-import pandas as pd
-import dios
-
-from saqc.funcs.spikes_detection import (
-    spikes_flagSpektrumBased,
-    spikes_flagMad,
-    spikes_flagSlidingZscore,
-    spikes_flagBasic,
-    spikes_flagRaise,
-    spikes_flagMultivarScores,
-    spikes_flagGrubbs,
-)
-
-from test.common import TESTFLAGGER
-
-
-@pytest.fixture(scope="module")
-def spiky_data():
-    index = pd.date_range(start="2011-01-01", end="2011-01-05", freq="5min")
-    s = pd.Series(np.linspace(1, 2, index.size), index=index, name="spiky_data")
-    s.iloc[100] = 100
-    s.iloc[1000] = -100
-    flag_assertion = [100, 1000]
-    return dios.DictOfSeries(s), flag_assertion
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagSpikesSpektrumBased(spiky_data, flagger):
-    data = spiky_data[0]
-    field, *_ = data.columns
-    flagger = flagger.initFlags(data)
-    data, flagger_result = spikes_flagSpektrumBased(data, field, flagger)
-    flag_result = flagger_result.getFlags(field)
-    test_sum = (flag_result[spiky_data[1]] == flagger.BAD).sum()
-    assert test_sum == len(spiky_data[1])
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagMad(spiky_data, flagger):
-    data = spiky_data[0]
-    field, *_ = data.columns
-    flagger = flagger.initFlags(data)
-    data, flagger_result = spikes_flagMad(data, field, flagger, "1H")
-    flag_result = flagger_result.getFlags(field)
-    test_sum = (flag_result[spiky_data[1]] == flagger.BAD).sum()
-    assert test_sum == len(spiky_data[1])
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("method", ["modZ", "zscore"])
-def test_slidingOutlier(spiky_data, flagger, method):
-    # test for numeric input
-    data = spiky_data[0]
-    field, *_ = data.columns
-    flagger = flagger.initFlags(data)
-
-    tests = [
-        spikes_flagSlidingZscore(data, field, flagger, window=300, offset=50, method=method),
-        spikes_flagSlidingZscore(data, field, flagger, window="1500min", offset="250min", method=method),
-    ]
-
-    for _, flagger_result in tests:
-        flag_result = flagger_result.getFlags(field)
-        test_sum = (flag_result.iloc[spiky_data[1]] == flagger.BAD).sum()
-        assert int(test_sum) == len(spiky_data[1])
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-def test_flagSpikesBasic(spiky_data, flagger):
-    data = spiky_data[0]
-    field, *_ = data.columns
-    flagger = flagger.initFlags(data)
-    data, flagger_result = spikes_flagBasic(data, field, flagger, thresh=60, tolerance=10, window="20min")
-    flag_result = flagger_result.getFlags(field)
-    test_sum = (flag_result[spiky_data[1]] == flagger.BAD).sum()
-    assert test_sum == len(spiky_data[1])
-
-
-# see test/functs/conftest.py for the 'course_N'
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize(
-    "dat",
-    [
-        pytest.lazy_fixture("course_1"),
-        pytest.lazy_fixture("course_2"),
-        pytest.lazy_fixture("course_3"),
-        pytest.lazy_fixture("course_4"),
-    ],
-)
-def test_flagSpikesLimitRaise(dat, flagger):
-    data, characteristics = dat()
-    field, *_ = data.columns
-    flagger = flagger.initFlags(data)
-    _, flagger_result = spikes_flagRaise(
-        data, field, flagger, thresh=2, intended_freq="10min", raise_window="20min", numba_boost=False
-    )
-    assert flagger_result.isFlagged(field)[characteristics["raise"]].all()
-    assert not flagger_result.isFlagged(field)[characteristics["return"]].any()
-    assert not flagger_result.isFlagged(field)[characteristics["drop"]].any()
-
-
-# see test/functs/conftest.py for the 'course_N'
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_3")])
-def test_flagMultivarScores(dat, flagger):
-    data1, characteristics = dat(periods=1000, initial_level=5, final_level=15, out_val=50)
-    data2, characteristics = dat(periods=1000, initial_level=20, final_level=1, out_val=30)
-    field = "dummy"
-    fields = ["data1", "data2"]
-    s1, s2 = data1.squeeze(), data2.squeeze()
-    s1 = pd.Series(data=s1.values, index=s1.index)
-    s2 = pd.Series(data=s2.values, index=s1.index)
-    data = dios.DictOfSeries([s1, s2], columns=["data1", "data2"])
-    flagger = flagger.initFlags(data)
-    _, flagger_result = spikes_flagMultivarScores(
-        data, field, flagger, fields=fields, binning=50, trafo=np.log, iter_start=0.95, n_neighbors=10
-    )
-    for field in fields:
-        isflagged = flagger_result.isFlagged(field)
-        assert isflagged[characteristics["raise"]].all()
-        assert not isflagged[characteristics["return"]].any()
-        assert not isflagged[characteristics["drop"]].any()
-
-
-@pytest.mark.parametrize("flagger", TESTFLAGGER)
-@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_3")])
-def test_grubbs(dat, flagger):
-    data, char_dict = dat(
-        freq="10min", periods=45, initial_level=0, final_level=0, crowd_size=1, crowd_spacing=3, out_val=-10
-    )
-    flagger = flagger.initFlags(data)
-    data, result_flagger = spikes_flagGrubbs(data, "data", flagger, winsz=20, min_periods=15)
-    assert result_flagger.isFlagged("data")[char_dict["drop"]].all()
-
diff --git a/test/lib/test_rolling.py b/test/lib/test_rolling.py
deleted file mode 100644
index b7245b3b0c7564c8c8ba50a8f643597cd84487d0..0000000000000000000000000000000000000000
--- a/test/lib/test_rolling.py
+++ /dev/null
@@ -1,220 +0,0 @@
-import pytest
-
-from saqc.lib.rolling import customRoller, Rolling
-import pandas as pd
-import numpy as np
-
-FUNCTS = ['count', 'sum', 'mean', 'median', 'var', 'std', 'min', 'max', 'corr', 'cov', 'skew', 'kurt', ]
-
-OTHA = ['apply',
-        'aggregate',  # needs param func eg. func='min'
-        'quantile',  # needs param quantile=0.5 (0<=q<=1)
-        ]
-
-
-@pytest.fixture
-def data():
-    return data_()
-
-
-def data_():
-    s1 = pd.Series(1., index=pd.date_range("1999/12", periods=12, freq='1M') + pd.Timedelta('1d'))
-    s2 = pd.Series(1., index=pd.date_range('2000/05/15', periods=8, freq='1d'))
-    s = pd.concat([s1, s2]).sort_index()
-    s.name = 's'
-    s[15] = np.nan
-    return s
-
-
-len_s = len(data_())
-
-
-def make_num_kws():
-    l = []
-    n = list(range(len_s))
-    for window in n:
-        mp = list(range(window))
-        for min_periods in [None] + mp:
-            if min_periods is not None and min_periods > window:
-                continue
-            for center in [False, True]:
-                l.append(dict(window=window, min_periods=min_periods, center=center))
-    return l
-
-
-def make_dt_kws():
-    l = []
-    n = [0, 1, 2, 10, 32, 70, 120]
-    mp = list(range(len_s))
-    for closed in ['right', 'both', 'neither', 'left']:
-        for window in n:
-            for min_periods in [None] + mp:
-                l.append(dict(window=f'{window}d', min_periods=min_periods, closed=closed))
-    return l
-
-
-def check_series(result, expected):
-    if not (result.isna() == expected.isna()).all():
-        return False
-    result = result.dropna()
-    expected = expected.dropna()
-    if not (result == expected).all():
-        return False
-    return True
-
-
-def print_diff(s, result, expected):
-    df = pd.DataFrame()
-    df['s'] = s
-    df['exp'] = expected
-    df['res'] = result
-    print(df)
-
-
-def call_rolling_function(roller, func):
-    if isinstance(func, str):
-        return getattr(roller, func)()
-    else:
-        return getattr(roller, 'apply')(func)
-
-
-@pytest.mark.parametrize("kws", make_dt_kws(), ids=lambda x: str(x))
-@pytest.mark.parametrize("func", FUNCTS)
-def test_pandas_conform_dt(data, kws, func):
-    s = data
-    try:
-        expR = s.rolling(**kws)
-        expected = call_rolling_function(expR, func)
-    except Exception as e0:
-        # pandas failed, so we should also fail
-        try:
-            resR = customRoller(s, **kws)
-            result = call_rolling_function(resR, func)
-        except Exception as e1:
-            assert type(e0) == type(e1)
-            return
-        assert False, 'pandas faild, but we succeed'
-
-    resR = customRoller(s, **kws)
-    result = call_rolling_function(resR, func)
-    success = check_series(result, expected)
-    if success:
-        return
-    print_diff(s, result, expected)
-    assert False
-
-
-@pytest.mark.parametrize("kws", make_num_kws(), ids=lambda x: str(x))
-@pytest.mark.parametrize("func", FUNCTS)
-def test_pandas_conform_num(data, kws, func):
-    s = data
-    try:
-        expR = s.rolling(**kws)
-        expected = call_rolling_function(expR, func)
-    except Exception as e0:
-        # pandas failed, so we should also fail
-        try:
-            resR = customRoller(s, **kws)
-            result = call_rolling_function(resR, func)
-        except Exception as e1:
-            assert type(e0) == type(e1)
-            return
-        assert False, 'pandas faild, but we succeed'
-
-    resR = customRoller(s, **kws)
-    result = call_rolling_function(resR, func)
-    success = check_series(result, expected)
-    if success:
-        return
-    print_diff(s, result, expected)
-    assert False
-
-
-@pytest.mark.parametrize("kws", make_dt_kws(), ids=lambda x: str(x))
-@pytest.mark.parametrize("func", FUNCTS)
-def test_forward_dt(data, kws, func):
-    s = data
-    try:
-        expR = pd.Series(reversed(s), reversed(s.index)).rolling(**kws)
-        expected = call_rolling_function(expR, func)[::-1]
-    except Exception as e0:
-        # pandas failed, so we should also fail
-        try:
-            resR = customRoller(s, forward=True, **kws)
-            result = call_rolling_function(resR, func)
-        except Exception as e1:
-            assert type(e0) == type(e1)
-            return
-        assert False, 'pandas faild, but we succeed'
-
-    resR = customRoller(s, forward=True, **kws)
-    result = call_rolling_function(resR, func)
-    success = check_series(result, expected)
-    if success:
-        return
-    print_diff(s, result, expected)
-    assert False
-
-
-@pytest.mark.parametrize("kws", make_num_kws(), ids=lambda x: str(x))
-@pytest.mark.parametrize("func", FUNCTS)
-def test_forward_num(data, kws, func):
-    s = data
-    try:
-        expR = pd.Series(reversed(s), reversed(s.index)).rolling(**kws)
-        expected = call_rolling_function(expR, func)[::-1]
-    except Exception as e0:
-        # pandas failed, so we should also fail
-        try:
-            resR = customRoller(s, forward=True, **kws)
-            result = call_rolling_function(resR, func)
-        except Exception as e1:
-            assert type(e0) == type(e1)
-            return
-        assert False, 'pandas faild, but we succeed'
-
-    resR = customRoller(s, forward=True, **kws)
-    result = call_rolling_function(resR, func)
-    success = check_series(result, expected)
-    if success:
-        return
-    print_diff(s, result, expected)
-    assert False
-
-
-def dt_center_kws():
-    l = []
-    for window in range(2, 10, 2):
-        for min_periods in range(1, window + 1):
-            l.append(dict(window=window, min_periods=min_periods))
-    return l
-
-
-@pytest.mark.parametrize("kws", dt_center_kws(), ids=lambda x: str(x))
-def test_centering_w_dtindex(kws):
-    print(kws)
-    s = pd.Series(0., index=pd.date_range("2000", periods=10, freq='1H'))
-    s[4:7] = 1
-
-    w = kws.pop('window')
-    mp = kws.pop('min_periods')
-
-    pd_kw = dict(window=w, center=True, min_periods=mp)
-    our_kw = dict(window=f'{w}h', center=True, closed='both', min_periods=mp)
-    expected = s.rolling(**pd_kw).sum()
-    result = customRoller(s, **our_kw).sum()
-    success = check_series(result, expected)
-    if not success:
-        print_diff(s, result, expected)
-        assert False
-
-    w -= 1
-    mp -= 1
-    pd_kw = dict(window=w, center=True, min_periods=mp)
-    our_kw = dict(window=f'{w}h', center=True, closed='neither', min_periods=mp)
-    expected = s.rolling(**pd_kw).sum()
-    result = customRoller(s, **our_kw).sum()
-    success = check_series(result, expected)
-    if not success:
-        print_diff(s, result, expected)
-        assert False
diff --git a/test/run_pytest.py b/test/run_pytest.py
deleted file mode 100644
index 861cb7cc8beb38a2abc79b9f4e07c329fc157be8..0000000000000000000000000000000000000000
--- a/test/run_pytest.py
+++ /dev/null
@@ -1,4 +0,0 @@
-import pytest
-
-if __name__ == "__main__":
-    pytest.main()
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..4265cc3e6c16c09774190fa55d609cd9fe0808e4
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1 @@
+#!/usr/bin/env python
diff --git a/tests/common.py b/tests/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..6ec9788b038076fcca3681a5697a72c7aebffd9f
--- /dev/null
+++ b/tests/common.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import io
+import numpy as np
+import pandas as pd
+import dios
+
+from saqc.constants import *
+from saqc.core import Flags
+from saqc.core.history import History, createHistoryFromData
+
+
+def flagAll(data, field, flags, **kwargs):
+    # NOTE: remember to rename flag -> flag_values
+    flags.copy()
+    flags[:, field] = BAD
+    return data, flags
+
+
+def initData(
+    cols=2, start_date="2017-01-01", end_date="2017-12-31", freq=None, rows=None
+):
+    if rows is None:
+        freq = freq or "1h"
+
+    di = dios.DictOfSeries(itype=dios.DtItype)
+    dates = pd.date_range(start=start_date, end=end_date, freq=freq, periods=rows)
+    dummy = np.arange(len(dates))
+
+    for col in range(1, cols + 1):
+        di[f"var{col}"] = pd.Series(data=dummy * col, index=dates)
+
+    return di
+
+
+def dummyHistory(hist: pd.DataFrame = None, meta: list = None):
+    if hist is None:
+        return History()
+
+    if meta is None:
+        meta = [{}] * len(hist.columns)
+
+    return createHistoryFromData(hist, meta, copy=True)
+
+
+def writeIO(content):
+    f = io.StringIO()
+    f.write(content)
+    f.seek(0)
+    return f
+
+
+def checkDataFlagsInvariants(data, flags, field, identical=True):
+    """
+    Check all invariants that must hold at any point for
+        * field
+        * data
+        * flags
+        * data[field]
+        * flags[field]
+        * data[field].index
+        * flags[field].index
+        * between data and flags
+        * between data[field] and flags[field]
+
+    Parameters
+    ----------
+    data : dios.DictOfSeries
+        data container
+    flags : Flags
+        flags container
+    field : str
+        the field in question
+    identical : bool, default True
+        whether to check indexes of data and flags to be
+        identical (True, default) of just for equality.
+    """
+    assert isinstance(data, dios.DictOfSeries)
+    assert isinstance(flags, Flags)
+
+    # all columns in data are in flags
+    assert data.columns.difference(flags.columns).empty
+
+    # ------------------------------------------------------------------------
+    # below here, we just check on and with field
+    # ------------------------------------------------------------------------
+    assert field in data
+    assert field in flags
+
+    assert flags[field].dtype == float
+
+    # `pd.Index.identical` also check index attributes like `freq`
+    if identical:
+        assert data[field].index.identical(flags[field].index)
+    else:
+        assert data[field].index.equals(flags[field].index)
diff --git a/test/core/__init__.py b/tests/core/__init__.py
similarity index 100%
rename from test/core/__init__.py
rename to tests/core/__init__.py
diff --git a/tests/core/test_core.py b/tests/core/test_core.py
new file mode 100644
index 0000000000000000000000000000000000000000..e64f6b9a5f0317d136f16e9328b2e921f339227d
--- /dev/null
+++ b/tests/core/test_core.py
@@ -0,0 +1,86 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import pytest
+import numpy as np
+import pandas as pd
+
+
+from saqc.constants import *
+from saqc.core import initFlagsLike
+from saqc import SaQC, flagging
+
+from tests.common import initData, flagAll
+
+
+OPTIONAL = [False, True]
+
+
+flagging(masking="field")(flagAll)
+
+
+@pytest.fixture
+def data():
+    return initData(3)
+
+
+@pytest.fixture
+def flags(data, optional):
+    if not optional:
+        return initFlagsLike(data[data.columns[::2]]).toDios()
+
+
+def test_errorHandling(data):
+    @flagging(masking="field")
+    def raisingFunc(data, field, flags, **kwargs):
+        raise TypeError
+
+    var1 = data.columns[0]
+
+    with pytest.raises(TypeError):
+        SaQC(data).raisingFunc(var1).getResult()
+
+
+def test_duplicatedVariable():
+    data = initData(1)
+    var1 = data.columns[0]
+
+    pdata, pflags = SaQC(data).flagDummy(var1).getResult()
+
+    if isinstance(pflags.columns, pd.MultiIndex):
+        cols = pflags.columns.get_level_values(0).drop_duplicates()
+        assert np.all(cols == [var1])
+    else:
+        assert (pflags.columns == [var1]).all()
+
+
+def test_sourceTarget():
+    """
+    test implicit assignments
+    """
+    data = initData(1)
+    var1 = data.columns[0]
+    target = "new"
+
+    pdata, pflags = SaQC(data).flagAll(field=var1, target=target).getResult(raw=True)
+
+    assert (pdata[var1] == pdata[target]).all(axis=None)
+    assert all(pflags[var1] == UNFLAGGED)
+    assert all(pflags[target] > UNFLAGGED)
+
+
+@pytest.mark.parametrize("optional", OPTIONAL)
+def test_dtypes(data, flags):
+    """
+    Test if the categorical dtype is preserved through the core functionality
+    """
+    flags = initFlagsLike(data)
+    flags_raw = flags.toDios()
+    var1, var2 = data.columns[:2]
+
+    pdata, pflags = (
+        SaQC(data, flags=flags_raw).flagAll(var1).flagAll(var2).getResult(raw=True)
+    )
+
+    for c in pflags.columns:
+        assert pflags[c].dtype == flags[c].dtype
diff --git a/tests/core/test_creation.py b/tests/core/test_creation.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d2badf6df87fc51a340688514e714203db87f86
--- /dev/null
+++ b/tests/core/test_creation.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+import pandas as pd
+import numpy as np
+import dios
+
+
+def test_init():
+    from saqc import SaQC, Flags
+
+    arr = np.array(
+        [
+            [0, 1, 2],
+            [0, 1, 3],
+        ]
+    )
+    data = pd.DataFrame(arr, columns=list("abc"))
+    qc = SaQC(data)
+
+    assert isinstance(qc, SaQC)
+    assert isinstance(qc._flags, Flags)
+    assert isinstance(qc._data, dios.DictOfSeries)
diff --git a/tests/core/test_flags.py b/tests/core/test_flags.py
new file mode 100644
index 0000000000000000000000000000000000000000..7bc08c00740d3577b8bd4f3f1ed90948b4cb3f5a
--- /dev/null
+++ b/tests/core/test_flags.py
@@ -0,0 +1,263 @@
+#!/usr/bin/env python
+from typing import Dict, Union
+import dios
+import pytest
+import numpy as np
+import pandas as pd
+
+from saqc.constants import *
+from saqc.core.flags import Flags
+
+from tests.core.test_history import (
+    History,
+    is_equal as hist_equal,
+)
+
+_data = [
+    np.array([[]]),
+    np.zeros((1, 1)),
+    np.zeros((3, 4)),
+    np.ones((3, 4)),
+    np.ones((3, 4)) * np.nan,
+    np.array(
+        [
+            [0, 0, 0, 0],
+            [0, 1, 2, 3],
+            [0, 1, 2, 3],
+        ]
+    ),
+    np.array(
+        [
+            [0, 0, 0, 0],
+            [0, 1, np.nan, 3],
+            [0, 1, 2, 3],
+        ]
+    ),
+]
+
+data = []
+for d in _data:
+    columns = list("abcdefgh")[: d.shape[1]]
+    df = pd.DataFrame(d, dtype=float, columns=columns)
+    dis = dios.DictOfSeries(df)
+    di = {}
+    di.update(df.items())
+    data.append(df)
+    data.append(di)
+    data.append(dis)
+
+
+@pytest.mark.parametrize("data", data)
+def test_init(data: Union[pd.DataFrame, dios.DictOfSeries, Dict[str, pd.Series]]):
+    flags = Flags(data)
+    assert isinstance(flags, Flags)
+    assert len(data.keys()) == len(flags)
+
+
+def is_equal(f1, f2):
+    assert f1.columns.equals(f2.columns)
+    for c in f1.columns:
+        assert hist_equal(f1.history[c], f2.history[c])
+
+
+@pytest.mark.parametrize("data", data)
+def test_copy(data: Union[pd.DataFrame, dios.DictOfSeries, Dict[str, pd.Series]]):
+    flags = Flags(data)
+    shallow = flags.copy(deep=False)
+    deep = flags.copy(deep=True)
+
+    # checks
+
+    for copy in [deep, shallow]:
+        assert isinstance(copy, Flags)
+        assert copy is not flags
+        assert copy._data is not flags._data
+        is_equal(copy, flags)
+
+    assert deep is not shallow
+    is_equal(deep, shallow)
+
+    for c in shallow.columns:
+        assert shallow._data[c] is flags._data[c]
+
+    for c in deep.columns:
+        assert deep._data[c] is not flags._data[c]
+
+
+@pytest.mark.parametrize("data", data)
+def test_flags_history(
+    data: Union[pd.DataFrame, dios.DictOfSeries, Dict[str, pd.Series]]
+):
+    flags = Flags(data)
+
+    # get
+    for c in flags.columns:
+        hist = flags.history[c]
+        assert isinstance(hist, History)
+        assert len(hist) > 0
+
+    # set
+    for c in flags.columns:
+        hist = flags.history[c]
+        hlen = len(hist)
+        hist.append(pd.Series(888.0, index=hist.index, dtype=float))
+        flags.history[c] = hist
+        assert isinstance(hist, History)
+        assert len(hist) == hlen + 1
+
+
+@pytest.mark.parametrize("data", data)
+def test_get_flags(data: Union[pd.DataFrame, dios.DictOfSeries, Dict[str, pd.Series]]):
+    flags = Flags(data)
+
+    for c in flags.columns:
+        # check obvious
+        var = flags[c]
+        assert isinstance(var, pd.Series)
+        assert not var.empty
+        assert var.equals(flags._data[c].max())
+
+        # always a copy
+        assert var is not flags[c]
+
+        # in particular, a deep copy
+        var[:] = 9999.0
+        assert all(flags[c] != var)
+
+
+@pytest.mark.parametrize("data", data)
+def test_set_flags(data: Union[pd.DataFrame, dios.DictOfSeries, Dict[str, pd.Series]]):
+    flags = Flags(data)
+
+    for c in flags.columns:
+        var = flags[c]
+        hlen = len(flags.history[c])
+        new = pd.Series(9999.0, index=var.index, dtype=float)
+
+        flags[c] = new
+        assert len(flags.history[c]) == hlen + 1
+        assert all(flags.history[c].max() == 9999.0)
+        assert all(flags.history[c].max() == flags[c])
+
+        # check if deep-copied correctly
+        new[:] = 8888.0
+        assert all(flags.history[c].max() == 9999.0)
+
+        # flags always overwrite former
+        flags[c] = new
+        assert len(flags.history[c]) == hlen + 2
+        assert all(flags.history[c].max() == 8888.0)
+        assert all(flags.history[c].max() == flags[c])
+
+        # check if deep-copied correctly
+        new[:] = 7777.0
+        assert all(flags.history[c].max() == 8888.0)
+
+
+@pytest.mark.parametrize("data", data)
+def test_set_flags_with_mask(
+    data: Union[pd.DataFrame, dios.DictOfSeries, Dict[str, pd.Series]]
+):
+    flags = Flags(data)
+
+    for c in flags.columns:
+        var = flags[c]
+        mask = var == UNFLAGGED
+
+        scalar = 222.0
+        flags[mask, c] = scalar
+        assert all(flags[c].loc[mask] == 222.0)
+        assert all(flags[c].loc[~mask] != 222.0)
+
+        # scalar without mask is not allowed, because
+        # it holds to much potential to set the whole
+        # column unintentionally.
+        with pytest.raises(ValueError):
+            flags[c] = 888.0
+
+        vector = var.copy()
+        vector[:] = 333.0
+        flags[mask, c] = vector
+        assert all(flags[c].loc[mask] == 333.0)
+        assert all(flags[c].loc[~mask] != 333.0)
+
+        # works with any that pandas eat, eg with numpy
+        vector[:] = 444.0
+        vector = vector.to_numpy()
+        flags[mask, c] = vector
+        assert all(flags[c].loc[mask] == 444.0)
+        assert all(flags[c].loc[~mask] != 444.0)
+
+        # test length miss-match (mask)
+        if len(mask) > 1:
+            wrong_len = mask[:-1]
+            with pytest.raises(ValueError):
+                flags[wrong_len, c] = vector
+
+        # test length miss-match (value)
+        if len(vector) > 1:
+            wrong_len = vector[:-1]
+            with pytest.raises(ValueError):
+                flags[mask, c] = wrong_len
+
+
+@pytest.mark.parametrize("data", data)
+def test_set_flags_with_index(
+    data: Union[pd.DataFrame, dios.DictOfSeries, Dict[str, pd.Series]]
+):
+    flags = Flags(data)
+
+    for c in flags.columns:
+        var = flags[c]
+        mask = var == UNFLAGGED
+        index = mask[mask].index
+
+        scalar = 222.0
+        flags[index, c] = scalar
+        assert all(flags[c].loc[mask] == 222.0)
+        assert all(flags[c].loc[~mask] != 222.0)
+
+        vector = var.copy()
+        vector[:] = 333.0
+        flags[index, c] = vector
+        assert all(flags[c].loc[mask] == 333.0)
+        assert all(flags[c].loc[~mask] != 333.0)
+
+        # works with any that pandas eat, eg with numpy
+        vector[:] = 444.0
+        vector = vector.to_numpy()
+        flags[index, c] = vector
+        assert all(flags[c].loc[mask] == 444.0)
+        assert all(flags[c].loc[~mask] != 444.0)
+
+        # test length miss-match (value)
+        if len(vector) > 1:
+            wrong_len = vector[:-1]
+            with pytest.raises(ValueError):
+                flags[index, c] = wrong_len
+
+
+def _validate_flags_equals_frame(flags, df):
+    assert df.columns.equals(flags.columns)
+
+    for c in flags.columns:
+        assert df[c].index.equals(flags[c].index)
+        assert df[c].equals(flags[c])  # respects nan's
+
+
+@pytest.mark.parametrize("data", data)
+def test_to_dios(data: Union[pd.DataFrame, dios.DictOfSeries, Dict[str, pd.Series]]):
+    flags = Flags(data)
+    df = flags.toDios()
+
+    assert isinstance(df, dios.DictOfSeries)
+    _validate_flags_equals_frame(flags, df)
+
+
+@pytest.mark.parametrize("data", data)
+def test_to_frame(data: Union[pd.DataFrame, dios.DictOfSeries, Dict[str, pd.Series]]):
+    flags = Flags(data)
+    df = flags.toFrame()
+
+    assert isinstance(df, pd.DataFrame)
+    _validate_flags_equals_frame(flags, df)
diff --git a/tests/core/test_history.py b/tests/core/test_history.py
new file mode 100644
index 0000000000000000000000000000000000000000..6ced94eb4f24040fcea733acf45bd1f03f5148a4
--- /dev/null
+++ b/tests/core/test_history.py
@@ -0,0 +1,225 @@
+#!/usr/bin/env python
+
+import pytest
+import numpy as np
+import pandas as pd
+
+from saqc.core.history import History, createHistoryFromData
+from tests.common import dummyHistory
+
+# see #GH143 combined backtrack
+# (adjusted to current implementation)
+example1 = (
+    # flags
+    np.array(
+        [
+            [0, np.nan, 50, 99, np.nan],
+            [0, np.nan, 50, np.nan, 25],
+            [0, 99, 99, 99, 25],
+            [0, 99, np.nan, np.nan, 25],
+        ]
+    ),
+    # expected from max()
+    np.array([99, 25, 25, 25]),
+)
+
+# see #GH143
+example2 = (
+    # flags
+    np.array(
+        [
+            [0, 99, np.nan, 0],
+            [0, np.nan, 99, np.nan],
+            [0, np.nan, np.nan, np.nan],
+            [0, np.nan, np.nan, 0],
+        ]
+    ),
+    # expected from max()
+    np.array([0, 99, 0, 0]),
+)
+
+data = [
+    np.array([[]]),
+    np.zeros((1, 1)),
+    np.zeros((3, 4)),
+    np.ones((3, 4)),
+    np.ones((3, 4)) * np.nan,
+    np.array(
+        [
+            [0, 0, 0, 0],
+            [0, 1, 2, 3],
+            [0, 1, 2, 3],
+        ]
+    ),
+    np.array(
+        [
+            [0, 0, 0, 0],
+            [0, 1, np.nan, 3],
+            [0, 1, 2, 3],
+        ]
+    ),
+]
+
+
+def check_invariants(hist):
+    """
+    This can be called for **any** FH.
+    The assertions must hold in any case.
+    """
+    # basics
+    assert isinstance(hist, History)
+    assert isinstance(hist.hist, pd.DataFrame)
+    assert isinstance(hist.meta, list)
+    assert all(
+        [isinstance(dtype, (float, pd.CategoricalDtype)) for dtype in hist.hist.dtypes]
+    )
+    assert all([isinstance(e, dict) for e in hist.meta])
+    assert hist.columns is hist.hist.columns
+    assert hist.index is hist.hist.index
+    assert len(hist) == len(hist.columns) == len(hist.meta)
+
+    # advanced
+    assert hist.columns.equals(pd.Index(range(len(hist))))
+    assert isinstance(hist.max(), pd.Series)
+
+
+def is_equal(hist1: History, hist2: History):
+    """
+    Check if two FH are (considered) equal, namely have equal 'hist'
+    """
+    return hist1.hist.equals(hist2.hist)
+
+
+@pytest.mark.parametrize("data", data + [None])
+def test_init(data: np.array):
+    # init
+    df = pd.DataFrame(data, dtype=float)
+    hist = History(df.index)
+    check_invariants(hist)
+
+
+@pytest.mark.parametrize("data", data + [None])
+def test_createHistory(data: np.array):
+    # init
+    df = pd.DataFrame(data, dtype=float)
+    meta = [{}] * len(df.columns)
+    hist = createHistoryFromData(df, meta)
+
+    check_invariants(hist)
+
+    # shape would fail
+    if data is not None:
+        assert len(hist.index) == data.shape[0]
+        assert len(hist.columns) == data.shape[1]
+
+
+@pytest.mark.parametrize("data", data + [None])
+def test_copy(data):
+    # init
+    df = pd.DataFrame(data, dtype=float)
+    hist = History(df.index)
+    for _, s in df.items():
+        hist.append(s)
+    shallow = hist.copy(deep=False)
+    deep = hist.copy(deep=True)
+
+    # checks
+
+    for copy in [deep, shallow]:
+        check_invariants(copy)
+        assert copy is not hist
+        assert is_equal(copy, hist)
+
+    assert deep is not shallow
+    assert is_equal(deep, shallow)
+
+    assert deep.hist is not hist.hist
+    assert deep.meta is not hist.meta
+
+    assert shallow.hist is hist.hist
+    assert shallow.meta is hist.meta
+
+
+@pytest.mark.parametrize("data", data + [None])
+def test_reindex_trivial_cases(data):
+    df = pd.DataFrame(data, dtype=float)
+    orig = dummyHistory(hist=df)
+
+    # checks
+    for index in [df.index, pd.Index([])]:
+        hist = orig.copy()
+        ref = hist.reindex(index)
+        assert ref is hist  # check if working inplace
+        check_invariants(hist)
+
+
+@pytest.mark.parametrize("data", data + [None])
+def test_reindex_missing_indicees(data):
+    df = pd.DataFrame(data, dtype=float)
+    hist = dummyHistory(hist=df)
+    index = df.index[1:-1]
+    # checks
+    ref = hist.reindex(index)
+    assert ref is hist  # check if working inplace
+    check_invariants(hist)
+
+
+@pytest.mark.parametrize("data", data + [None])
+def test_reindex_extra_indicees(data):
+    df = pd.DataFrame(data, dtype=float)
+    hist = dummyHistory(hist=df)
+    index = df.index.append(pd.Index(range(len(df.index), len(df.index) + 5)))
+    # checks
+    ref = hist.reindex(index)
+    assert ref is hist  # check if working inplace
+    check_invariants(hist)
+
+
+@pytest.mark.parametrize(
+    "s, meta",
+    [
+        (pd.Series(0, index=range(6), dtype=float), None),
+        (pd.Series(0, index=range(6), dtype=float), {}),
+        (pd.Series(1, index=range(6), dtype=float), {"foo": "bar"}),
+    ],
+)
+def test_append_with_meta(s, meta):
+    hist = History(s.index)
+    hist.append(s, meta=meta)
+    check_invariants(hist)
+
+    if meta is None:
+        meta = {}
+
+    assert hist.meta[0] is not meta
+    assert hist.meta == [meta]
+
+    hist.append(s, meta=meta)
+    check_invariants(hist)
+    assert hist.meta == [meta, meta]
+
+
+@pytest.fixture(scope="module")
+def __hist():
+    # this FH is filled by
+    #  - test_append
+    #  - test_append_force
+    return History(index=pd.Index(range(6)))
+
+
+# this test append more rows to the resulting
+# FH from the former test
+@pytest.mark.parametrize(
+    "s, max_val",
+    [
+        (pd.Series(0, index=range(6), dtype=float), 0),
+        (pd.Series(1, index=range(6), dtype=float), 1),
+        (pd.Series(np.nan, index=range(6), dtype=float), 1),
+        (pd.Series(0, index=range(6), dtype=float), 0),
+    ],
+)
+def test_append_force(__hist, s, max_val):
+    hist = __hist
+    hist.append(s)
+    check_invariants(hist)
+    assert all(hist.max() == max_val)
diff --git a/test/core/test_reader.py b/tests/core/test_reader.py
similarity index 61%
rename from test/core/test_reader.py
rename to tests/core/test_reader.py
index d3733a64ca4420dc378bb5c63e272d18171df7b7..e13f2976f97876e957c8640ac4e5680543b857e7 100644
--- a/test/core/test_reader.py
+++ b/tests/core/test_reader.py
@@ -1,19 +1,17 @@
 #! /usr/bin/env python
 # -*- coding: utf-8 -*-
 
-from pathlib import Path
-
 import pytest
 import numpy as np
 import pandas as pd
 import dios
+from pathlib import Path
 
-from saqc.core.config import Fields as F
-from test.common import initData, writeIO
+from saqc.core.reader import fromConfig, readFile
+from saqc.core.register import FUNC_MAP, flagging
+from saqc.constants import UNTOUCHED
 
-from saqc.core.core import SaQC
-from saqc.flagger import SimpleFlagger
-from saqc.core.register import FUNC_MAP, register
+from tests.common import initData, writeIO
 
 
 @pytest.fixture
@@ -28,14 +26,18 @@ def test_packagedConfig():
     config_path = path / "config_ci.csv"
     data_path = path / "data.csv"
 
-    data = pd.read_csv(data_path, index_col=0, parse_dates=True,)
-    saqc = SaQC(SimpleFlagger(), dios.DictOfSeries(data)).readConfig(config_path)
-    data, flagger = saqc.getResult()
+    data = pd.read_csv(
+        data_path,
+        index_col=0,
+        parse_dates=True,
+    )
+    saqc = fromConfig(config_path, data)
+    saqc.getResult()
 
 
 def test_variableRegex(data):
 
-    header = f"{F.VARNAME};{F.TEST};{F.PLOT}"
+    header = f"varname;test"
     tests = [
         ("'.*'", data.columns),
         ("'var(1|2)'", [c for c in data.columns if c[-1] in ("1", "2")]),
@@ -46,9 +48,8 @@ def test_variableRegex(data):
 
     for regex, expected in tests:
         fobj = writeIO(header + "\n" + f"{regex} ; flagDummy()")
-        saqc = SaQC(SimpleFlagger(), data).readConfig(fobj)
-        expansion = saqc._expandFields(saqc._to_call[0], data.columns)
-        result = [f.field for f in expansion]
+        saqc = fromConfig(fobj, data=data)
+        result = [field for field, _ in saqc.called]
         assert np.all(result == expected)
 
 
@@ -57,18 +58,18 @@ def test_inlineComments(data):
     adresses issue #3
     """
     config = f"""
-    {F.VARNAME} ; {F.TEST}       ; {F.PLOT}
-    pre2        ; flagDummy() # test ; False # test
+    varname ; test
+    pre2        ; flagDummy() # test
     """
-    saqc = SaQC(SimpleFlagger(), data).readConfig(writeIO(config))
-    func_dump = saqc._to_call[0]
-    assert func_dump.ctrl.plot is False
-    assert func_dump.func == FUNC_MAP["flagDummy"]["func"]
+
+    saqc = fromConfig(writeIO(config), data)
+    _, func = saqc.called[0]
+    assert func[0] == FUNC_MAP["flagDummy"]
 
 
 def test_configReaderLineNumbers(data):
     config = f"""
-    {F.VARNAME} ; {F.TEST}
+    varname ; test
     #temp1      ; flagDummy()
     pre1        ; flagDummy()
     pre2        ; flagDummy()
@@ -78,10 +79,9 @@ def test_configReaderLineNumbers(data):
 
     SM1         ; flagDummy()
     """
-    saqc = SaQC(SimpleFlagger(), data).readConfig(writeIO(config))
-    result = [f.ctrl.lineno for f in saqc._to_call]
-    expected = [3, 4, 5, 9]
-    assert result == expected
+    planned = readFile(writeIO(config))
+    expected = [4, 5, 6, 10]
+    assert (planned.index == expected).all()
 
 
 def test_configFile(data):
@@ -89,7 +89,7 @@ def test_configFile(data):
     # check that the reader accepts different whitespace patterns
 
     config = f"""
-    {F.VARNAME} ; {F.TEST}
+    varname ; test
 
     #temp1      ; flagDummy()
     pre1; flagDummy()
@@ -100,18 +100,19 @@ def test_configFile(data):
 
     SM1;flagDummy()
     """
-    SaQC(SimpleFlagger(), data).readConfig(writeIO(config))
+    fromConfig(writeIO(config), data)
 
 
 def test_configChecks(data):
 
     var1, _, var3, *_ = data.columns
 
-    @register(masking="none")
-    def flagFunc(data, field, flagger, arg, opt_arg=None, **kwargs):
-        return data, flagger
+    @flagging(masking="none")
+    def flagFunc(data, field, flags, arg, opt_arg=None, **kwargs):
+        flags[:, field] = UNTOUCHED
+        return data, flags
 
-    header = f"{F.VARNAME};{F.TEST}"
+    header = f"varname;test"
     tests = [
         (f"{var1};flagFunc(mn=0)", TypeError),  # bad argument name
         (f"{var1};flagFunc()", TypeError),  # not enough arguments
@@ -122,7 +123,7 @@ def test_configChecks(data):
     for test, expected in tests:
         fobj = writeIO(header + "\n" + test)
         with pytest.raises(expected):
-            SaQC(SimpleFlagger(), data).readConfig(fobj).getResult()
+            fromConfig(fobj, data=data).evaluate()
 
 
 def test_supportedArguments(data):
@@ -132,13 +133,14 @@ def test_supportedArguments(data):
 
     # TODO: necessary?
 
-    @register(masking='field')
-    def func(data, field, flagger, kwarg, **kwargs):
-        return data, flagger
+    @flagging(masking="field")
+    def func(data, field, flags, kwarg, **kwargs):
+        flags[:, field] = UNTOUCHED
+        return data, flags
 
     var1 = data.columns[0]
 
-    header = f"{F.VARNAME};{F.TEST}"
+    header = f"varname;test"
     tests = [
         f"{var1};func(kwarg=NAN)",
         f"{var1};func(kwarg='str')",
@@ -151,4 +153,4 @@ def test_supportedArguments(data):
 
     for test in tests:
         fobj = writeIO(header + "\n" + test)
-        SaQC(SimpleFlagger(), data).readConfig(fobj)
+        fromConfig(fobj, data)
diff --git a/tests/core/test_translator.py b/tests/core/test_translator.py
new file mode 100644
index 0000000000000000000000000000000000000000..3e06597e5a676dbc567e6a20470f6cda56968ad3
--- /dev/null
+++ b/tests/core/test_translator.py
@@ -0,0 +1,290 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import json
+from typing import Dict, Union, Sequence
+
+import numpy as np
+import pandas as pd
+
+import pytest
+
+from dios import DictOfSeries
+
+from saqc.constants import UNFLAGGED, BAD, DOUBTFUL
+from saqc.core.translator import (
+    PositionalTranslator,
+    Translator,
+    DmpTranslator,
+)
+from saqc.core.flags import Flags
+from saqc.core.core import SaQC
+
+from tests.common import initData
+
+
+def _genTranslators():
+    for dtype in (str, float, int):
+        flags = {
+            dtype(-2): UNFLAGGED,
+            dtype(-1): BAD,
+            **{dtype(f * 10): float(f) for f in range(10)},
+        }
+        translator = Translator(flags, {v: k for k, v in flags.items()})
+        yield flags, translator
+
+
+def _genFlags(data: Dict[str, Union[Sequence, pd.Series]]) -> Flags:
+
+    flags = Flags()
+    for k, v in data.items():
+        if not isinstance(v, pd.Series):
+            v = pd.Series(
+                v, index=pd.date_range("2012-01-01", freq="1D", periods=len(v))
+            )
+        flags[k] = v
+
+    return flags
+
+
+def test_forwardTranslation():
+    for flags, translator in _genTranslators():
+        for k, expected in flags.items():
+            got = translator(k)
+            assert expected == got or np.isnan([got, expected]).all()
+
+        for k in ["bad", 3.14, max]:
+            with pytest.raises(ValueError):
+                translator(k)
+
+
+def test_backwardTranslation():
+    field = "var1"
+    for _, translator in _genTranslators():
+        keys = tuple(translator._backward.keys())
+        flags = _genFlags({field: np.array(keys)})
+        translated = translator.backward(flags)
+        expected = set(translator._backward.values())
+        assert not (set(translated[field]) - expected)
+
+
+def test_backwardTranslationFail():
+    field = "var1"
+    for _, translator in _genTranslators():
+        keys = tuple(translator._backward.keys())
+        # add an scheme invalid value to the flags
+        flags = _genFlags({field: np.array(keys + (max(keys) + 1,))})
+        with pytest.raises(ValueError):
+            translator.backward(flags)
+
+
+def test_dmpTranslator():
+
+    translator = DmpTranslator()
+    # generate a bunch of dummy flags
+    keys = np.array(tuple(translator._backward.keys()) * 50)
+    flags = _genFlags({"var1": keys, "var2": keys, "var3": keys})
+    flags[:, "var1"] = BAD
+    flags[:, "var1"] = DOUBTFUL
+    flags[:, "var2"] = BAD
+
+    history1 = flags.history["var1"]
+    history1.meta[1].update({"func": "flagFoo", "keywords": {"cause": "AUTOFLAGGED"}})
+    history1.meta[2].update({"func": "flagBar", "keywords": {"comment": "I did it"}})
+    flags.history["var1"] = history1
+
+    history2 = flags.history["var2"]
+    history2.meta[-1].update(
+        {"func": "flagFoo", "keywords": {"cause": "BELOW_OR_ABOVE_MIN_MAX"}}
+    )
+    flags.history["var2"] = history2
+
+    tflags = translator.backward(flags)
+
+    assert set(tflags.columns.get_level_values(1)) == {
+        "quality_flag",
+        "quality_comment",
+        "quality_cause",
+    }
+
+    assert (tflags.loc[:, ("var1", "quality_flag")] == "DOUBTFUL").all(axis=None)
+    assert (
+        tflags.loc[:, ("var1", "quality_comment")]
+        == '{"test": "flagBar", "comment": "I did it"}'
+    ).all(axis=None)
+
+    assert (tflags.loc[:, ("var1", "quality_cause")] == "OTHER").all(axis=None)
+
+    assert (tflags.loc[:, ("var2", "quality_flag")] == "BAD").all(axis=None)
+    assert (
+        tflags.loc[:, ("var2", "quality_comment")]
+        == '{"test": "flagFoo", "comment": ""}'
+    ).all(axis=None)
+    assert (tflags.loc[:, ("var2", "quality_cause")] == "BELOW_OR_ABOVE_MIN_MAX").all(
+        axis=None
+    )
+
+    assert (
+        tflags.loc[flags["var3"] == BAD, ("var3", "quality_comment")]
+        == '{"test": "unknown", "comment": ""}'
+    ).all(axis=None)
+    assert (tflags.loc[flags["var3"] == BAD, ("var3", "quality_cause")] == "OTHER").all(
+        axis=None
+    )
+    mask = flags["var3"] == UNFLAGGED
+    assert (tflags.loc[mask, ("var3", "quality_cause")] == "").all(axis=None)
+
+
+def test_positionalTranslator():
+    translator = PositionalTranslator()
+    flags = _genFlags({"var1": np.zeros(100), "var2": np.zeros(50)})
+    flags[1::3, "var1"] = BAD
+    flags[1::3, "var1"] = DOUBTFUL
+    flags[2::3, "var1"] = BAD
+
+    tflags = translator.backward(flags)
+    assert (tflags["var2"].replace(-9999, np.nan).dropna() == 90).all(axis=None)
+    assert (tflags["var1"].iloc[1::3] == 90210).all(axis=None)
+    assert (tflags["var1"].iloc[2::3] == 90002).all(axis=None)
+
+
+def test_positionalTranslatorIntegration():
+
+    data = initData(3)
+    col: str = data.columns[0]
+
+    translator = PositionalTranslator()
+    saqc = SaQC(data=data, scheme=translator)
+    saqc = saqc.flagMissing(col).flagRange(col, min=3, max=10, flag=DOUBTFUL)
+    data, flags = saqc.getResult()
+
+    for field in flags.columns:
+        assert flags[field].astype(str).str.match("^9[012]*$").all()
+
+    round_trip = translator.backward(translator.forward(flags))
+
+    assert (flags.values == round_trip.values).all()
+    assert (flags.index == round_trip.index).all()
+    assert (flags.columns == round_trip.columns).all()
+
+
+def test_dmpTranslatorIntegration():
+
+    data = initData(1)
+    col = data.columns[0]
+
+    translator = DmpTranslator()
+    saqc = SaQC(data=data, scheme=translator)
+    saqc = saqc.flagMissing(col).flagRange(col, min=3, max=10)
+    data, flags = saqc.getResult()
+
+    qflags = flags.xs("quality_flag", axis="columns", level=1)
+    qfunc = flags.xs("quality_comment", axis="columns", level=1).applymap(
+        lambda v: json.loads(v)["test"] if v else ""
+    )
+    qcause = flags.xs("quality_cause", axis="columns", level=1)
+
+    assert qflags.isin(translator._forward.keys()).all(axis=None)
+    assert qfunc.isin({"", "flagMissing", "flagRange"}).all(axis=None)
+    assert (qcause[qflags[col] == "BAD"] == "OTHER").all(axis=None)
+
+    round_trip = translator.backward(translator.forward(flags))
+
+    assert round_trip.xs("quality_flag", axis="columns", level=1).equals(qflags)
+
+    assert round_trip.xs("quality_comment", axis="columns", level=1).equals(
+        flags.xs("quality_comment", axis="columns", level=1)
+    )
+
+    assert round_trip.xs("quality_cause", axis="columns", level=1).equals(
+        flags.xs("quality_cause", axis="columns", level=1)
+    )
+
+
+def test_dmpValidCombinations():
+    data = initData(1)
+    col = data.columns[0]
+
+    translator = DmpTranslator()
+    saqc = SaQC(data=data, scheme=translator)
+
+    with pytest.raises(ValueError):
+        saqc.flagRange(col, min=3, max=10, cause="SOMETHING_STUPID").getResult()
+
+    with pytest.raises(ValueError):
+        saqc.flagRange(col, min=3, max=10, cause="").getResult()
+
+
+def _buildupSaQCObjects():
+
+    """
+    return two evaluated saqc objects calling the same functions,
+    whereas the flags from the evaluetion of the first objetc are
+    used as input flags of the second
+    """
+    data = initData(3)
+    col = data.columns[0]
+    flags = None
+
+    out = []
+    for _ in range(2):
+        saqc = SaQC(data=data, flags=flags)
+        saqc = saqc.flagRange(field=col, min=5, max=6, to_mask=False).flagRange(
+            col, min=3, max=10, to_mask=False
+        )
+        flags = saqc._flags
+        out.append(saqc)
+    return out
+
+
+def test_translationPreservesFlags():
+
+    saqc1, saqc2 = _buildupSaQCObjects()
+    _, flags1 = saqc1.getResult(raw=True)
+    _, flags2 = saqc2.getResult(raw=True)
+
+    for k in flags2.columns:
+        got = flags2.history[k].hist
+
+        f1hist = flags1.history[k].hist
+        expected = pd.concat([f1hist, f1hist], axis="columns")
+        expected.columns = got.columns
+
+        assert expected.equals(got)
+
+
+def test_multicallsPreserveHistory():
+    saqc1, saqc2 = _buildupSaQCObjects()
+    _, flags1 = saqc1.getResult(raw=True)
+    _, flags2 = saqc2.getResult(raw=True)
+
+    # check, that the `History` is duplicated
+    for col in flags2.columns:
+        hist1 = flags1.history[col].hist
+        hist2 = flags2.history[col].hist
+
+        hist21 = hist2.iloc[:, : len(hist1.columns)]
+        hist22 = hist2.iloc[:, len(hist1.columns) :]
+
+        hist21.columns = hist1.columns
+        hist22.columns = hist1.columns
+
+        assert hist1.equals(hist21)
+        assert hist1.equals(hist22)
+        assert hist21.equals(hist22)
+
+
+def test_positionalMulitcallsPreserveState():
+
+    saqc1, saqc2 = _buildupSaQCObjects()
+
+    translator = PositionalTranslator()
+    _, flags1 = saqc1.getResult(raw=True)
+    _, flags2 = saqc2.getResult(raw=True)
+    tflags1 = translator.backward(flags1).astype(str)
+    tflags2 = translator.backward(flags2).astype(str)
+
+    for k in flags2.columns:
+        expected = tflags1[k].str.slice(start=1) * 2
+        got = tflags2[k].str.slice(start=1)
+        assert expected.equals(got)
diff --git a/test/funcs/conftest.py b/tests/fixtures.py
similarity index 65%
rename from test/funcs/conftest.py
rename to tests/fixtures.py
index 1fd4685e6c0aca0015b8f2cbcb4cf67be9a4ec75..a14d60f75525f967278ef17549168ed04e1c6790 100644
--- a/test/funcs/conftest.py
+++ b/tests/fixtures.py
@@ -5,6 +5,11 @@ import pandas as pd
 from dios import DictOfSeries
 
 
+# TODO: this is odd
+#  Why not simple fixtures with talking-names,
+#  that also take parameter, if needed
+
+
 @pytest.fixture
 def char_dict():
     return {
@@ -16,13 +21,16 @@ def char_dict():
     }
 
 
-
 @pytest.fixture
 def course_1(char_dict):
-    # MONOTONOUSLY ASCENDING/DESCENDING
-    # values , that monotonously ascend towards a peak level, and thereafter do monotonously decrease
-    # the resulting drop/raise per value equals:  (peak_level - initial_level) / (0.5*(periods-2))
-    # periods number better be even!
+    """
+    MONOTONOUSLY ASCENDING/DESCENDING
+
+    values , that monotonously ascend towards a peak level, and thereafter do monotonously decrease
+    the resulting drop/raise per value equals:  (peak_level - initial_level) / (0.5*(periods-2))
+    periods number better be even!
+    """
+
     def fix_funk(
         freq="10min",
         periods=10,
@@ -30,9 +38,8 @@ def course_1(char_dict):
         peak_level=10,
         initial_index=pd.Timestamp(2000, 1, 1, 0, 0, 0),
         char_dict=char_dict,
-        name='data'
+        name="data",
     ):
-
         t_index = pd.date_range(initial_index, freq=freq, periods=periods)
         left = np.linspace(initial_level, peak_level, int(np.floor(len(t_index) / 2)))
         right = np.linspace(peak_level, initial_level, int(np.ceil(len(t_index) / 2)))
@@ -40,7 +47,9 @@ def course_1(char_dict):
 
         char_dict["raise"] = s.index[1 : int(np.floor(len(t_index) / 2))]
         char_dict["drop"] = s.index[int(np.floor(len(t_index) / 2) + 1) :]
-        char_dict["peak"] = s.index[int(np.floor(len(t_index) / 2)) - 1 : int(np.floor(len(t_index) / 2)) + 1]
+        char_dict["peak"] = s.index[
+            int(np.floor(len(t_index) / 2)) - 1 : int(np.floor(len(t_index) / 2)) + 1
+        ]
 
         data = DictOfSeries(data=s, columns=[name])
         return data, char_dict
@@ -50,10 +59,14 @@ def course_1(char_dict):
 
 @pytest.fixture
 def course_2(char_dict):
+    """
+    SINGLE_SPIKE
+
+    values , that linearly  develop over the whole timeseries, from "initial_level" to "final_level", exhibiting
+    one "anomalous" or "outlierish" value of magnitude "out_val" at position "periods/2"
+    number of periods better be even!
+    """
     # SINGLE_SPIKE
-    # values , that linearly  develop over the whole timeseries, from "initial_level" to "final_level", exhibiting
-    # one "anomalous" or "outlierish" value of magnitude "out_val" at position "periods/2"
-    # number of periods better be even!
     def fix_funk(
         freq="10min",
         periods=10,
@@ -85,33 +98,43 @@ def course_2(char_dict):
 
 @pytest.fixture
 def course_test(char_dict):
-    # Test function for pattern detection - same as test pattern for first three values, than constant function
-    def fix_funk(freq='1 D',
-                 initial_index=pd.Timestamp(2000, 1, 1, 0, 0, 0), out_val=5, char_dict=char_dict):
+    """
+    Test function for pattern detection
+
+    same as test pattern for first three values, than constant function
+    """
 
+    def fix_funk(
+        freq="1 D",
+        initial_index=pd.Timestamp(2000, 1, 1, 0, 0, 0),
+        out_val=5,
+        char_dict=char_dict,
+    ):
         t_index = pd.date_range(initial_index, freq=freq, periods=100)
 
         data = pd.Series(data=0, index=t_index)
         data.iloc[2] = out_val
         data.iloc[3] = out_val
 
-
-        data = DictOfSeries(data=data, columns=['data'])
+        data = DictOfSeries(data=data, columns=["data"])
         return data, char_dict
 
     return fix_funk
 
 
-
 @pytest.fixture
 def course_3(char_dict):
-    # CROWD IN A PIT/CROWD ON A SUMMIT
-    # values , that linearly  develop over the whole timeseries, from "initial_level" to "final_level", exhibiting
-    # a "crowd" of "anomalous" or "outlierish" values of magnitude "out_val".
-    # The "crowd/group" of anomalous values starts at position "periods/2" and continues with an additional amount
-    # of "crowd_size" values, that are each spaced "crowd_spacing" minutes from there predecessors.
-    # number of periods better be even!
-    # chrowd_size * crowd_spacing better be less then freq[minutes].
+    """
+    CROWD IN A PIT/CROWD ON A SUMMIT
+
+    values , that linearly  develop over the whole timeseries, from "initial_level" to "final_level", exhibiting
+    a "crowd" of "anomalous" or "outlierish" values of magnitude "out_val".
+    The "crowd/group" of anomalous values starts at position "periods/2" and continues with an additional amount
+    of "crowd_size" values, that are each spaced "crowd_spacing" minutes from there predecessors.
+    number of periods better be even!
+    chrowd_size * crowd_spacing better be less then freq[minutes].
+    """
+
     def fix_funk(
         freq="10min",
         periods=10,
@@ -129,13 +152,18 @@ def course_3(char_dict):
         data = pd.Series(data=data, index=t_index)
 
         ind1 = data.index[int(np.floor(periods / 2))]
-        dates = [ind1 + crowd_spacing * pd.Timedelta(f"{k}min") for k in range(1, crowd_size + 1)]
+        dates = [
+            ind1 + crowd_spacing * pd.Timedelta(f"{k}min")
+            for k in range(1, crowd_size + 1)
+        ]
         insertion_index = pd.DatetimeIndex(dates)
 
         data.iloc[int(np.floor(periods / 2))] = out_val
         data = data.append(pd.Series(data=out_val, index=insertion_index))
         data.sort_index(inplace=True)
-        anomaly_index = insertion_index.insert(0, data.index[int(np.floor(periods / 2))])
+        anomaly_index = insertion_index.insert(
+            0, data.index[int(np.floor(periods / 2))]
+        )
 
         if out_val > data.iloc[int(np.floor(periods / 2) - 1)]:
             kind = "raise"
@@ -153,9 +181,13 @@ def course_3(char_dict):
 
 @pytest.fixture
 def course_4(char_dict):
-    # TEETH (ROW OF SPIKES) values , that remain on value level "base_level" and than begin exposing an outlierish or
-    # spikey value of magnitude "out_val" every second timestep, starting at periods/2, with the first spike. number
-    # of periods better be even!
+    """
+    TEETH (ROW OF SPIKES) values
+
+    , that remain on value level "base_level" and than begin exposing an outlierish or
+    spikey value of magnitude "out_val" every second timestep, starting at periods/2, with the first spike. number
+    of periods better be even!
+    """
 
     def fix_funk(
         freq="10min",
@@ -165,7 +197,6 @@ def course_4(char_dict):
         initial_index=pd.Timestamp(2000, 1, 1, 0, 0, 0),
         char_dict=char_dict,
     ):
-
         t_index = pd.date_range(initial_index, freq=freq, periods=periods)
         data = pd.Series(data=base_level, index=t_index)
         data[int(len(t_index) / 2) :: 2] = out_val
@@ -180,11 +211,14 @@ def course_4(char_dict):
 
 @pytest.fixture
 def course_5(char_dict):
-    # NAN_holes
-    # values , that ascend from initial_level to final_level linearly and have missing data(=nan)
-    # at posiiotns "nan_slice", (=a slice or a list, for iloc indexing)
-    # periods better be even!
-    # periods better be greater 5
+    """
+    NAN_holes
+
+    values , that ascend from initial_level to final_level linearly and have missing data(=nan)
+    at positions "nan_slice", (=a slice or a list, for iloc indexing)
+    periods better be even!
+    periods better be greater 5
+    """
 
     def fix_funk(
         freq="10min",
@@ -205,5 +239,3 @@ def course_5(char_dict):
         return data, char_dict
 
     return fix_funk
-
-
diff --git a/test/funcs/__init__.py b/tests/funcs/__init__.py
similarity index 100%
rename from test/funcs/__init__.py
rename to tests/funcs/__init__.py
diff --git a/tests/funcs/test_constants_detection.py b/tests/funcs/test_constants_detection.py
new file mode 100644
index 0000000000000000000000000000000000000000..302672611e88854a465630512ab7b5aa77e0d76b
--- /dev/null
+++ b/tests/funcs/test_constants_detection.py
@@ -0,0 +1,44 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import pytest
+import numpy as np
+
+from saqc.constants import *
+from saqc.funcs.constants import flagConstants, flagByVariance
+from saqc.core import initFlagsLike, Flags
+
+from tests.common import initData
+
+
+@pytest.fixture
+def data():
+    constants_data = initData(
+        1, start_date="2011-01-01 00:00:00", end_date="2011-01-01 03:00:00", freq="5min"
+    )
+    constants_data.iloc[5:25] = 200
+    return constants_data
+
+
+def test_constants_flagBasic(data):
+    field, *_ = data.columns
+    flags = initFlagsLike(data)
+    data, flags_result = flagConstants(
+        data, field, flags, window="15Min", thresh=0.1, flag=BAD
+    )
+    flagscol = flags_result[field]
+    assert np.all(flagscol[5:25] == BAD)
+    assert np.all(flagscol[:5] == UNFLAGGED)
+    assert np.all(flagscol[25 + 1 :] == UNFLAGGED)
+
+
+def test_constants_flagVarianceBased(data):
+    expected = np.arange(5, 25)
+    field, *_ = data.columns
+    flags = initFlagsLike(data)
+    data, flags_result1 = flagByVariance(data, field, flags, window="1h", flag=BAD)
+
+    flagscol = flags_result1[field]
+    assert np.all(flagscol[5:25] == BAD)
+    assert np.all(flagscol[:5] == UNFLAGGED)
+    assert np.all(flagscol[25 + 1 :] == UNFLAGGED)
diff --git a/tests/funcs/test_functions.py b/tests/funcs/test_functions.py
new file mode 100644
index 0000000000000000000000000000000000000000..7b8885d7434998f7e6b37e87a05f43d3f80ec129
--- /dev/null
+++ b/tests/funcs/test_functions.py
@@ -0,0 +1,319 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import dios
+import pandas as pd
+import numpy as np
+
+from saqc.funcs.noise import flagByStatLowPass
+from saqc.constants import *
+from saqc.core import initFlagsLike
+from saqc.funcs.drift import (
+    flagDriftFromNorm,
+    flagDriftFromReference,
+    flagDriftFromScaledNorm,
+)
+from saqc.funcs.outliers import flagCrossStatistic, flagRange
+from saqc.funcs.flagtools import flagManual, forceFlags, clearFlags
+from saqc.funcs.tools import dropField, copyField, maskTime
+from saqc.funcs.resampling import reindexFlags
+from saqc.funcs.breaks import flagIsolated
+
+from tests.fixtures import *
+from tests.common import initData
+
+
+@pytest.fixture
+def data():
+    return initData(cols=1, start_date="2016-01-01", end_date="2018-12-31", freq="1D")
+
+
+@pytest.fixture
+def field(data):
+    return data.columns[0]
+
+
+def test_statPass():
+    data = pd.Series(0, index=pd.date_range("2000", "2001", freq="1D"), name="data")
+    noise = [-1, 1] * 10
+    data[100:120] = noise
+    data[200:210] = noise[:10]
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    data, flags = flagByStatLowPass(
+        data, "data", flags, np.std, "20D", 0.999, "5D", 0.999, 0, flag=BAD
+    )
+    assert (flags["data"].iloc[:100] == UNFLAGGED).all()
+    assert (flags["data"].iloc[100:120] == BAD).all()
+    assert (flags["data"].iloc[121:] == UNFLAGGED).all()
+
+
+def test_flagRange(data, field):
+    min, max = 10, 90
+    flags = initFlagsLike(data)
+    data, flags = flagRange(data, field, flags, min=min, max=max, flag=BAD)
+    flagged = flags[field] > UNFLAGGED
+    expected = (data[field] < min) | (data[field] > max)
+    assert all(flagged == expected)
+
+
+def test_flagSesonalRange(data, field):
+    data.iloc[::2] = 0
+    data.iloc[1::2] = 50
+    nyears = len(data[field].index.year.unique())
+
+    tests = [
+        (
+            {
+                "min": 1,
+                "max": 100,
+                "startmonth": 7,
+                "startday": 1,
+                "endmonth": 8,
+                "endday": 31,
+            },
+            31 * 2 * nyears // 2,
+        ),
+        (
+            {
+                "min": 1,
+                "max": 100,
+                "startmonth": 12,
+                "startday": 16,
+                "endmonth": 1,
+                "endday": 15,
+            },
+            31 * nyears // 2 + 1,
+        ),
+    ]
+
+    for test, expected in tests:
+        flags = initFlagsLike(data)
+        newfield = f"{field}_masked"
+        start = f"{test['startmonth']:02}-{test['startday']:02}T00:00:00"
+        end = f"{test['endmonth']:02}-{test['endday']:02}T00:00:00"
+
+        data, flags = copyField(data, field, flags, field + "_masked")
+        data, flags = maskTime(
+            data,
+            newfield,
+            flags,
+            mode="periodic",
+            start=start,
+            end=end,
+            closed=True,
+            flag=BAD,
+        )
+        data, flags = flagRange(
+            data, newfield, flags, min=test["min"], max=test["max"], flag=BAD
+        )
+        data, flags = reindexFlags(
+            data, field, flags, method="match", source=newfield, flag=BAD
+        )
+        data, flags = dropField(data, newfield, flags)
+        flagged = flags[field] > UNFLAGGED
+        assert flagged.sum() == expected
+
+
+def test_clearFlags(data, field):
+    flags = initFlagsLike(data)
+    flags[:, field] = BAD
+    assert all(flags[field] == BAD)
+
+    _, flags = clearFlags(data, field, flags)
+    assert all(flags[field] == UNFLAGGED)
+
+
+def test_forceFlags(data, field):
+    flags = initFlagsLike(data)
+    flags[:, field] = BAD
+    assert all(flags[field] == BAD)
+
+    _, flags = forceFlags(data, field, flags, flag=DOUBT)
+    assert all(flags[field] == DOUBT)
+
+
+def test_flagIsolated(data, field):
+    flags = initFlagsLike(data)
+
+    data.iloc[1:3, 0] = np.nan
+    data.iloc[4:5, 0] = np.nan
+    flags[data[field].index[5:6], field] = BAD
+    data.iloc[11:13, 0] = np.nan
+    data.iloc[15:17, 0] = np.nan
+
+    #              data  flags
+    # 2016-01-01   0.0   -inf
+    # 2016-01-02   NaN   -inf
+    # 2016-01-03   NaN   -inf
+    # 2016-01-04   3.0   -inf
+    # 2016-01-05   NaN   -inf
+    # 2016-01-06   5.0  255.0
+    # 2016-01-07   6.0   -inf
+    # 2016-01-08   7.0   -inf
+    #         ..    ..     ..
+
+    _, flags_result = flagIsolated(
+        data, field, flags, group_window="1D", gap_window="2.1D", flag=BAD
+    )
+
+    assert flags_result[field].iloc[[3, 5]].all()
+
+    data, flags_result = flagIsolated(
+        data,
+        field,
+        flags_result,
+        group_window="2D",
+        gap_window="2.1D",
+        continuation_range="1.1D",
+        flag=BAD,
+    )
+    assert flags_result[field].iloc[[3, 5, 13, 14]].all()
+
+
+@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_2")])
+def test_flagCrossScoring(dat):
+    data1, characteristics = dat(initial_level=0, final_level=0, out_val=0)
+    data2, characteristics = dat(initial_level=0, final_level=0, out_val=10)
+    field = "dummy"
+    fields = ["data1", "data2"]
+    s1, s2 = data1.squeeze(), data2.squeeze()
+    s1 = pd.Series(data=s1.values, index=s1.index)
+    s2 = pd.Series(data=s2.values, index=s1.index)
+    data = dios.DictOfSeries([s1, s2], columns=["data1", "data2"])
+    flags = initFlagsLike(data)
+    _, flags_result = flagCrossStatistic(
+        data, field, flags, fields=fields, thresh=3, method=np.mean, flag=BAD
+    )
+    for field in fields:
+        isflagged = flags_result[field] > UNFLAGGED
+        assert isflagged[characteristics["raise"]].all()
+
+
+def test_flagManual(data, field):
+    flags = initFlagsLike(data)
+    args = data, field, flags
+    dat = data[field]
+
+    mdata = pd.Series("lala", index=dat.index)
+    index_exp = mdata.iloc[[10, 33, 200, 500]].index
+    mdata.iloc[[101, 133, 220, 506]] = "b"
+    mdata.loc[index_exp] = "a"
+    shrinked = mdata.loc[index_exp.union(mdata.iloc[[1, 2, 3, 4, 600, 601]].index)]
+
+    kwargs_list = [
+        dict(mdata=mdata, mflag="a", method="plain", flag=BAD),
+        dict(mdata=mdata.to_list(), mflag="a", method="plain", flag=BAD),
+        dict(mdata=mdata, mflag="a", method="ontime", flag=BAD),
+        dict(mdata=shrinked, mflag="a", method="ontime", flag=BAD),
+    ]
+
+    for kw in kwargs_list:
+        _, fl = flagManual(*args, **kw)
+        isflagged = fl[field] > UNFLAGGED
+        assert isflagged[isflagged].index.equals(index_exp)
+
+    # flag not exist in mdata
+    _, fl = flagManual(
+        *args, mdata=mdata, mflag="i do not exist", method="ontime", flag=BAD
+    )
+    isflagged = fl[field] > UNFLAGGED
+    assert isflagged[isflagged].index.equals(pd.DatetimeIndex([]))
+
+    # check right-open / ffill
+    index = pd.date_range(start="2016-01-01", end="2018-12-31", periods=11)
+    mdata = pd.Series(0, index=index)
+    mdata.loc[index[[1, 5, 6, 7, 9, 10]]] = 1
+    # >>> mdata
+    # 2016-01-01 00:00:00    0
+    # 2016-04-19 12:00:00    1
+    # 2016-08-07 00:00:00    0
+    # 2016-11-24 12:00:00    0
+    # 2017-03-14 00:00:00    0
+    # 2017-07-01 12:00:00    1
+    # 2017-10-19 00:00:00    1
+    # 2018-02-05 12:00:00    1
+    # 2018-05-26 00:00:00    0
+    # 2018-09-12 12:00:00    1
+    # 2018-12-31 00:00:00    1
+    # dtype: int64
+
+    # add first and last index from data
+    expected = mdata.copy()
+    expected.loc[dat.index[0]] = 0
+    expected.loc[dat.index[-1]] = 1
+    expected = expected.astype(bool)
+
+    _, fl = flagManual(*args, mdata=mdata, mflag=1, method="right-open", flag=BAD)
+    isflagged = fl[field] > UNFLAGGED
+    last = expected.index[0]
+
+    for curr in expected.index[1:]:
+        expected_value = mdata[last]
+        # datetime slicing is inclusive !
+        i = isflagged[last:curr].index[:-1]
+        chunk = isflagged.loc[i]
+        assert (chunk == expected_value).all()
+        last = curr
+    # check last value
+    assert isflagged[curr] == expected[curr]
+
+    # check left-open / bfill
+    expected.loc[dat.index[-1]] = 0  # this time the last is False
+    _, fl = flagManual(*args, mdata=mdata, mflag=1, method="left-open", flag=BAD)
+    isflagged = fl[field] > UNFLAGGED
+    last = expected.index[0]
+    assert isflagged[last] == expected[last]
+
+    for curr in expected.index[1:]:
+        expected_value = mdata[curr]
+        # datetime slicing is inclusive !
+        i = isflagged[last:curr].index[1:]
+        chunk = isflagged.loc[i]
+        assert (chunk == expected_value).all()
+        last = curr
+
+
+@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_1")])
+def test_flagDriftFromNormal(dat):
+    data = dat(periods=200, peak_level=5, name="d1")[0]
+    data["d2"] = dat(periods=200, peak_level=10, name="d2")[0]["d2"]
+    data["d3"] = dat(periods=200, peak_level=100, name="d3")[0]["d3"]
+    data["d4"] = 3 + 4 * data["d1"]
+    data["d5"] = 3 + 4 * data["d1"]
+
+    flags = initFlagsLike(data)
+    data_norm, flags_norm = flagDriftFromNorm(
+        data,
+        "dummy",
+        flags,
+        ["d1", "d2", "d3"],
+        freq="200min",
+        spread=5,
+        flag=BAD,
+    )
+
+    data_ref, flags_ref = flagDriftFromReference(
+        data,
+        "d1",
+        flags,
+        ["d1", "d2", "d3"],
+        freq="3D",
+        thresh=20,
+        flag=BAD,
+    )
+
+    data_scale, flags_scale = flagDriftFromScaledNorm(
+        data,
+        "dummy",
+        flags,
+        ["d1", "d3"],
+        ["d4", "d5"],
+        freq="3D",
+        thresh=20,
+        spread=5,
+        flag=BAD,
+    )
+    assert all(flags_norm["d3"] > UNFLAGGED)
+    assert all(flags_ref["d3"] > UNFLAGGED)
+    assert all(flags_scale["d3"] > UNFLAGGED)
diff --git a/tests/funcs/test_generic_api_functions.py b/tests/funcs/test_generic_api_functions.py
new file mode 100644
index 0000000000000000000000000000000000000000..360418cbb4254c206784746577a4a23550b218db
--- /dev/null
+++ b/tests/funcs/test_generic_api_functions.py
@@ -0,0 +1,42 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import pytest
+import pandas as pd
+
+from saqc.constants import *
+from saqc.core.register import flagging
+from saqc.funcs.tools import maskTime
+from saqc import SaQC
+
+from tests.common import initData, flagAll
+
+
+flagging(masking="field")(flagAll)
+
+
+@pytest.fixture
+def data():
+    return initData()
+
+
+def test_addFieldFlagGeneric(data):
+    saqc = SaQC(data=data)
+
+    func = lambda var1: pd.Series(False, index=data[var1.name].index)
+    data, flags = saqc.genericFlag("tmp1", func, flag=BAD).getResult()
+    assert "tmp1" in flags.columns and "tmp1" not in data
+
+
+def test_addFieldProcGeneric(data):
+    saqc = SaQC(data=data)
+
+    func = lambda: pd.Series([])
+    data, flags = saqc.genericProcess("tmp1", func, flag=BAD).getResult(raw=True)
+    assert "tmp1" in data.columns and data["tmp1"].empty
+
+    func = lambda var1, var2: var1 + var2
+    data, flags = saqc.genericProcess("tmp2", func, flag=BAD).getResult()
+    assert "tmp2" in data.columns and (data["tmp2"] == data["var1"] + data["var2"]).all(
+        axis=None
+    )
diff --git a/tests/funcs/test_generic_config_functions.py b/tests/funcs/test_generic_config_functions.py
new file mode 100644
index 0000000000000000000000000000000000000000..2046ff8e9b02114e1390eb658323068a78ebcba8
--- /dev/null
+++ b/tests/funcs/test_generic_config_functions.py
@@ -0,0 +1,326 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import ast
+from saqc.core.reader import fromConfig
+import pytest
+import numpy as np
+import pandas as pd
+import dios
+
+from saqc.constants import *
+from saqc.core import initFlagsLike, Flags
+from saqc.core.visitor import ConfigFunctionParser
+from saqc.core.register import flagging
+from saqc.funcs.generic import _execGeneric
+from saqc import SaQC
+
+from tests.common import initData, writeIO
+
+
+@pytest.fixture
+def data():
+    return initData()
+
+
+@pytest.fixture
+def data_diff():
+    data = initData(cols=3)
+    col0 = data[data.columns[0]]
+    col1 = data[data.columns[1]]
+    mid = len(col0) // 2
+    offset = len(col0) // 8
+    return dios.DictOfSeries(
+        data={
+            col0.name: col0.iloc[: mid + offset],
+            col1.name: col1.iloc[mid - offset :],
+        }
+    )
+
+
+def _compileGeneric(expr, flags):
+    tree = ast.parse(expr, mode="eval")
+    _, kwargs = ConfigFunctionParser().parse(tree.body)
+    return kwargs["func"]
+
+
+def test_missingIdentifier(data):
+    flags = Flags()
+
+    # NOTE:
+    # - the error is only raised at runtime during parsing would be better
+    tests = [
+        "fff(var2) < 5",
+        "var3 != 42",
+    ]
+
+    for test in tests:
+        func = _compileGeneric(f"genericFlag(func={test})", flags)
+        with pytest.raises(NameError):
+            _execGeneric(flags, data, func, field="")
+
+
+def test_syntaxError():
+    flags = Flags()
+    tests = [
+        "range(x=5",
+        "rangex=5)",
+        "range[x=5]" "range{x=5}" "int->float(x=4)" "int*float(x=4)",
+    ]
+
+    for test in tests:
+        with pytest.raises(SyntaxError):
+            _compileGeneric(f"flag(func={test})", flags)
+
+
+def test_typeError():
+    """
+    test that forbidden constructs actually throw an error
+    TODO: find a few more cases or get rid of the test
+    """
+    flags = Flags()
+
+    # : think about cases that should be forbidden
+    tests = ("lambda x: x * 2",)
+
+    for test in tests:
+        with pytest.raises(TypeError):
+            _compileGeneric(f"genericFlag(func={test})", flags)
+
+
+def test_comparisonOperators(data):
+    flags = initFlagsLike(data)
+    var1, var2, *_ = data.columns
+    this = var1
+
+    tests = [
+        ("this > 100", data[this] > 100),
+        (f"10 >= {var2}", 10 >= data[var2]),
+        (f"{var2} < 100", data[var2] < 100),
+        (f"this <= {var2}", data[this] <= data[var2]),
+        (f"{var1} == {var2}", data[this] == data[var2]),
+        (f"{var1} != {var2}", data[this] != data[var2]),
+    ]
+
+    for test, expected in tests:
+        func = _compileGeneric(f"genericFlag(func={test})", flags)
+        result = _execGeneric(flags, data, func, field=var1)
+        assert np.all(result == expected)
+
+
+def test_arithmeticOperators(data):
+    flags = initFlagsLike(data)
+    var1, *_ = data.columns
+    this = data[var1]
+
+    tests = [
+        ("var1 + 100 > 110", this + 100 > 110),
+        ("var1 - 100 > 0", this - 100 > 0),
+        ("var1 * 100 > 200", this * 100 > 200),
+        ("var1 / 100 > .1", this / 100 > 0.1),
+        ("var1 % 2 == 1", this % 2 == 1),
+        ("var1 ** 2 == 0", this ** 2 == 0),
+    ]
+
+    for test, expected in tests:
+        func = _compileGeneric(f"genericProcess(func={test})", flags)
+        result = _execGeneric(flags, data, func, field=var1)
+        assert np.all(result == expected)
+
+
+def test_nonReduncingBuiltins(data):
+    flags = initFlagsLike(data)
+    var1, *_ = data.columns
+    this = var1
+
+    tests = [
+        (f"abs({this})", np.abs(data[this])),
+        (f"log({this})", np.log(data[this])),
+        (f"exp({this})", np.exp(data[this])),
+    ]
+
+    for test, expected in tests:
+        func = _compileGeneric(f"genericProcess(func={test})", flags)
+        result = _execGeneric(flags, data, func, field=this)
+        assert (result == expected).all()
+
+
+def test_reduncingBuiltins(data):
+    data.loc[::4] = np.nan
+    flags = initFlagsLike(data)
+    var1 = data.columns[0]
+    this = data.iloc[:, 0]
+
+    tests = [
+        ("min(this)", np.nanmin(this)),
+        (f"max({var1})", np.nanmax(this)),
+        (f"sum({var1})", np.nansum(this)),
+        ("mean(this)", np.nanmean(this)),
+        (f"std({this.name})", np.std(this)),
+        (f"len({this.name})", len(this)),
+    ]
+
+    for test, expected in tests:
+        func = _compileGeneric(f"genericProcess(func={test})", flags)
+        result = _execGeneric(flags, data, func, field=this.name)
+        assert result == expected
+
+
+def test_ismissing(data):
+
+    flags = initFlagsLike(data)
+    data.iloc[: len(data) // 2, 0] = np.nan
+    data.iloc[(len(data) // 2) + 1 :, 0] = -9999
+    this = data.iloc[:, 0]
+
+    tests = [
+        (f"ismissing({this.name})", pd.isnull(this)),
+        (f"~ismissing({this.name})", pd.notnull(this)),
+    ]
+
+    for test, expected in tests:
+        func = _compileGeneric(f"genericFlag(func={test})", flags)
+        result = _execGeneric(flags, data, func, this.name)
+        assert np.all(result == expected)
+
+
+def test_bitOps(data):
+    var1, var2, *_ = data.columns
+    this = var1
+
+    flags = initFlagsLike(data)
+
+    tests = [
+        ("~(this > mean(this))", ~(data[this] > np.nanmean(data[this]))),
+        (f"(this <= 0) | (0 < {var1})", (data[this] <= 0) | (0 < data[var1])),
+        (f"({var2} >= 0) & (0 > this)", (data[var2] >= 0) & (0 > data[this])),
+    ]
+
+    for test, expected in tests:
+        func = _compileGeneric(f"genericFlag(func={test})", flags)
+        result = _execGeneric(flags, data, func, this)
+        assert np.all(result == expected)
+
+
+def test_isflagged(data):
+
+    var1, var2, *_ = data.columns
+    flags = initFlagsLike(data)
+    flags[data[var1].index[::2], var1] = BAD
+
+    tests = [
+        (f"isflagged({var1})", flags[var1] > UNFLAGGED),
+        (f"isflagged({var1}, flag=BAD)", flags[var1] >= BAD),
+        (f"isflagged({var1}, UNFLAGGED, '==')", flags[var1] == UNFLAGGED),
+        (f"~isflagged({var2})", flags[var2] == UNFLAGGED),
+        (
+            f"~({var2}>999) & (~isflagged({var2}))",
+            ~(data[var2] > 999) & (flags[var2] == UNFLAGGED),
+        ),
+    ]
+
+    for i, (test, expected) in enumerate(tests):
+        try:
+            func = _compileGeneric(f"genericFlag(func={test}, flag=BAD)", flags)
+            result = _execGeneric(flags, data, func, field=None)
+            assert np.all(result == expected)
+        except Exception:
+            print(i, test)
+            raise
+
+    # test bad combination
+    for comp in [">", ">=", "==", "!=", "<", "<="]:
+        fails = f"isflagged({var1}, comparator='{comp}')"
+
+        func = _compileGeneric(f"genericFlag(func={fails}, flag=BAD)", flags)
+        with pytest.raises(ValueError):
+            _execGeneric(flags, data, func, field=None)
+
+
+def test_variableAssignments(data):
+    var1, var2, *_ = data.columns
+
+    config = f"""
+    varname ; test
+    dummy1  ; genericProcess(func=var1 + var2)
+    dummy2  ; genericFlag(func=var1 + var2 > 0)
+    """
+
+    fobj = writeIO(config)
+    saqc = fromConfig(fobj, data)
+    result_data, result_flags = saqc.getResult(raw=True)
+
+    assert set(result_data.columns) == set(data.columns) | {
+        "dummy1",
+    }
+    assert set(result_flags.columns) == set(data.columns) | {"dummy1", "dummy2"}
+
+
+def test_processMultiple(data_diff):
+    config = f"""
+    varname ; test
+    dummy   ; genericProcess(func=var1 + 1)
+    dummy   ; genericProcess(func=var2 - 1)
+    """
+
+    fobj = writeIO(config)
+    saqc = fromConfig(fobj, data_diff)
+    result_data, result_flags = saqc.getResult()
+    assert len(result_data["dummy"]) == len(result_flags["dummy"])
+
+
+def test_callableArgumentsUnary(data):
+
+    window = 5
+
+    @flagging(masking="field")
+    def testFuncUnary(data, field, flags, func, **kwargs):
+        data[field] = data[field].rolling(window=window).apply(func)
+        return data, initFlagsLike(data)
+
+    var = data.columns[0]
+
+    config = f"""
+    varname ; test
+    {var}   ; testFuncUnary(func={{0}})
+    """
+
+    tests = [
+        ("sum", np.nansum),
+        ("std(exp(x))", lambda x: np.std(np.exp(x))),
+    ]
+
+    for (name, func) in tests:
+        fobj = writeIO(config.format(name))
+        result_config, _ = fromConfig(fobj, data).getResult()
+        result_api, _ = SaQC(data).testFuncUnary(var, func=func).getResult()
+        expected = data[var].rolling(window=window).apply(func)
+        assert (result_config[var].dropna() == expected.dropna()).all(axis=None)
+        assert (result_api[var].dropna() == expected.dropna()).all(axis=None)
+
+
+def test_callableArgumentsBinary(data):
+    var1, var2 = data.columns[:2]
+
+    @flagging(masking="field")
+    def testFuncBinary(data, field, flags, func, **kwargs):
+        data[field] = func(data[var1], data[var2])
+        return data, initFlagsLike(data)
+
+    config = f"""
+    varname ; test
+    {var1}  ; testFuncBinary(func={{0}})
+    """
+
+    tests = [
+        ("x + y", lambda x, y: x + y),
+        ("y - (x * 2)", lambda y, x: y - (x * 2)),
+    ]
+
+    for (name, func) in tests:
+        fobj = writeIO(config.format(name))
+        result_config, _ = fromConfig(fobj, data).getResult()
+        result_api, _ = SaQC(data).testFuncBinary(var1, func=func).getResult()
+        expected = func(data[var1], data[var2])
+        assert (result_config[var1].dropna() == expected.dropna()).all(axis=None)
+        assert (result_api[var1].dropna() == expected.dropna()).all(axis=None)
diff --git a/tests/funcs/test_harm_funcs.py b/tests/funcs/test_harm_funcs.py
new file mode 100644
index 0000000000000000000000000000000000000000..d645e1bd9478321921afc4ab58ba7660510137f4
--- /dev/null
+++ b/tests/funcs/test_harm_funcs.py
@@ -0,0 +1,334 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import pytest
+import numpy as np
+import pandas as pd
+import dios
+
+from saqc.core import initFlagsLike, Flags
+from saqc.constants import BAD, UNFLAGGED
+from saqc.funcs.resampling import linear, interpolate, shift, reindexFlags, resample
+from saqc.funcs.tools import copyField, dropField
+from tests.common import checkDataFlagsInvariants
+
+
+@pytest.fixture
+def data():
+    index = pd.date_range(
+        start="1.1.2011 00:00:00", end="1.1.2011 01:00:00", freq="15min"
+    )
+    index = index.insert(2, pd.Timestamp(2011, 1, 1, 0, 29, 0))
+    index = index.insert(2, pd.Timestamp(2011, 1, 1, 0, 28, 0))
+    index = index.insert(5, pd.Timestamp(2011, 1, 1, 0, 32, 0))
+    index = index.insert(5, pd.Timestamp(2011, 1, 1, 0, 31, 0))
+    index = index.insert(0, pd.Timestamp(2010, 12, 31, 23, 57, 0))
+    index = index.drop(pd.Timestamp("2011-01-01 00:30:00"))
+    dat = pd.Series(np.linspace(-50, 50, index.size), index=index, name="data")
+    # good to have some nan
+    dat[-3] = np.nan
+    data = dios.DictOfSeries(dat)
+    return data
+
+
+@pytest.mark.parametrize(
+    "func, kws",
+    [
+        ("linear", dict()),
+        ("shift", dict(method="nshift")),
+        ("interpolate", dict(method="spline")),
+        ("resample", dict(agg_func=np.nansum, method="nagg")),
+    ],
+)
+def test_wrapper(data, func, kws):
+    field = "data"
+    freq = "15T"
+    flags = initFlagsLike(data)
+
+    import saqc
+
+    func = getattr(saqc.funcs, func)
+    data, flags = func(data, field, flags, freq, **kws)
+
+    # check minimal requirements
+    checkDataFlagsInvariants(data, flags, field)
+    assert data[field].index.inferred_freq == freq
+
+
+@pytest.mark.parametrize("method", ["time", "polynomial"])
+def test_gridInterpolation(data, method):
+    freq = "15T"
+    field = "data"
+    data = data[field]
+    data = (data * np.sin(data)).append(data.shift(1, "2h")).shift(1, "3s")
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+
+    # we are just testing if the interpolation gets passed to the series without
+    # causing an error:
+    res = interpolate(
+        data, field, flags, freq, method=method, downcast_interpolation=True
+    )
+
+    if method == "polynomial":
+        res = interpolate(
+            data,
+            field,
+            flags,
+            freq,
+            order=2,
+            method=method,
+            downcast_interpolation=True,
+        )
+        res = interpolate(
+            data,
+            field,
+            flags,
+            freq,
+            order=10,
+            method=method,
+            downcast_interpolation=True,
+        )
+
+    # check minimal requirements
+    rdata, rflags = res
+    checkDataFlagsInvariants(rdata, rflags, field, identical=False)
+    assert rdata[field].index.inferred_freq == freq
+
+
+@pytest.mark.parametrize(
+    "func, kws",
+    [
+        ("linear", dict()),
+        ("shift", dict(method="nshift")),
+        ("interpolate", dict(method="spline")),
+        ("aggregate", dict(value_func=np.nansum, method="nagg")),
+    ],
+)
+def test_flagsSurviveReshaping(func, kws):
+    """
+    flagging -> reshaping -> test (flags also was reshaped correctly)
+    """
+    pass
+
+
+def test_flagsSurviveInverseReshaping():
+    """
+    inverse reshaping -> flagging -> test (flags also was reshaped correctly)"""
+    pass
+
+
+def test_flagsSurviveBackprojection():
+    """
+    flagging -> reshaping -> inverse reshaping -> test (flags == original-flags)
+    """
+    pass
+
+
+@pytest.mark.parametrize(
+    "reshaper", ["nshift", "fshift", "bshift", "nagg", "bagg", "fagg", "interpolation"]
+)
+def test_harmSingleVarIntermediateFlagging(data, reshaper):
+    flags = initFlagsLike(data)
+    field = "data"
+    freq = "15T"
+
+    pre_data = data.copy()
+    pre_flags = flags.copy()
+    data, flags = copyField(data, field, flags, field + "_interpolated")
+    data, flags = linear(data, field + "_interpolated", flags, freq=freq)
+    checkDataFlagsInvariants(data, flags, field + "_interpolated", identical=True)
+    assert data[field + "_interpolated"].index.inferred_freq == freq
+
+    # flag something bad
+    flags[data[field + "_interpolated"].index[3:4], field + "_interpolated"] = BAD
+    data, flags = reindexFlags(
+        data, field, flags, method="inverse_" + reshaper, source=field + "_interpolated"
+    )
+    data, flags = dropField(data, field + "_interpolated", flags)
+
+    assert len(data[field]) == len(flags[field])
+    assert data[field].equals(pre_data[field])
+    assert flags[field].index.equals(pre_flags[field].index)
+
+    if "agg" in reshaper:
+        if reshaper == "nagg":
+            start, end = 3, 7
+        elif reshaper == "fagg":
+            start, end = 3, 5
+        elif reshaper == "bagg":
+            start, end = 5, 7
+        else:
+            raise NotImplementedError("untested test case")
+
+        assert all(flags[field].iloc[start:end] > UNFLAGGED)
+        assert all(flags[field].iloc[:start] == UNFLAGGED)
+        assert all(flags[field].iloc[end:] == UNFLAGGED)
+
+    elif "shift" in reshaper:
+        if reshaper == "nshift":
+            exp = [False, False, False, False, True, False, False, False, False]
+        elif reshaper == "fshift":
+            exp = [False, False, False, False, True, False, False, False, False]
+        elif reshaper == "bshift":
+            exp = [False, False, False, False, False, True, False, False, False]
+        else:
+            raise NotImplementedError("untested test case")
+
+        flagged = flags[field] > UNFLAGGED
+        assert all(flagged == exp)
+
+    elif reshaper == "interpolation":
+        pytest.skip("no testcase for interpolation")
+
+    else:
+        raise NotImplementedError("untested test case")
+
+
+@pytest.mark.parametrize(
+    "params, expected",
+    [
+        (
+            ("nagg", "15Min"),
+            pd.Series(
+                data=[-87.5, -25.0, 0.0, 37.5, 50.0],
+                index=pd.date_range(
+                    "2011-01-01 00:00:00", "2011-01-01 01:00:00", freq="15min"
+                ),
+            ),
+        ),
+        (
+            ("nagg", "30Min"),
+            pd.Series(
+                data=[-87.5, -25.0, 87.5],
+                index=pd.date_range(
+                    "2011-01-01 00:00:00", "2011-01-01 01:00:00", freq="30min"
+                ),
+            ),
+        ),
+        (
+            ("bagg", "15Min"),
+            pd.Series(
+                data=[-50.0, -37.5, -37.5, 12.5, 37.5, 50.0],
+                index=pd.date_range(
+                    "2010-12-31 23:45:00", "2011-01-01 01:00:00", freq="15min"
+                ),
+            ),
+        ),
+        (
+            ("bagg", "30Min"),
+            pd.Series(
+                data=[-50.0, -75.0, 50.0, 50.0],
+                index=pd.date_range(
+                    "2010-12-31 23:30:00", "2011-01-01 01:00:00", freq="30min"
+                ),
+            ),
+        ),
+    ],
+)
+def test_harmSingleVarInterpolationAgg(data, params, expected):
+    flags = initFlagsLike(data)
+    field = "data"
+    h_field = "data_harm"
+
+    pre_data = data.copy()
+    pre_flaggger = flags.copy()
+    method, freq = params
+
+    data_harm, flags_harm = copyField(data, "data", flags, "data_harm")
+    data_harm, flags_harm = resample(
+        data_harm, h_field, flags_harm, freq, func=np.sum, method=method
+    )
+    checkDataFlagsInvariants(data_harm, flags_harm, h_field, identical=True)
+    assert data_harm[h_field].index.freq == pd.Timedelta(freq)
+    assert data_harm[h_field].equals(expected)
+
+    data_deharm, flags_deharm = reindexFlags(
+        data_harm, field, flags_harm, source=h_field, method="inverse_" + method
+    )
+    data_deharm, flags_deharm = dropField(data_deharm, h_field, flags_deharm)
+    checkDataFlagsInvariants(data_deharm, flags_deharm, field, identical=True)
+    assert data_deharm[field].equals(pre_data[field])
+    assert flags_deharm[field].equals(pre_flaggger[field])
+
+
+@pytest.mark.parametrize(
+    "params, expected",
+    [
+        (
+            ("bshift", "15Min"),
+            pd.Series(
+                data=[-50.0, -37.5, -25.0, 12.5, 37.5, 50.0],
+                index=pd.date_range(
+                    "2010-12-31 23:45:00", "2011-01-01 01:00:00", freq="15Min"
+                ),
+            ),
+        ),
+        (
+            ("fshift", "15Min"),
+            pd.Series(
+                data=[np.nan, -37.5, -25.0, 0.0, 37.5, 50.0],
+                index=pd.date_range(
+                    "2010-12-31 23:45:00", "2011-01-01 01:00:00", freq="15Min"
+                ),
+            ),
+        ),
+        (
+            ("nshift", "15min"),
+            pd.Series(
+                data=[np.nan, -37.5, -25.0, 12.5, 37.5, 50.0],
+                index=pd.date_range(
+                    "2010-12-31 23:45:00", "2011-01-01 01:00:00", freq="15Min"
+                ),
+            ),
+        ),
+        (
+            ("bshift", "30Min"),
+            pd.Series(
+                data=[-50.0, -37.5, 12.5, 50.0],
+                index=pd.date_range(
+                    "2010-12-31 23:30:00", "2011-01-01 01:00:00", freq="30Min"
+                ),
+            ),
+        ),
+        (
+            ("fshift", "30Min"),
+            pd.Series(
+                data=[np.nan, -37.5, 0.0, 50.0],
+                index=pd.date_range(
+                    "2010-12-31 23:30:00", "2011-01-01 01:00:00", freq="30Min"
+                ),
+            ),
+        ),
+        (
+            ("nshift", "30min"),
+            pd.Series(
+                data=[np.nan, -37.5, 12.5, 50.0],
+                index=pd.date_range(
+                    "2010-12-31 23:30:00", "2011-01-01 01:00:00", freq="30Min"
+                ),
+            ),
+        ),
+    ],
+)
+def test_harmSingleVarInterpolationShift(data, params, expected):
+    flags = initFlagsLike(data)
+    field = "data"
+    h_field = "data_harm"
+    pre_data = data.copy()
+    pre_flags = flags.copy()
+    method, freq = params
+
+    data_harm, flags_harm = copyField(data, "data", flags, "data_harm")
+    data_harm, flags_harm = shift(data_harm, h_field, flags_harm, freq, method=method)
+    assert data_harm[h_field].equals(expected)
+    checkDataFlagsInvariants(data_harm, flags_harm, field, identical=True)
+
+    data_deharm, flags_deharm = reindexFlags(
+        data_harm, field, flags_harm, source=h_field, method="inverse_" + method
+    )
+    checkDataFlagsInvariants(data_deharm, flags_deharm, field, identical=True)
+
+    data_deharm, flags_deharm = dropField(data_deharm, h_field, flags_deharm)
+    assert data_deharm[field].equals(pre_data[field])
+    assert flags_deharm[field].equals(pre_flags[field])
diff --git a/tests/funcs/test_modelling.py b/tests/funcs/test_modelling.py
new file mode 100644
index 0000000000000000000000000000000000000000..ff244da97d922915fba48ff905152809825b5b7c
--- /dev/null
+++ b/tests/funcs/test_modelling.py
@@ -0,0 +1,115 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+# see test/functs/fixtures.py for global fixtures "course_..."
+
+import dios
+
+from saqc import BAD, UNFLAGGED
+from saqc.core import initFlagsLike
+from saqc.funcs.tools import maskTime
+from saqc.funcs.residues import calculatePolynomialResidues, calculateRollingResidues
+
+from tests.fixtures import *
+
+
+@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_2")])
+def test_modelling_polyFit_forRegular(dat):
+    data, _ = dat(
+        freq="10min", periods=30, initial_level=0, final_level=100, out_val=-100
+    )
+    # add some nice sine distortion
+    data = data + 10 * np.sin(np.arange(0, len(data.indexes[0])))
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    result1, _ = calculatePolynomialResidues(data, "data", flags, 11, 2, numba=False)
+    result2, _ = calculatePolynomialResidues(data, "data", flags, 11, 2, numba=True)
+    assert (result1["data"] - result2["data"]).abs().max() < 10 ** -10
+    result3, _ = calculatePolynomialResidues(
+        data, "data", flags, "110min", 2, numba=False
+    )
+    assert result3["data"].equals(result1["data"])
+    result4, _ = calculatePolynomialResidues(
+        data, "data", flags, 11, 2, numba=True, min_periods=11
+    )
+    assert (result4["data"] - result2["data"]).abs().max() < 10 ** -10
+    data.iloc[13:16] = np.nan
+    result5, _ = calculatePolynomialResidues(
+        data, "data", flags, 11, 2, numba=True, min_periods=9
+    )
+    assert result5["data"].iloc[10:19].isna().all()
+
+
+@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_2")])
+def test_modelling_rollingMean_forRegular(dat):
+    data, _ = dat(
+        freq="10min", periods=30, initial_level=0, final_level=100, out_val=-100
+    )
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    calculateRollingResidues(
+        data,
+        "data",
+        flags,
+        5,
+        func=np.mean,
+        set_flags=True,
+        min_periods=0,
+        center=True,
+    )
+    calculateRollingResidues(
+        data,
+        "data",
+        flags,
+        5,
+        func=np.mean,
+        set_flags=True,
+        min_periods=0,
+        center=False,
+    )
+
+
+@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_1")])
+def test_modelling_mask(dat):
+    data, _ = dat()
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    field = "data"
+
+    # set flags everywhere to test unflagging
+    flags[:, field] = BAD
+
+    common = dict(data=data, field=field, flags=flags, mode="periodic")
+    data_seasonal, flags_seasonal = maskTime(
+        **common, start="20:00", end="40:00", closed=False
+    )
+    flagscol = flags_seasonal[field]
+    m = (20 <= flagscol.index.minute) & (flagscol.index.minute <= 40)
+    assert all(flags_seasonal[field][m] == UNFLAGGED)
+    assert all(data_seasonal[field][m].isna())
+
+    data_seasonal, flags_seasonal = maskTime(**common, start="15:00:00", end="02:00:00")
+    flagscol = flags_seasonal[field]
+    m = (15 <= flagscol.index.hour) & (flagscol.index.hour <= 2)
+    assert all(flags_seasonal[field][m] == UNFLAGGED)
+    assert all(data_seasonal[field][m].isna())
+
+    data_seasonal, flags_seasonal = maskTime(
+        **common, start="03T00:00:00", end="10T00:00:00"
+    )
+    flagscol = flags_seasonal[field]
+    m = (3 <= flagscol.index.hour) & (flagscol.index.hour <= 10)
+    assert all(flags_seasonal[field][m] == UNFLAGGED)
+    assert all(data_seasonal[field][m].isna())
+
+    mask_ser = pd.Series(False, index=data["data"].index)
+    mask_ser[::5] = True
+    data["mask_ser"] = mask_ser
+    flags = initFlagsLike(data)
+    data_masked, flags_masked = maskTime(
+        data, "data", flags, mode="mask_field", mask_field="mask_ser"
+    )
+    m = mask_ser
+    assert all(flags_masked[field][m] == UNFLAGGED)
+    assert all(data_masked[field][m].isna())
diff --git a/tests/funcs/test_pattern_rec.py b/tests/funcs/test_pattern_rec.py
new file mode 100644
index 0000000000000000000000000000000000000000..2c35dda3addbb2a20af0d1a3e6ea797a6a836507
--- /dev/null
+++ b/tests/funcs/test_pattern_rec.py
@@ -0,0 +1,58 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import pytest
+import pandas as pd
+import dios
+
+from saqc.constants import *
+from saqc.core import initFlagsLike
+from saqc.funcs.pattern import *
+from tests.common import initData
+
+
+@pytest.fixture
+def data():
+    return initData(cols=1, start_date="2016-01-01", end_date="2018-12-31", freq="1D")
+
+
+@pytest.fixture
+def field(data):
+    return data.columns[0]
+
+
+@pytest.mark.skip(reason="faulty implementation - wait for #GL216")
+def test_flagPattern_wavelet():
+    data = pd.Series(0, index=pd.date_range(start="2000", end="2001", freq="1d"))
+    data.iloc[10:18] = [0, 5, 6, 7, 6, 8, 5, 0]
+    pattern = data.iloc[10:18]
+
+    data = dios.DictOfSeries(dict(data=data, pattern_data=pattern))
+    flags = initFlagsLike(data, name="data")
+    data, flags = flagPatternByWavelet(
+        data, "data", flags, reference="pattern_data", flag=BAD
+    )
+
+    assert all(flags["data"].iloc[10:18] == BAD)
+    assert all(flags["data"].iloc[:9] == UNFLAGGED)
+    assert all(flags["data"].iloc[18:] == UNFLAGGED)
+
+
+def test_flagPattern_dtw():
+    data = pd.Series(0, index=pd.date_range(start="2000", end="2001", freq="1d"))
+    data.iloc[10:18] = [0, 5, 6, 7, 6, 8, 5, 0]
+    pattern = data.iloc[10:18]
+
+    data = dios.DictOfSeries(dict(data=data, pattern_data=pattern))
+    flags = initFlagsLike(data, name="data")
+    data, flags = flagPatternByDTW(
+        data, "data", flags, reference="pattern_data", flag=BAD
+    )
+
+    assert all(flags["data"].iloc[10:18] == BAD)
+    assert all(flags["data"].iloc[:9] == UNFLAGGED)
+    assert all(flags["data"].iloc[18:] == UNFLAGGED)
+
+    # visualize:
+    # data['data'].plot()
+    # ((flags['data']>0) *5.).plot()
diff --git a/tests/funcs/test_proc_functions.py b/tests/funcs/test_proc_functions.py
new file mode 100644
index 0000000000000000000000000000000000000000..62a33cf313b77252f4f2a27152b35948e39ad781
--- /dev/null
+++ b/tests/funcs/test_proc_functions.py
@@ -0,0 +1,136 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+# see test/functs/fixtures.py for global fixtures "course_..."
+
+import dios
+
+from saqc.constants import *
+from saqc.core import initFlagsLike
+from saqc.funcs.transformation import transform
+from saqc.funcs.drift import correctOffset
+from saqc.funcs.interpolation import (
+    interpolateByRolling,
+    interpolateInvalid,
+    interpolateIndex,
+)
+from saqc.funcs.resampling import resample
+from saqc.lib.ts_operators import linearInterpolation, polynomialInterpolation
+
+from tests.fixtures import *
+
+
+def test_rollingInterpolateMissing(course_5):
+    data, characteristics = course_5(periods=10, nan_slice=[5, 6])
+    field = data.columns[0]
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    dataInt, *_ = interpolateByRolling(
+        data,
+        field,
+        flags,
+        3,
+        func=np.median,
+        center=True,
+        min_periods=0,
+        interpol_flag=UNFLAGGED,
+    )
+    assert dataInt[field][characteristics["missing"]].notna().all()
+    dataInt, *_ = interpolateByRolling(
+        data,
+        field,
+        flags,
+        3,
+        func=np.nanmean,
+        center=False,
+        min_periods=3,
+        interpol_flag=UNFLAGGED,
+    )
+    assert dataInt[field][characteristics["missing"]].isna().all()
+
+
+def test_interpolateMissing(course_5):
+    data, characteristics = course_5(periods=10, nan_slice=[5])
+    field = data.columns[0]
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    dataLin, *_ = interpolateInvalid(data, field, flags, method="linear")
+    dataPoly, *_ = interpolateInvalid(data, field, flags, method="polynomial")
+    assert dataLin[field][characteristics["missing"]].notna().all()
+    assert dataPoly[field][characteristics["missing"]].notna().all()
+    data, characteristics = course_5(periods=10, nan_slice=[5, 6, 7])
+    dataLin1, *_ = interpolateInvalid(
+        data.copy(), field, flags, method="linear", limit=2
+    )
+    dataLin2, *_ = interpolateInvalid(
+        data.copy(), field, flags, method="linear", limit=3
+    )
+    dataLin3, *_ = interpolateInvalid(
+        data.copy(), field, flags, method="linear", limit=4
+    )
+    assert dataLin1[field][characteristics["missing"]].isna().all()
+    assert dataLin2[field][characteristics["missing"]].isna().all()
+    assert dataLin3[field][characteristics["missing"]].notna().all()
+
+
+def test_transform(course_5):
+    data, characteristics = course_5(periods=10, nan_slice=[5, 6])
+    field = data.columns[0]
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    data1, *_ = transform(data, field, flags, func=linearInterpolation)
+    assert data1[field][characteristics["missing"]].isna().all()
+    data1, *_ = transform(
+        data, field, flags, func=lambda x: linearInterpolation(x, inter_limit=3)
+    )
+    assert data1[field][characteristics["missing"]].notna().all()
+    data1, *_ = transform(
+        data,
+        field,
+        flags,
+        func=lambda x: polynomialInterpolation(x, inter_limit=3, inter_order=3),
+    )
+    assert data1[field][characteristics["missing"]].notna().all()
+
+
+def test_resample(course_5):
+    data, characteristics = course_5(
+        freq="1min", periods=30, nan_slice=[1, 11, 12, 22, 24, 26]
+    )
+    field = data.columns[0]
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    data1, *_ = resample(
+        data,
+        field,
+        flags,
+        "10min",
+        np.mean,
+        maxna=2,
+        maxna_group=1,
+    )
+    assert ~np.isnan(data1[field].iloc[0])
+    assert np.isnan(data1[field].iloc[1])
+    assert np.isnan(data1[field].iloc[2])
+
+
+def test_interpolateGrid(course_5, course_3):
+    data, _ = course_5()
+    data_grid, characteristics = course_3()
+    data["grid"] = data_grid.to_df()
+    # data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    dataInt, *_ = interpolateIndex(
+        data, "data", flags, "1h", "time", grid_field="grid", limit=10
+    )
+
+
+def test_offsetCorrecture():
+    data = pd.Series(0, index=pd.date_range("2000", freq="1d", periods=100), name="dat")
+    data.iloc[30:40] = -100
+    data.iloc[70:80] = 100
+    data = dios.DictOfSeries(data)
+    flags = initFlagsLike(data)
+    data, _ = correctOffset(data, "dat", flags, 40, 20, "3d", 1)
+    assert (data == 0).all()[0]
diff --git a/tests/funcs/test_spikes_detection.py b/tests/funcs/test_spikes_detection.py
new file mode 100644
index 0000000000000000000000000000000000000000..f5cb61bdfab078c8aabb7dc30d1ce62b9abcc7a5
--- /dev/null
+++ b/tests/funcs/test_spikes_detection.py
@@ -0,0 +1,128 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# see test/functs/fixtures.py for global fixtures "course_..."
+import dios
+from tests.fixtures import *
+
+from saqc.funcs.outliers import (
+    flagMAD,
+    flagOffset,
+    flagRaise,
+    flagMVScores,
+    flagByGrubbs,
+)
+from saqc.constants import *
+from saqc.core import initFlagsLike
+
+
+@pytest.fixture(scope="module")
+def spiky_data():
+    index = pd.date_range(start="2011-01-01", end="2011-01-05", freq="5min")
+    s = pd.Series(np.linspace(1, 2, index.size), index=index, name="spiky_data")
+    s.iloc[100] = 100
+    s.iloc[1000] = -100
+    flag_assertion = [100, 1000]
+    return dios.DictOfSeries(s), flag_assertion
+
+
+def test_flagMad(spiky_data):
+    data = spiky_data[0]
+    field, *_ = data.columns
+    flags = initFlagsLike(data)
+    data, flags_result = flagMAD(data, field, flags, "1H", flag=BAD)
+    flag_result = flags_result[field]
+    test_sum = (flag_result[spiky_data[1]] == BAD).sum()
+    assert test_sum == len(spiky_data[1])
+
+
+def test_flagSpikesBasic(spiky_data):
+    data = spiky_data[0]
+    field, *_ = data.columns
+    flags = initFlagsLike(data)
+    data, flags_result = flagOffset(
+        data, field, flags, thresh=60, tolerance=10, window="20min", flag=BAD
+    )
+    flag_result = flags_result[field]
+    test_sum = (flag_result[spiky_data[1]] == BAD).sum()
+    assert test_sum == len(spiky_data[1])
+
+
+# see test/functs/fixtures.py for the 'course_N'
+@pytest.mark.parametrize(
+    "dat",
+    [
+        pytest.lazy_fixture("course_1"),
+        pytest.lazy_fixture("course_2"),
+        pytest.lazy_fixture("course_3"),
+        pytest.lazy_fixture("course_4"),
+    ],
+)
+def test_flagSpikesLimitRaise(dat):
+    data, characteristics = dat()
+    field, *_ = data.columns
+    flags = initFlagsLike(data)
+    _, flags_result = flagRaise(
+        data,
+        field,
+        flags,
+        thresh=2,
+        freq="10min",
+        raise_window="20min",
+        numba_boost=False,
+        flag=BAD,
+    )
+    assert np.all(flags_result[field][characteristics["raise"]] > UNFLAGGED)
+    assert not np.any(flags_result[field][characteristics["return"]] > UNFLAGGED)
+    assert not np.any(flags_result[field][characteristics["drop"]] > UNFLAGGED)
+
+
+# see test/functs/fixtures.py for the 'course_N'
+@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_3")])
+def test_flagMultivarScores(dat):
+    data1, characteristics = dat(
+        periods=1000, initial_level=5, final_level=15, out_val=50
+    )
+    data2, characteristics = dat(
+        periods=1000, initial_level=20, final_level=1, out_val=30
+    )
+    field = "dummy"
+    fields = ["data1", "data2"]
+    s1, s2 = data1.squeeze(), data2.squeeze()
+    s1 = pd.Series(data=s1.values, index=s1.index)
+    s2 = pd.Series(data=s2.values, index=s1.index)
+    data = dios.DictOfSeries([s1, s2], columns=["data1", "data2"])
+    flags = initFlagsLike(data)
+    _, flags_result = flagMVScores(
+        data,
+        field,
+        flags,
+        fields=fields,
+        trafo=np.log,
+        iter_start=0.95,
+        n=10,
+        flag=BAD,
+    )
+    for field in fields:
+        isflagged = flags_result[field] > UNFLAGGED
+        assert isflagged[characteristics["raise"]].all()
+        assert not isflagged[characteristics["return"]].any()
+        assert not isflagged[characteristics["drop"]].any()
+
+
+@pytest.mark.parametrize("dat", [pytest.lazy_fixture("course_3")])
+def test_grubbs(dat):
+    data, char_dict = dat(
+        freq="10min",
+        periods=45,
+        initial_level=0,
+        final_level=0,
+        crowd_size=1,
+        crowd_spacing=3,
+        out_val=-10,
+    )
+    flags = initFlagsLike(data)
+    data, result_flags = flagByGrubbs(
+        data, "data", flags, window=20, min_periods=15, flag=BAD
+    )
+    assert np.all(result_flags["data"][char_dict["drop"]] > UNFLAGGED)
diff --git a/tests/funcs/test_tools.py b/tests/funcs/test_tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..c181277cc01129b2921ef6e494014a8ceeacb748
--- /dev/null
+++ b/tests/funcs/test_tools.py
@@ -0,0 +1,36 @@
+import pytest
+
+from saqc.lib.plotting import makeFig
+import pandas as pd
+import numpy as np
+import saqc
+import dios
+
+
+def test_makeFig():
+    # just testing for no errors to occure...
+    data = dios.DictOfSeries(
+        pd.Series(
+            np.linspace(0, 1000, 1000),
+            pd.date_range("2000", "2001", periods=1000),
+            name="data",
+        )
+    )
+    d_saqc = saqc.SaQC(data)
+    d_saqc = (
+        d_saqc.flagRange("data", max=500)
+        .flagRange("data", max=400)
+        .flagRange("data", max=300)
+    )
+
+    # not interactive, no storing
+    dummy_path = ""
+
+    d_saqc = d_saqc.plot(field="data", path="")
+    d_saqc = d_saqc.plot(
+        field="data", path=dummy_path, plot_kwargs={"history": "valid"}, stats=True
+    )
+    d_saqc = d_saqc.plot(field="data", path=dummy_path, plot_kwargs={"history": "all"})
+    d_saqc = d_saqc.plot(
+        field="data", path=dummy_path, plot_kwargs={"slice": "2000-10"}, stats=True
+    )
diff --git a/tests/fuzzy/__init__.py b/tests/fuzzy/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..4265cc3e6c16c09774190fa55d609cd9fe0808e4
--- /dev/null
+++ b/tests/fuzzy/__init__.py
@@ -0,0 +1 @@
+#!/usr/bin/env python
diff --git a/tests/fuzzy/lib.py b/tests/fuzzy/lib.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b77a8b06e411c26bff8a2d1948b1e3228a19010
--- /dev/null
+++ b/tests/fuzzy/lib.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+
+
+import numbers
+import dios
+import numpy as np
+import pandas as pd
+from typing import get_type_hints
+from contextlib import contextmanager
+
+from hypothesis.strategies import (
+    lists,
+    sampled_from,
+    composite,
+    from_regex,
+    sampled_from,
+    datetimes,
+    integers,
+    register_type_strategy,
+    from_type,
+)
+from hypothesis.extra.numpy import arrays, from_dtype
+from hypothesis.strategies._internal.types import _global_type_lookup
+
+from saqc.constants import *
+from saqc.core.register import FUNC_MAP
+from saqc.lib.types import FreqString
+from saqc.core import initFlagsLike
+
+MAX_EXAMPLES = 50
+# MAX_EXAMPLES = 100000
+
+
+@composite
+def dioses(draw, min_cols=1):
+    """
+    initialize data according to the current restrictions
+    """
+    # NOTE:
+    # The following restriction showed up and should be enforced during init:
+    # - Column names need to satisify the following regex: [A-Za-z0-9_-]+
+    # - DatetimeIndex needs to be sorted
+    # - Integer values larger than 2**53 lead to numerical instabilities during
+    #   the integer->float->integer type conversion in _maskData/_unmaskData.
+
+    cols = draw(lists(columnNames(), unique=True, min_size=min_cols))
+    columns = {c: draw(dataSeries(min_size=3)) for c in cols}
+    return dios.DictOfSeries(columns)
+
+
+@composite
+def dataSeries(
+    draw, min_size=0, max_size=100, dtypes=("float32", "float64", "int32", "int64")
+):
+    if np.isscalar(dtypes):
+        dtypes = (dtypes,)
+
+    dtype = np.dtype(draw(sampled_from(dtypes)))
+    if issubclass(dtype.type, numbers.Integral):
+        info = np.iinfo(dtype)
+    elif issubclass(dtype.type, numbers.Real):
+        info = np.finfo(dtype)
+    else:
+        raise ValueError("only numerical dtypes are supported")
+    # we don't want to fail just because of overflows
+    elements = from_dtype(dtype, min_value=info.min + 1, max_value=info.max - 1)
+
+    index = draw(daterangeIndexes(min_size=min_size, max_size=max_size))
+    values = draw(arrays(dtype=dtype, elements=elements, shape=len(index)))
+    return pd.Series(data=values, index=index)
+
+
+@composite
+def columnNames(draw):
+    return draw(from_regex(r"[A-Za-z0-9_-]+", fullmatch=True))
+
+
+@composite
+def flagses(draw, data):
+    """
+    initialize a flags and set some flags
+    """
+    flags = initFlagsLike(data)
+    for col, srs in data.items():
+        loc_st = lists(
+            sampled_from(sorted(srs.index)), unique=True, max_size=len(srs) - 1
+        )
+        flags[draw(loc_st), col] = BAD
+    return flags
+
+
+@composite
+def functions(draw, module: str = None):
+    samples = tuple(FUNC_MAP.values())
+    if module:
+        samples = tuple(f for f in samples if f.name.startswith(module))
+    # samples = [FUNC_MAP["drift.correctExponentialDrift"]]
+    return draw(sampled_from(samples))
+
+
+@composite
+def daterangeIndexes(draw, min_size=0, max_size=100):
+    min_date = pd.Timestamp("1900-01-01").to_pydatetime()
+    max_date = pd.Timestamp("2099-12-31").to_pydatetime()
+    start = draw(datetimes(min_value=min_date, max_value=max_date))
+    periods = draw(integers(min_value=min_size, max_value=max_size))
+    freq = draw(sampled_from(["D", "H", "T", "min", "S", "L", "ms", "U", "us", "N"]))
+    return pd.date_range(start, periods=periods, freq=freq)
+
+
+@composite
+def frequencyStrings(draw, _):
+    freq = draw(sampled_from(["D", "H", "T", "min", "S", "L", "ms", "U", "us", "N"]))
+    mult = draw(integers(min_value=1, max_value=10))
+    value = f"{mult}{freq}"
+    return value
+
+
+@composite
+def dataFieldFlags(draw):
+    data = draw(dioses())
+    field = draw(sampled_from(sorted(data.columns)))
+    flags = draw(flagses(data))
+    return data, field, flags
+
+
+@composite
+def functionCalls(draw, module: str = None):
+    func = draw(functions(module))
+    kwargs = draw(functionKwargs(func))
+    return func, kwargs
+
+
+@contextmanager
+def applyStrategies(strategies: dict):
+
+    for dtype, strategy in strategies.items():
+        register_type_strategy(dtype, strategy)
+
+    yield
+
+    for dtype in strategies.keys():
+        del _global_type_lookup[dtype]
+
+
+@composite
+def functionKwargs(draw, func):
+    data = draw(dioses())
+    field = draw(sampled_from(sorted(data.columns)))
+
+    kwargs = {"data": data, "field": field, "flags": draw(flagses(data))}
+
+    i64 = np.iinfo("int64")
+
+    strategies = {
+        FreqString: frequencyStrings,
+        ColumnName: lambda _: sampled_from(
+            sorted(c for c in data.columns if c != field)
+        ),
+        IntegerWindow: lambda _: integers(min_value=1, max_value=len(data[field]) - 1),
+        int: lambda _: integers(min_value=i64.min + 1, max_value=i64.max - 1),
+    }
+
+    with applyStrategies(strategies):
+        for k, v in get_type_hints(func).items():
+            if k not in {"data", "field", "flags", "return"}:
+                value = draw(from_type(v))
+                kwargs[k] = value
+
+    return kwargs
diff --git a/tests/fuzzy/test_masking.py b/tests/fuzzy/test_masking.py
new file mode 100644
index 0000000000000000000000000000000000000000..15051ff0604f890c3f867bd49e8a548125c55f26
--- /dev/null
+++ b/tests/fuzzy/test_masking.py
@@ -0,0 +1,191 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import pandas as pd
+
+from hypothesis import given, settings
+
+from saqc.constants import UNFLAGGED, BAD
+from saqc.core.register import _maskData, _unmaskData, CallState
+
+from tests.fuzzy.lib import dataFieldFlags, MAX_EXAMPLES
+
+
+@settings(max_examples=MAX_EXAMPLES, deadline=None)
+@given(data_field_flags=dataFieldFlags())
+def test_maskingMasksData(data_field_flags):
+    """
+    test if flagged values are replaced by np.nan
+    """
+    data_in, field, flags = data_field_flags
+    data_masked, mask = _maskData(
+        data_in, flags, columns=[field], thresh=UNFLAGGED
+    )  # thresh UNFLAGGED | np.inf
+    assert data_masked[field].iloc[mask[field].index].isna().all()
+    assert (flags[field].iloc[mask[field].index] > UNFLAGGED).all()
+
+
+@settings(max_examples=MAX_EXAMPLES, deadline=None)
+@given(data_field_flags=dataFieldFlags())
+def test_dataMutationPreventsUnmasking(data_field_flags):
+    """test if (un)masking works as expected on data-changes.
+
+    if `data` is mutated after `_maskData`, `_unmaskData` should be a no-op
+    """
+    filler = -9999
+
+    data_in, field, flags = data_field_flags
+    data_masked, mask = _maskData(data_in, flags, columns=[field], thresh=UNFLAGGED)
+    state = CallState(
+        func=lambda x: x,
+        func_name="",
+        flags=flags,
+        field=field,
+        args=(),
+        kwargs={},
+        masking="field",
+        mthresh=UNFLAGGED,
+        mask=mask,
+    )
+
+    data_masked[field] = filler
+    data_out = _unmaskData(data_masked, state)
+    assert (data_out[field] == filler).all(axis=None)
+
+
+@settings(max_examples=MAX_EXAMPLES, deadline=None)
+@given(data_field_flags=dataFieldFlags())
+def test_flagsMutationPreventsUnmasking(data_field_flags):
+    """test if (un)masking works as expected on flags-changes.
+
+    if `flags` is mutated after `_maskData`, `_unmaskData` should be a no-op
+    """
+    data_in, field, flags = data_field_flags
+    data_masked, mask = _maskData(data_in, flags, columns=[field], thresh=UNFLAGGED)
+    state = CallState(
+        func=lambda x: x,
+        func_name="",
+        flags=flags,
+        field=field,
+        args=(),
+        kwargs={},
+        masking="field",
+        mthresh=UNFLAGGED,
+        mask=mask,
+    )
+    flags[:, field] = UNFLAGGED
+    data_out = _unmaskData(data_masked, state)
+    assert (data_out.loc[flags[field] == BAD, field].isna()).all(axis=None)
+
+
+@settings(max_examples=MAX_EXAMPLES, deadline=None)
+@given(data_field_flags=dataFieldFlags())
+def test_reshapingPreventsUnmasking(data_field_flags):
+    """test if (un)masking works as expected on index-changes.
+
+    If the index of data (and flags) change in the func, the unmasking,
+    should not reapply original data, instead take the new data (and flags) as is.
+    """
+
+    filler = -1111
+
+    data_in, field, flags = data_field_flags
+    data_masked, mask = _maskData(data_in, flags, columns=[field], thresh=UNFLAGGED)
+    state = CallState(
+        func=lambda x: x,
+        func_name="",
+        flags=flags,
+        field=field,
+        args=(),
+        kwargs={},
+        masking="field",
+        mthresh=UNFLAGGED,
+        mask=mask,
+    )
+    # mutate indexes of `data` and `flags`
+    index = data_masked[field].index.to_series()
+    index.iloc[-len(data_masked[field]) // 2 :] += pd.Timedelta("7.5Min")
+    data_masked[field] = pd.Series(data=filler, index=index)
+
+    fflags = flags[field]
+    flags.drop(field)
+    flags[field] = pd.Series(data=fflags.values, index=index)
+
+    data_out = _unmaskData(data_masked, state)
+    assert (data_out[field] == filler).all(axis=None)
+
+
+@settings(max_examples=MAX_EXAMPLES, deadline=None)
+@given(data_field_flags=dataFieldFlags())
+def test_unmaskingInvertsMasking(data_field_flags):
+    """
+    unmasking data should invert the masking
+    """
+    data_in, field, flags = data_field_flags
+    data_masked, mask = _maskData(data_in, flags, columns=[field], thresh=UNFLAGGED)
+    state = CallState(
+        func=lambda x: x,
+        func_name="",
+        flags=flags,
+        field=field,
+        args=(),
+        kwargs={},
+        masking="field",
+        mthresh=UNFLAGGED,
+        mask=mask,
+    )
+    data_out = _unmaskData(data_masked, state)
+    assert pd.DataFrame.equals(
+        data_out.to_df().astype(float), data_in.to_df().astype(float)
+    )
+
+
+# @settings(max_examples=MAX_EXAMPLES, deadline=None)
+# @given(data_field_flags=dataFieldFlags(), func_kwargs=flagFuncsKwargs())
+# def test_maskingPreservesData(data_field_flags, func_kwargs):
+#     """
+#     no mutations on pre-flagged data
+
+#     calling a function on pre-flagged data should yield the same
+#     behavior as calling this function on data where the flagged values
+#     are removed
+#     """
+
+#     data_in, field, flags = data_field_flags
+
+#     data_masked, mask = _maskData(data_in, flags, columns=[field], to_mask=flags.BAD)
+#     func, kwargs = func_kwargs
+#     data_masked, _ = func(data_masked, field, flags, **kwargs)
+#     data_out = _unmaskData(data_in, mask, data_masked, flags, to_mask=flags.BAD)
+
+#     flags_in = flags.isFlagged(flag=flags.BAD)
+#     assert data_in.aloc[flags_in].equals(data_out.aloc[flags_in])
+
+
+# @settings(max_examples=MAX_EXAMPLES, deadline=None)
+# @given(data_field_flags=dataFieldFlags(), func_kwargs=flagFuncsKwargs())
+# def test_maskingEqualsRemoval(data_field_flags, func_kwargs):
+#     """
+#     calling a function on pre-flagged data should yield the same
+#     results as calling this function on data where the flagged values
+#     are removed
+#     """
+#     func, kwargs = func_kwargs
+
+#     data, field, flags = data_field_flags
+#     flagged_in = flags.isFlagged(flag=flags.BAD, comparator=">=")
+
+#     # mask and call
+#     data_left, _ = _maskData(data, flags, columns=[field], to_mask=flags.BAD)
+#     data_left, _ = func(data_left, field, flags, **kwargs)
+
+#     # remove and call
+#     data_right = data.aloc[~flagged_in]
+#     flags_right = flags.initFlags(flags.getFlags().aloc[~flagged_in])
+#     data_right, _ = func(data_right, field, flags_right, **kwargs)
+
+#     # NOTE: we need to handle the implicit type conversion in `_maskData`
+#     data_left_compare = data_left.aloc[~flagged_in]
+#     data_left_compare[field] = data_left_compare[field].astype(data[field].dtype)
+
+#     assert data_right.equals(data_left_compare)
diff --git a/test/__init__.py b/tests/integration/__init__.py
similarity index 100%
rename from test/__init__.py
rename to tests/integration/__init__.py
diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py
new file mode 100644
index 0000000000000000000000000000000000000000..634e2352167cf0ca24df77d5ccf5d687343fab37
--- /dev/null
+++ b/tests/integration/test_integration.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+from click.testing import CliRunner
+import os
+
+
+def test__main__py():
+    import saqc.__main__
+
+    # if not run from project root
+    projpath = os.path.dirname(saqc.__file__) + "/../"
+    args = [
+        "--config",
+        projpath + "ressources/data/config_ci.csv",
+        "--data",
+        projpath + "ressources/data/data.csv",
+        "--outfile",
+        "/tmp/test.csv",  # the filesystem temp dir
+    ]
+    runner = CliRunner()
+
+    for scheme in ["float", "positional", "dmp", "simple"]:
+        result = runner.invoke(saqc.__main__.main, args + ["--scheme", scheme])
+        assert result.exit_code == 0, result.output
diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..4265cc3e6c16c09774190fa55d609cd9fe0808e4
--- /dev/null
+++ b/tests/lib/__init__.py
@@ -0,0 +1 @@
+#!/usr/bin/env python
diff --git a/tests/lib/test_rolling.py b/tests/lib/test_rolling.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef94c987210dddeeee32f82ab859dc093ac089d0
--- /dev/null
+++ b/tests/lib/test_rolling.py
@@ -0,0 +1,211 @@
+import pytest
+
+import pandas as pd
+import numpy as np
+from saqc.lib.rolling import customRoller
+
+n = np.nan
+
+
+def test_rolling_existence_of_attrs():
+    r = pd.DataFrame().rolling(0).validate()
+    c = customRoller(pd.DataFrame(), 0)
+    expected = [attr for attr in dir(r) if not attr.startswith("_")]
+    result = [attr for attr in dir(c) if not attr.startswith("_")]
+    diff = [attr for attr in expected if attr not in result]
+    print(diff)
+    assert len(diff) == 0
+
+
+@pytest.fixture()
+def data():
+    # a series with a symmetrical but not regular index
+    left = pd.date_range("2000", freq="1min", periods=3)
+    middle = pd.date_range(left[-1], freq="1H", periods=4)
+    right = pd.date_range(middle[-1], freq="1min", periods=3)
+    s = pd.Series(1, index=pd.Index([*left, *middle[1:-1], *right]))
+    return s
+
+
+@pytest.mark.parametrize(
+    "kws, expected",
+    [
+        (dict(window="0h", closed="neither"), [n, n, n, n, n, n, n, n]),
+        (dict(window="1h", closed="neither"), [n, n, n, n, n, n, 1, 2]),
+        (dict(window="2h", closed="neither"), [n, n, n, n, 1, 1, 2, 3]),
+        (dict(window="3h", closed="neither"), [n, n, n, n, n, 2, 3, 4]),
+        (dict(window="4h", closed="neither"), [n, n, n, n, n, n, n, n]),
+        # at least #hour NANs at beginning (removed by expanding=False)
+        (dict(window="0h", closed="left"), [n, n, n, n, n, n, n, n]),
+        (dict(window="1h", closed="left"), [n, n, n, 1, 1, 1, 1, 2]),
+        (dict(window="2h", closed="left"), [n, n, n, n, 2, 2, 2, 3]),
+        (dict(window="3h", closed="left"), [n, n, n, n, n, 3, 3, 4]),
+        (dict(window="4h", closed="left"), [n, n, n, n, n, n, n, n]),
+        # at least #hour NANs at beginning (removed by expanding=False)
+        (dict(window="0h", closed="right"), [1, 1, 1, 1, 1, 1, 1, 1]),
+        (dict(window="1h", closed="right"), [n, n, n, 1, 1, 1, 2, 3]),
+        (dict(window="2h", closed="right"), [n, n, n, n, 2, 2, 3, 4]),
+        (dict(window="3h", closed="right"), [n, n, n, n, n, 3, 4, 5]),
+        (dict(window="4h", closed="right"), [n, n, n, n, n, n, n, n]),
+        # at least #hour NANs at beginning (removed by expanding=False)
+        (dict(window="0h", closed="both"), [1, 1, 1, 1, 1, 1, 1, 1]),
+        (dict(window="1h", closed="both"), [n, n, n, 2, 2, 2, 2, 3]),
+        (dict(window="2h", closed="both"), [n, n, n, n, 3, 3, 3, 4]),
+        (dict(window="3h", closed="both"), [n, n, n, n, n, 4, 4, 5]),
+        (dict(window="4h", closed="both"), [n, n, n, n, n, n, n, n]),
+    ],
+    ids=lambda x: str(x if not isinstance(x, list) else ""),
+)
+def test_rolling_expand(data, kws, expected):
+    expected = np.array(expected)
+    result = customRoller(data, **kws, expand=False).sum()
+    result = result.to_numpy()
+
+    print()
+    print(
+        pd.DataFrame(
+            dict(
+                orig=data,
+                exp=expected,
+                res=result,
+            ),
+            index=data.index,
+        )
+    )
+    assert np.allclose(result, expected, rtol=0, atol=0, equal_nan=True)
+
+
+# left and right results are swapped
+# the expected result is checked inverted, aka x[::-1]
+@pytest.mark.parametrize(
+    "kws, expected",
+    [
+        (dict(window="0h", closed="neither"), [n, n, n, n, n, n, n, n]),
+        (dict(window="1h", closed="neither"), [n, n, n, n, n, n, 1, 2]),
+        (dict(window="2h", closed="neither"), [n, n, n, n, 1, 1, 2, 3]),
+        (dict(window="3h", closed="neither"), [n, n, n, n, n, 2, 3, 4]),
+        (dict(window="4h", closed="neither"), [n, n, n, n, n, n, n, n]),
+        # at least #hour NANs at beginning (removed by expanding=False)
+        (dict(window="0h", closed="right"), [n, n, n, n, n, n, n, n]),
+        (dict(window="1h", closed="right"), [n, n, n, 1, 1, 1, 1, 2]),
+        (dict(window="2h", closed="right"), [n, n, n, n, 2, 2, 2, 3]),
+        (dict(window="3h", closed="right"), [n, n, n, n, n, 3, 3, 4]),
+        (dict(window="4h", closed="right"), [n, n, n, n, n, n, n, n]),
+        # at least #hour NANs at beginning (removed by expanding=False)
+        (dict(window="0h", closed="left"), [1, 1, 1, 1, 1, 1, 1, 1]),
+        (dict(window="1h", closed="left"), [n, n, n, 1, 1, 1, 2, 3]),
+        (dict(window="2h", closed="left"), [n, n, n, n, 2, 2, 3, 4]),
+        (dict(window="3h", closed="left"), [n, n, n, n, n, 3, 4, 5]),
+        (dict(window="4h", closed="left"), [n, n, n, n, n, n, n, n]),
+        # at least #hour NANs at beginning (removed by expanding=False)
+        (dict(window="0h", closed="both"), [1, 1, 1, 1, 1, 1, 1, 1]),
+        (dict(window="1h", closed="both"), [n, n, n, 2, 2, 2, 2, 3]),
+        (dict(window="2h", closed="both"), [n, n, n, n, 3, 3, 3, 4]),
+        (dict(window="3h", closed="both"), [n, n, n, n, n, 4, 4, 5]),
+        (dict(window="4h", closed="both"), [n, n, n, n, n, n, n, n]),
+    ],
+    ids=lambda x: str(x if not isinstance(x, list) else ""),
+)
+def test_rolling_expand_forward(data, kws, expected):
+    expected = np.array(expected)[::-1]  # inverted
+    result = customRoller(data, **kws, expand=False, forward=True).sum()
+    result = result.to_numpy()
+
+    print()
+    print(
+        pd.DataFrame(
+            dict(
+                orig=data,
+                exp=expected,
+                res=result,
+            ),
+            index=data.index,
+        )
+    )
+    assert np.allclose(result, expected, rtol=0, atol=0, equal_nan=True)
+
+
+@pytest.mark.parametrize("window", ["0H", "1H", "2H", "3H", "4H"])
+@pytest.mark.parametrize("closed", ["both", "neither", "left", "right"])
+@pytest.mark.parametrize("center", [False, True], ids=lambda x: f" center={x} ")
+@pytest.mark.parametrize("forward", [False, True], ids=lambda x: f" forward={x} ")
+@pytest.mark.parametrize(
+    "func",
+    [
+        "sum",
+        "count",
+        "mean",
+        "median",
+        "min",
+        "max",
+        "skew",
+        "kurt",
+        "cov",
+        "corr",
+        "sem",
+        "var",
+        "std",
+    ],
+)
+def test_dtindexer(data, center, closed, window, forward, func):
+    print()
+    print("forward", forward)
+    print("center", center)
+    print("closed", closed)
+    print("window", window)
+
+    data: pd.Series
+
+    d = data
+    cl = closed
+    if forward:
+        d = data[::-1]
+        cl = "right" if closed == "left" else "left" if closed == "right" else closed
+    roller = d.rolling(
+        window=window,
+        closed=cl,
+        center=center,
+    )
+
+    expected = getattr(roller, func)()
+    if forward:
+        expected = expected[::-1]
+
+    roller = customRoller(
+        obj=data,
+        window=window,
+        closed=closed,
+        center=center,
+        forward=forward,
+        expand=True,
+    )
+    result = getattr(roller, func)()
+
+    print()
+    print(
+        pd.DataFrame(
+            dict(
+                orig=data,
+                exp=expected,
+                res=result,
+            ),
+            index=data.index,
+        )
+    )
+
+    # pandas bug
+    if pd.__version__ < "1.4" and forward:
+        result = result[:-1]
+        expected = expected[:-1]
+
+    # pandas bug
+    # pandas insert a NaN where a valid value should be
+    if (
+        pd.__version__ < "1.4"
+        and forward
+        and func in ["sem", "var", "std"]
+        and int(window[:-1]) <= 1
+    ):
+        pytest.skip("fails for pandas < 1.4")
+
+    assert np.allclose(result, expected, rtol=0, atol=0, equal_nan=True)
diff --git a/tests/lib/test_ts_operators.py b/tests/lib/test_ts_operators.py
new file mode 100644
index 0000000000000000000000000000000000000000..510fce2e4f5d84b9c86684098b196345f841b753
--- /dev/null
+++ b/tests/lib/test_ts_operators.py
@@ -0,0 +1,10 @@
+import pytest
+
+from saqc.lib.ts_operators import butterFilter
+import pandas as pd
+
+
+def test_butterFilter():
+    assert (
+        butterFilter(pd.Series([1, -1] * 100), cutoff=0.1) - pd.Series([1, -1] * 100)
+    ).mean() < 0.5