Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • FINAM/finam
  • afid.nur-kholis/finam
2 results
Show changes
Commits on Source (871)
Showing
with 1988 additions and 22 deletions
node: $Format:%H$
node-date: $Format:%cI$
describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$
ref-names: $Format:%D$
.git_archival.txt export-subst
......@@ -49,6 +49,8 @@ nosetests.xml
coverage.xml
*.cover
.hypothesis/
/bench/
/prof/
# Translations
*.mo
......@@ -67,6 +69,7 @@ instance/
# Sphinx documentation
docs/_build/
docs/source/api/generated
docs/output.txt
# PyBuilder
......
image: python
image: python:3.11
stages:
- test
- build
- docs
- deploy
- release
check:
stage: test
before_script:
- pip3 install black 'pylint<3' 'isort[colors]<6'
- pip3 install 'black>=23,<24' 'pylint>=3' 'isort[colors]<6'
script:
- pip3 install --editable .
- black --check --diff --color .
......@@ -19,20 +21,55 @@ test:
stage: test
script:
- pip3 install --editable .[test]
- python -m pytest --cov finam --cov-report term-missing --cov-report xml:cov.xml -v tests/
- python -m pytest --cov finam --cov-report term-missing --cov-report html:cov --cov-report xml:cov.xml -v tests/
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
artifacts:
reports:
coverage_report:
coverage_format: cobertura
path: cov.xml
paths:
- cov
benchmark:
stage: test
script:
- pip3 install --editable .[test]
- python -m pytest -v benchmarks/ --benchmark-histogram bench/bench --benchmark-sort name
artifacts:
paths:
- bench
profile:
stage: test
before_script:
- apt-get update -y
- apt-get install -y graphviz
- pip3 install graphviz gprof2dot
script:
- pip3 install --editable .[test]
- ./benchmarks/run_profiling.sh
artifacts:
paths:
- prof
doctest:
stage: test
script:
- pip3 install --editable .[doc]
# doctest does not detect tests in code files during the first run.
# add a dummy build to generate .rst files before the actual tests
- sphinx-build -b dummy docs/source docs/build
- sphinx-build -b doctest docs/source docs/build
documentation:
stage: build
stage: docs
variables:
GIT_STRATEGY: clone
GIT_DEPTH: 0 # to have all tags
script:
- pip3 install --editable .[doc]
- sphinx-apidoc --separate -o docs src/finam
- sphinx-build -b html docs docs/build
- sphinx-build docs/source docs/build
- mv docs/build public/
artifacts:
paths:
......@@ -46,3 +83,42 @@ pages:
- public
only:
- main
build:
stage: build
variables:
GIT_STRATEGY: clone
GIT_DEPTH: 0 # to have all tags
script:
- pip install build
- python -m build
artifacts:
paths:
- dist
pypi_test_release:
stage: release
dependencies:
- build
script:
- pip install twine
- python -m twine upload --verbose --skip-existing -r testpypi -u __token__ -p ${TEST_PYPI_TOKEN} dist/*
artifacts:
paths:
- dist
only:
- main
- tags
pypi_release:
stage: release
dependencies:
- build
script:
- pip install twine
- python -m twine upload --verbose -u __token__ -p ${PYPI_TOKEN} dist/*
artifacts:
paths:
- dist
only:
- tags
{
"contributors": [
{
"orcid": "0000-0002-8853-7202",
"affiliation": "Helmholtz Centre for Environmental Research - UFZ",
"type": "ProjectMember",
"name": "Martin Lange"
},
{
"orcid": "0000-0001-9060-4008",
"affiliation": "Helmholtz Centre for Environmental Research - UFZ",
"type": "ProjectMember",
"name": "Sebastian M\u00fcller"
},
{
"orcid": "0000-0002-1071-3464",
"affiliation": "Helmholtz Centre for Environmental Research - UFZ",
"type": "ProjectMember",
"name": "Thomas Fischer"
},
{
"orcid": "0000-0001-5385-0234",
"affiliation": "Helmholtz Centre for Environmental Research - UFZ",
"type": "ProjectMember",
"name": "Sara K\u00f6nig"
},
{
"orcid": "0000-0003-1785-8866",
"affiliation": "Helmholtz Centre for Environmental Research - UFZ",
"type": "ProjectMember",
"name": "Jeisson Javier Leal Rojas"
},
{
"orcid": "0000-0001-9303-6712",
"affiliation": "Helmholtz Centre for Environmental Research - UFZ",
"type": "ProjectMember",
"name": "Matthias Kelbling"
},
{
"orcid": "0000-0003-3939-1523",
"affiliation": "Helmholtz Centre for Environmental Research - UFZ",
"type": "Supervisor",
"name": "Stephan Thober"
},
{
"orcid": "0000-0002-7798-7080",
"affiliation": "Helmholtz Centre for Environmental Research - UFZ",
"type": "Supervisor",
"name": "Sabine Attinger"
}
],
"license": "LGPL-3.0+",
"title": "FINAM is not a model",
"language": "eng",
"keywords": [
"coupler",
"framwork",
"modelling"
],
"creators": [
{
"affiliation": "Helmholtz Centre for Environmental Research - UFZ",
"name": "FINAM Developers"
}
]
}
# FINAM developers
The FINAM project was created by the following people.
## Core developers
- Martin Lange (E-mail: <martin.lange@ufz.de>)
- Sebastian Müller (E-mail: <sebastian.mueller@ufz.de>)
## Working Group members
- Thomas Fischer (E-mail: <thomas.fischer@ufz.de>)
- Sara König (E-mail: <sara.koenig@ufz.de>)
- Jeisson Javier Leal Rojas (E-mail: <jeisson-javier.leal-rojas@ufz.de>)
- Matthias Kelbling (E-mail: <matthias.kelbling@ufz.de>)
## Supervisors
- Stephan Thober (E-mail: <stephan.thober@ufz.de>)
- Sabine Attinger (E-mail: <sabine.attinger@ufz.de>)
# Changelog
# Release notes
## [unpublished]
## [v1.0.0]
None
### Breaking changes
* submodule `modules` renamed to `components` for consistency (!289)
* argument `modules` renamed to `components` in `Composition` for consistency (!289)
* Components now implement `_next_time` instead of the property `next_time` for consistency (!283)
* All fields of `Composition` are now private (!273)
* `Input.source` is private, `Input.get_source()` becomes property `Input.source`, `Input.set_source` becomes a getter (!273)
* `Output.targets` is private, `Output.get_targets()` becomes property `Output.targets` (!273)
* Composition metadata was restructured to hold components and adapters in separate sub-dictionaries (!274)
* Time components implement method `_next_time` instead of property `next_time` (!283)
* `Info` now has properties for `grid`, `time` and `mask` (!286)
* all init-args of `Info` are now optional (!286)
* `Info.accepts` has changed signature: renamed `ignore_none` to `incoming_donwstream` (!286)
* `Info.accepts` now only checks: `grid`, `mask` and `units` (other meta data can differ) (!286)
* `Grid.to_/from_canonical` now allows additional dimensions (!286)
* `data_shape` now a property of `GridBase` (!286)
* `NoGrid` can be initialized with `dim` or `data_shape` now
* `NoGrid.data_shape` can have `-1` entries for variable size dimensions
* if only `dim` given to `NoGrid`, all entries in `data_shape` will be `-1`
### Features
* Components and adapters automatically provide default metadata that can be extended by implementations (!274, !276)
* Grid class now have attributes providing connectivity information for the contained cells (!275)
* `cells_connectivity`: connectivity array as used by ESMF and VTK
* `cells_definition`: cell definition as used by PyVista and legacy VTK
* `cells_offset`: location of the start of each cell in `cells_connectivity`
* added convenience functions and constants to `grid_tools` (!275)
* `get_cells_matrix`: convert `cells_connectivity` or `cells_definition` back to the default cells matrix used in the Grid class (can be used to convert VTK-grids into FINAM-grids)
* `INV_VTK_TYPE_MAP`: inverse mapping to `VTK_TYPE_MAP` - FINAM cell type to VTK cell type
* `VTK_CELL_DIM`: parametric dimension for each VTK cell type
* Grid class now reusable when having different data locations and better grid type casting (!278)
* added `copy` method to grids with optional argument `deep` (`False` by default) to create a copy of a grid
* added setter for `data_location` in order to set a new data location (e.g. after copying a grid)
* added class attribute `valid_locations` in order to check the set data location (esri-grid only supports cells, unstructured-points only support points)
* added missing casting methods to convert esri to uniform and uniform to rectilinear (when you want to use point data on an esri-grid, you can cast it to uniform first)
* added `axes_attributes` also to unstructured grids
* Grid method `compatible_with` now has a `check_location` argument to optionally check data location (!280)
* added `Mask` enum with two options: (!286)
* `Mask.FLEX` for flexible masking
* `Mask.NONE` to explicitly use plain numpy arrays
* added `mask` attribute and init-arg to `Info` : can be a `Mask` value or a valid mask for `numpy.ma.MaskedArray` (!286)
* `data.tools.prepare` now applies masks to data if set in `Info` object (!286)
* `ARegridding` now has a `out_mask` arg (!286)
* `RegridNearest` and `RegridLinear` now support explicitly masked data (input doesn't have `Mask.FLEX`) (!286)
* adapters now have an `in_info` property (!286)
### Bug fixes
* cells for structured grids in 3D are now created correctly (no negative Volume in VTK/ESMF) (!286)
* cf_units.py was broken for pint>=0.24 (!282)
### Documentation
* Minor fixes in documentation examples and links (!272)
* Adds a book section on composition, component and adapter metadata (!274)
## [v0.5.1]
### Bug fixes
* Fix unquantified masked arrays loosing mask in `fm.data.prepare()` (#115, !270)
## [v0.5.0]
### Features
* Components and adapters can provide a dictionary of meta data (!259)
* Class `Composition` hat a property `metadata` that collects and returns the meta data from all components and adapters (!259)
* Automatic conversion between compatible grids (!255)
* Adds methods `to_canonical`, `from_canonical` and `get_transform_to` to grid classes (!255)
* Adds support for masked grids using `numpy.ma.MaskedArray` (!258, !260)
* Adds convenience functions for dealing with masked arrays in `data.tools` (!260):
`is_masked_array`, `has_masked_values`, `filled`, `to_compressed`, `from_compressed`, `check_data_covers_domain`
### Documentation
* Adds a book chapter on wrapping existing models for FINAM (!256)
* Adds a book section on masked data (!262)
### Bug fixes
* No more logging of expected `FinamNoDataError` in inputs during the connect phase (!257)
### Other
* FINAM is now available on Conda via conda-forge
## [v0.4.0]
### New scheduling algorithm
* FINAM uses a new scheduling algorithm that allows components to use future data instead of only past/current (!157, !159)
* New adapters to resolve circular coupling through the use of delayed data (!187)
* It is now possible to set up static couplings that run only once and have no explicit time or stepping (!166)
* FINAM can handle different starting times of components by pushing initial data twice (!206):
Once for the common starting time, and once for the actual component time
* Components are no longer required to push all outputs on every step (!208)
### Data and metadata rework
* Outputs check compatibility between metadata of inputs if there is more than one target input (!104)
* Add data tools function `compatible_units` to check for convertibility (!105)
* Components can exchange their starting time through the `Info` object (!111)
* Info exchange is automated by the `ConnectHelper` by specifying transfer rules at initialization (!154)
* `Info` now requires time in constructor (can be `None`) (!111)
* Scheduler checks for dead links that don't work in terms of push/pull combination (!112)
* `IInput`, `IOutput` and `IAdapter` have new internally used properties `needs_push` and `needs_pull` (!112)
* `to_xarray` now checks the data shape if the data is not flat (!130)
* Outputs can be flagged `static` for data that is only used during initialization, or that is constant (!166)
* Inputs can be flagged `static` for constant data (!171)
* Outputs accept and convert compatible units, not only exactly equal units (!215)
* Outputs check that subsequent data pushes don't share memory (!217)
* Exchanged `xarray` data has no time coordinate anymore, only a dimension without values (for performance and usability) (!223)
* Remove the `xarray` wrapping completely. Use numpy arrays in pint `Quantity` (!235)
* Outputs and adapters can have a `memory_limit` and write data to disk if the limit is exceeded (!238)
### Components
* Add `modules.WeightedSum` for aggregation of multiple inputs (!105)
* Add `modules.SimplexNoise` for generating spatio-temporal noise (!131)
* Add `modules.TimeTrigger` to forward data from pull-based to push-based components (!131)
* Add `modules.ScheduleLogger` to visualize scheduling/module updates through ASCII charts (!160)
* Add `modules.DebugPushConsumer` as a push-based variant of the debug consumer (!165)
* Add `modules.UserControl` that lets users control FINAM runs from the terminal (!184)
* `modules.DebugConsumer` and `modules.DebugPushConsumer` can use optional callbacks for better debugging (!176)
* Components can be renamed using the method `with_name()` (!243)
### Adapters
* Add `adapters.Histogram` to extract a histogram from grid values (!182)
* Add `adapters.DelayFixed`, `adapters.DelayToPull` and `adapters.DelayToPush` to resolve circular coupling through the use of delayed data (!187)
* Add `adapters.StepTime` for step-wise interpolation (!194)
* Restructuring of time integration adapters (!194)
* `adapters.IntegrateTime` renamed to `adapters.AvgOverTime`
* Add `adapters.SumOverTime` for sum/Area under Curve integration
* Adapters have a method `finalize()` for cleanup (!226).
* Adapters can be renamed using the method `with_name()` (!243)
### Other
* Remove module `core`, subpackages now under `finam` (!106)
* Rename `IOutput.source_changed()` to `source_updated` (!107)
* Rename `LogError` to `ErrorLogger` (!107)
* Rename abstract SDK classes: (!107)
* `AAdapter` is now `Adapter`
* `AComponent` is now `Component`
* `ATimeComponent` is now `TimeComponent`
* Changed arguments for `create_connector()` (!111)
* Removed `required_out_infos`
* Renamed `required_in_data` to `pull_data`
* Added arguments to specify info exchange rules
* All error types are in module `errors` now, and re-exported at top level (!116)
* Overwriting `_validate()` and `_finalize()` in components is now mandatory (!156)
* Input and output slots can be accessed from components directly, e.g. `comp["A"]` instead of `comp.inputs["A"]` (!147)
* Inputs and outputs can be marked as `static` for constant data without time information (!166, !171)
* New helper function `tools.inspect()` to inspect components, adapters and I/O slots (!197)
* Publish on PyPI, starting with the next release (!198, !200, !201)
* Added benchmarks for the most important FINAM functions
(see the [benchmarks README](https://git.ufz.de/FINAM/finam/-/blob/main/benchmarks/README.md))
* Added profiling for full runs to the CI (!221)
* Optimization of data tool functions, with approx. 20-fold speedup of basic push+pull
(!222, !223, !224, !228, !229, !237).
* Add two more log levels: `TRACE` (most verbose) and `PROFILE` (between `DEBUG` and `INFO`) (!240)
## [v0.4.0-rc.2]
......@@ -19,7 +182,7 @@ None
* Connect phase of scheduler can be called separately from run (!99)
* No need to set component status in constructor anymore (!100)
### Other
### Other
* Components are allowed to be in state VALIDATED at the end of a run (i.e. not updated) (!97)
* Component connector checks that inputs and outputs referenced in arguments actually exist (!101)
......@@ -28,12 +191,12 @@ None
### Data and metadata rework
* Grid specifications for structured and unstructured grids (!74):
* Grid specifications for structured and unstructured grids (!74):
`RectilinearGrid`, `UniformGrid`, `EsriGrid`, `UnstructuredGrid` and `UnstructuredPoints`
* Use of `xarray.DataArray` for all exchanged data (!74)
* All exchanged data must have `pint` units (can be "dimensionless") (!74)
* Metadata about grid specification, units and other metadata is exchanged before the first data exchange (!77)
* Metadata exchange is iterative and bi-directional (!77)
* Metadata exchange is iterative and bi-directional (!77)
Components can depend on metadata from source or target components
* Inputs check compatibility of incoming metadata with own requirements (!77)
* Inputs and outputs check compatibility of incoming data with metadata (!77)
......@@ -92,3 +255,19 @@ None
* Uses Python's `datetime` and `timedelta` for all time-related parameters
* Removed temporal sum integration adapter
## [v0.1.0]
* initial release of FINAM
[unpublished]: https://git.ufz.de/FINAM/finam/-/compare/v1.0.0...main
[v1.0.0]: https://git.ufz.de/FINAM/finam/-/compare/v0.5.1...v1.0.0
[v0.5.1]: https://git.ufz.de/FINAM/finam/-/compare/v0.5.0...v0.5.1
[v0.5.0]: https://git.ufz.de/FINAM/finam/-/compare/v0.4.0...v0.5.0
[v0.4.0]: https://git.ufz.de/FINAM/finam/-/compare/v0.4.0-rc.2...v0.4.0
[v0.4.0-rc.2]: https://git.ufz.de/FINAM/finam/-/compare/v0.4.0-rc.1...v0.4.0-rc.2
[v0.4.0-rc.1]: https://git.ufz.de/FINAM/finam/-/compare/v0.3.0...v0.4.0-rc.1
[v0.3.0]: https://git.ufz.de/FINAM/finam/-/compare/v0.2.0...v0.3.0
[v0.2.0]: https://git.ufz.de/FINAM/finam/-/compare/v0.1.0...v0.2.0
[v0.1.0]: https://git.ufz.de/FINAM/finam/-/commits/v0.1.0
This diff is collapsed.
File moved
# SOFTWARE LICENCE
FINAM is an open-source component-based model coupling framework for environmental models.
## Copyright Notice
Copyright © 2021-2025, the FINAM developers from Helmholtz-Zentrum für Umweltforschung GmbH - UFZ. All rights reserved.
***The code is a property of:***
> Helmholtz-Zentrum für Umweltforschung GmbH - UFZ<br/>
> Registered Office: Leipzig<br/>
> Registration Office: Amtsgericht Leipzig<br/>
> Trade Register Nr. B 4703<br/>
The list of FINAM developers is provided in the AUTHORS.md file.
***Contact:***
- FINAM Admins (E-mail: <finam@ufz.de>)
- Martin Lange (E-mail: <martin.lange@ufz.de>)
- Sebastian Müller (E-mail: <sebastian.mueller@ufz.de>)
- Stephan Thober (E-mail: <stephan.thober@ufz.de>)
- Sabine Attinger (E-mail: <sabine.attinger@ufz.de>)
> Department Computational Hydrosystems (CHS)<br/>
> Helmholtz Centre for Environmental Research - UFZ<br/>
> Permoserstr. 15<br/>
> 04318 Leipzig, Germany
This program is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with this program.
It can be found in the files `COPYING` and `COPYING.LESSER` provided with this software.
The complete GNU license text can also be found at < https://www.gnu.org/licenses/>.
## Redistribution
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, the list of conditions for redistribution and modification as well as the following GNU Lesser General Public License.
- Redistributions in binary form must reproduce the above copyright notice, this list of conditions, the following GNU Lesser General Public License and the modification conditions in the documentation and/or other materials provided with the distribution.
- Neither the name of Helmholtz-Zentrum für Umweltforschung GmbH - UFZ, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
## Modification
If software is modified to produce derivative works, such modified software should be clearly marked, so as not to confuse it with the version available from Helmholtz-Zentrum für Umweltforschung GmbH - UFZ.
prune *
graft src
graft tests
include LICENSE README.md pyproject.toml setup.cfg
global-exclude __pycache__ *.py[cod] .*
......@@ -4,15 +4,19 @@ FINAM is an open-source component-based model coupling framework for environment
It aims at enabling bi-directional online couplings of models for different compartments like geo-, hydro-, pedo- and biosphere.
<a href="https://finam.pages.ufz.de" title="FINAM homepage" target="_blank">
<img width="300" src="https://git.ufz.de/FINAM/finam-book/-/raw/main/src/images/logo_large.svg" />
<img width="300" src="https://git.ufz.de/FINAM/finam/-/raw/main/docs/source/_static/logo_large.svg" />
</a>
The framework is built in Python, with well-defined interfaces for data exchange.
This approach allows for coupling of models irrespective of their internal structure, architecture or programming language.
### Resources
## Resources
* FINAM [homepage](https://finam.pages.ufz.de)
* FINAM [user and developer guide](https://finam.pages.ufz.de/finam-book/)
* FINAM [source code](https://git.ufz.de/FINAM/finam) and [API docs](https://finam.pages.ufz.de/finam/)
* FINAM [documentation](https://finam.pages.ufz.de/finam/)
* FINAM [source code](https://git.ufz.de/FINAM/finam)
* FINAM [GitLab group](https://git.ufz.de/FINAM), containing further related projects
## License
LGPLv3, Copyright © 2021-2025, the FINAM developers from Helmholtz-Zentrum für Umweltforschung GmbH - UFZ. All rights reserved.
# FINAM benchmarks
Micro-benchmarks and profiling for important FINAM runs functions.
Note that plot panels have different units!
`ms` is milliseconds (1/1,000 second), `us` is microseconds (1/1,000,000 second).
Open images in a separate browser tab for tooltips showing exact values.
## Full runs
**Profiling data** for full runs can be found in the latest [job artifacts](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/browse/prof?job=profile).
### Simple link, 365 steps
Simple run over one year with two coupled components with daily time step.
Groups left to right:
* Using numpy arrays, no data copy, no units conversion
* Using numpy arrays, with data copy, no units conversion
* Using numpy arrays, no data copy, with units conversion
![tools](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/raw/bench/bench-run-sim.svg?job=benchmark)
## SDK
### Push & pull
Push & pull using numpy arrays, with and without units conversion.
![sdk-io](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/raw/bench/bench-sdk-io.svg?job=benchmark)
Push & pull using zero memory limit. I.e. everything written to and re-read from file.
![sdk-io-mem](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/raw/bench/bench-sdk-io-mem.svg?job=benchmark)
## Data
### Tools
Functions in `data/tools`
![tools](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/raw/bench/bench-data-tools.svg?job=benchmark)
Functions in `data/tools` with longer run time
![tools-slow](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/raw/bench/bench-data-tools-slow.svg?job=benchmark)
### Grids
Grid creation
![create-grids](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/raw/bench/bench-data-create-grids.svg?job=benchmark)
Grid functions
![grid-functions](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/raw/bench/bench-data-grid-functions.svg?job=benchmark)
Grid functions with longer run time
![grid-functions-slow](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/raw/bench/bench-data-grid-functions-slow.svg?job=benchmark)
## Adapters
### Regridding
Regridding adapters, dependent on grid size.
Regridding from a uniform grid to another uniform grid of the same size, with slightly offset origin.
For more performant regridding, see the
[ESMPy](https://earthsystemmodeling.org/esmpy/)-based regridding adapter in
[`finam-regrid`](https://git.ufz.de/FINAM/finam-regrid/)
([benchmarks](https://git.ufz.de/FINAM/finam-regrid/-/tree/main/benchmarks))
![adapters-regrid](https://git.ufz.de/FINAM/finam/-/jobs/artifacts/main/raw/bench/bench-adapters-regrid.svg?job=benchmark)
import datetime as dt
import unittest
import pytest
import finam as fm
class TestRegrid(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def setup_adapter(self, grid1, grid2, adapter):
time = dt.datetime(2000, 1, 1)
self.data = fm.data.full(1.0, fm.Info(time=time, grid=grid1))
self.source = fm.components.CallbackGenerator(
callbacks={"Step": (lambda t: self.data, fm.Info(None, grid=grid1))},
start=time,
step=dt.timedelta(1.0),
)
self.adapter = adapter
self.source.initialize()
self.source.outputs["Step"] >> self.adapter
self.adapter.get_info(fm.Info(None, grid=grid2))
self.source.connect(time)
self.source.connect(time)
self.source.validate()
@pytest.mark.benchmark(group="adapters-regrid")
def test_regrid_nearest_01_32x16(self):
grid1 = fm.UniformGrid((32, 16))
grid2 = fm.UniformGrid((32, 16), origin=(0.25, 0.25))
self.setup_adapter(grid1, grid2, fm.adapters.RegridNearest())
_result = self.benchmark(
self.adapter.get_data, time=dt.datetime(2000, 1, 1), target=None
)
@pytest.mark.benchmark(group="adapters-regrid")
def test_regrid_nearest_02_512x256(self):
grid1 = fm.UniformGrid((512, 256))
grid2 = fm.UniformGrid((512, 256), origin=(0.25, 0.25))
self.setup_adapter(grid1, grid2, fm.adapters.RegridNearest())
_result = self.benchmark(
self.adapter.get_data, time=dt.datetime(2000, 1, 1), target=None
)
@pytest.mark.benchmark(group="adapters-regrid")
def test_regrid_nearest_03_1024x512(self):
grid1 = fm.UniformGrid((1024, 512))
grid2 = fm.UniformGrid((1024, 512), origin=(0.25, 0.25))
self.setup_adapter(grid1, grid2, fm.adapters.RegridNearest())
_result = self.benchmark(
self.adapter.get_data, time=dt.datetime(2000, 1, 1), target=None
)
@pytest.mark.benchmark(group="adapters-regrid")
def test_regrid_nearest_04_2048x1024(self):
grid1 = fm.UniformGrid((2048, 1024))
grid2 = fm.UniformGrid((2048, 1024), origin=(0.25, 0.25))
self.setup_adapter(grid1, grid2, fm.adapters.RegridNearest())
_result = self.benchmark(
self.adapter.get_data, time=dt.datetime(2000, 1, 1), target=None
)
@pytest.mark.benchmark(group="adapters-regrid")
def test_regrid_linear_01_32x16(self):
grid1 = fm.UniformGrid((32, 16))
grid2 = fm.UniformGrid((32, 16), origin=(0.25, 0.25))
self.setup_adapter(grid1, grid2, fm.adapters.RegridLinear())
_result = self.benchmark(
self.adapter.get_data, time=dt.datetime(2000, 1, 1), target=None
)
@pytest.mark.benchmark(group="adapters-regrid")
def test_regrid_linear_02_512x256(self):
grid1 = fm.UniformGrid((512, 256))
grid2 = fm.UniformGrid((512, 256), origin=(0.25, 0.25))
self.setup_adapter(grid1, grid2, fm.adapters.RegridLinear())
_result = self.benchmark(
self.adapter.get_data, time=dt.datetime(2000, 1, 1), target=None
)
@pytest.mark.benchmark(group="adapters-regrid")
def test_regrid_linear_03_1024x512(self):
grid1 = fm.UniformGrid((1024, 512))
grid2 = fm.UniformGrid((1024, 512), origin=(0.25, 0.25))
self.setup_adapter(grid1, grid2, fm.adapters.RegridLinear())
_result = self.benchmark(
self.adapter.get_data, time=dt.datetime(2000, 1, 1), target=None
)
@pytest.mark.benchmark(group="adapters-regrid")
def test_regrid_linear_04_2048x1024(self):
grid1 = fm.UniformGrid((2048, 1024))
grid2 = fm.UniformGrid((2048, 1024), origin=(0.25, 0.25))
self.setup_adapter(grid1, grid2, fm.adapters.RegridLinear())
_result = self.benchmark(
self.adapter.get_data, time=dt.datetime(2000, 1, 1), target=None
)
import unittest
import numpy as np
import pytest
import finam as fm
class TestCreateUniform(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def create_grid(self, size):
return fm.UniformGrid(size)
@pytest.mark.benchmark(group="data-create-grids")
def test_create_uniform_01_2x1(self):
_result = self.benchmark(self.create_grid, size=(2, 1))
@pytest.mark.benchmark(group="data-create-grids")
def test_create_uniform_02_512x256(self):
_result = self.benchmark(self.create_grid, size=(512, 256))
@pytest.mark.benchmark(group="data-create-grids")
def test_create_uniform_03_2048x1024(self):
_result = self.benchmark(self.create_grid, size=(2048, 1024))
class TestCreateRectilinear(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def create_grid(self, axes):
return fm.RectilinearGrid(axes)
@pytest.mark.benchmark(group="data-create-grids")
def test_create_rectilinear_01_2x1(self):
axes = [np.asarray(range(2)), np.asarray(range(1))]
_result = self.benchmark(self.create_grid, axes=axes)
@pytest.mark.benchmark(group="data-create-grids")
def test_create_rectilinear_02_512x256(self):
axes = [np.asarray(range(512)), np.asarray(range(256))]
_result = self.benchmark(self.create_grid, axes=axes)
@pytest.mark.benchmark(group="data-create-grids")
def test_create_rectilinear_03_2048x1024(self):
axes = [np.asarray(range(2048)), np.asarray(range(1024))]
_result = self.benchmark(self.create_grid, axes=axes)
class TestGridFunctionsSimple(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def get_cell_axes(self, grid):
return grid.cell_axes
@pytest.mark.benchmark(group="data-grid-functions")
def test_cell_axes_01_2x1(self):
grid = fm.UniformGrid((2, 1))
_result = self.benchmark(self.get_cell_axes, grid=grid)
@pytest.mark.benchmark(group="data-grid-functions")
def test_cell_axes_02_512x256(self):
grid = fm.UniformGrid((512, 256))
_result = self.benchmark(self.get_cell_axes, grid=grid)
@pytest.mark.benchmark(group="data-grid-functions")
def test_cell_axes_03_2048x1024(self):
grid = fm.UniformGrid((2048, 1024))
_result = self.benchmark(self.get_cell_axes, grid=grid)
class TestGridFunctionsSlow(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def get_points(self, grid):
return grid.points
@pytest.mark.benchmark(group="data-grid-functions-slow")
def test_points_01_2x1(self):
grid = fm.UniformGrid((2, 1))
_result = self.benchmark(self.get_points, grid=grid)
@pytest.mark.benchmark(group="data-grid-functions-slow")
def test_points_02_512x256(self):
grid = fm.UniformGrid((512, 256))
_result = self.benchmark(self.get_points, grid=grid)
@pytest.mark.benchmark(group="data-grid-functions-slow")
def test_points_03_2048x1024(self):
grid = fm.UniformGrid((2048, 1024))
_result = self.benchmark(self.get_points, grid=grid)
def get_cell_centers(self, grid):
return grid.cell_centers
@pytest.mark.benchmark(group="data-grid-functions-slow")
def test_cell_centers_01_2x1(self):
grid = fm.UniformGrid((2, 1))
_result = self.benchmark(self.get_cell_centers, grid=grid)
@pytest.mark.benchmark(group="data-grid-functions-slow")
def test_cell_centers_02_512x256(self):
grid = fm.UniformGrid((512, 256))
_result = self.benchmark(self.get_cell_centers, grid=grid)
@pytest.mark.benchmark(group="data-grid-functions-slow")
def test_cell_centers_03_2048x1024(self):
grid = fm.UniformGrid((2048, 1024))
_result = self.benchmark(self.get_cell_centers, grid=grid)
def get_cells(self, grid):
return grid.cells
@pytest.mark.benchmark(group="data-grid-functions-slow")
def test_cells_01_2x1(self):
grid = fm.UniformGrid((2, 1))
_result = self.benchmark(self.get_cells, grid=grid)
@pytest.mark.benchmark(group="data-grid-functions-slow")
def test_cells_02_512x256(self):
grid = fm.UniformGrid((512, 256))
_result = self.benchmark(self.get_cells, grid=grid)
@pytest.mark.benchmark(group="data-grid-functions-slow")
def test_cells_03_2048x1024(self):
grid = fm.UniformGrid((2048, 1024))
_result = self.benchmark(self.get_cells, grid=grid)
import datetime as dt
import unittest
import numpy as np
import pytest
import finam as fm
from finam.data.tools import (
check,
compatible_units,
equivalent_units,
full,
full_like,
get_magnitude,
get_units,
is_quantified,
prepare,
strip_time,
to_units,
)
class TestCheck(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
@pytest.mark.benchmark(group="data-tools")
def test_check_01_2x1(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(check, xdata=xdata, info=info)
@pytest.mark.benchmark(group="data-tools")
def test_check_02_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(check, xdata=xdata, info=info)
class TestPrepare(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def copy_numpy_prepare(self, data, info):
return prepare(np.copy(data), info=info)
@pytest.mark.benchmark(group="data-tools")
def test_prepare_np_01_2x1(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(prepare, data=xdata, info=info)
@pytest.mark.benchmark(group="data-tools")
def test_prepare_np_02_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(prepare, data=xdata, info=info)
@pytest.mark.benchmark(group="data-tools")
def test_prepare_np_03_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(prepare, data=xdata, info=info)
@pytest.mark.benchmark(group="data-tools")
def test_prepare_np_units_01_2x1(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info).magnitude
_result = self.benchmark(prepare, data=xdata, info=info)
@pytest.mark.benchmark(group="data-tools")
def test_prepare_np_units_02_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info).magnitude
_result = self.benchmark(prepare, data=xdata, info=info)
@pytest.mark.benchmark(group="data-tools")
def test_prepare_np_units_03_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
xdata = full(0.0, info).magnitude
_result = self.benchmark(prepare, data=xdata, info=info)
@pytest.mark.benchmark(group="data-tools-slow")
def test_cp_prepare_np_01_2x1(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(self.copy_numpy_prepare, data=xdata, info=info)
@pytest.mark.benchmark(group="data-tools-slow")
def test_cp_prepare_np_02_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(self.copy_numpy_prepare, data=xdata, info=info)
@pytest.mark.benchmark(group="data-tools-slow")
def test_cp_prepare_np_03_1024x512(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((1024, 512)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(self.copy_numpy_prepare, data=xdata, info=info)
@pytest.mark.benchmark(group="data-tools-slow")
def test_cp_prepare_np_04_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(self.copy_numpy_prepare, data=xdata, info=info)
class TestFull(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
@pytest.mark.benchmark(group="data-tools-slow")
def test_full_01_2x1(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
_result = self.benchmark(full, value=0.0, info=info)
@pytest.mark.benchmark(group="data-tools-slow")
def test_full_02_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
_result = self.benchmark(full, value=0.0, info=info)
@pytest.mark.benchmark(group="data-tools-slow")
def test_full_03_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
_result = self.benchmark(full, value=0.0, info=info)
class TestFullLike(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
@pytest.mark.benchmark(group="data-tools-slow")
def test_full_like_01_2x1(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(full_like, xdata=xdata, value=0.0)
@pytest.mark.benchmark(group="data-tools-slow")
def test_full_like_02_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(full_like, xdata=xdata, value=0.0)
@pytest.mark.benchmark(group="data-tools-slow")
def test_full_like_03_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(full_like, xdata=xdata, value=0.0)
class TestTimeTools(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
@pytest.mark.benchmark(group="data-tools")
def test_strip_time(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(strip_time, xdata=xdata, grid=info.grid)
class TestUnitsTools(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
@pytest.mark.benchmark(group="data-tools")
def test_get_units(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(get_units, xdata=xdata)
@pytest.mark.benchmark(group="data-tools")
def test_is_quantified(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(is_quantified, xdata=xdata)
@pytest.mark.benchmark(group="data-tools")
def test_equivalent_units_true(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="mm")
xdata = full(0.0, info)
_result = equivalent_units(xdata, fm.UNITS.Unit("L/m^2"))
result = self.benchmark(
equivalent_units, unit1=xdata, unit2=fm.UNITS.Unit("L/m^2")
)
self.assertTrue(result)
@pytest.mark.benchmark(group="data-tools")
def test_equivalent_units_false(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="mm")
xdata = full(0.0, info)
_result = equivalent_units(xdata, fm.UNITS.meter)
result = self.benchmark(equivalent_units, unit1=xdata, unit2=fm.UNITS.meter)
self.assertFalse(result)
@pytest.mark.benchmark(group="data-tools")
def test_compatible_units(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="mm")
xdata = full(0.0, info)
_result = compatible_units(xdata, fm.UNITS.kilometer)
_result = self.benchmark(
compatible_units, unit1=xdata, unit2=fm.UNITS.kilometer
)
@pytest.mark.benchmark(group="data-tools-slow")
def test_to_units_01_2x1(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(to_units, xdata=xdata, units="in")
@pytest.mark.benchmark(group="data-tools-slow")
def test_to_units_02_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(to_units, xdata=xdata, units="in")
@pytest.mark.benchmark(group="data-tools-slow")
def test_to_units_03_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(to_units, xdata=xdata, units="in")
@pytest.mark.benchmark(group="data-tools")
def test_to_units_noop_01_2x1(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(to_units, xdata=xdata, units="m")
@pytest.mark.benchmark(group="data-tools")
def test_to_units_noop_02_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(to_units, xdata=xdata, units="m")
@pytest.mark.benchmark(group="data-tools")
def test_to_units_noop_03_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(to_units, xdata=xdata, units="m")
@pytest.mark.benchmark(group="data-tools")
def test_get_magnitude_01_2x1(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2, 1)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(get_magnitude, xdata=xdata)
@pytest.mark.benchmark(group="data-tools")
def test_get_magnitude_02_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(get_magnitude, xdata=xdata)
@pytest.mark.benchmark(group="data-tools")
def test_get_magnitude_03_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
xdata = full(0.0, info)
_result = self.benchmark(get_magnitude, xdata=xdata)
@pytest.mark.benchmark(group="data-tools-slow")
def test_set_units_mul_01_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info).magnitude
_result = self.benchmark(set_units_mul, data=xdata, units=fm.UNITS.Unit("m"))
@pytest.mark.benchmark(group="data-tools-slow")
def test_set_units_mul_02_1024x512(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((1024, 512)), units="m")
xdata = full(0.0, info).magnitude
_result = self.benchmark(set_units_mul, data=xdata, units=fm.UNITS.Unit("m"))
@pytest.mark.benchmark(group="data-tools-slow")
def test_set_units_mul_03_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
xdata = full(0.0, info).magnitude
_result = self.benchmark(set_units_mul, data=xdata, units=fm.UNITS.Unit("m"))
@pytest.mark.benchmark(group="data-tools-slow")
def test_set_units_quant_01_512x256(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((512, 256)), units="m")
xdata = full(0.0, info).magnitude
_result = self.benchmark(set_units_qua, data=xdata, units=fm.UNITS.Unit("m"))
@pytest.mark.benchmark(group="data-tools-slow")
def test_set_units_quant_02_1024x512(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((1024, 512)), units="m")
xdata = full(0.0, info).magnitude
_result = self.benchmark(set_units_qua, data=xdata, units=fm.UNITS.Unit("m"))
@pytest.mark.benchmark(group="data-tools-slow")
def test_set_units_quant_03_2048x1024(self):
time = dt.datetime(2000, 1, 1)
info = fm.Info(time=time, grid=fm.UniformGrid((2048, 1024)), units="m")
xdata = full(0.0, info).magnitude
_result = self.benchmark(set_units_qua, data=xdata, units=fm.UNITS.Unit("m"))
def set_units_mul(data, units):
return units * data
def set_units_qua(data, units):
return fm.UNITS.Quantity(data, units)
import os.path
import tempfile
import unittest
import numpy as np
import pytest
import finam as fm
class TestCreateUniform(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
@pytest.mark.benchmark(group="np-save-load")
def test_save_01_64x32(self):
xdata = np.full((1, 64, 32), 1.0, dtype=np.dtype(np.float64))
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, "temp.npy")
_result = self.benchmark(np.save, file=fp, arr=xdata)
@pytest.mark.benchmark(group="np-save-load")
def test_save_02_512x256(self):
xdata = np.full((1, 512, 256), 1.0, dtype=np.dtype(np.float64))
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, "temp.npy")
_result = self.benchmark(np.save, file=fp, arr=xdata)
@pytest.mark.benchmark(group="np-save-load")
def test_save_03_1024x512(self):
xdata = np.full((1, 1024, 512), 1.0, dtype=np.dtype(np.float64))
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, "temp.npy")
_result = self.benchmark(np.save, file=fp, arr=xdata)
@pytest.mark.benchmark(group="np-save-load")
def test_save_04_2048x1024(self):
xdata = np.full((1, 2048, 1024), 1.0, dtype=np.dtype(np.float64))
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, "temp.npy")
_result = self.benchmark(np.save, file=fp, arr=xdata)
@pytest.mark.benchmark(group="np-save-load")
def test_load_01_64x32(self):
xdata = np.full((1, 64, 32), 1.0, dtype=np.dtype(np.float64))
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, "temp.npy")
np.save(fp, xdata)
_result = self.benchmark(np.load, file=fp)
@pytest.mark.benchmark(group="np-save-load")
def test_load_02_512x256(self):
xdata = np.full((1, 512, 256), 1.0, dtype=np.dtype(np.float64))
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, "temp.npy")
np.save(fp, xdata)
_result = self.benchmark(np.load, file=fp)
@pytest.mark.benchmark(group="np-save-load")
def test_load_03_1024x512(self):
xdata = np.full((1, 1024, 512), 1.0, dtype=np.dtype(np.float64))
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, "temp.npy")
np.save(fp, xdata)
_result = self.benchmark(np.load, file=fp)
@pytest.mark.benchmark(group="np-save-load")
def test_load_04_2048x1024(self):
xdata = np.full((1, 2048, 1024), 1.0, dtype=np.dtype(np.float64))
with tempfile.TemporaryDirectory() as d:
fp = os.path.join(d, "temp.npy")
np.save(fp, xdata)
_result = self.benchmark(np.load, file=fp)
import cProfile
import datetime as dt
import io
import pstats
import sys
import time
import numpy as np
import finam as fm
def run_model():
t = time.time()
start_time = dt.datetime(2000, 1, 1)
end_time = dt.datetime(2002, 1, 1)
size = (1024, 1024)
info1 = fm.Info(time=None, grid=fm.UniformGrid(size), units="m")
data = fm.data.prepare(fm.data.full(0.0, info1), info1)
def gen_data(t):
return np.copy(data)
source = fm.components.CallbackGenerator(
callbacks={"Out": (gen_data, info1.copy())},
start=start_time,
step=dt.timedelta(days=1),
)
sink = fm.components.DebugConsumer(
inputs={
"In": info1.copy(),
},
start=start_time,
step=dt.timedelta(days=365),
)
composition = fm.Composition([source, sink], slot_memory_limit=500 * 2**20)
source["Out"] >> sink["In"]
composition.run(end_time=end_time)
print("Total time:", time.time() - t)
if __name__ == "__main__":
pr = cProfile.Profile()
pr.enable()
run_model()
pr.disable()
s = io.StringIO()
ps = pstats.Stats(pr, stream=s).sort_stats(pstats.SortKey.CUMULATIVE)
ps.dump_stats(sys.argv[1])
"""Simple coupling setup for profiling, using numpy arrays.
Two components, coupled via a single link.
Simulation runs for 1 year with a daily step in both components.
Components exchange a 128x64 uniform grid.
"""
import cProfile
import datetime as dt
import io
import pstats
import sys
import finam as fm
def run_model():
start_time = dt.datetime(2000, 1, 1)
end_time = dt.datetime(2000, 12, 31)
counter = 0
size = (128, 64)
info1 = fm.Info(time=None, grid=fm.UniformGrid(size), units="m")
info2 = fm.Info(time=None, grid=fm.UniformGrid(size), units="m")
data = [
fm.data.full(0.0, info1),
fm.data.full(0.0, info1),
]
def gen_data(t):
nonlocal counter
d = data[counter % 2]
counter += 1
return d
source = fm.components.CallbackGenerator(
callbacks={"Out": (gen_data, info1.copy())},
start=start_time,
step=dt.timedelta(days=1),
)
sink = fm.components.DebugConsumer(
inputs={
"In": info2.copy(),
},
start=start_time,
step=dt.timedelta(days=1),
)
composition = fm.Composition([source, sink])
source["Out"] >> sink["In"]
composition.run(end_time=end_time)
def run_model_multi(n):
for _ in range(n):
run_model()
if __name__ == "__main__":
pr = cProfile.Profile()
pr.enable()
run_model_multi(10)
pr.disable()
s = io.StringIO()
ps = pstats.Stats(pr, stream=s).sort_stats(pstats.SortKey.CUMULATIVE)
ps.dump_stats(sys.argv[1])
import io
import os
import pstats
def _convert_to_csv(in_path, out_path):
result = io.StringIO()
pstats.Stats(in_path, stream=result).print_stats()
result = result.getvalue()
result = "ncalls" + result.split("ncalls")[-1]
result = "\n".join(
[",".join(line.rstrip().split(None, 5)) for line in result.split("\n")]
)
with open(out_path, "w+") as f:
f.write(result)
f.close()
if __name__ == "__main__":
path = "prof/"
for file in os.listdir(path):
if file.endswith(".pstats"):
in_file = os.path.join(path, file)
out_file = os.path.join(path, file.replace(".pstats", ".csv"))
_convert_to_csv(in_file, out_file)