Full Code of justusschock/delira for AI

master cd3ad277d6fa cached
236 files
1.2 MB
280.3k tokens
1049 symbols
1 requests
Download .txt
Showing preview only (1,309K chars total). Download the full file or copy to clipboard to get everything.
Repository: justusschock/delira
Branch: master
Commit: cd3ad277d6fa
Files: 236
Total size: 1.2 MB

Directory structure:
gitextract__xkadp9f/

├── .codecov.yml
├── .gitattributes
├── .github/
│   └── ISSUE_TEMPLATE/
│       ├── bug_report.md
│       ├── feature_request.md
│       └── question.md
├── .gitignore
├── .readthedocs.yml
├── .travis.yml
├── AUTHORS.rst
├── CODEOWNERS
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── README.md
├── delira/
│   ├── __init__.py
│   ├── _backends.py
│   ├── _debug_mode.py
│   ├── _version.py
│   ├── data_loading/
│   │   ├── __init__.py
│   │   ├── augmenter.py
│   │   ├── data_loader.py
│   │   ├── data_manager.py
│   │   ├── dataset.py
│   │   ├── load_utils.py
│   │   ├── numba_transform.py
│   │   └── sampler/
│   │       ├── __init__.py
│   │       ├── abstract.py
│   │       ├── batch.py
│   │       ├── random.py
│   │       ├── sequential.py
│   │       └── weighted.py
│   ├── io/
│   │   ├── __init__.py
│   │   ├── chainer.py
│   │   ├── sklearn.py
│   │   ├── tf.py
│   │   └── torch.py
│   ├── logging/
│   │   ├── __init__.py
│   │   ├── base_backend.py
│   │   ├── base_logger.py
│   │   ├── logging_context.py
│   │   ├── registry.py
│   │   ├── tensorboard_backend.py
│   │   ├── visdom_backend.py
│   │   └── writer_backend.py
│   ├── models/
│   │   ├── __init__.py
│   │   ├── abstract_network.py
│   │   └── backends/
│   │       ├── __init__.py
│   │       ├── chainer/
│   │       │   ├── __init__.py
│   │       │   ├── abstract_network.py
│   │       │   └── data_parallel.py
│   │       ├── sklearn/
│   │       │   ├── __init__.py
│   │       │   └── abstract_network.py
│   │       ├── tf_eager/
│   │       │   ├── __init__.py
│   │       │   ├── abstract_network.py
│   │       │   └── data_parallel.py
│   │       ├── tf_graph/
│   │       │   ├── __init__.py
│   │       │   └── abstract_network.py
│   │       ├── torch/
│   │       │   ├── __init__.py
│   │       │   ├── abstract_network.py
│   │       │   ├── data_parallel.py
│   │       │   └── utils.py
│   │       └── torchscript/
│   │           ├── __init__.py
│   │           └── abstract_network.py
│   ├── training/
│   │   ├── __init__.py
│   │   ├── backends/
│   │   │   ├── __init__.py
│   │   │   ├── chainer/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   ├── sklearn/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   ├── tf_eager/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   ├── tf_graph/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   ├── torch/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   └── torchscript/
│   │   │       ├── __init__.py
│   │   │       ├── experiment.py
│   │   │       └── trainer.py
│   │   ├── base_experiment.py
│   │   ├── base_trainer.py
│   │   ├── callbacks/
│   │   │   ├── __init__.py
│   │   │   ├── abstract_callback.py
│   │   │   ├── early_stopping.py
│   │   │   ├── logging_callback.py
│   │   │   └── pytorch_schedulers.py
│   │   ├── losses.py
│   │   ├── metrics.py
│   │   ├── predictor.py
│   │   └── utils.py
│   └── utils/
│       ├── __init__.py
│       ├── codecs.py
│       ├── config.py
│       ├── context_managers.py
│       ├── decorators.py
│       ├── dict_reductions.py
│       ├── messenger.py
│       ├── path.py
│       └── time.py
├── docker/
│   └── Dockerfile
├── docs/
│   ├── Makefile
│   ├── _api/
│   │   └── _build/
│   │       ├── delira/
│   │       │   ├── backend_resolution.rst
│   │       │   ├── class_hierarchy.rst
│   │       │   ├── data_loading/
│   │       │   │   ├── arbitrary_data.rst
│   │       │   │   ├── data_loading.rst
│   │       │   │   ├── dataloader.rst
│   │       │   │   ├── datamanager.rst
│   │       │   │   ├── dataset.rst
│   │       │   │   ├── nii.rst
│   │       │   │   ├── sampler.rst
│   │       │   │   └── utils.rst
│   │       │   ├── debug_mode.rst
│   │       │   ├── delira.io.rst
│   │       │   ├── delira.rst
│   │       │   ├── delira.utils.rst
│   │       │   ├── logging/
│   │       │   │   ├── backends.rst
│   │       │   │   ├── base_logger.rst
│   │       │   │   ├── handlers.rst
│   │       │   │   ├── logging.rst
│   │       │   │   ├── logging_context.py
│   │       │   │   ├── logging_context.rst
│   │       │   │   ├── registry.py
│   │       │   │   ├── registry.rst
│   │       │   │   ├── tensorboard_backend.py
│   │       │   │   ├── visdom_backend.py
│   │       │   │   └── writer_backend.py
│   │       │   ├── models/
│   │       │   │   ├── chainer.rst
│   │       │   │   ├── models.rst
│   │       │   │   ├── sklearn.rst
│   │       │   │   ├── tfeager.rst
│   │       │   │   ├── tfgraph.rst
│   │       │   │   ├── torch.rst
│   │       │   │   └── torchscript.rst
│   │       │   └── training/
│   │       │       ├── backends/
│   │       │       │   ├── backends.rst
│   │       │       │   ├── chainer.rst
│   │       │       │   ├── sklearn.rst
│   │       │       │   ├── tfeager.rst
│   │       │       │   ├── tfgraph.rst
│   │       │       │   ├── torch.rst
│   │       │       │   └── torchscript.rst
│   │       │       ├── callbacks.rst
│   │       │       ├── experiment.rst
│   │       │       ├── losses.rst
│   │       │       ├── metrics.rst
│   │       │       ├── parameters.rst
│   │       │       ├── predictor.rst
│   │       │       ├── trainer.rst
│   │       │       ├── training.rst
│   │       │       └── utils.rst
│   │       └── modules.rst
│   ├── classification_pytorch.rst
│   ├── conda.yml
│   ├── conf.py
│   ├── custom_backend.rst
│   ├── gan_pytorch.rst
│   ├── getting_started.rst
│   ├── index.rst
│   ├── requirements.txt
│   ├── segmentation_2d_pytorch.rst
│   ├── segmentation_3d_pytorch.rst
│   └── tutorial_delira.rst
├── notebooks/
│   ├── classification_examples/
│   │   ├── chainer.ipynb
│   │   ├── pytorch.ipynb
│   │   ├── sklearn.ipynb
│   │   ├── tf_eager.ipynb
│   │   ├── tf_graph.ipynb
│   │   └── torchscript.ipynb
│   ├── custom_backend.ipynb
│   ├── gan_pytorch.ipynb
│   ├── segmentation_2d_pytorch.ipynb
│   ├── segmentation_3d_pytorch.ipynb
│   └── tutorial_delira.ipynb
├── paper/
│   ├── paper.bib
│   └── paper.md
├── pytest.ini
├── requirements/
│   ├── base.txt
│   ├── chainer.txt
│   ├── tensorflow.txt
│   └── torch.txt
├── scripts/
│   └── ci/
│       ├── build_docs.sh
│       ├── install_before_docs.sh
│       ├── install_before_style_check.sh
│       ├── install_before_tests.sh
│       ├── run_style_checks.sh
│       └── run_tests.sh
├── setup.cfg
├── setup.py
├── tests/
│   ├── __init__.py
│   ├── data_loading/
│   │   ├── __init__.py
│   │   ├── test_augmenters.py
│   │   ├── test_data_loader.py
│   │   ├── test_data_manager.py
│   │   ├── test_dataset.py
│   │   ├── test_numba_transforms.py
│   │   ├── test_sampler.py
│   │   └── utils.py
│   ├── io/
│   │   ├── __init__.py
│   │   ├── test_chainer.py
│   │   ├── test_sklearn.py
│   │   ├── test_tf.py
│   │   └── test_torch.py
│   ├── logging/
│   │   ├── __init__.py
│   │   ├── test_logging_frequency.py
│   │   ├── test_logging_outside_trainer.py
│   │   └── test_single_threaded_logging.py
│   ├── models/
│   │   ├── __init__.py
│   │   ├── data_parallel/
│   │   │   ├── __init__.py
│   │   │   ├── test_chainer.py
│   │   │   └── test_torch.py
│   │   └── test_abstract_models.py
│   ├── training/
│   │   ├── __init__.py
│   │   ├── backends/
│   │   │   ├── __init__.py
│   │   │   ├── test_chainer.py
│   │   │   ├── test_sklearn.py
│   │   │   ├── test_tf_eager.py
│   │   │   ├── test_tf_graph.py
│   │   │   ├── test_torch.py
│   │   │   ├── test_torchscript.py
│   │   │   └── utils.py
│   │   ├── test_losses_torch.py
│   │   └── test_metrics.py
│   └── utils/
│       ├── __init__.py
│       ├── dict_reductions.py
│       ├── test_codecs.py
│       ├── test_config.py
│       └── test_messenger.py
└── versioneer.py

================================================
FILE CONTENTS
================================================

================================================
FILE: .codecov.yml
================================================
comment: off

coverage:
  status:
    project:
      default:
        target: auto
        threshold: 0.50
        base: auto
    patch: off
ignore:
  - "tests/"
  - "notebooks/"
  - "*/__init.py"
  


================================================
FILE: .gitattributes
================================================
delira/_version.py export-subst


================================================
FILE: .github/ISSUE_TEMPLATE/bug_report.md
================================================
---
name: Bug report
about: Report a bug and give us a minimal example to reproduce it
title: "[Bug]"
labels: bug
assignees: ''

---

**Description**
What happens? What should happen?

**Environment**
* OS:
* Python version:
* `delira` version
* How did you install `delira`? [ pip | source | conda | docker ]

**Reproduction**
Give us a minimal example to reproduce the error

**Additional context**
Add any other context about the problem here.


================================================
FILE: .github/ISSUE_TEMPLATE/feature_request.md
================================================
---
name: Feature request
about: Request a feature
title: "[FeatureRequest]"
labels: new feature
assignees: ''

---

**Description**
What should be added/changed?

**Feature History**
What have you tried so far?

**Proposal**
How could the feature be implemented? 
*Are you able/willing to implement the feature yourself (with some guidance from us)?

**Additional context**
Add any other context about the feature request here.


================================================
FILE: .github/ISSUE_TEMPLATE/question.md
================================================
---
name: Question
about: Ask a question/for support
title: "[Question]"
labels: question
assignees: ''

---

**Description**
What happens? What should happen?

**Environment**
* OS:
* Python version:
* `delira` version
* How did you install `delira`? [ pip | source | conda | docker ]
* Machine Specs:
* Minimal working Example:


================================================
FILE: .gitignore
================================================
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
#  Usually these files are written by a python script from a template
#  before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# pyenv
.python-version

# celery beat schedule file
celerybeat-schedule

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/

# pycharm
.idea/
.DS_Store

.idea
.vscode
.pytest_cache

# delira config
*/.delira

# backend extensions
*.pkl
events.*
*.pt
*.pth
*.ptj
*.chain
*.meta

# Test results
*/UnnamedExperiment/*


================================================
FILE: .readthedocs.yml
================================================
# .readthedocs.yml
version: 2

formats: 
    - epub
    - pdf
    - htmlzip

# python:
#     version: 3.7
#     install:
#         - requirements: docs/requirements.txt
#         - method: setuptools
#     system_packages: false

build:
  image: latest
 
conda:
    environment: docs/conda.yml


================================================
FILE: .travis.yml
================================================
language: python

matrix:
    include:
        # basic tests withut a backend
        - name: "Unittests Python 3.5 No Backend"
          python: 3.5
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="None"
        - name: "Unittests Python 3.6 No Backend"
          python: 3.6
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="None"
        - name: "Unittests Python 3.7 No Backend"
          python: 3.7
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="None"
    # SCIKIT-LEARN BACKEND TESTS
        - name: "Unittests Python 3.5 Sklearn Backend"
          python: 3.5
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="Sklearn"
        - name: "Unittests Python 3.6 Sklearn Backend"
          python: 3.6
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="Sklearn"
        - name: "Unittests Python 3.7 Sklearn Backend"
          python: 3.7
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="Sklearn"
              
    # TENSORFLOW EAGER BACKEND TESTS
        - name: "Unittests Python 3.5 TF Eager Backend"
          python: 3.5
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="TFEager"
        - name: "Unittests Python 3.6 TF Eager Backend"
          python: 3.6
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="TFEager"
        - name: "Unittests Python 3.7 TF Eager Backend"
          python: 3.7
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="TFEager"

        # TENSORFLOW GRAPH BACKEND TESTS
        -   name: "Unittests Python 3.5 TF Graph Backend"
            python: 3.5
            dist: xenial
            env:
                - TEST_TYPE="unittests"
                - BACKEND="TFGraph"
        -   name: "Unittests Python 3.6 TF Graph Backend"
            python: 3.6
            dist: xenial
            env:
                - TEST_TYPE="unittests"
                - BACKEND="TFGraph"
        -   name: "Unittests Python 3.7 TF Graph Backend"
            python: 3.7
            dist: xenial
            env:
                - TEST_TYPE="unittests"
                - BACKEND="TFGraph"

      # PYTORCH BACKEND TESTS
        - name: "Unittests Python 3.5 Torch Backend"
          python: 3.5
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="Torch"
        - name: "Unittests Python 3.6 Torch Backend"
          python: 3.6
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="Torch"
        - name: "Unittests Python 3.7 Torch Backend"
          python: 3.7
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="Torch"

        # TORCHSCRIPT BACKEND TESTS
        -   name: "Unittests Python 3.5 TorchScript Backend"
            python: 3.5
            dist: xenial
            env:
                - TEST_TYPE="unittests"
                - BACKEND="TorchScript"
        -   name: "Unittests Python 3.6 TorchScript Backend"
            python: 3.6
            dist: xenial
            env:
                - TEST_TYPE="unittests"
                - BACKEND="TorchScript"
        -   name: "Unittests Python 3.7 TorchScript Backend"
            python: 3.7
            dist: xenial
            env:
                - TEST_TYPE="unittests"
                - BACKEND="TorchScript"

      # CHAINER BACKEND TESTS
        - name: "Unittests Python 3.5 Chainer Backend"
          python: 3.5
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="Chainer"
        - name: "Unittests Python 3.6 Chainer Backend"
          python: 3.6
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="Chainer"
        - name: "Unittests Python 3.7 Chainer Backend"
          python: 3.7
          dist: xenial
          env:
              - TEST_TYPE="unittests"
              - BACKEND="Chainer"
              
      # STATIC CHECKS
        - name: "Static Style Checks"
          python: 3.7
          dist: xenial
          env:
              - TEST_TYPE="style-check"
        - name: "Documentation"
          python: 3.7
          dist: xenial
          env:
              - TEST_TYPE="docs"

# command to install dependencies
before_install:
    - if [[ "$TEST_TYPE" == "unittests" ]]; then
          bash scripts/ci/install_before_tests.sh;
      elif [[ "$TEST_TYPE" == "docs" ]]; then
          bash scripts/ci/install_before_docs.sh;
      else
          bash scripts/ci/install_before_style_check.sh;
          pip install -r docs/requirements.txt;
      fi

install:
    - pip install --no-deps .
  
# command to run tests
script:
    # run tests or stylechecks
    - if [[ "$TEST_TYPE" == "unittests" ]]; then
          bash scripts/ci/run_tests.sh;
      elif [[ "$TEST_TYPE" == "docs" ]]; then
          bash scripts/ci/build_docs.sh;
      else
          bash scripts/ci/run_style_checks.sh;
      fi

after_script:
  - if [[ "$TEST_TYPE" == "unittests" ]]; then
      codecov;

before_deploy:
    - cd $TRAVIS_BUILD_DIR

deploy:
        - provider: pages
          skip_cleanup: true
          github_token: $GITHUB_TOKEN  # Set in travis-ci.org dashboard, marked secure
          keep-history: true
          on:
              branch: master
              condition: $TEST_TYPE = Docs
              local_dir: docs/_build/html
        - provider: pypi
          user: $PYPI_USERNAME
          password: $PYPI_PASSWORD
          on:
              tags: true
              distributions: "sdist bdist_wheel"
              skip_existing: true
              condition: $TEST_TYPE = style-check


================================================
FILE: AUTHORS.rst
================================================
Authors
==========


**Core Development Team:**

- Justus Schock: `GitHub <https://github.com/justusschock>`_ | `LinkedIn <https://www.linkedin.com/in/justus-schock/>`_ | `Google Scholar <https://scholar.google.de/citations?hl=de&user=KYf-ZHoAAAAJ>`_ | `E-Mail <mailto:justus.schock@rwth-aachen.de>`_
- Michael Baumgartner: `GitHub <https://github.com/mibaumgartner>`_ | `LinkedIn <https://www.linkedin.com/in/michael-baumgartner-/>`_
- Oliver Rippel: `GitHub <https://github.com/ORippler>`_ | `LinkedIn <https://www.linkedin.com/in/oliver-rippel-70361113a/>`_ | `Google Scholar <https://scholar.google.de/citations?user=DaTF8RsAAAAJ&hl=de>`_
- Christoph Haarburger: `GitHub <https://github.com/haarburger>`_ | `LinkedIn <https://www.linkedin.com/in/chaarburger/>`_ | `Google Scholar <https://scholar.google.de/citations?user=Lb8DcccAAAAJ&hl=de>`_ 

**Contributions:**

- Nicolas Horst

- Alexander Moriz


================================================
FILE: CODEOWNERS
================================================
# Use this CODEOWNERS file for automatically request reviews from owners at PRs. 
# For Details see https://help.github.com/en/articles/about-code-owners
# The order of the codeowners is simply alphabetically. 

# General Namespace (versioning backend resolution etc.)
/delira/* @justusschock

# DataLoading
/delira/data_loading/ @justusschock @mibaumgartner

# IO
/delira/io/ @justusschock
/delira/io/tf.py @ORippler

# Logging
/delira/logging/ @justusschock @ORippler

# Models
/delira/models/* @justusschock
/delira/models/backends/* @justusschock
/delira/models/backends/chainer/ @justusschock
/delira/models/backends/sklearn/ @justusschock
/delira/models/backends/tf_eager/ @justusschock @ORippler
/delira/models/backends/tf_graph/ @ORippler
/delira/models/backends/torch/ @justusschock @mibaumgartner
/delira/models/backends/torchscript/ @justusschock

# Training
/delira/training/__init__.py @justusschock
/delira/training/base_experiment.py @justusschock @mibaumgartner @ORippler
/delira/training/base_trainer.py @justusschock @mibaumgartner @ORippler
/delira/training/losses.py @mibaumgartner
/delira/training/metrics.py @justusschock @mibaumgartner
/delira/training/parameters.py @justusschock @mibaumgartner
/delira/training/predictor.py @justusschock @mibaumgartner @ORippler
/delira/training/utils.py @justusschock
/delira/training/backends/* @justusschock
/delira/training/backends/chainer/ @justusschock
/delira/training/backends/sklearn/ @justusschock
/delira/training/backends/tf_eager/ @justusschock @ORippler
/delira/training/backends/tf_graph/ @ORippler
/delira/taining/backends/torch/ @justusschock @mibaumgartner
/delira/training/backends/torchscript/ @justusschock
/delira/training/callbacks/ @justusschock

# Utils
/delira/utils/ @justusschock @mibaumgartner


# Global repo stuff
/* @justusschock
/docker/ @haarburger
/docs/ @justusschock
/notebooks/* @mibaumgartner
/paper/ @haarburger
/requirements/ @haarburger @justusschock @mibaumgartner @ORippler
/scripts/ci/ @justusschock

# Tests
/tests/* @justusschock
/tests/data_loading @justusschock @mibaumgartner
/tests/io/ @justusschock @ORippler
/logging/ @justusschock @ORippler
/tests/models/ @justusschock
/tests/training/* @mibaumgarnter
/tests/training/backends/ @justusschock


================================================
FILE: CONTRIBUTING.md
================================================
# Contributing to `delira`

If you are interested in contributing to `delira`, you will either

* implement a new feature

or 

* fix a bug.

For both types of contribution, the process is roughly the same:

1. File an issue at [this repo] and discuss 
the issue with us! Maybe we can give you some hints towards 
implementation/fixing.

2. Create your own fork of `delira`

3. In your own fork, start a new branch for the implementation of your issue. 
Make sure to include basic unittests (We know, that the current code is not 
that well tested so far, but we want to change this in future).

> **Note:** To improve readability and maintainability, [PEP8 Style](https://www.python.org/dev/peps/pep-0008/) should always be followed (no exceptions).

> **Note:** To ensure our CI/CD running correctly, you should *never* use relative imports but absolute ones.

> **Note:** If you added a feature, you should also add it to the documentation

4. After finishing the coding part, send a pull request to 
[this repo]

5. Afterwards, have a look at your pull request since we might suggest some 
changes.


If you are not familiar with creating a Pull Request, here are some guides:
- http://stackoverflow.com/questions/14680711/how-to-do-a-github-pull-request
- https://help.github.com/articles/creating-a-pull-request/


## Development Install

To develop `delira` on your machine, here are some tips:

1. Uninstall all existing installs of `delira`:
```
conda uninstall delira
pip uninstall delira
pip uninstall delira # run this command twice
```

2. Clone a copy of `delira` from source:

```
git clone https://github.com/justusschock/delira.git
cd delira
```

3. Install `delira` in `build develop` mode:

Install it via 

```
python setup.py build develop
```

or 

```
pip install -e .
```

This mode will symlink the python files from the current local source tree into the
python install.

Hence, if you modify a python file, you do not need to reinstall `delira` 
again and again

In case you want to reinstall, make sure that you uninstall `delira` first by running `pip uninstall delira`
and `python setup.py clean`. Then you can install in `build develop` mode again.


## Unit testing

Unittests are located under `test/`. Run the entire test suite with

```
python test/run_test.py
```

or run individual test files, like `python test/test_dummy.py`, for individual test suites.

### Better local unit tests with unittest
Testing is done with a `unittest` suite

## Writing documentation

`delira` uses [numpy style](http://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_numpy.html)
for formatting docstrings. Length of line inside docstrings block must be limited to 80 characters to
fit into Jupyter documentation popups.

[this repo]: https://github.com/delira-dev/delira


================================================
FILE: LICENSE
================================================
                    GNU AFFERO GENERAL PUBLIC LICENSE
                       Version 3, 19 November 2007

 Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
 Everyone is permitted to copy and distribute verbatim copies
 of this license document, but changing it is not allowed.

                            Preamble

  The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.

  The licenses for most software and other practical works are designed
to take away your freedom to share and change the works.  By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.

  When we speak of free software, we are referring to freedom, not
price.  Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.

  Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.

  A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate.  Many developers of free software are heartened and
encouraged by the resulting cooperation.  However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.

  The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community.  It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server.  Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.

  An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals.  This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.

  The precise terms and conditions for copying, distribution and
modification follow.

                       TERMS AND CONDITIONS

  0. Definitions.

  "This License" refers to version 3 of the GNU Affero General Public License.

  "Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.

  "The Program" refers to any copyrightable work licensed under this
License.  Each licensee is addressed as "you".  "Licensees" and
"recipients" may be individuals or organizations.

  To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy.  The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.

  A "covered work" means either the unmodified Program or a work based
on the Program.

  To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy.  Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.

  To "convey" a work means any kind of propagation that enables other
parties to make or receive copies.  Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.

  An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License.  If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.

  1. Source Code.

  The "source code" for a work means the preferred form of the work
for making modifications to it.  "Object code" means any non-source
form of a work.

  A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.

  The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form.  A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.

  The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities.  However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work.  For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.

  The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.

  The Corresponding Source for a work in source code form is that
same work.

  2. Basic Permissions.

  All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met.  This License explicitly affirms your unlimited
permission to run the unmodified Program.  The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work.  This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.

  You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force.  You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright.  Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.

  Conveying under any other circumstances is permitted solely under
the conditions stated below.  Sublicensing is not allowed; section 10
makes it unnecessary.

  3. Protecting Users' Legal Rights From Anti-Circumvention Law.

  No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.

  When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.

  4. Conveying Verbatim Copies.

  You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.

  You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.

  5. Conveying Modified Source Versions.

  You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:

    a) The work must carry prominent notices stating that you modified
    it, and giving a relevant date.

    b) The work must carry prominent notices stating that it is
    released under this License and any conditions added under section
    7.  This requirement modifies the requirement in section 4 to
    "keep intact all notices".

    c) You must license the entire work, as a whole, under this
    License to anyone who comes into possession of a copy.  This
    License will therefore apply, along with any applicable section 7
    additional terms, to the whole of the work, and all its parts,
    regardless of how they are packaged.  This License gives no
    permission to license the work in any other way, but it does not
    invalidate such permission if you have separately received it.

    d) If the work has interactive user interfaces, each must display
    Appropriate Legal Notices; however, if the Program has interactive
    interfaces that do not display Appropriate Legal Notices, your
    work need not make them do so.

  A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit.  Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.

  6. Conveying Non-Source Forms.

  You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:

    a) Convey the object code in, or embodied in, a physical product
    (including a physical distribution medium), accompanied by the
    Corresponding Source fixed on a durable physical medium
    customarily used for software interchange.

    b) Convey the object code in, or embodied in, a physical product
    (including a physical distribution medium), accompanied by a
    written offer, valid for at least three years and valid for as
    long as you offer spare parts or customer support for that product
    model, to give anyone who possesses the object code either (1) a
    copy of the Corresponding Source for all the software in the
    product that is covered by this License, on a durable physical
    medium customarily used for software interchange, for a price no
    more than your reasonable cost of physically performing this
    conveying of source, or (2) access to copy the
    Corresponding Source from a network server at no charge.

    c) Convey individual copies of the object code with a copy of the
    written offer to provide the Corresponding Source.  This
    alternative is allowed only occasionally and noncommercially, and
    only if you received the object code with such an offer, in accord
    with subsection 6b.

    d) Convey the object code by offering access from a designated
    place (gratis or for a charge), and offer equivalent access to the
    Corresponding Source in the same way through the same place at no
    further charge.  You need not require recipients to copy the
    Corresponding Source along with the object code.  If the place to
    copy the object code is a network server, the Corresponding Source
    may be on a different server (operated by you or a third party)
    that supports equivalent copying facilities, provided you maintain
    clear directions next to the object code saying where to find the
    Corresponding Source.  Regardless of what server hosts the
    Corresponding Source, you remain obligated to ensure that it is
    available for as long as needed to satisfy these requirements.

    e) Convey the object code using peer-to-peer transmission, provided
    you inform other peers where the object code and Corresponding
    Source of the work are being offered to the general public at no
    charge under subsection 6d.

  A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.

  A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling.  In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage.  For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product.  A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.

  "Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source.  The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.

  If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information.  But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).

  The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed.  Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.

  Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.

  7. Additional Terms.

  "Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law.  If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.

  When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it.  (Additional permissions may be written to require their own
removal in certain cases when you modify the work.)  You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.

  Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:

    a) Disclaiming warranty or limiting liability differently from the
    terms of sections 15 and 16 of this License; or

    b) Requiring preservation of specified reasonable legal notices or
    author attributions in that material or in the Appropriate Legal
    Notices displayed by works containing it; or

    c) Prohibiting misrepresentation of the origin of that material, or
    requiring that modified versions of such material be marked in
    reasonable ways as different from the original version; or

    d) Limiting the use for publicity purposes of names of licensors or
    authors of the material; or

    e) Declining to grant rights under trademark law for use of some
    trade names, trademarks, or service marks; or

    f) Requiring indemnification of licensors and authors of that
    material by anyone who conveys the material (or modified versions of
    it) with contractual assumptions of liability to the recipient, for
    any liability that these contractual assumptions directly impose on
    those licensors and authors.

  All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10.  If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term.  If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.

  If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.

  Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.

  8. Termination.

  You may not propagate or modify a covered work except as expressly
provided under this License.  Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).

  However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.

  Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.

  Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License.  If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.

  9. Acceptance Not Required for Having Copies.

  You are not required to accept this License in order to receive or
run a copy of the Program.  Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance.  However,
nothing other than this License grants you permission to propagate or
modify any covered work.  These actions infringe copyright if you do
not accept this License.  Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.

  10. Automatic Licensing of Downstream Recipients.

  Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License.  You are not responsible
for enforcing compliance by third parties with this License.

  An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations.  If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.

  You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License.  For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.

  11. Patents.

  A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based.  The
work thus licensed is called the contributor's "contributor version".

  A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version.  For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.

  Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.

  In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement).  To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.

  If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients.  "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.

  If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.

  A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License.  You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.

  Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.

  12. No Surrender of Others' Freedom.

  If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License.  If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all.  For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.

  13. Remote Network Interaction; Use with the GNU General Public License.

  Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software.  This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.

  Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work.  The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.

  14. Revised Versions of this License.

  The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time.  Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.

  Each version is given a distinguishing version number.  If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation.  If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.

  If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.

  Later license versions may give you additional or different
permissions.  However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.

  15. Disclaimer of Warranty.

  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.

  16. Limitation of Liability.

  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.

  17. Interpretation of Sections 15 and 16.

  If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.

                     END OF TERMS AND CONDITIONS

            How to Apply These Terms to Your New Programs

  If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.

  To do so, attach the following notices to the program.  It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.

    <one line to give the program's name and a brief idea of what it does.>
    Copyright (C) <year>  <name of author>

    This program is free software: you can redistribute it and/or modify
    it under the terms of the GNU Affero General Public License as published
    by the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU Affero General Public License for more details.

    You should have received a copy of the GNU Affero General Public License
    along with this program.  If not, see <https://www.gnu.org/licenses/>.

Also add information on how to contact you by electronic and paper mail.

  If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source.  For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code.  There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.

  You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.


================================================
FILE: MANIFEST.in
================================================
include requirements/*.txt
include *.md
include LICENSE
include notebooks/*.ipynb
include setup.cfg
include versioneer.py
include delira/_version.py


================================================
FILE: README.md
================================================
[<img src="https://img.shields.io/badge/chat-slack%20channel-75BBC4.svg">](https://join.slack.com/t/deliradev/shared_invite/enQtNjI1MjA4MjQzMzQ2LTUzNTQ0MjQyNjJjNzgyODczY2Y1YjYxNjA3ZmQ0MGFhODhkYzQ4M2RjMGM1YWM3YWU5MDM0ZjdiNTQ4MmQ0ZDk)
[![PyPI version](https://badge.fury.io/py/delira.svg)](https://badge.fury.io/py/delira) [![Build Status](https://travis-ci.com/delira-dev/delira.svg?branch=master)](https://travis-ci.com/delira-dev/delira) [![Documentation Status](https://readthedocs.org/projects/delira/badge/?version=master)](https://delira.readthedocs.io/en/master/?badge=master) [![codecov](https://codecov.io/gh/justusschock/delira/branch/master/graph/badge.svg)](https://codecov.io/gh/delira-dev/delira)
[![DOI](http://joss.theoj.org/papers/10.21105/joss.01488/status.svg)](https://doi.org/10.21105/joss.01488)

![logo](docs/_static/logo/delira.svg "delira - A Backend Agnostic High Level Deep Learning Library")

# delira - A Backend Agnostic High Level Deep Learning Library
Authors: [Justus Schock, Michael Baumgartner, Oliver Rippel, Christoph Haarburger](AUTHORS.rst)

Copyright (C) 2020 by RWTH Aachen University                      
http://www.rwth-aachen.de                                             
                                                                         
License:                                                                                                                                       
This software is dual-licensed under:                                 
• Commercial license (please contact: lfb@lfb.rwth-aachen.de)         
• AGPL (GNU Affero General Public License) open source license        

## Introduction
`delira` is designed to work as a backend agnostic high level deep learning library. You can choose among several computation [backends](#choose-backend).
It allows you to compare different models written for different backends without rewriting them.

For this case, `delira` couples the entire training and prediction logic in backend-agnostic modules to achieve identical behavior for training in all backends.

`delira` is designed in a very modular way so that almost everything is easily exchangeable or customizable.

A (non-comprehensive) list of the features included in `delira`:
* Dataset loading
* Dataset sampling
* Augmentation (multi-threaded) including 3D images with any number of channels (based on [`batchgenerators`](https://github.com/MIC-DKFZ/batchgenerators))
* A generic trainer class that implements the training process for all [backends](#choose-backend)
* Training monitoring using [Visdom](https://github.com/facebookresearch/visdom) or [Tensorboard](https://www.tensorflow.org/guide/summaries_and_tensorboard)
* Model save and load functions
* Already impelemented Datasets
* Many operations and utilities for medical imaging

## What about the name?
`delira` started as a library to enable deep learning research and fast prototyping in medical imaging (especially in radiology). 
That's also where the name comes from: `delira` was an acronym for **DE**ep **L**earning **I**n **RA**diology*. 
To adapt many other use cases we changed the framework's focus quite a bit, although we are still having many medical-related utilities 
and are working on constantly factoring them out.


## Installation

### Choose Backend

You may choose a backend from the list below. If your desired backend is not listed and you want to add it, please open an issue (it should not be hard at all) and we will guide you during the process of doing so.


| Backend                                                   | Binary Installation               | Source Installation                                                                               | Notes                                                                                                                                                 |
|-----------------------------------------------------------|-----------------------------------|---------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------|
| None                                                      | `pip install delira`              | `pip install git+https://github.com/delira-dev/delira.git`                                      | Training not possible if backend is not installed separately                                                                                          |
| [`torch`](https://pytorch.org)                            | `pip install delira[torch]`       | `git clone https://github.com/delira-dev/delira.git && cd delira && pip install .[torch]`       | `delira` with `torch` backend supports mixed-precision training via [NVIDIA/apex](https://github.com/NVIDIA/apex.git) (must be installed separately). |
| [`torchscript`](https://pytorch.org/docs/stable/jit.html) | `pip install delira[torchscript]` | `git clone https://github.com/delira-dev/delira.git && cd delira && pip install .[torchscript]` | The `torchscript` backend currently supports only single-GPU-training                                                                                 |
| [`tensorflow eager`](https://www.tensorflow.org/)         | `pip install delira[tensorflow]`  | `git clone https://github.com/delira-dev/delira.git && cd delira && pip install .[tensorflow]`  | the `tensorflow` backend is still very experimental and lacks some [features](https://github.com/delira-dev/delira/issues/47)                       |
| [`tensorflow graph`](https://www.tensorflow.org/)         | `pip install delira[tensorflow]`  | `git clone https://github.com/delira-dev/delira.git && cd delira && pip install .[tensorflow]`  | the `tensorflow` backend is still very experimental and lacks some [features](https://github.com/delira-dev/delira/issues/47)                       |
| [`scikit-learn`](https://scikit-learn.org/stable/)        | `pip install delira`              | `pip install git+https://github.com/delira-dev/delira.git`                                      | /                                                                                                                                                     |
| [`chainer`](https://chainer.org/)                         | `pip install delira[chainer]`     | `git clone https://github.com/delira-dev/delira.git && cd delira && pip install .[chainer]`     | /
| Full                                                      | `pip install delira[full]`        | `git clone https://github.com/delira-dev/delira.git && cd delira && pip install .[full]`        | All backends will be installed.                                                                                                                       |

### Docker
The easiest way to use `delira` is via docker (with the [nvidia-runtime](https://github.com/NVIDIA/nvidia-docker) for GPU-support) and using the [Dockerfile](docker/Dockerfile) or the [prebuild-images](https://cloud.docker.com/u/justusschock/repository/docker/justusschock/delira).

### Chat
We have a [community chat on slack](https://deliradev.slack.com). If you need an invitation, just follow [this link](https://join.slack.com/t/deliradev/shared_invite/enQtNjI1MjA4MjQzMzQ2LTUzNTQ0MjQyNjJjNzgyODczY2Y1YjYxNjA3ZmQ0MGFhODhkYzQ4M2RjMGM1YWM3YWU5MDM0ZjdiNTQ4MmQ0ZDk).

## Getting Started
The best way to learn how to use is to have a look at the [tutorial notebook](notebooks/tutorial_delira.ipynb).
Example implementations for classification problems, segmentation approaches and GANs are also provided in the [notebooks](notebooks) folder.

## Documentation
The docs are hosted on [ReadTheDocs/Delira](https://delira.rtfd.io).
The documentation of the latest master branch can always be found at the project's [github page](https://delira-dev.github.io/delira/).

## Contributing
If you find a bug or have an idea for an improvement, please have a look at our [contribution guideline](CONTRIBUTING.md).


================================================
FILE: delira/__init__.py
================================================
from delira._debug_mode import get_current_debug_mode, switch_debug_mode, \
    set_debug_mode
from delira._backends import get_backends, seed_all

from ._version import get_versions as _get_versions

import warnings
warnings.simplefilter('default', DeprecationWarning)
warnings.simplefilter('ignore', ImportWarning)


__version__ = _get_versions()['version']
del _get_versions


================================================
FILE: delira/_backends.py
================================================
import os
import json
from delira._version import get_versions as _get_versions

# to register new possible backends, they have to be added to this list.
# each backend should consist of a tuple of length 2 with the first entry
# being the package import name and the second being the backend abbreviation.
# E.g. TensorFlow's package is named 'tensorflow' but if the package is found,
# it will be considered as 'tf' later on
__POSSIBLE_BACKENDS = (("torch", "torch"),
                       ("tensorflow", "tf"),
                       ("chainer", "chainer"),
                       ("sklearn", "sklearn"))
__BACKENDS = ()


def _determine_backends():
    """
    Internal Helper Function to determine the currently valid backends by
    trying to import them. The valid backends are not returned, but appended
    to the global ``__BACKENDS`` variable

    """

    _config_file = __file__.replace("_backends.py", ".delira")
    # look for config file to determine backend
    # if file exists: load config into environment variables

    if not os.path.isfile(_config_file):
        _backends = {}
        # try to import all possible backends to determine valid backends

        import importlib
        for curr_backend in __POSSIBLE_BACKENDS:
            try:
                assert len(curr_backend) == 2
                assert all([isinstance(_tmp, str) for _tmp in curr_backend]), \
                    "All entries in current backend must be strings"

                # check if backend can be imported
                bcknd = importlib.util.find_spec(curr_backend[0])

                if bcknd is not None:
                    _backends[curr_backend[1]] = True
                else:
                    _backends[curr_backend[1]] = False
                del bcknd

            except ValueError:
                _backends[curr_backend[1]] = False

        with open(_config_file, "w") as f:
            json.dump({"version": _get_versions()['version'],
                       "backend": _backends},
                      f, sort_keys=True, indent=4)

        del _backends

    # set values from config file to variable and empty Backend-List before
    global __BACKENDS
    __BACKENDS = []
    with open(_config_file) as f:
        _config_dict = json.load(f)
    for key, val in _config_dict.pop("backend").items():
        if val:
            __BACKENDS.append(key.upper())
    del _config_dict

    del _config_file

    # make __BACKENDS non mutable
    __BACKENDS = tuple(__BACKENDS)


def get_backends():
    """
    Return List of currently available backends

    Returns
    -------
    list
        list of strings containing the currently installed backends
    """
    global __BACKENDS

    if not __BACKENDS:
        _determine_backends()
    return __BACKENDS


def seed_all(seed):
    """
    Helper Function to seed all available backends

    Parameters
    ----------
    seed : int
        the new random seed

    """
    import sys

    import numpy as np
    np.random.seed(seed)

    import random
    random.seed = seed

    if "torch" in sys.modules and "TORCH" in get_backends():
        import torch
        torch.random.manual_seed(seed)

    elif "tensorflow" in sys.modules and "TF" in get_backends():
        import tensorflow as tf
        tf.random.set_random_seed(seed)

    elif "chainer" in sys.modules and "CHAINER" in get_backends():
        try:
            import cupy
            cupy.random.seed(seed)
        except ImportError:
            pass


================================================
FILE: delira/_debug_mode.py
================================================
__DEBUG_MODE = False

# Functions to get and set the internal __DEBUG_MODE variable. This variable
# currently only defines whether to use multiprocessing or not. At the moment
# this is only used inside the DataManager, which either returns a
# MultiThreadedAugmenter or a SingleThreadedAugmenter depending on the current
# debug mode.
# All other functions using multiprocessing should be aware of this and
# implement a functionality without multiprocessing
# (even if this slows down things a lot!).


def get_current_debug_mode():
    """
    Getter function for the current debug mode
    Returns
    -------
    bool
        current debug mode
    """
    return __DEBUG_MODE


def switch_debug_mode():
    """
    Alternates the current debug mode
    """
    set_debug_mode(not get_current_debug_mode())


def set_debug_mode(mode: bool):
    """
    Sets a new debug mode
    Parameters
    ----------
    mode : bool
        the new debug mode
    """
    global __DEBUG_MODE
    __DEBUG_MODE = mode


================================================
FILE: delira/_version.py
================================================
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.

# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)

"""Git implementation of _version.py."""

import errno
import os
import re
import subprocess
import sys


def get_keywords():
    """Get the keywords needed to look up the version information."""
    # these strings will be replaced by git during git-archive.
    # setup.py/versioneer.py will grep for the variable names, so they must
    # each be defined on a line of their own. _version.py will just call
    # get_keywords().
    git_refnames = "$Format:%d$"
    git_full = "$Format:%H$"
    git_date = "$Format:%ci$"
    keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
    return keywords


class VersioneerConfig:
    """Container for Versioneer configuration parameters."""


def get_config():
    """Create, populate and return the VersioneerConfig() object."""
    # these strings are filled in when 'setup.py versioneer' creates
    # _version.py
    cfg = VersioneerConfig()
    cfg.VCS = "git"
    cfg.style = "pep440"
    cfg.tag_prefix = ""
    cfg.parentdir_prefix = ""
    cfg.versionfile_source = "delira/_version.py"
    cfg.verbose = False
    return cfg


class NotThisMethod(Exception):
    """Exception raised if a method is not valid for the current scenario."""


LONG_VERSION_PY = {}
HANDLERS = {}


def register_vcs_handler(vcs, method):  # decorator
    """Decorator to mark a method as the handler for a particular VCS."""
    def decorate(f):
        """Store f in HANDLERS[vcs][method]."""
        if vcs not in HANDLERS:
            HANDLERS[vcs] = {}
        HANDLERS[vcs][method] = f
        return f
    return decorate


def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
                env=None):
    """Call the given command(s)."""
    assert isinstance(commands, list)
    p = None
    for c in commands:
        try:
            dispcmd = str([c] + args)
            # remember shell=False, so use git.cmd on windows, not just git
            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
                                 stdout=subprocess.PIPE,
                                 stderr=(subprocess.PIPE if hide_stderr
                                         else None))
            break
        except EnvironmentError:
            e = sys.exc_info()[1]
            if e.errno == errno.ENOENT:
                continue
            if verbose:
                print("unable to run %s" % dispcmd)
                print(e)
            return None, None
    else:
        if verbose:
            print("unable to find command, tried %s" % (commands,))
        return None, None
    stdout = p.communicate()[0].strip()
    if sys.version_info[0] >= 3:
        stdout = stdout.decode()
    if p.returncode != 0:
        if verbose:
            print("unable to run %s (error)" % dispcmd)
            print("stdout was %s" % stdout)
        return None, p.returncode
    return stdout, p.returncode


def versions_from_parentdir(parentdir_prefix, root, verbose):
    """Try to determine the version from the parent directory name.

    Source tarballs conventionally unpack into a directory that includes both
    the project name and a version string. We will also support searching up
    two directory levels for an appropriately named parent directory
    """
    rootdirs = []

    for i in range(3):
        dirname = os.path.basename(root)
        if dirname.startswith(parentdir_prefix):
            return {"version": dirname[len(parentdir_prefix):],
                    "full-revisionid": None,
                    "dirty": False, "error": None, "date": None}
        else:
            rootdirs.append(root)
            root = os.path.dirname(root)  # up a level

    if verbose:
        print("Tried directories %s but none started with prefix %s" %
              (str(rootdirs), parentdir_prefix))
    raise NotThisMethod("rootdir doesn't start with parentdir_prefix")


@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
    """Extract version information from the given file."""
    # the code embedded in _version.py can just fetch the value of these
    # keywords. When used from setup.py, we don't want to import _version.py,
    # so we do it with a regexp instead. This function is not used from
    # _version.py.
    keywords = {}
    try:
        f = open(versionfile_abs, "r")
        for line in f.readlines():
            if line.strip().startswith("git_refnames ="):
                mo = re.search(r'=\s*"(.*)"', line)
                if mo:
                    keywords["refnames"] = mo.group(1)
            if line.strip().startswith("git_full ="):
                mo = re.search(r'=\s*"(.*)"', line)
                if mo:
                    keywords["full"] = mo.group(1)
            if line.strip().startswith("git_date ="):
                mo = re.search(r'=\s*"(.*)"', line)
                if mo:
                    keywords["date"] = mo.group(1)
        f.close()
    except EnvironmentError:
        pass
    return keywords


@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
    """Get version information from git keywords."""
    if not keywords:
        raise NotThisMethod("no keywords at all, weird")
    date = keywords.get("date")
    if date is not None:
        # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
        # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
        # -like" string, which we must then edit to make compliant), because
        # it's been around since git-1.5.3, and it's too difficult to
        # discover which version we're using, or to work around using an
        # older one.
        date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
    refnames = keywords["refnames"].strip()
    if refnames.startswith("$Format"):
        if verbose:
            print("keywords are unexpanded, not using")
        raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
    refs = set([r.strip() for r in refnames.strip("()").split(",")])
    # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
    # just "foo-1.0". If we see a "tag: " prefix, prefer those.
    TAG = "tag: "
    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
    if not tags:
        # Either we're using git < 1.8.3, or there really are no tags. We use
        # a heuristic: assume all version tags have a digit. The old git %d
        # expansion behaves like git log --decorate=short and strips out the
        # refs/heads/ and refs/tags/ prefixes that would let us distinguish
        # between branches and tags. By ignoring refnames without digits, we
        # filter out many common branch names like "release" and
        # "stabilization", as well as "HEAD" and "master".
        tags = set([r for r in refs if re.search(r'\d', r)])
        if verbose:
            print("discarding '%s', no digits" % ",".join(refs - tags))
    if verbose:
        print("likely tags: %s" % ",".join(sorted(tags)))
    for ref in sorted(tags):
        # sorting will prefer e.g. "2.0" over "2.0rc1"
        if ref.startswith(tag_prefix):
            r = ref[len(tag_prefix):]
            if verbose:
                print("picking %s" % r)
            return {"version": r,
                    "full-revisionid": keywords["full"].strip(),
                    "dirty": False, "error": None,
                    "date": date}
    # no suitable tags, so version is "0+unknown", but full hex is still there
    if verbose:
        print("no suitable tags, using unknown + full revision id")
    return {"version": "0+unknown",
            "full-revisionid": keywords["full"].strip(),
            "dirty": False, "error": "no suitable tags", "date": None}


@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
    """Get version from 'git describe' in the root of the source tree.

    This only gets called if the git-archive 'subst' keywords were *not*
    expanded, and _version.py hasn't already been rewritten with a short
    version string, meaning we're inside a checked out source tree.
    """
    GITS = ["git"]
    if sys.platform == "win32":
        GITS = ["git.cmd", "git.exe"]

    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
                          hide_stderr=True)
    if rc != 0:
        if verbose:
            print("Directory %s not under git control" % root)
        raise NotThisMethod("'git rev-parse --git-dir' returned error")

    # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
    # if there isn't one, this yields HEX[-dirty] (no NUM)
    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
                                          "--always", "--long",
                                          "--match", "%s*" % tag_prefix],
                                   cwd=root)
    # --long was added in git-1.5.5
    if describe_out is None:
        raise NotThisMethod("'git describe' failed")
    describe_out = describe_out.strip()
    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
    if full_out is None:
        raise NotThisMethod("'git rev-parse' failed")
    full_out = full_out.strip()

    pieces = {}
    pieces["long"] = full_out
    pieces["short"] = full_out[:7]  # maybe improved later
    pieces["error"] = None

    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
    # TAG might have hyphens.
    git_describe = describe_out

    # look for -dirty suffix
    dirty = git_describe.endswith("-dirty")
    pieces["dirty"] = dirty
    if dirty:
        git_describe = git_describe[:git_describe.rindex("-dirty")]

    # now we have TAG-NUM-gHEX or HEX

    if "-" in git_describe:
        # TAG-NUM-gHEX
        mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
        if not mo:
            # unparseable. Maybe git-describe is misbehaving?
            pieces["error"] = ("unable to parse git-describe output: '%s'"
                               % describe_out)
            return pieces

        # tag
        full_tag = mo.group(1)
        if not full_tag.startswith(tag_prefix):
            if verbose:
                fmt = "tag '%s' doesn't start with prefix '%s'"
                print(fmt % (full_tag, tag_prefix))
            pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
                               % (full_tag, tag_prefix))
            return pieces
        pieces["closest-tag"] = full_tag[len(tag_prefix):]

        # distance: number of commits since tag
        pieces["distance"] = int(mo.group(2))

        # commit: short hex revision ID
        pieces["short"] = mo.group(3)

    else:
        # HEX: no tags
        pieces["closest-tag"] = None
        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
                                    cwd=root)
        pieces["distance"] = int(count_out)  # total number of commits

    # commit date: see ISO-8601 comment in git_versions_from_keywords()
    date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
                       cwd=root)[0].strip()
    pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)

    return pieces


def plus_or_dot(pieces):
    """Return a + if we don't already have one, else return a ."""
    if "+" in pieces.get("closest-tag", ""):
        return "."
    return "+"


def render_pep440(pieces):
    """Build up version string, with post-release "local version identifier".

    Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
    get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty

    Exceptions:
    1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
    """
    if pieces["closest-tag"]:
        rendered = pieces["closest-tag"]
        if pieces["distance"] or pieces["dirty"]:
            rendered += plus_or_dot(pieces)
            rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
            if pieces["dirty"]:
                rendered += ".dirty"
    else:
        # exception #1
        rendered = "0+untagged.%d.g%s" % (pieces["distance"],
                                          pieces["short"])
        if pieces["dirty"]:
            rendered += ".dirty"
    return rendered


def render_pep440_pre(pieces):
    """TAG[.post.devDISTANCE] -- No -dirty.

    Exceptions:
    1: no tags. 0.post.devDISTANCE
    """
    if pieces["closest-tag"]:
        rendered = pieces["closest-tag"]
        if pieces["distance"]:
            rendered += ".post.dev%d" % pieces["distance"]
    else:
        # exception #1
        rendered = "0.post.dev%d" % pieces["distance"]
    return rendered


def render_pep440_post(pieces):
    """TAG[.postDISTANCE[.dev0]+gHEX] .

    The ".dev0" means dirty. Note that .dev0 sorts backwards
    (a dirty tree will appear "older" than the corresponding clean one),
    but you shouldn't be releasing software with -dirty anyways.

    Exceptions:
    1: no tags. 0.postDISTANCE[.dev0]
    """
    if pieces["closest-tag"]:
        rendered = pieces["closest-tag"]
        if pieces["distance"] or pieces["dirty"]:
            rendered += ".post%d" % pieces["distance"]
            if pieces["dirty"]:
                rendered += ".dev0"
            rendered += plus_or_dot(pieces)
            rendered += "g%s" % pieces["short"]
    else:
        # exception #1
        rendered = "0.post%d" % pieces["distance"]
        if pieces["dirty"]:
            rendered += ".dev0"
        rendered += "+g%s" % pieces["short"]
    return rendered


def render_pep440_old(pieces):
    """TAG[.postDISTANCE[.dev0]] .

    The ".dev0" means dirty.

    Eexceptions:
    1: no tags. 0.postDISTANCE[.dev0]
    """
    if pieces["closest-tag"]:
        rendered = pieces["closest-tag"]
        if pieces["distance"] or pieces["dirty"]:
            rendered += ".post%d" % pieces["distance"]
            if pieces["dirty"]:
                rendered += ".dev0"
    else:
        # exception #1
        rendered = "0.post%d" % pieces["distance"]
        if pieces["dirty"]:
            rendered += ".dev0"
    return rendered


def render_git_describe(pieces):
    """TAG[-DISTANCE-gHEX][-dirty].

    Like 'git describe --tags --dirty --always'.

    Exceptions:
    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
    """
    if pieces["closest-tag"]:
        rendered = pieces["closest-tag"]
        if pieces["distance"]:
            rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
    else:
        # exception #1
        rendered = pieces["short"]
    if pieces["dirty"]:
        rendered += "-dirty"
    return rendered


def render_git_describe_long(pieces):
    """TAG-DISTANCE-gHEX[-dirty].

    Like 'git describe --tags --dirty --always -long'.
    The distance/hash is unconditional.

    Exceptions:
    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
    """
    if pieces["closest-tag"]:
        rendered = pieces["closest-tag"]
        rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
    else:
        # exception #1
        rendered = pieces["short"]
    if pieces["dirty"]:
        rendered += "-dirty"
    return rendered


def render(pieces, style):
    """Render the given version pieces into the requested style."""
    if pieces["error"]:
        return {"version": "unknown",
                "full-revisionid": pieces.get("long"),
                "dirty": None,
                "error": pieces["error"],
                "date": None}

    if not style or style == "default":
        style = "pep440"  # the default

    if style == "pep440":
        rendered = render_pep440(pieces)
    elif style == "pep440-pre":
        rendered = render_pep440_pre(pieces)
    elif style == "pep440-post":
        rendered = render_pep440_post(pieces)
    elif style == "pep440-old":
        rendered = render_pep440_old(pieces)
    elif style == "git-describe":
        rendered = render_git_describe(pieces)
    elif style == "git-describe-long":
        rendered = render_git_describe_long(pieces)
    else:
        raise ValueError("unknown style '%s'" % style)

    return {"version": rendered, "full-revisionid": pieces["long"],
            "dirty": pieces["dirty"], "error": None,
            "date": pieces.get("date")}


def get_versions():
    """Get version information or return default if unable to do so."""
    # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
    # __file__, we can work backwards from there to the root. Some
    # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
    # case we can only use expanded keywords.

    cfg = get_config()
    verbose = cfg.verbose

    try:
        return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
                                          verbose)
    except NotThisMethod:
        pass

    try:
        root = os.path.realpath(__file__)
        # versionfile_source is the relative path from the top of the source
        # tree (where the .git directory might live) to this file. Invert
        # this to find the root from __file__.
        for i in cfg.versionfile_source.split('/'):
            root = os.path.dirname(root)
    except NameError:
        return {"version": "0+unknown", "full-revisionid": None,
                "dirty": None,
                "error": "unable to find root of source tree",
                "date": None}

    try:
        pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
        return render(pieces, cfg.style)
    except NotThisMethod:
        pass

    try:
        if cfg.parentdir_prefix:
            return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
    except NotThisMethod:
        pass

    return {"version": "0+unknown", "full-revisionid": None,
            "dirty": None,
            "error": "unable to compute version", "date": None}


================================================
FILE: delira/data_loading/__init__.py
================================================
# basic imports
from delira.data_loading.data_loader import DataLoader
from delira.data_loading.dataset import AbstractDataset, IterableDataset, \
    DictDataset, BaseCacheDataset, BaseExtendCacheDataset, BaseLazyDataset, \
    ConcatDataset
from delira.data_loading.augmenter import Augmenter
from delira.data_loading.data_manager import DataManager
from delira.data_loading.load_utils import LoadSample, LoadSampleLabel

from delira.data_loading.sampler import *
from delira import get_backends as _get_backends

# if numba is installed: Import Numba Transforms
try:
    from delira.data_loading.numba_transform import NumbaTransform, \
        NumbaTransformWrapper, NumbaCompose
except ImportError:
    pass


================================================
FILE: delira/data_loading/augmenter.py
================================================
import multiprocessing
from multiprocessing import connection as mpconnection
from collections import Callable
import abc
import os
import sys
import numpy as np
import random

from delira.data_loading.sampler import AbstractSampler, BatchSampler
from delira.data_loading.data_loader import DataLoader
from delira import get_current_debug_mode


class AbstractAugmenter(object):
    """
    Basic Augmenter Class providing a general Augmenter API
    """

    def __init__(
            self,
            data_loader,
            batchsize,
            sampler,
            transforms=None,
            seed=1,
            drop_last=False):
        """
        Parameters
        ----------
        data_loader : :class:`DataLoader`
            the dataloader, loading samples for given indices
        batchsize : int
            the batchsize to use for sampling
        sampler : :class:`AbstractSampler`
            the sampler_old (may be batch sampler_old or usual sampler_old),
            defining the actual sampling strategy; Is an iterable yielding
            indices
        transforms : :class:`collections.Callable`
            the transforms to apply; defaults to None
        seed : int
            the basic seed; default: 1
        drop_last : bool
            whether to drop the last (possibly smaller) batch or not
        """

        self._data_loader = data_loader

        if not isinstance(sampler, BatchSampler):
            if isinstance(sampler, AbstractSampler):
                sampler = BatchSampler(sampler, batchsize,
                                       drop_last=drop_last)
            else:
                raise TypeError("Invalid Sampler given: %s" % str(sampler))

        self._sampler = sampler

        self._drop_last = drop_last

        self._transforms = transforms
        self._seed = seed

        # seed numpy.random and random as these are the random number
        # generators, which might be used for sampling
        np.random.seed(seed)
        random.seed(seed)

    @abc.abstractmethod
    def __iter__(self):
        raise NotImplementedError


class _ParallelAugmenter(AbstractAugmenter):
    """
    An Augmenter that loads and augments multiple batches in parallel
    """

    def __init__(self, data_loader, batchsize, sampler, num_processes=None,
                 transforms=None, seed=1, drop_last=False):
        """
        Parameters
        ----------
        data_loader : :class:`DataLoader`
            the dataloader, loading samples for given indices
         batchsize : int
            the batchsize to use for sampling
        sampler : :class:`AbstractSampler`
            the sampler_old (may be batch sampler_old or usual sampler_old),
            defining the actual sampling strategy; Is an iterable yielding
            indices
        num_processes : int
            the number of processes to use for dataloading + augmentation;
            if None: the number of available CPUs will be used as number of
            processes
        transforms : :class:`collections.Callable`
            the transforms to apply; defaults to None
        seed : int
            the basic seed; default: 1
        drop_last : bool
            whether to drop the last (possibly smaller) batch or not
        """

        super().__init__(data_loader, batchsize, sampler, transforms, seed,
                         drop_last)

        if num_processes is None:
            num_processes = os.cpu_count()

        self._num_processes = num_processes

        self._processes = []

        self._index_pipes = []
        self._data_pipes = []

        self._index_pipe_counter = 0
        self._data_pipe_counter = 0
        self._abort_event = None
        self._data_queued = []
        self._processes_running = False

    @property
    def abort_event(self):
        """
        Property to access the abortion Event

        Returns
        -------
        :class:`multiprocessing.Event`
            the abortion event
        """
        return self._abort_event

    @abort_event.setter
    def abort_event(self, new_event):
        """
        Setter for the abortion Event;

        Parameters
        ----------
        new_event : class:`multiprocessing.Event`
            the new event
        """

        self._abort_event = new_event

    def _start_processes(self):
        """
        Starts new processes and pipes for interprocess communication
        """

        # reset abortion event
        self.abort_event = multiprocessing.Event()

        # for each process do:
        for i in range(self._num_processes):
            # start two oneway pipes (one for passing index to workers
            # and one for passing back data to main process)
            recv_conn_out, send_conn_out = multiprocessing.Pipe(duplex=False)
            recv_conn_in, send_conn_in = multiprocessing.Pipe(duplex=False)

            # create the actual process
            process = _WorkerProcess(dataloader=self._data_loader,
                                     output_pipe=send_conn_out,
                                     index_pipe=recv_conn_in,
                                     transforms=self._transforms,
                                     abort_event=self._abort_event,
                                     process_id=i)
            process.daemon = True
            process.start()
            # wait until process was created and started
            while not process.is_alive():
                pass

            # append process and pipes to list
            self._processes.append(process)
            self._index_pipes.append(send_conn_in),
            self._data_pipes.append(recv_conn_out)
            self._data_queued.append(0)
            self._processes_running = True

    def _shutdown_processes(self):
        """
        Shuts down the processes and resets all related flags and counters
        """

        # create copy to avoid modifying the list we iterate over
        worker = list(
            zip(self._data_pipes, self._index_pipes, self._processes))

        for _data_conn, _index_conn, _process in worker:

            _index_conn.send(None)

            _process.join()
            if sys.version_info >= (3, 7):
                _process.close()
            else:
                _process.terminate()

            _index_conn.close()
            _data_conn.close()

            self._data_pipes.pop()
            self._data_queued.pop()
            self._index_pipes.pop()
            self._processes.pop()

        # reset running process flag and counters
        self._processes_running = False
        self._data_pipe_counter = 0
        self._index_pipe_counter = 0

    @property
    def _next_index_pipe(self):
        """
        Property implementing switch to next index pipe
        """
        ctr = self._index_pipe_counter
        new_ctr = (self._index_pipe_counter + 1) % self._num_processes
        self._index_pipe_counter = new_ctr

        return ctr

    @property
    def _next_data_pipe(self):
        """
        Property implementing switch to next data pipe
        """
        ctr = self._data_pipe_counter
        new_ctr = (self._data_pipe_counter + 1) % self._num_processes
        self._data_pipe_counter = new_ctr

        return ctr

    def _enqueue_indices(self, sample_idxs):
        """
        Enqueues a set of indices to workers while iterating over workers in
        cyclic way
        Parameters
        ----------
        sample_idxs : list
            the indices to enqueue to the workers
        """

        # iterating over all batch indices
        for idxs in sample_idxs:
            # switch to next counter
            index_pipe_ctr = self._next_index_pipe
            # increase number of queued batches for current worker
            self._data_queued[index_pipe_ctr] += 1
            # enqueue indices to worker
            self._index_pipes[index_pipe_ctr].send(idxs)

    def _receive_data(self):
        """
        Receives data from worker
        """
        # switching to next worker
        _data_pipe = self._next_data_pipe

        # receive data from worker
        data = self._data_pipes[_data_pipe].recv()
        # decrease number of enqueued batches for current worker
        self._data_queued[_data_pipe] -= 1

        return data

    def __iter__(self):
        self._start_processes()

        sampler_iter = iter(self._sampler)
        all_sampled = False

        try:
            # start by enqueuing two items per process as buffer
            _indices = []
            try:
                for i in range(self._num_processes * 2):
                    idxs = next(sampler_iter)
                    _indices.append(idxs)
            except StopIteration:
                all_sampled = True

            self._enqueue_indices(_indices)

            # iterate while not all data has been sampled and any data is
            # enqueued
            while True:

                if self.abort_event.is_set():
                    raise RuntimeError("Abort Event was set in one of the "
                                       "workers")

                # enqueue additional indices if sampler_old was not already
                # exhausted
                try:
                    if not all_sampled:
                        idxs = next(sampler_iter)
                        self._enqueue_indices([idxs])
                except StopIteration:
                    all_sampled = True

                # receive data from workers
                if any(self._data_queued):
                    yield self._receive_data()
                else:
                    break

        except Exception as e:
            # set abort event to shutdown workers
            self._abort_event.set()
            raise e

        finally:
            if self._processes_running:
                self._shutdown_processes()


class _WorkerProcess(multiprocessing.Process):
    """
    A Process running an infinite loop of loading data for given indices
    """

    def __init__(self, dataloader: DataLoader,
                 output_pipe: mpconnection.Connection,
                 index_pipe: mpconnection.Connection,
                 abort_event: multiprocessing.Event,
                 transforms: Callable,
                 process_id):
        """
        Parameters
        ----------
        dataloader : :class:`DataLoader`
            the data loader which loads the data corresponding to the given
            indices
        output_pipe : :class:`multiprocessing.connection.Connection`
            the pipe, the loaded data shoud be sent to
        index_pipe : :class:`multiprocessing.connection.Connection`
            the pipe to accept the indices
        abort_event : class:`multiprocessing.Event`
            the abortion event; will be set for every Exception;
            If set: Worker terminates
        transforms : :class:`collections.Callable`
            the transforms to transform the data
        process_id : int
            the process id
        """
        super().__init__()

        self._data_loader = dataloader
        self._output_pipe = output_pipe
        self._input_pipe = index_pipe
        self._abort_event = abort_event
        self._process_id = process_id
        self._transforms = transforms

    def run(self) -> None:
        # set the process id
        self._data_loader.process_id = self._process_id

        try:
            while True:
                # check if worker should terminate
                if self._abort_event.is_set():
                    raise RuntimeError("Abort Event has been set externally")

                # get indices if available (with timeout to frequently check
                # for abortions
                if self._input_pipe.poll(timeout=0.2):
                    idxs = self._input_pipe.recv()

                    # final indices -> shutdown workers
                    if idxs is None:
                        break

                    # load data
                    data = self._data_loader(idxs)

                    #
                    if self._transforms is not None:
                        data = self._transforms(**data)

                    self._output_pipe.send(data)

        except Exception as e:
            self._abort_event.set()
            raise e


class _SequentialAugmenter(AbstractAugmenter):
    """
    An Augmenter that loads and augments batches sequentially without any
    parallelism
    """

    def __init__(
            self,
            data_loader,
            batchsize,
            sampler,
            transforms=None,
            seed=1,
            drop_last=False):
        """
        Parameters
        ----------
        data_loader : :class:`DataLoader`
            the dataloader, loading samples for given indices
        sampler : :class:`AbstractSampler`
            the sampler_old (may be batch sampler_old or usual sampler_old),
            defining the actual sampling strategy; Is an iterable yielding
            indices
        transforms : :class:`collections.Callable`
            the transforms to apply; defaults to None
        seed : int
            the basic seed; default: 1
        drop_last : bool
            whether to drop the last (possibly smaller) batch or not
        """
        super().__init__(data_loader=data_loader, batchsize=batchsize,
                         sampler=sampler, transforms=transforms, seed=seed,
                         drop_last=drop_last)

    def __iter__(self):
        # create sampler_old iterator
        sampler_iter = iter(self._sampler)

        # for every index load and augment the data
        for idxs in sampler_iter:

            # load data
            data = self._data_loader(idxs)

            # transform data if transforms given
            if self._transforms is not None:
                data = self._transforms(**data)

            yield data


class Augmenter(object):
    """
    The actual Augmenter wrapping the :class:`_SequentialAugmenter` and the
    :class:`_ParallelAugmenter` and switches between them by arguments and
    debug mode
    """

    def __init__(self, data_loader, batchsize, sampler, num_processes=None,
                 transforms=None, seed=1, drop_last=False):
        """
        Parameters
        ----------
        data_loader : :class:`DataLoader`
            the dataloader, loading samples for given indices
        sampler : :class:`AbstractSampler`
            the sampler_old (may be batch sampler_old or usual sampler_old),
            defining the actual sampling strategy; Is an iterable yielding
            indices
        num_processes : int
            the number of processes to use for dataloading + augmentation;
            if None: the number of available CPUs will be used as number of
            processes
        transforms : :class:`collections.Callable`
            the transforms to apply; defaults to None
        seed : int
            the basic seed; default: 1
        drop_last : bool
            whether to drop the last (possibly smaller) batch or not
        """

        self._augmenter = self._resolve_augmenter_cls(num_processes,
                                                      data_loader=data_loader,
                                                      batchsize=batchsize,
                                                      sampler=sampler,
                                                      transforms=transforms,
                                                      seed=seed,
                                                      drop_last=drop_last)

    @staticmethod
    def _resolve_augmenter_cls(num_processes, **kwargs):
        """
        Resolves the augmenter class by the number of specified processes and
        the debug mode and creates an instance of the chosen class
        Parameters
        ----------
        num_processes : int
            the number of processes to use for dataloading + augmentation;
            if None: the number of available CPUs will be used as number of
            processes
        **kwargs :
            additional keyword arguments, used for instantiation of the chosen
            class
        Returns
        -------
        :class:`AbstractAugmenter`
            an instance of the chosen augmenter class
        """
        if get_current_debug_mode() or num_processes == 0:
            return _SequentialAugmenter(**kwargs)
        return _ParallelAugmenter(num_processes=num_processes, **kwargs)

    def __iter__(self):
        """
        Makes the Augmenter iterable by generators
        Returns
        -------
        Generator
            a generator function yielding the arguments
        """
        yield from self._augmenter


================================================
FILE: delira/data_loading/data_loader.py
================================================
import numpy as np
from delira.data_loading.dataset import AbstractDataset, DictDataset, \
    IterableDataset
from collections import Iterable, defaultdict


class DataLoader:
    """
    Basic Dataloader class, that returns data for a given set of indices and
    combines it as batches
    """

    def __init__(self, data):
        """
        Parameters
        ----------
        data : Any
            the data to use; Ideally this either is a dataset, an iterable or
            a dict, but in general, this must only be indexable, have a length
            and return a dict of arrays if indexed
        """
        self._process_id = None
        if isinstance(data, AbstractDataset):
            dataset = data

        else:
            # wrap it into dataset depending on datatype
            if isinstance(data, dict):
                dataset = DictDataset(data)
            elif isinstance(data, Iterable):
                dataset = IterableDataset(data)
            else:
                raise TypeError("Invalid dataset type: %s"
                                % type(data).__name__)

        self.dataset = dataset

    def __call__(self, indices):
        """
        Loads data for given indices and combines them to batches
        Parameters
        ----------
        indices : list
            a list of integers specifying the data indices
        Returns
        -------
        dict
            a dict of numpy arrays (specifying the batches)
        """

        # get data for all indices
        data = [self.dataset[idx] for idx in indices]

        data_dict = defaultdict(list)

        # concatenate dict entities by keys
        for _result_dict in data:
            for key, val in _result_dict.items():
                data_dict[key].append(val)

        # convert list to numpy arrays
        for key, val_list in data_dict.items():
            data_dict[key] = np.asarray(val_list)

        return data_dict

    @property
    def process_id(self):
        """
        A Property to access the process id
        Returns
        -------
        int
            the process id
        """
        if self._process_id is None:
            return 0
        return self._process_id

    @process_id.setter
    def process_id(self, new_id):
        """
        Setter for the :attr:`process_id`; Makes sure, that the process id is
        only set once
        Parameters
        ----------
        new_id : int
        Raises
        ------
        AttributeError
            if the process id has already been set once
        """
        if self._process_id is not None:
            raise AttributeError("Attribute 'process_id' can be set only once")

        self._process_id = new_id


================================================
FILE: delira/data_loading/data_manager.py
================================================
import logging

from batchgenerators.transforms import AbstractTransform

from delira import get_current_debug_mode
from delira.data_loading.data_loader import DataLoader
from delira.data_loading.sampler import SequentialSampler, AbstractSampler
from delira.data_loading.augmenter import Augmenter
from delira.data_loading.dataset import DictDataset, IterableDataset, \
    AbstractDataset
from collections import Iterable
import inspect

logger = logging.getLogger(__name__)


class DataManager(object):
    """
    Class to Handle Data
    Creates Dataset (if necessary), Dataloader and Augmenter

    """

    def __init__(self, data, batch_size, n_process_augmentation,
                 transforms, sampler_cls=SequentialSampler,
                 drop_last=False, data_loader_cls=None,
                 **sampler_kwargs):
        """

        Parameters
        ----------
        data : str or Dataset
            if str: Path to data samples
            if dataset: Dataset
        batch_size : int
            Number of samples per batch
        n_process_augmentation : int
            Number of processes for augmentations
        transforms :
            Data transformations for augmentation
        sampler_cls : AbstractSampler
            class defining the sampling strategy
        drop_last : bool
            whether to drop the last (possibly smaller) batch
        data_loader_cls : subclass of SlimDataLoaderBase
            DataLoader class
        **sampler_kwargs :
            other keyword arguments (passed to sampler_cls)

        Raises
        ------
        AssertionError
            ``data_loader_cls`` is not :obj:`None` and not a subclass of
            `DataLoader`
        TypeError
            ``data`` is not a Dataset object and not of type dict or iterable

        See Also
        --------
        :class:`AbstractDataset`

        """

        # Instantiate Hidden variables for property access
        if sampler_kwargs is None:
            sampler_kwargs = {}
        self._batch_size = None
        self._n_process_augmentation = None
        self._transforms = None
        self._data_loader_cls = None
        self._sampler = None
        self.drop_last = drop_last

        # set actual values to properties
        self.batch_size = batch_size

        self.n_process_augmentation = n_process_augmentation
        self.transforms = transforms

        if data_loader_cls is None:
            logger.info("No dataloader Class specified. Using DataLoader")
            data_loader_cls = DataLoader
        else:
            if not inspect.isclass(data_loader_cls):
                raise TypeError(
                    "data_loader_cls must be class not instance of class")

            if not issubclass(data_loader_cls, DataLoader):
                raise TypeError(
                    "data_loader_cls must be subclass of DataLoader")

        self.data_loader_cls = data_loader_cls

        self.data = data

        if not (inspect.isclass(sampler_cls) and issubclass(sampler_cls,
                                                            AbstractSampler)):
            raise TypeError

        self.sampler_cls = sampler_cls
        self.sampler_kwargs = sampler_kwargs

    def get_batchgen(self, seed=1):
        """
        Create DataLoader and Batchgenerator

        Parameters
        ----------
        seed : int
            seed for Random Number Generator

        Returns
        -------
        Augmenter
           The actual iterable batchgenerator

        Raises
        ------
        AssertionError
            :attr:`DataManager.n_batches` is smaller than or equal to zero

        """
        assert self.n_batches > 0

        data_loader = self.data_loader_cls(
            self.data
        )

        sampler = self.sampler_cls.from_dataset(data_loader.dataset,
                                                **self.sampler_kwargs)

        return Augmenter(data_loader=data_loader,
                         batchsize=self.batch_size,
                         sampler=sampler,
                         num_processes=self.n_process_augmentation,
                         transforms=self.transforms,
                         seed=seed,
                         drop_last=self.drop_last
                         )

    def get_subset(self, indices):
        """
        Returns a Subset of the current datamanager based on given indices

        Parameters
        ----------
        indices : iterable
            valid indices to extract subset from current dataset

        Returns
        -------
        :class:`DataManager`
            manager containing the subset

        """

        subset_kwargs = {
            "batch_size": self.batch_size,
            "n_process_augmentation": self.n_process_augmentation,
            "transforms": self.transforms,
            "sampler_cls": self.sampler_cls,
            "data_loader_cls": self.data_loader_cls,
            "drop_last": self.drop_last,
            **self.sampler_kwargs
        }

        return self.__class__(
            self.data.get_subset(indices),
            **subset_kwargs)

    def update_state_from_dict(self, new_state: dict):
        """
        Updates internal state and therefore the behavior from dict.
        If a key is not specified, the old attribute value will be used

        Parameters
        ----------
        new_state : dict
            The dict to update the state from.
            Valid keys are:

                * ``batch_size``
                * ``n_process_augmentation``
                * ``data_loader_cls``
                * ``sampler_cls``
                * ``sampler_kwargs``
                * ``transforms``

            If a key is not specified, the old value of the corresponding
            attribute will be used

        Raises
        ------
        KeyError
            Invalid keys are specified

        """

        # update batch_size if specified
        self.batch_size = new_state.pop("batch_size", self.batch_size)
        # update n_process_augmentation if specified
        self.n_process_augmentation = new_state.pop(
            "n_process_augmentation", self.n_process_augmentation)
        # update data_loader_cls if specified
        self.data_loader_cls = new_state.pop("data_loader_cls",
                                             self.data_loader_cls)
        # update sampler
        self.sampler_cls = new_state.pop("sampler_cls", self.sampler_cls)
        self.sampler_kwargs = new_state.pop("sampler_kwargs",
                                            self.sampler_kwargs)

        self.transforms = new_state.pop("transforms", self.transforms)

        if new_state:
            raise KeyError("Invalid Keys in new_state given: %s"
                           % (','.join(map(str, new_state.keys()))))

    @property
    def batch_size(self):
        """
        Property to access the batchsize

        Returns
        -------
        int
            the batchsize
        """

        return self._batch_size

    @batch_size.setter
    def batch_size(self, new_batch_size):
        """
        Setter for current batchsize, casts to int before setting the attribute

        Parameters
        ----------
        new_batch_size : int, Any
            the new batchsize; should be int but can be of any type that can be
            casted to an int

        """

        self._batch_size = int(new_batch_size)

    @property
    def n_process_augmentation(self):
        """
        Property to access the number of augmentation processes

        Returns
        -------
        int
            number of augmentation processes
        """

        if get_current_debug_mode():
            return 0
        return self._n_process_augmentation

    @n_process_augmentation.setter
    def n_process_augmentation(self, new_process_number):
        """
        Setter for number of augmentation processes, casts to int before
        setting the attribute


        Parameters
        ----------
        new_process_number : int, Any
            new number of augmentation processes; should be int but can be of
            any type that can be casted to an int

        """

        self._n_process_augmentation = int(new_process_number)

    @property
    def transforms(self):
        """
        Property to access the current data transforms

        Returns
        -------
        None, ``AbstractTransform``
            The transformation, can either be None or an instance of
            ``AbstractTransform``
        """

        return self._transforms

    @transforms.setter
    def transforms(self, new_transforms):
        """
        Setter for data transforms, assert if transforms are of valid type
        (either None or instance of ``AbstractTransform``)

        Parameters
        ----------
        new_transforms : None, ``AbstractTransform``
            the new transforms

        """

        if new_transforms is not None and not isinstance(
                new_transforms, AbstractTransform):
            raise TypeError

        self._transforms = new_transforms

    @property
    def data_loader_cls(self):
        """
        Property to access the current data loader class

        Returns
        -------
        type
            Subclass of ``DataLoader``
        """

        return self._data_loader_cls

    @data_loader_cls.setter
    def data_loader_cls(self, new_loader_cls):
        """
        Setter for current data loader class, asserts if class is of valid
        type
        (must be a class and a subclass of ``DataLoader``)

        Parameters
        ----------
        new_loader_cls : type
            the new data loader class

        """

        if not inspect.isclass(new_loader_cls) and issubclass(
                new_loader_cls, DataLoader):
            raise TypeError

        self._data_loader_cls = new_loader_cls

    @property
    def n_samples(self):
        """
        Number of Samples

        Returns
        -------
        int
            Number of Samples

        """
        return len(self.dataset)

    @property
    def n_batches(self):
        """
        Returns Number of Batches based on batchsize and number of samples

        Returns
        -------
        int
            Number of Batches

        Raises
        ------
        AssertionError
            :attr:`DataManager.n_samples` is smaller than or equal to zero

        """
        assert self.n_samples > 0

        n_batches = self.n_samples // self.batch_size

        truncated_batch = self.n_samples % self.batch_size

        n_batches += int(bool(truncated_batch) and not self.drop_last)

        return n_batches

    @property
    def dataset(self):
        return self.data

    @dataset.setter
    def dataset(self, new_dset):
        if not isinstance(new_dset, AbstractDataset):
            raise TypeError

        self.data = new_dset

    def __iter__(self):
        """
        Build-In function to create an iterator. First creates an
        :class:`Augmenter` and afterwards an iterable for the created
        augmenter, which is then returned

        Returns
        -------
        Generator object
            generator object to iterate over the augmented batches

        """
        return iter(self.get_batchgen())


================================================
FILE: delira/data_loading/dataset.py
================================================
import abc
import os
import typing

import numpy as np
from skimage.transform import resize
from sklearn.model_selection import train_test_split
from collections import Iterable
from tqdm import tqdm

from delira.utils import subdirs


class AbstractDataset:
    """
    Base Class for Dataset

    """

    def __init__(self, data_path: str, load_fn: typing.Callable):
        """

        Parameters
        ----------
        data_path : str
            path to data samples
        load_fn : function
            function to load single sample
        """
        self.data_path = data_path
        self._load_fn = load_fn
        self.data = []

    @abc.abstractmethod
    def _make_dataset(self, path: str):
        """
        Create dataset

        Parameters
        ----------
        path : str
            path to data samples

        Returns
        -------
        list
            data: List of sample paths if lazy; List of samples if not

        """
        pass

    @abc.abstractmethod
    def __getitem__(self, index):
        """
        return data with given index (and loads it before if lazy)

        Parameters
        ----------
        index : int
            index of data

        Returns
        -------
        dict
            data

        """
        pass

    def __len__(self):
        """
        Return number of samples

        Returns
        -------
        int
            number of samples
        """
        return len(self.data)

    def __iter__(self):
        """
        Return an iterator for the dataset

        Returns
        -------
        object
            a single sample
        """
        return _DatasetIter(self)

    def get_sample_from_index(self, index):
        """
        Returns the data sample for a given index
        (without any loading if it would be necessary)
        This implements the base case and can be subclassed
        for index mappings.
        The actual loading behaviour (lazy or cached) should be
        implemented in ``__getitem__``

        See Also
        --------
        :method:ConcatDataset.get_sample_from_index
        :method:BaseLazyDataset.__getitem__
        :method:BaseCacheDataset.__getitem__

        Parameters
        ----------
        index : int
            index corresponding to targeted sample

        Returns
        -------
        Any
            sample corresponding to given index
        """

        return self.data[index]

    def get_subset(self, indices):
        """
        Returns a Subset of the current dataset based on given indices

        Parameters
        ----------
        indices : iterable
            valid indices to extract subset from current dataset

        Returns
        -------
        :class:`BlankDataset`
            the subset

        """

        # extract other important attributes from current dataset
        kwargs = {}

        for key, val in vars(self).items():
            if not (key.startswith("__") and key.endswith("__")):

                if key == "data":
                    continue
                kwargs[key] = val

        kwargs["old_getitem"] = self.__class__.__getitem__
        subset_data = [self.get_sample_from_index(idx) for idx in indices]

        return BlankDataset(subset_data, **kwargs)


class _DatasetIter(object):
    """
    Iterator for dataset
    """

    def __init__(self, dset):
        """

        Parameters
        ----------
        dset: :class: `AbstractDataset`
            the dataset which should be iterated
        """
        self._dset = dset
        self._curr_index = 0

    def __iter__(self):
        return self

    def __next__(self):
        if self._curr_index >= len(self._dset):
            raise StopIteration

        sample = self._dset[self._curr_index]
        self._curr_index += 1
        return sample


class DictDataset(AbstractDataset):
    """
    Dataset to wrap a dict of keys and iterables.
    """

    def __init__(self, data: dict):
        """

        Parameters
        ----------
        data : dict
            dictionary consisting of keys and iterables.
            The iterables should contain an item for each index
        """
        super().__init__(None, None)
        self._data = data

    def __getitem__(self, index: int):
        """
        Function to make the dataset indexable. Returns the sample
        corresponding to the given index

        Parameters
        ----------
        index : int
            the index specifying the sample to return

        Returns
        -------
        dict
            the sample corresponding to :param:`index`

        """
        return {k: v[index] for k, v in self._data.items()}

    def get_sample_from_index(self, index):
        """
        Mapping from index to sample

        Parameters
        ----------
        index : int
            the index specifying the sample to return

        Returns
        -------
        dict
            the sample corresponding to :param:`index`

        """
        return self[index]

    def _make_dataset(self, path: str):
        """
        Function to create the dataset
        (not necessary here, since the data is already in memory)

        Parameters
        ----------
        path : str
            the path to load the data from

        """
        pass

    def __len__(self):
        """
        Function to determine the dataset's length

        Returns
        -------
        int
            the number of samples
        """
        return min([len(v) for v in self._data.values()])


class IterableDataset(AbstractDataset):
    """
    Dataset to wrap a list of dicts.
    """

    def __init__(self, data: Iterable):
        """

        Parameters
        ----------
        data : Iterable
            an iterable of dicts each representing a single sample
        """
        super().__init__(None, None)
        self._data = data

    def __getitem__(self, index):
        """
        Function to make the dataset indexable. Returns the sample
        corresponding to the given index

        Parameters
        ----------
        index : int
           the index specifying the sample to return

        Returns
        -------
        dict
           the sample corresponding to :param:`index`

       """
        return self._data[index]

    def get_sample_from_index(self, index):
        """
        Mapping from index to sample

        Parameters
        ----------
        index : int
            the index specifying the sample to return

        Returns
        -------
        dict
            the sample corresponding to :param:`index`

        """
        return self[index]

    def _make_dataset(self, path: str):
        """
        Function to create the dataset
        (not necessary here, since the data is already in memory)

        Parameters
        ----------
        path : str
            the path to load the data from

        """
        pass

    def __len__(self):
        """
        Function to determine the dataset's length

        Returns
        -------
        int
            the number of samples
        """
        return len(self._data)


class BlankDataset(AbstractDataset):
    """
    Blank Dataset loading the data, which has been passed
    in it's ``__init__`` by it's ``_sample_fn``

    """

    def __init__(self, data, old_getitem, **kwargs):
        """

        Parameters
        ----------
        data : iterable
            data to load
        old_getitem : function
            get item method of previous dataset
        **kwargs :
            additional keyword arguments (are set as class attribute)

        """
        super().__init__(None, None)

        self.data = data
        self._old_getitem = old_getitem

        for key, val in kwargs.items():
            setattr(self, key, val)

    def __getitem__(self, index):
        """
        returns single sample corresponding to ``index`` via the ``_sample_fn``

        Parameters
        ----------
        index : int
            index specifying the data to load

        Returns
        -------
        dict
            dictionary containing a single sample

        """
        return self._old_getitem(self, index)

    def __len__(self):
        """
        returns the length of the dataset

        Returns
        -------
        int
            number of samples

        """
        return len(self.data)


class BaseCacheDataset(AbstractDataset):
    """
    Dataset to preload and cache data

    Notes
    -----
    data needs to fit completely into RAM!

    """

    def __init__(self, data_path: typing.Union[str, list],
                 load_fn: typing.Callable, **load_kwargs):
        """

        Parameters
        ----------
        data_path : str or list
            if data_path is a string, _sample_fn is called for all items inside
            the specified directory
            if data_path is a list, _sample_fn is called for elements in the
            list
        load_fn : function
            function to load a single data sample
        **load_kwargs :
            additional loading keyword arguments (image shape,
            channel number, ...); passed to _sample_fn

        """
        super().__init__(data_path, load_fn)
        self._load_kwargs = load_kwargs
        self.data = self._make_dataset(data_path)

    def _make_dataset(self, path: typing.Union[str, list]):
        """
        Helper Function to make a dataset containing all samples in a certain
        directory

        Parameters
        ----------
        path: str or list
            if data_path is a string, _sample_fn is called for all items inside
            the specified directory
            if data_path is a list, _sample_fn is called for elements in the
            list

        Returns
        -------
        list
            list of items which where returned from _sample_fn (typically dict)

        Raises
        ------
        AssertionError
            if `path` is not a list and is not a valid directory

        """
        data = []
        if isinstance(path, list):
            # iterate over all elements
            for p in tqdm(path, unit='samples', desc="Loading samples"):
                data.append(self._load_fn(p, **self._load_kwargs))
        else:
            # call _sample_fn for all elements inside directory
            assert os.path.isdir(path), '%s is not a valid directory' % path
            for p in tqdm(os.listdir(path), unit='samples',
                          desc="Loading samples"):
                data.append(self._load_fn(os.path.join(path, p),
                                          **self._load_kwargs))
        return data

    def __getitem__(self, index):
        """
        return data sample specified by index

        Parameters
        ----------
        index : int
            index to specifiy which data sample to return

        Returns
        -------
        dict
            data sample

        """
        data_dict = self.get_sample_from_index(index)
        return data_dict


class BaseLazyDataset(AbstractDataset):
    """
    Dataset to load data in a lazy way

    """

    def __init__(self, data_path: typing.Union[str, list],
                 load_fn: typing.Callable, **load_kwargs):
        """

        Parameters
        ----------
        data_path : str or list
            if data_path is a string, _sample_fn is called for all items inside
            the specified directory
            if data_path is a list, _sample_fn is called for elements in the
            list
        load_fn : function
            function to load single data sample
        **load_kwargs :
            additional loading keyword arguments (image shape,
            channel number, ...); passed to _sample_fn

        """
        super().__init__(data_path, load_fn)
        self._load_kwargs = load_kwargs
        self.data = self._make_dataset(self.data_path)

    def _make_dataset(self, path: typing.Union[str, list]):
        """
        Helper Function to make a dataset containing paths to all images in a
        certain directory

        Parameters
        ----------
        path : str or list
            path to data samples

        Returns
        -------
        list
            list of sample paths

        Raises
        ------
        AssertionError
            if `path` is not a valid directory

        """
        if isinstance(path, list):
            # generate list from iterable
            data = list(path)
        else:
            # generate list from all items
            assert os.path.isdir(path), '%s is not a valid directory' % path
            data = [os.path.join(path, p) for p in os.listdir(path)]
        return data

    def __getitem__(self, index):
        """
        load data sample specified by index

        Parameters
        ----------
        index : int
            index to specifiy which data sample to load

        Returns
        -------
        dict
            loaded data sample
        """
        data_dict = self._load_fn(self.get_sample_from_index(index),
                                  **self._load_kwargs)
        return data_dict


class BaseExtendCacheDataset(BaseCacheDataset):
    """
    Dataset to preload and cache data. Function to load sample is expected
    to return an iterable which can contain multiple samples

    Notes
    -----
    data needs to fit completely into RAM!

    """

    def __init__(self, data_path: typing.Union[str, list],
                 load_fn: typing.Callable, **load_kwargs):
        """

        Parameters
        ----------
        data_path : str or list
            if data_path is a string, _sample_fn is called for all items inside
            the specified directory
            if data_path is a list, _sample_fn is called for elements in the
            list
        load_fn : function
            function to load a multiple data samples at once. Needs to return
            an iterable which extends the internal list.
        **load_kwargs :
            additional loading keyword arguments (image shape,
            channel number, ...); passed to _sample_fn

        See Also
        --------
        :class: `BaseCacheDataset`

        """
        super().__init__(data_path, load_fn, **load_kwargs)

    def _make_dataset(self, path: typing.Union[str, list]):
        """
        Helper Function to make a dataset containing all samples in a certain
        directory

        Parameters
        ----------
        path: str or iterable
            if data_path is a string, _sample_fn is called for all items inside
            the specified directory
            if data_path is a list, _sample_fn is called for elements in the
            list

        Returns
        -------
        list
            list of items which where returned from _sample_fn (typically dict)

        Raises
        ------
        AssertionError
            if `path` is not a list and is not a valid directory

        """
        data = []
        if isinstance(path, list):
            # iterate over all elements
            for p in tqdm(path, unit='samples', desc="Loading samples"):
                data.extend(self._load_fn(p, **self._load_kwargs))
        else:
            # call _sample_fn for all elements inside directory
            assert os.path.isdir(path), '%s is not a valid directory' % dir
            for p in tqdm(os.listdir(path), unit='samples',
                          desc="Loading samples"):
                data.extend(self._load_fn(os.path.join(path, p),
                                          **self._load_kwargs))
        return data


class ConcatDataset(AbstractDataset):
    def __init__(self, *datasets):
        """
        Concatenate multiple datasets to one

        Parameters
        ----------
        datasets:
            variable number of datasets
        """
        super().__init__(None, None)

        # TODO: Why should datasets[0] be a list not a AbstractDataset?

        # check if first item in datasets is list and datasets is of length 1
        if (len(datasets) == 1) and isinstance(datasets[0], list):
            datasets = datasets[0]

        self.data = datasets

    def get_sample_from_index(self, index):
        """
        Returns the data sample for a given index
        (without any loading if it would be necessary)
        This method implements the index mapping of a global index to
        the subindices for each dataset.
        The actual loading behaviour (lazy or cached) should be
        implemented in ``__getitem__``

        See Also
        --------
        :method:AbstractDataset.get_sample_from_index
        :method:BaseLazyDataset.__getitem__
        :method:BaseCacheDataset.__getitem__

        Parameters
        ----------
        index : int
            index corresponding to targeted sample

        Returns
        -------
        Any
            sample corresponding to given index
        """

        curr_max_index = 0
        for dset in self.data:
            prev_max_index = curr_max_index
            curr_max_index += len(dset)

            if prev_max_index <= index < curr_max_index:
                return dset[index - prev_max_index]

            else:
                continue

        raise IndexError("Index %d is out of range for %d items in datasets" %
                         (index, len(self)))

    def __getitem__(self, index):
        return self.get_sample_from_index(index)

    def __len__(self):
        return sum([len(dset) for dset in self.data])


================================================
FILE: delira/data_loading/load_utils.py
================================================
import collections
import os

import numpy as np
from skimage.io import imread
from skimage.transform import resize


def norm_range(mode):
    """
    Closure function for range normalization
    Parameters
    ----------
    mode : str
        '-1,1' normalizes data to range [-1, 1], while '0,1'
        normalizes data to range [0, 1]
    Returns
    -------
    callable
        normalization function
    """
    def norm_fn(data):
        """
        Returns the input data normalized to the range
        Parameters
        ----------
        data : np.ndarray
            data which should be normalized
        Returns
        -------
        np.ndarary
            normalized data
        """
        norm = data - data.min()
        norm = norm / norm.max()
        if mode == '-1,1':
            norm = norm - 0.5
            norm = norm * 2
        elif mode == '0,1':
            pass
        else:
            raise ValueError('{mode} not supported.')
        return norm
    return norm_fn


def norm_zero_mean_unit_std(data):
    """
    Return normalized data with mean 0, standard deviation 1
    Parameters
    ----------
    data : np.nadarray
    Returns
    -------
    np.ndarray
        normalized data
    """
    return (data - np.mean(data)) / np.std(data)


class LoadSample:
    """
    Provides a callable to load a single sample from multiple files in a folder
    """

    def __init__(self,
                 sample_ext: dict,
                 sample_fn: collections.abc.Callable,
                 dtype: dict = None, normalize: tuple = (),
                 norm_fn=norm_range('-1,1'),
                 **kwargs):
        """
        Parameters
        ----------
        sample_ext : dict of iterable
            Defines the data _sample_ext. The dict key defines the position of
            the sample inside the returned data dict, while the list defines
            the the files which should be loaded inside the data dict.
        sample_fn : function
            function to load a single sample
        dtype : dict
            defines the data type which should be used for the respective key
        normalize : iterable of hashable
            list of hashable which should be normalized. Can contain
            entire keys of extension (normalizes each element individually)
            or provide the file name which should be normalized
        norm_fn : function
            function to normalize input. Default: normalize range to [-1, 1]
        kwargs :
            variable number of keyword arguments passed to load function
        Examples
        --------
        Simple loading function which returns a dict with `data`
        >>> from delira.data_loading.nii import load_nii
        >>> load_fn = LoadSample({'data:': ['data.nii']}, load_nii)
        Loading function for data (casted to float32 and normalized) and
        segmentation (casted to unit8)
        >>> from delira.data_loading.nii import load_nii
        >>> load_fn = LoadSample({'data:': ['data.nii'], 'seg': ['seg.nii']},
        >>>                      load_nii, dtype={'data': 'float32',
        >>>                                       'seg': 'uint8'},
        >>>                      normalize=('data',))
        """
        if dtype is None:
            dtype = {}
        self._sample_ext = sample_ext
        self._sample_fn = sample_fn
        self._dtype = dtype
        self._normalize = normalize
        self._norm_fn = norm_fn
        self._kwargs = kwargs

    def __call__(self, path) -> dict:
        """
        Load sample from multiple files
        Parameters
        ----------
        path : str
            defines patch to folder which contain the _sample_ext
        Returns
        -------
        dict
            dict with data defines by _sample_ext
        """
        sample_dict = {}
        for key, item in self._sample_ext.items():
            data_list = []
            for f in item:
                data = self._sample_fn(os.path.join(path, f), **self._kwargs)

                # _normalize data if necessary
                if (key in self._normalize) or (f in self._normalize):
                    data = self._norm_fn(data)

                # cast data to type
                if key in self._dtype:
                    data = data.astype(self._dtype[key])

                # append data
                data_list.append(data)
            if len(data_list) == 1:
                sample_dict[key] = data_list[0][np.newaxis]
            else:
                sample_dict[key] = np.stack(data_list)
        return sample_dict


class LoadSampleLabel(LoadSample):
    def __init__(self,
                 sample_ext: dict,
                 sample_fn: collections.abc.Callable,
                 label_ext: str,
                 label_fn: collections.abc.Callable,
                 dtype: dict = None, normalize: tuple = (),
                 norm_fn=norm_range('-1,1'),
                 sample_kwargs=None, **kwargs):
        """
        Load sample and label from folder
        Parameters
        ----------
        sample_ext : dict of list
            Defines the data _sample_ext. The dict key defines the position of
            the sample inside the returned data dict, while the list defines
            the the files which should be loaded inside the data dict.
            Passed to LoadSample.
        sample_fn : function
            function to load a single sample
            Passed to LoadSample.
        label_ext : str
            extension for label
        label_fn: function
            functions which returns the label inside a dict
        dtype : dict
            defines the data type which should be used for the respective key
        normalize : iterable of hashable
            list of hashable which should be normalized. Can contain
            entire keys of extension (normalizes each element individually)
            or provide the file name which should be normalized
        norm_fn : function
            function to normalize input. Default: normalize range to [-1, 1]
        sample_kwargs :
            additional keyword arguments passed to LoadSample
        kwargs :
            variable number of keyword arguments passed to _label_fn
        See Also
        --------
        :class: `LoadSample`
        """
        if sample_kwargs is None:
            sample_kwargs = {}

        super().__init__(sample_ext=sample_ext, sample_fn=sample_fn,
                         dtype=dtype, normalize=normalize, norm_fn=norm_fn,
                         **sample_kwargs)
        self._label_ext = label_ext
        self._label_fn = label_fn
        self._label_kwargs = kwargs

    def __call__(self, path) -> dict:
        """
        Loads a sample and a label
        Parameters
        ----------
        path : str
        Returns
        -------
        dict
            dict with data and label
        """
        sample_dict = super().__call__(path)
        label_dict = self._label_fn(os.path.join(path, self._label_ext),
                                    **self._label_kwargs)
        sample_dict.update(label_dict)
        return sample_dict


================================================
FILE: delira/data_loading/numba_transform.py
================================================
from batchgenerators.transforms import AbstractTransform, Compose

import logging
from delira import get_current_debug_mode
import numba

logger = logging.getLogger(__name__)


class NumbaTransformWrapper(AbstractTransform):
    def __init__(self, transform: AbstractTransform, nopython=True,
                 target="cpu", parallel=False, **options):

        if get_current_debug_mode():
            # set options for debug mode
            logging.debug("Debug mode detected. Overwriting numba options "
                          "nopython to False and target to cpu")
            nopython = False
            target = "cpu"

        transform.__call__ = numba.jit(transform.__call__, nopython=nopython,
                                       target=target,
                                       parallel=parallel, **options)
        self._transform = transform

    def __call__(self, **kwargs):
        return self._transform(**kwargs)


class NumbaTransform(NumbaTransformWrapper):
    def __init__(self, transform_cls, nopython=True, target="cpu",
                 parallel=False, **kwargs):
        trafo = transform_cls(**kwargs)

        super().__init__(trafo, nopython=nopython, target=target,
                         parallel=parallel)


class NumbaCompose(Compose):
    def __init__(self, transforms):
        super().__init__(transforms=[NumbaTransformWrapper(trafo)
                                     for trafo in transforms])


================================================
FILE: delira/data_loading/sampler/__init__.py
================================================
from delira.data_loading.sampler.abstract import AbstractSampler
from delira.data_loading.sampler.batch import BatchSampler
from delira.data_loading.sampler.random import RandomSampler, \
    RandomSamplerNoReplacement, RandomSamplerWithReplacement
from delira.data_loading.sampler.sequential import SequentialSampler
from delira.data_loading.sampler.weighted import WeightedRandomSampler, \
    PrevalenceRandomSampler


================================================
FILE: delira/data_loading/sampler/abstract.py
================================================
from delira.data_loading.dataset import AbstractDataset


class AbstractSampler(object):
    """
    Abstract Class defining a sampler interface
    """

    def __init__(self, indices):
        """

        Parameters
        ----------
        indices : list
            the indices containing the classes to sample from
        """
        self._indices = indices

    def __iter__(self):
        """
        Returns an iterator, must be overwritten in subclasses

        Raises
        ------
        NotImplementedError
            if not overwritten in subclass

        """
        raise NotImplementedError

    def __len__(self):
        """
        Defines the class length

        Returns
        -------
        int
            the number of samples

        """
        return len(self._indices)

    @classmethod
    def from_dataset(cls, dset: AbstractDataset, **kwargs):
        """
        Class Method to create a sampler from a given dataset

        Parameters
        ----------
        dset : :class:`AbstractDataset`
            the dataset to create the sampler from
        **kwargs :
            additional keyword arguments

        """
        if hasattr(dset, "__len__"):
            length = len(dset)
        else:
            length = len([tmp for tmp in dset])
        return cls(list(range(length)), **kwargs)


================================================
FILE: delira/data_loading/sampler/batch.py
================================================
from delira.data_loading.sampler.abstract import AbstractSampler


class BatchSampler(object):
    """
    A Sampler-Wrapper combining the single indices sampled by a sampler to
    batches of a given size
    """

    def __init__(self, sampler: AbstractSampler, batch_size, drop_last=False):
        """

        Parameters
        ----------
        sampler : :class:`AbstractSampler`
            the actual sampler producing single-sized samples
        batch_size : int
            the size of each batch
        drop_last : bool
            whether or not to discard the last (possibly smaller) batch
        """
        self._sampler = sampler
        self._batchsize = batch_size
        self._drop_last = drop_last

    def __iter__(self):
        """
        Iterator holding lists of sample-indices. Each list contains indices
        for a single batch

        Yields
        ------
        list
            a list containing the sample indices of the current batch

        """
        batch_idxs = []

        for idx in self._sampler:
            batch_idxs.append(idx)

            if len(batch_idxs) == self._batchsize:
                yield batch_idxs

                batch_idxs = []

        if not self._drop_last and batch_idxs:
            yield batch_idxs

    def __len__(self):
        """
        Defines the class length

        Returns
        -------
        int
            number of samples

        """
        num_batches = len(self._sampler) // self._batchsize

        if not self._drop_last:
            num_batches += int(bool(len(self._sampler) % self._batchsize))

        return num_batches


================================================
FILE: delira/data_loading/sampler/random.py
================================================
from delira.data_loading.sampler.abstract import AbstractSampler
import numpy as np


class RandomSampler(AbstractSampler):
    """
    A Generic Random Sampler
    """

    def __init__(self, indices, replacement=False, num_samples=None):
        """

        Parameters
        ----------
        indices : list
            the indices containing the classes to sample from
        replacement : bool
            whether to sample with or without replacement
        num_samples : int
            the number of samples to provide. Must only be specified
            if :param:`replacement` is True; If not specified, it defaults to
            the number of samples present in :param:`indices`
        """
        super().__init__(indices)

        if replacement and num_samples is None:
            num_samples = len(self._indices)

        self._replacement = replacement
        self._num_samples = num_samples

    def __iter__(self):
        """
        Returns an iterator returning random samples

        Returns
        -------
        Iterator
            an iterator returning random samples

        """
        n = len(self._indices)

        if self._replacement:
            return iter(np.random.randint(n, size=self._num_samples).tolist())

        possible_samples = np.arange(n)
        np.random.shuffle(possible_samples)

        return iter(possible_samples)

    def __len__(self):
        """
        Defines the length of the sampler

        Returns
        -------
        int
            the number of samples
        """
        if self._replacement:
            return self._num_samples
        else:
            return super().__len__()


class RandomSamplerNoReplacement(RandomSampler):
    """
    A Random Sampler without replacement
    """

    def __init__(self, indices):
        """

        Parameters
        ----------
        indices : list
            the indices containing the classes to sample from

        """
        super().__init__(indices, False, None)


class RandomSamplerWithReplacement(RandomSampler):
    """
    A Random Sampler With Replacement
    """

    def __init__(self, indices, num_samples=None):
        """

        Parameters
        ----------
        indices : list
            the indices containing the classes to sample from
        num_samples : int
            number of samples to provide, if not specified: defaults to the
            amount values given in :param:`indices`

        """
        super().__init__(indices, True, num_samples)


================================================
FILE: delira/data_loading/sampler/sequential.py
================================================
from delira.data_loading.sampler.abstract import AbstractSampler


class SequentialSampler(AbstractSampler):
    """
    Class to implement sequential sampling
    """

    def __iter__(self):
        """
        Creates an iterator returning sequential samples

        Returns
        -------
        Iterator
            iterator returning samples in a sequential manner
        """
        return iter(range(len(self._indices)))


================================================
FILE: delira/data_loading/sampler/weighted.py
================================================
from delira.data_loading.sampler.abstract import AbstractSampler
from delira.data_loading.dataset import AbstractDataset
import numpy as np


class WeightedRandomSampler(AbstractSampler):
    """
    Class implementing Weighted Random Sampling
    """

    def __init__(self, weights, num_samples=None):
        """

        Parameters
        ----------
        weights : list
            per-sample weights
        num_samples : int
            number of samples to provide. If not specified this defaults to
            the amount of values given in :param:`num_samples´
        """
        if num_samples is None:
            num_samples = len(weights)

        self._num_samples = num_samples
        super().__init__(np.arange(num_samples))
        self._weights = weights

    def __iter__(self):
        """
        Defines the actual weighted random sampling

        Returns
        -------
        Iterator
            iterator producing random samples
        """
        return iter(np.random.choice(self._indices, size=self._num_samples,
                                     p=self._weights))

    def __len__(self):
        """
        Defines the length of the sampler

        Returns
        -------
        int
            the number of samples
        """
        return self._num_samples


class PrevalenceRandomSampler(WeightedRandomSampler):
    """
    Class implementing prevalence weighted sampling
    """

    def __init__(self, indices):
        """

        Parameters
        ----------
        indices : list
            list of class indices to calculate a weighting from
        """

        weights = np.array(indices).astype(np.float)
        classes, classes_count = np.unique(indices, return_counts=True)

        # compute probabilities
        target_prob = 1 / classes.shape[0]

        # generate weight matrix
        for i, c in enumerate(classes):
            weights[weights == c] = (target_prob / classes_count[i])

        super().__init__(weights, num_samples=len(indices))

    @classmethod
    def from_dataset(cls, dset: AbstractDataset, key="label", **kwargs):
        """
        CLass function to create an instance of this sampler by giving it a
        dataset

        Parameters
        ----------
        dset : :class:`AbstractDataset`
            the dataset to create weightings from
        key : str
            the key holding the class index for each sample
        **kwargs :
            Additional keyword arguments

        """
        return cls([_sample[key] for _sample in dset], **kwargs)


================================================
FILE: delira/io/__init__.py
================================================
from delira import get_backends

if "TORCH" in get_backends():
    from delira.io.torch import save_checkpoint_torch as torch_save_checkpoint
    from delira.io.torch import load_checkpoint_torch as torch_load_checkpoint

    from delira.io.torch import save_checkpoint_torchscript \
        as torchscript_save_checkpoint
    from delira.io.torch import load_checkpoint_torchscript \
        as torchscript_load_checkpoint

if "TF" in get_backends():
    from delira.io.tf import save_checkpoint as tf_save_checkpoint
    from delira.io.tf import load_checkpoint as tf_load_checkpoint

    from delira.io.tf import save_checkpoint_eager as tf_eager_save_checkpoint
    from delira.io.tf import load_checkpoint_eager as tf_eager_load_checkpoint

if "CHAINER" in get_backends():
    from delira.io.chainer import save_checkpoint as chainer_save_checkpoint
    from delira.io.chainer import load_checkpoint as chainer_load_checkpoint

if "SKLEARN" in get_backends():
    from delira.io.sklearn import load_checkpoint as sklearn_load_checkpoint
    from delira.io.sklearn import save_checkpoint as sklearn_save_checkpoint


================================================
FILE: delira/io/chainer.py
================================================
import chainer
import zipfile
import os
import json


def save_checkpoint(file, model=None, optimizers=None, epoch=None):
    """
    Saves the given checkpoint

    Parameters
    ----------
    file : str
        string containing the path, the state should be saved to
    model : :class:`AbstractChainerNetwork`
    optimizers : dict
        dictionary containing all optimizers
    epoch : int
        the current epoch

    """
    # config file for path mapping insde the archive
    save_config = {}
    # files to write to archive and delete afterwards
    del_files = []

    # save model to hdf5
    if model is not None:
        # temporary filename
        _curr_file = file.replace("chain", "model")
        # serialize to temporary file
        chainer.serializers.save_hdf5(_curr_file, model)
        # add to config (without path to navigate inside archive)
        save_config["model"] = os.path.basename(_curr_file)
        # append to files to process
        del_files.append(_curr_file)

    # save all optimizers to hdf5
    if optimizers is not None:
        # dict for mapping optimizer names to files
        optim_config = {}
        for k, v in optimizers.items():
            # temporary file
            _curr_file = file.replace("chain", "optim.%s" % str(k))
            # serialize to temporary file
            chainer.serializers.save_hdf5(_curr_file, v)
            # add to optimizer config (without path to navigate inside archive)
            optim_config[k] = os.path.basename(_curr_file)
            # append to files to process
            del_files.append(_curr_file)

        # add optimizer path mapping to config
        save_config["optimizers"] = optim_config

    # add epoch to config
    if epoch is not None:
        save_config["epoch"] = epoch
    # temporary config file
    _curr_file = file.replace("chain", "config")
    # serialize config dict to temporary json config file
    with open(_curr_file, "w") as f:
        json.dump(save_config, f)
    # append to files to process
    del_files.append(_curr_file)

    # create the actual archive
    with zipfile.ZipFile(file, mode="w") as f:
        for _file in del_files:
            # write temporary file to archive and remove it afterwards
            f.write(_file, os.path.basename(_file))
            os.remove(_file)


def _deserialize_and_load(archive: zipfile.ZipFile, file: str, obj,
                          temp_dir: str):
    """
    Helper Function to temporarily extract a file from a given archive,
    deserialize the object in this file and remove the temporary file

    Parameters
    ----------
    archive : :class:`zipfile.Zipfile`
        the archive containing the file to deserialize
    file : str
        identifier specifying the file inside the archive to extract and
        deserialize
    obj : Any
        the object to load the deserialized state to. Must provide a
        `serialize` function
    temp_dir : str
        the directory the file will be temporarily extracted to

    Returns
    -------
    Any
        the object with the loaded and deserialized state

    """
    # temporary extract file
    archive.extract(file, temp_dir)
    # deserialize object
    chainer.serializers.load_hdf5(os.path.join(temp_dir, file), obj)
    # remove temporary file
    os.remove(os.path.join(temp_dir, file))
    return obj


def load_checkpoint(file, old_state: dict = None,
                    model: chainer.link.Link = None, optimizers: dict = None):
    """
    Loads a state from a given file

    Parameters
    ----------
    file : str
        string containing the path to the file containing the saved state
    old_state : dict
        dictionary containing the modules to load the states to
    model : :class:`chainer.link.Link`
        the model the state should be loaded to;
        overwrites the ``model`` key in ``old_state`` if not None
    optimizers : dict
        dictionary containing all optimizers.
        overwrites the ``optimizers`` key in ``old_state`` if not None

    Returns
    -------
    dict
        the loaded state

    """
    if old_state is None:
        old_state = {}

    if model is not None:
        old_state["model"] = model
    if optimizers is not None:
        old_state["optimizers"] = optimizers

    loaded_state = {}

    # open zip archive
    with zipfile.ZipFile(file) as f:

        # load config
        _curr_file = file.replace("chain", "config")
        # temporarily extract json file to dir
        f.extract(os.path.basename(_curr_file),
                  os.path.dirname(file))
        # load config dict
        with open(_curr_file) as _file:
            config = json.load(_file)
        # remove temporary json file
        os.remove(_curr_file)

        # load model if path is inside config
        if "model" in config:
            # open file in archive by temporary extracting it
            loaded_state["model"] = _deserialize_and_load(
                f, config["model"], old_state["model"], os.path.dirname(file))

        # load optimizers if path mapping is inside config
        if "optimizers" in config:
            loaded_state["optimizers"] = {}
            optimizer_config = config["optimizers"]

            for k, v in optimizer_config.items():
                # open file in archive by temporary extracting it
                loaded_state["optimizers"][k] = _deserialize_and_load(
                    f, v, old_state["optimizers"][k], os.path.dirname(file))

        # load epoch from config if possible
        if "epoch" in config:
            loaded_state["epoch"] = config["epoch"]

    return loaded_state


================================================
FILE: delira/io/sklearn.py
================================================
import logging
import joblib
logger = logging.getLogger(__name__)


def save_checkpoint(file: str, model=None, epoch=None, **kwargs):
    """
    Save model's parameters

    Parameters
    ----------
    file : str
        filepath the model should be saved to
    model : AbstractNetwork or None
        the model which should be saved
        if None: empty dict will be saved as state dict
    epoch : int
        current epoch (will also be pickled)

    """

    return_val = joblib.dump({"model": model, "epoch": epoch}, file, **kwargs)
    return return_val


def load_checkpoint(file, **kwargs):
    """
    Loads a saved model

    Parameters
    ----------
    file : str
        filepath to a file containing a saved model
    **kwargs:
        Additional keyword arguments (passed to torch.load)
        Especially "map_location" is important to change the device the
        state_dict should be loaded to

    Returns
    -------
    OrderedDict
        checkpoint state_dict

    """
    return joblib.load(file, **kwargs)


================================================
FILE: delira/io/tf.py
================================================
from delira.models.backends.tf_eager import AbstractTfEagerNetwork
import typing
import logging

import tensorflow as tf

logger = logging.getLogger(__name__)


def save_checkpoint(file: str, model=None):
    """
    Save model's parameters contained in it's graph

    Parameters
    ----------
    file : str
        filepath the model should be saved to
    model : TfNetwork
        the model which should be saved
    """
    tf.train.Saver().save(model._sess, file)


def load_checkpoint(file: str, model=None):
    """
    Loads a saved model

    Parameters
    ----------
    file : str
        filepath to a file containing a saved model
    model : TfNetwork
        the model which should be loaded
    """

    # following operation adds AssignVariableOps to the graph, keep an eye on
    # this for memory leak
    tf.train.Saver().restore(model._sess, file)
    return {}


def _create_varlist(model: AbstractTfEagerNetwork = None,
                    optimizer: typing.Dict[str, tf.train.Optimizer] = None):
    variable_list = []

    if model is not None:
        variable_list += model.variables

    if optimizer is not None:
        for k, v in optimizer.items():
            variable_list += v.variables()

    return variable_list


def save_checkpoint_eager(file,
                          model: AbstractTfEagerNetwork = None,
                          optimizer: typing.Dict[str,
                                                 tf.train.Optimizer] = None,
                          epoch=None):
    variable_list = _create_varlist(model, optimizer)

    # can only save if variables exist, this is not the case if there was no
    # input forwarded through the network (yet)
    if variable_list:
        saver = tf.contrib.eager.Saver(variable_list)
        saver.save(file, global_step=epoch)
        return
    logging.warning("Could not save any variables because they don't exist "
                    "(yet). If you haven't forwarded any input through your "
                    "network yet, this is not an error, but expected behavior")


def load_checkpoint_eager(file,
                          model: AbstractTfEagerNetwork = None,
                          optimizer: typing.Dict[str,
                                                 tf.train.Optimizer] = None):

    variable_list = _create_varlist(model, optimizer)

    if variable_list:
        saver = tf.contrib.eager.Saver(variable_list)
        saver.restore(file)

        return {"model": model, "optimizer": optimizer}

    raise RuntimeError(
        "No Variables found to restore, probably no variables "
        "exist, because they aren't yet created. Make sure, you "
        "have at least once forwarded an input through your "
        "model!")


================================================
FILE: delira/io/torch.py
================================================
from delira.models.backends.torchscript import AbstractTorchScriptNetwork
from delira.models.backends.torch import AbstractPyTorchNetwork
import torch
import logging
import os
from collections import OrderedDict

logger = logging.getLogger(__name__)


def save_checkpoint_torch(file: str, model=None, optimizers=None,
                          epoch=None, **kwargs):
    """
    Save checkpoint

    Parameters
    ----------
    file : str
        filepath the model should be saved to
    model : AbstractNetwork or None
        the model which should be saved
        if None: empty dict will be saved as state dict
    optimizers : dict
        dictionary containing all optimizers
    epoch : int
        current epoch (will also be pickled)

    """
    if optimizers is None:
        optimizers = {}
    if isinstance(model, torch.nn.DataParallel):
        _model = model.module
    else:
        _model = model

    if isinstance(_model, (AbstractPyTorchNetwork,
                           AbstractTorchScriptNetwork)):
        model_state = _model.state_dict()
    else:
        model_state = {}
        logger.debug("Saving checkpoint without Model")

    optim_state = OrderedDict()
    for key, val in optimizers.items():
        if isinstance(val, torch.optim.Optimizer):
            optim_state[key] = val.state_dict()

    if not optim_state:
        logger.debug("Saving checkpoint without Optimizer")

    if epoch is None:
        epoch = 0

    state = {"optimizer": optim_state,
             "model": model_state,
             "epoch": epoch}

    torch.save(state, file, **kwargs)


def load_checkpoint_torch(file, **kwargs):
    """
    Loads a saved model

    Parameters
    ----------
    file : str
        filepath to a file containing a saved model
    **kwargs:
        Additional keyword arguments (passed to torch.load)
        Especially "map_location" is important to change the device the
        state_dict should be loaded to

    Returns
    -------
    OrderedDict
        checkpoint state_dict

    """
    checkpoint = torch.load(file, **kwargs)

    if not all([_key in checkpoint
                for _key in ["model", "optimizer", "epoch"]]):
        return checkpoint['state_dict']
    return checkpoint


def save_checkpoint_torchscript(file: str, model=None, optimizers=None,
                                epoch=None, **kwargs):
    """
    Save current checkpoint to two different files:
        1.) ``file + "_model.ptj"``: Will include the state of the model
            (including the graph; this is the opposite to
            :func:`save_checkpoint`)
        2.) ``file + "_trainer_state.pt"``: Will include the states of all
            optimizers and the current epoch (if given)

    Parameters
    ----------
    file : str
        filepath the model should be saved to
    model : AbstractPyTorchJITNetwork or None
        the model which should be saved
        if None: empty dict will be saved as state dict
    optimizers : dict
        dictionary containing all optimizers
    epoch : int
        current epoch (will also be pickled)

    """

    # remove file extension if given
    if optimizers is None:
        optimizers = {}
    if any([file.endswith(ext) for ext in [".pth", ".pt", ".ptj"]]):

        file, old_ext = file.rsplit(".", 1)

        if old_ext != "ptj":
            logger.info("File extension was changed from %s to ptj to "
                        "indicate that the current module is a "
                        "torchscript module (including the graph)")

    if isinstance(model, AbstractTorchScriptNetwork):
        torch.jit.save(model, file + ".model.ptj")

    if optimizers or epoch is not None:
        save_checkpoint_torch(file + ".trainer_state.pt", None,
                              optimizers=optimizers, epoch=epoch, **kwargs)


def load_checkpoint_torchscript(file: str, **kwargs):
    """
    Loads a saved checkpoint consisting of 2 files
    (see :func:`save_checkpoint_jit` for details)

    Parameters
    ----------
    file : str
        filepath to a file containing a saved model
    **kwargs:
        Additional keyword arguments (passed to torch.load)
        Especially "map_location" is important to change the device the
        state_dict should be loaded to

    Returns
    -------
    OrderedDict
        checkpoint state_dict

    """

    # load model
    if os.path.isfile(file):
        model_file = file
    elif os.path.isfile(file.replace(".ptj", ".model.ptj")):
        model_file = file.replace(".ptj", ".model.ptj")
    else:
        raise ValueError("No Model File found for %s" % file)

    # load trainer state (if possible)
    trainer_file = model_file.replace(".model.ptj", ".trainer_state.pt")
    if os.path.isfile(trainer_file):
        trainer_state = load_checkpoint_torch(trainer_file, **kwargs)

    else:
        trainer_state = {"optimizer": {},
                         "epoch": None}

    trainer_state.update({"model": torch.jit.load(model_file)})

    return trainer_state


================================================
FILE: delira/logging/__init__.py
================================================
from delira.logging.tensorboard_backend import TensorboardBackend
from delira.logging.visdom_backend import VisdomBackend
from delira.logging.base_backend import BaseBackend
from delira.logging.writer_backend import WriterLoggingBackend
from delira.logging.base_logger import Logger, SingleThreadedLogger, \
    make_logger
from delira.logging.registry import unregister_logger, register_logger, \
    get_logger, logger_exists, log as _log, get_available_loggers
from delira.logging.logging_context import LoggingContext

log = _log


================================================
FILE: delira/logging/base_backend.py
================================================

from queue import Empty
from abc import abstractmethod, ABCMeta
from threading import Event
from queue import Queue
import warnings

_FUNCTIONS_WITHOUT_STEP = ("graph_pytorch", "graph_tf", "graph_onnx",
                           "embedding")

# Deprecated Keys with their future alternative
_DEPRECATED_KEYS = {"img": "image", "picture": "image", "imgs": "images",
                    "pictures": "images", "bounding_boxes": "image_with_boxes",
                    "bboxes": "image_with_boxes", "value": "scalar",
                    "values": "scalar", "hist": "histogram", "fig": "figure",
                    "sound": "audio", "pr": "pr_curve", "curve": "line",
                    "hm": "heatmap"}


class BaseBackend(object, metaclass=ABCMeta):
    """
    The basic Logging Backend, Provides an abstract interface to log
    different value types and some keyword mappings
    """

    class FigureManager:
        """
        A Figure Manager, which creates a figure during entrance and pushes
        the figure to logging writer during exit
        """

        def __init__(self, push_fn, figure_kwargs: dict, push_kwargs: dict):
            """

            Parameters
            ----------
            push_fn : function
                A function accepting a figure and some keyword arguments
                to push it to the logging writer
            figure_kwargs : dict
                dictionary containing all keyword arguments to create the
                figure
            push_kwargs : dict
                dictionary containing all keyword arguments to push the figure
                to the loggging writer
            """
            self._push_fn = push_fn
            self._figure_kwargs = figure_kwargs
            self._push_kwargs = push_kwargs
            self._fig = None

        def __enter__(self):
            """
            Function to be executed during context-manager entrance;
            Will create a figure with the figure kwargs

            """
            from matplotlib.pyplot import figure
            self._fig = figure(**self._figure_kwargs)

        def __exit__(self, *args):
            """
            Function to be executed during context-manager exit;
            Will push the figure to the logging writer and destroy it
            afterwards

            Parameters
            ----------
            *args :
                arbitrary positional arguments; Necessary to be compatible
                with other context managers, but not used in this one

            """
            from matplotlib.pyplot import close
            self._push_fn(figure=self._fig, **self._push_kwargs)

            close(self._fig)
            self._fig = None

    def __init__(self, abort_event: Event = None, queue: Queue = None):
        """

        Parameters
        ----------
        abort_event : :class:`threading.Event`
            the event to signalize, when the logger must be destroyed
        queue : :class:`queue.Queue`
            the queue to enqueue all tuples of mapped functions and the
            corresponding arguments before their execution

        """
        super().__init__()
        self.KEYWORD_FN_MAPPING = {}

        self.daemon = True

        self._queue = queue
        self._abort_event = abort_event
        self._global_steps = {}
        # create Keyword mapping
        self.KEYWORD_FN_MAPPING.update(**{
            "image": self._image,
            "img": self._image,
            "picture": self._image,
            "images": self._images,
            "imgs": self._images,
            "pictures": self._images,
            "image_with_boxes": self._image_with_boxes,
            "bounding_boxes": self._image_with_boxes,
            "bboxes": self._image_with_boxes,
            "scalar": self._scalar,
            "value": self._scalar,
            "scalars": self._scalars,
            "values": self._scalars,
            "histogram": self._histogram,
            "hist": self._histogram,
            "figure": self._figure,
            "fig": self._figure,
            "audio": self._audio,
            "sound": self._audio,
            "video": self._video,
            "text": self._text,
            "graph_pytorch": self._graph_pytorch,
            "graph_tf": self._graph_tf,
            "graph_onnx": self._graph_onnx,
            "embedding": self._embedding,
            "pr_curve": self._pr_curve,
            "pr": self._pr_curve,
            "scatter": self._scatter,
            "line": self._line,
            "curve": self._line,
            "stem": self._stem,
            "heatmap": self._heatmap,
            "hm": self._heatmap,
            "bar": self._bar,
            "boxplot": self._boxplot,
            "surface": self._surface,
            "contour": self._contour,
            "quiver": self._quiver,
            # "mesh": self._mesh
        })

    def _log_item(self):
        """
        Internal helper function to log an item of the queue

        Raises
        ------
        ValueError
            if the item to log is not a dict

        """
        # get item from dict
        process_item = self._queue.get(timeout=0.001)
        # log item if item is dict
        if isinstance(process_item, dict):

            for key, val in process_item.items():
                # raise DeprecationWarning for deprecated keys
                if key in _DEPRECATED_KEYS:
                    warnings.warn("The Key %s is deprecated and will"
                                  " be removed in the next release. "
                                  "Please use %s instead!"
                                  % (key, _DEPRECATED_KEYS[key]),
                                  DeprecationWarning)

                # performs the actual mapping
                execute_fn = self.KEYWORD_FN_MAPPING[str(key).lower()]

                # resolve the global step
                val = self._resolve_global_step(str(key).lower(), **val)

                # execute the logging function
                self._call_exec_fn(execute_fn, val)

        # item is no dict -> raise Error
        else:
            raise ValueError("Invalid Value passed for logging: %s"
                             % str(process_item))

    def _resolve_global_step(self, key, **val):
        """
        Helper function to resolve the global step from given Arguments

        Parameters
        ----------
        key : str
            the function key to resolve the step for
        **val :
            kwargs which may contain the step information

        Returns
        -------
        int
            the global step

        Raises
        ------
        ValueError
            If no valid tag was found although a tag should exist

        """
        # check if function should be processed statically
        # (no time update possible)
        if str(key).lower() not in _FUNCTIONS_WITHOUT_STEP:

            # check for different step names
            if "tag" in val:
                tag = "tag"
            elif "main_tag" in val:
                tag = "main_tag"
            else:
                raise ValueError("No valid tag found to extract global step")

            # check if global step is given
            if "global_step" not in val or val["global_step"] is None:

                # check if tag is already part of internal global steps
                if val[tag] in self._global_steps:
                    # if already existent: increment step for given tag
                    self._global_steps[val[tag]] += 1
                    step = self._global_steps[val[tag]]

                else:
                    # if not existent_ set step for given tag to zero
                    step = 0
                    self._global_steps[val[tag]] = step

                val.update({"global_step": step})

            elif "global_step" in val:
                self._global_steps[tag] = val["global_step"]

        return val

    def run(self):
        """
        Main function which executes the logging, catches exceptions and sets
        the abortion event if necessary

        """
        try:
            self._log_item()

        except Empty:
            pass

        except Exception as e:
            self._abort_event.set()
            raise e

    def set_queue(self, queue: Queue):
        """
        Setter Function for the Queue

        Parameters
        ----------
        queue : :class:`queue.Queue`
            the new queue

        """
        self._queue = queue

    def set_event(self, event: Event):
        """
        Setter Function for the abortion event

        Parameters
        ----------
        event : :class:`threading.Event`
            the new abortion event

        """
        self._abort_event = event

    def _call_exec_fn(self, exec_fn, args):
        """
        Helper Function calling the actual  mapped function

        Parameters
        ----------
        exec_fn : function
            the function which will execute the actual logging
        args : iterable (listlike) or mapping (dictlike)
            the arguments passed to the ``exec_fn``

        Returns
        -------
        Any
            the return value obtained by the ``exec_fn``

        Raises
        ------
        TypeError
            if the given ``args`` are neither of type dict or tuple/list

        """

        if isinstance(args, dict):
            ret_val = exec_fn(**args)
        elif isinstance(args, (tuple, list)):
            ret_val = exec_fn(*args)

        else:
            raise TypeError("Invalid type for args. Must be either dict, "
                            "tuple or list, but got %s."
                            % args.__class__.__name__)

        return ret_val

    @abstractmethod
    def _image(self, *args, **kwargs):
        """
        Abstract Interface Function to log a single image

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _images(self, *args, **kwargs):
        """
        Abstract Interface Function to log multiple images

        Parameters
        ----------
        *args
        **kwargs

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _image_with_boxes(self, *args, **kwargs):
        """
        Abstract Interface Function to log a single image with bounding boxes

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _scalar(self, *args, **kwargs):
        """
        Abstract Interface Function to log a single scalar value

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _scalars(self, *args, **kwargs):
        """
        Abstract Interface Function to log multiple scalar values

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _histogram(self, *args, **kwargs):
        """
        Abstract Interface Function to create and log a histogram out of given
        values

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _figure(self, *args, **kwargs):
        """
        Abstract Interface Function to log a single ``matplotlib`` figure

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _audio(self, *args, **kwargs):
        """
        Abstract Interface Function to log a single audio signal

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _video(self, *args, **kwargs):
        """
        Abstract Interface Function to log a single video

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _text(self, *args, **kwargs):
        """
        Abstract Interface Function to log a single string as text

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _graph_pytorch(self, *args, **kwargs):
        """
        Abstract Interface Function to log a ``PyTorch`` Graph

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """

        raise NotImplementedError

    @abstractmethod
    def _graph_tf(self, *args, **kwargs):
        """
        Abstract Interface Function to log a TF Graph

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _graph_onnx(self, *args, **kwargs):
        """
        Abstract Interface Function to log a ONNX Graph

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _embedding(self, *args, **kwargs):
        """
        Abstract Interface Function to create and log an embedding

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    @abstractmethod
    def _pr_curve(self, *args, **kwargs):
        """
        Abstract Interface Function to calculate and log a PR curve out of
        given values

        Parameters
        ----------
        *args
            arbitrary positional arguments
        **kwargs
            arbitrary keyword arguments

        Raises
        ------
        NotImplementedError
            If not overwritten in subclass

        """
        raise NotImplementedError

    def _scatter(self, plot_kwargs: dict, figure_kwargs: dict = None,
                 **kwargs):
        """
        Function to create a scatter plot and push it

        Parameters
        ----------
        plot_kwargs : dict
            the arguments for plotting
        figure_kwargs : dict
            the arguments to actually create the figure
        **kwargs :
            additional keyword arguments for pushing the created figure to the
            logging writer

        """

        if figure_kwargs is None:
            figure_kwargs = {}
        with self.FigureManager(self._figure, figure_kwargs, kwargs):
            from matplotlib.pyplot import scatter

            scatter(self, **plot_kwargs)

    def _line(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
        """
        Function to create a line plot and push it

        Parameters
        ----------
        plot_kwargs : dict
            the arguments for plotting
        figure_kwargs : dict
            the arguments to actually create the figure
        **kwargs :
            additional keyword arguments for pushing the created figure to the
            logging writer

        """

        if figure_kwargs is None:
            figure_kwargs = {}
        if plot_kwargs is None:
            plot_kwargs = {}
        with self.FigureManager(self._figure, figure_kwargs, kwargs):
            from matplotlib.pyplot import plot
            plot(**plot_kwargs)

    def _stem(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
        """
        Function to create a stem plot and push it

        Parameters
        ----------
        plot_kwargs : dict
            the arguments for plotting
        figure_kwargs : dict
            the arguments to actually create the figure
        **kwargs :
            additional keyword arguments for pushing the created figure to the
            logging writer

        """
        if figure_kwargs is None:
            figure_kwargs = {}
        if plot_kwargs is None:
            plot_kwargs = {}
        with self.FigureManager(self._figure, figure_kwargs, kwargs):
            from matplotlib.pyplot import stem
            stem(**plot_kwargs)

    def _heatmap(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
        """
        Function to create a heatmap plot and push it

        Parameters
        ----------
        plot_kwargs : dict
            the arguments for plotting
        figure_kwargs : dict
            the arguments to actually create the figure
        **kwargs :
            additional keyword arguments for pushing the created figure to the
            logging writer

        """
        if figure_kwargs is None:
            figure_kwargs = {}
        if plot_kwargs is None:
            plot_kwargs = {}
        with self.FigureManager(self._figure, figure_kwargs, kwargs):
            from seaborn import heatmap
            heatmap(**plot_kwargs)

    def _bar(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
        """
        Function to create a bar plot and push it

        Parameters
        ----------
        plot_kwargs : dict
            the arguments for plotting
        figure_kwargs : dict
            the arguments to actually create the figure
        **kwargs :
            additional keyword arguments for pushing the created figure to the
            logging writer

        """
        if figure_kwargs is None:
            figure_kwargs = {}
        if plot_kwargs is None:
            plot_kwargs = {}
        with self.FigureManager(self._figure, figure_kwargs, kwargs):
            from matplotlib.pyplot import bar
            bar(**plot_kwargs)

    def _boxplot(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
        """
        Function to create a boxplot and push it

        Parameters
        ----------
        plot_kwargs : dict
            the arguments for plotting
        figure_kwargs : dict
            the arguments to actually create the figure
        **kwargs :
            additional keyword arguments for pushing the created figure to the
            logging writer

        """
        if plot_kwargs is None:
            plot_kwargs = {}
        if figure_kwargs is None:
            figure_kwargs = {}
        with self.FigureManager(self._figure, figure_kwargs, kwargs):
            from matplotlib.pyplot import boxplot
            boxplot(**plot_kwargs)

    def _surface(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
        """
        Function to create a surface plot and push it

        Parameters
        ----------
        plot_kwargs : dict
            the arguments for plotting
        figure_kwargs : dict
            the arguments to actually create the figure
        **kwargs :
            additional keyword arguments for pushing the created figure to the
            logging writer

        """
        if figure_kwargs is None:
            figure_kwargs = {}
        if plot_kwargs is None:
            plot_kwargs = {}
        with self.FigureManager(self._figure, figure_kwargs, kwargs):
            from seaborn import kdeplot

            kdeplot(**plot_kwargs)

    def _contour(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
        """
        Function to create a contour plot and push it

        Parameters
        ----------
        plot_kwargs : dict
            the arguments for plotting
        figure_kwargs : dict
            the arguments to actually create the figure
        **kwargs :
            additional keyword arguments for pushing the created figure to the
            logging writer

        """
        if figure_kwargs is None:
            figure_kwargs = {}
        if plot_kwargs is None:
            plot_kwargs = {}
        with self.FigureManager(self._figure, figure_kwargs, kwargs):
            from matplotlib.pyplot import contour

            contour(**plot_kwargs)

    def _quiver(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
        """
        Function to create a quiver plot and push it

        Parameters
        ----------
        plot_kwargs : dict
            the arguments for plotting
        figure_kwargs : dict
            the arguments to actually create the figure
        **kwargs :
            additional keyword arguments for pushing the created figure to the
            logging writer

        """
        if plot_kwargs is None:
            plot_kwargs = {}
        if figure_kwargs is None:
            figure_kwargs = {}
        with self.FigureManager(self._figure, figure_kwargs, kwargs):
            from matplotlib.pyplot import quiver
            quiver(**plot_kwargs)

    @property
    def name(self):
        return "BaseBackend"


================================================
FILE: delira/logging/base_logger.py
================================================
from multiprocessing.queues import Queue as MpQueue
from threading import Event
from queue import Queue, Full
from delira.logging.base_backend import BaseBackend
from delira.utils.dict_reductions import get_reduction, possible_reductions, \
    reduce_dict
import logging
from types import FunctionType


class Logger(object):
    """
    The actual Logger Frontend, passing logging messages to the assigned
    logging backend if appropriate or to python's logging module if not
    """

    def __init__(self, backend: BaseBackend, max_queue_size: int = None,
                 logging_frequencies=None, reduce_types=None,
                 level=logging.NOTSET):
        """

        Parameters
        ----------
        backend : :class:`delira.logging.base_backend.BaseBackend`
            the logging backend to use
        max_queue_size : int
            the maximum size for the queue; if queue is full, all additional
            logging tasks will be dropped until some tasks inside the queue
            were executed; Per default no maximum size is applied
        logging_frequencies : int or dict
            specifies how often to log for each key.
            If int: integer will be applied to all valid keys
            if dict: should contain a frequency per valid key. Missing keys
            will be filled with a frequency of 1 (log every time)
            None is equal to empty dict here.
        reduce_types : str of FunctionType or dict
            Values are logged in each iteration. This argument specifies,
            how to reduce them to a single value if a logging_frequency
            besides 1 is passed

            if str:
                specifies the reduction type to use. Valid types are
                'last' | 'first' | 'mean' | 'median' | 'max' | 'min'.
                The given type will be mapped to all valid keys.
            if FunctionType:
                specifies the actual reduction function. Will be applied for
                all keys.
            if dict: should contain pairs of valid logging keys and either str
                or FunctionType. Specifies the logging value per key.
                Missing keys will be filles with a default value of 'last'.
                Valid types for strings are
                'last' | 'first' | 'mean' | 'max' | 'min'.
        level : int
            the logging value to use if passing the logging message to
            python's logging module because it is not appropriate for logging
            with the assigned logging backendDict[str, Callable]

        Warnings
        --------
        Since the intermediate values between to logging steps  are stored in
        memory to enable reduction, this might cause OOM errors easily
        (especially if the logged items are still on GPU).
        If this occurs you may want to choose a lower logging frequency.

        """

        # 0 means unlimited size, but None is more readable
        if max_queue_size is None:
            max_queue_size = 0

        # convert to empty dict if None
        if logging_frequencies is None:
            logging_frequencies = {}

        # if int: assign int to all possible keys
        if isinstance(logging_frequencies, int):
            logging_frequencies = {
                k: logging_frequencies
                for k in backend.KEYWORD_FN_MAPPING.keys()}
        # if dict: update missing keys with 1 and make sure other values
        # are ints
        elif isinstance(logging_frequencies, dict):
            for k in backend.KEYWORD_FN_MAPPING.keys():
                if k not in logging_frequencies:
                    logging_frequencies[k] = 1
                else:
                    logging_frequencies[k] = int(logging_frequencies[k])
        else:
            raise TypeError("Invalid Type for logging frequencies: %s"
                            % type(logging_frequencies).__name__)

        # assign frequencies and create empty queues
        self._logging_frequencies = logging_frequencies
        self._logging_queues = {}

        default_reduce_type = "last"
        if reduce_types is None:
            reduce_types = default_reduce_type

        # map string and function to all valid keys
        if isinstance(reduce_types, (str, FunctionType)):
            reduce_types = {
                k: reduce_types
                for k in backend.KEYWORD_FN_MAPPING.keys()}

        # should be dict by now!
        if isinstance(reduce_types, dict):
            # check all valid keys for occurences
            for k in backend.KEYWORD_FN_MAPPING.keys():
                # use default reduce type if necessary
                if k not in reduce_types:
                    reduce_types[k] = default_reduce_type
                # check it is either valid string or already function type
                else:
                    if not isinstance(reduce_types, FunctionType):
                        assert reduce_types[k] in possible_reductions()
                        reduce_types[k] = str(reduce_types[k])
                # map all strings to actual functions
                if isinstance(reduce_types[k], str):
                    reduce_types[k] = get_reduction(reduce_types[k])

        else:
            raise TypeError("Invalid Type for logging reductions: %s"
                            % type(reduce_types).__name__)

        self._reduce_types = reduce_types

        self._abort_event = Event()
        self._flush_queue = Queue(max_queue_size)
        self._backend = backend
        self._backend.set_queue(self._flush_queue)
        self._backend.set_event(self._abort_event)
        self._level = level

    def log(self, log_message: dict):
        """
        Main Logging Function, Decides whether to log with the assigned
        backend or python's internal module

        Parameters
        ----------
        log_message : dict
            the message to log; Should be a dict, where the keys indicate the
            logging function to execute, and the corresponding value holds
            the arguments necessary to execute this function

        Raises
        ------
        RuntimeError
            If the abort event was set externally

        """

        try:
            if self._abort_event.is_set():
                self.close()
                raise RuntimeError("Abort-Event in logging process was set: %s"
                                   % self._backend.name)

            # convert tuple to dict if necessary
            if isinstance(log_message, (tuple, list)):
                if len(log_message) == 2:
                    log_message = (log_message,)
                log_message = dict(log_message)

            # try logging and drop item if queue is full
            try:
                # logging appropriate message with backend
                if isinstance(log_message, dict):
                    # multiple logging instances at once possible with
                    # different keys
                    for k, v in log_message.items():
                        # append tag if tag is given, because otherwise we
                        # would enqueue same types but different tags in same
                        # queue
                        if "tag" in v:
                            queue_key = k + "." + v["tag"]
                        else:
                            queue_key = k

                        # create queue if necessary
                        if queue_key not in self._logging_queues:
                            self._logging_queues[queue_key] = []

                        # append current message to queue
                        self._logging_queues[queue_key].append({k: v})
                        # check if logging should be executed
                        if (len(self._logging_queues[queue_key])
              
Download .txt
gitextract__xkadp9f/

├── .codecov.yml
├── .gitattributes
├── .github/
│   └── ISSUE_TEMPLATE/
│       ├── bug_report.md
│       ├── feature_request.md
│       └── question.md
├── .gitignore
├── .readthedocs.yml
├── .travis.yml
├── AUTHORS.rst
├── CODEOWNERS
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── README.md
├── delira/
│   ├── __init__.py
│   ├── _backends.py
│   ├── _debug_mode.py
│   ├── _version.py
│   ├── data_loading/
│   │   ├── __init__.py
│   │   ├── augmenter.py
│   │   ├── data_loader.py
│   │   ├── data_manager.py
│   │   ├── dataset.py
│   │   ├── load_utils.py
│   │   ├── numba_transform.py
│   │   └── sampler/
│   │       ├── __init__.py
│   │       ├── abstract.py
│   │       ├── batch.py
│   │       ├── random.py
│   │       ├── sequential.py
│   │       └── weighted.py
│   ├── io/
│   │   ├── __init__.py
│   │   ├── chainer.py
│   │   ├── sklearn.py
│   │   ├── tf.py
│   │   └── torch.py
│   ├── logging/
│   │   ├── __init__.py
│   │   ├── base_backend.py
│   │   ├── base_logger.py
│   │   ├── logging_context.py
│   │   ├── registry.py
│   │   ├── tensorboard_backend.py
│   │   ├── visdom_backend.py
│   │   └── writer_backend.py
│   ├── models/
│   │   ├── __init__.py
│   │   ├── abstract_network.py
│   │   └── backends/
│   │       ├── __init__.py
│   │       ├── chainer/
│   │       │   ├── __init__.py
│   │       │   ├── abstract_network.py
│   │       │   └── data_parallel.py
│   │       ├── sklearn/
│   │       │   ├── __init__.py
│   │       │   └── abstract_network.py
│   │       ├── tf_eager/
│   │       │   ├── __init__.py
│   │       │   ├── abstract_network.py
│   │       │   └── data_parallel.py
│   │       ├── tf_graph/
│   │       │   ├── __init__.py
│   │       │   └── abstract_network.py
│   │       ├── torch/
│   │       │   ├── __init__.py
│   │       │   ├── abstract_network.py
│   │       │   ├── data_parallel.py
│   │       │   └── utils.py
│   │       └── torchscript/
│   │           ├── __init__.py
│   │           └── abstract_network.py
│   ├── training/
│   │   ├── __init__.py
│   │   ├── backends/
│   │   │   ├── __init__.py
│   │   │   ├── chainer/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   ├── sklearn/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   ├── tf_eager/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   ├── tf_graph/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   ├── torch/
│   │   │   │   ├── __init__.py
│   │   │   │   ├── experiment.py
│   │   │   │   ├── trainer.py
│   │   │   │   └── utils.py
│   │   │   └── torchscript/
│   │   │       ├── __init__.py
│   │   │       ├── experiment.py
│   │   │       └── trainer.py
│   │   ├── base_experiment.py
│   │   ├── base_trainer.py
│   │   ├── callbacks/
│   │   │   ├── __init__.py
│   │   │   ├── abstract_callback.py
│   │   │   ├── early_stopping.py
│   │   │   ├── logging_callback.py
│   │   │   └── pytorch_schedulers.py
│   │   ├── losses.py
│   │   ├── metrics.py
│   │   ├── predictor.py
│   │   └── utils.py
│   └── utils/
│       ├── __init__.py
│       ├── codecs.py
│       ├── config.py
│       ├── context_managers.py
│       ├── decorators.py
│       ├── dict_reductions.py
│       ├── messenger.py
│       ├── path.py
│       └── time.py
├── docker/
│   └── Dockerfile
├── docs/
│   ├── Makefile
│   ├── _api/
│   │   └── _build/
│   │       ├── delira/
│   │       │   ├── backend_resolution.rst
│   │       │   ├── class_hierarchy.rst
│   │       │   ├── data_loading/
│   │       │   │   ├── arbitrary_data.rst
│   │       │   │   ├── data_loading.rst
│   │       │   │   ├── dataloader.rst
│   │       │   │   ├── datamanager.rst
│   │       │   │   ├── dataset.rst
│   │       │   │   ├── nii.rst
│   │       │   │   ├── sampler.rst
│   │       │   │   └── utils.rst
│   │       │   ├── debug_mode.rst
│   │       │   ├── delira.io.rst
│   │       │   ├── delira.rst
│   │       │   ├── delira.utils.rst
│   │       │   ├── logging/
│   │       │   │   ├── backends.rst
│   │       │   │   ├── base_logger.rst
│   │       │   │   ├── handlers.rst
│   │       │   │   ├── logging.rst
│   │       │   │   ├── logging_context.py
│   │       │   │   ├── logging_context.rst
│   │       │   │   ├── registry.py
│   │       │   │   ├── registry.rst
│   │       │   │   ├── tensorboard_backend.py
│   │       │   │   ├── visdom_backend.py
│   │       │   │   └── writer_backend.py
│   │       │   ├── models/
│   │       │   │   ├── chainer.rst
│   │       │   │   ├── models.rst
│   │       │   │   ├── sklearn.rst
│   │       │   │   ├── tfeager.rst
│   │       │   │   ├── tfgraph.rst
│   │       │   │   ├── torch.rst
│   │       │   │   └── torchscript.rst
│   │       │   └── training/
│   │       │       ├── backends/
│   │       │       │   ├── backends.rst
│   │       │       │   ├── chainer.rst
│   │       │       │   ├── sklearn.rst
│   │       │       │   ├── tfeager.rst
│   │       │       │   ├── tfgraph.rst
│   │       │       │   ├── torch.rst
│   │       │       │   └── torchscript.rst
│   │       │       ├── callbacks.rst
│   │       │       ├── experiment.rst
│   │       │       ├── losses.rst
│   │       │       ├── metrics.rst
│   │       │       ├── parameters.rst
│   │       │       ├── predictor.rst
│   │       │       ├── trainer.rst
│   │       │       ├── training.rst
│   │       │       └── utils.rst
│   │       └── modules.rst
│   ├── classification_pytorch.rst
│   ├── conda.yml
│   ├── conf.py
│   ├── custom_backend.rst
│   ├── gan_pytorch.rst
│   ├── getting_started.rst
│   ├── index.rst
│   ├── requirements.txt
│   ├── segmentation_2d_pytorch.rst
│   ├── segmentation_3d_pytorch.rst
│   └── tutorial_delira.rst
├── notebooks/
│   ├── classification_examples/
│   │   ├── chainer.ipynb
│   │   ├── pytorch.ipynb
│   │   ├── sklearn.ipynb
│   │   ├── tf_eager.ipynb
│   │   ├── tf_graph.ipynb
│   │   └── torchscript.ipynb
│   ├── custom_backend.ipynb
│   ├── gan_pytorch.ipynb
│   ├── segmentation_2d_pytorch.ipynb
│   ├── segmentation_3d_pytorch.ipynb
│   └── tutorial_delira.ipynb
├── paper/
│   ├── paper.bib
│   └── paper.md
├── pytest.ini
├── requirements/
│   ├── base.txt
│   ├── chainer.txt
│   ├── tensorflow.txt
│   └── torch.txt
├── scripts/
│   └── ci/
│       ├── build_docs.sh
│       ├── install_before_docs.sh
│       ├── install_before_style_check.sh
│       ├── install_before_tests.sh
│       ├── run_style_checks.sh
│       └── run_tests.sh
├── setup.cfg
├── setup.py
├── tests/
│   ├── __init__.py
│   ├── data_loading/
│   │   ├── __init__.py
│   │   ├── test_augmenters.py
│   │   ├── test_data_loader.py
│   │   ├── test_data_manager.py
│   │   ├── test_dataset.py
│   │   ├── test_numba_transforms.py
│   │   ├── test_sampler.py
│   │   └── utils.py
│   ├── io/
│   │   ├── __init__.py
│   │   ├── test_chainer.py
│   │   ├── test_sklearn.py
│   │   ├── test_tf.py
│   │   └── test_torch.py
│   ├── logging/
│   │   ├── __init__.py
│   │   ├── test_logging_frequency.py
│   │   ├── test_logging_outside_trainer.py
│   │   └── test_single_threaded_logging.py
│   ├── models/
│   │   ├── __init__.py
│   │   ├── data_parallel/
│   │   │   ├── __init__.py
│   │   │   ├── test_chainer.py
│   │   │   └── test_torch.py
│   │   └── test_abstract_models.py
│   ├── training/
│   │   ├── __init__.py
│   │   ├── backends/
│   │   │   ├── __init__.py
│   │   │   ├── test_chainer.py
│   │   │   ├── test_sklearn.py
│   │   │   ├── test_tf_eager.py
│   │   │   ├── test_tf_graph.py
│   │   │   ├── test_torch.py
│   │   │   ├── test_torchscript.py
│   │   │   └── utils.py
│   │   ├── test_losses_torch.py
│   │   └── test_metrics.py
│   └── utils/
│       ├── __init__.py
│       ├── dict_reductions.py
│       ├── test_codecs.py
│       ├── test_config.py
│       └── test_messenger.py
└── versioneer.py
Download .txt
SYMBOL INDEX (1049 symbols across 110 files)

FILE: delira/_backends.py
  function _determine_backends (line 17) | def _determine_backends():
  function get_backends (line 75) | def get_backends():
  function seed_all (line 91) | def seed_all(seed):

FILE: delira/_debug_mode.py
  function get_current_debug_mode (line 13) | def get_current_debug_mode():
  function switch_debug_mode (line 24) | def switch_debug_mode():
  function set_debug_mode (line 31) | def set_debug_mode(mode: bool):

FILE: delira/_version.py
  function get_keywords (line 19) | def get_keywords():
  class VersioneerConfig (line 32) | class VersioneerConfig:
  function get_config (line 36) | def get_config():
  class NotThisMethod (line 50) | class NotThisMethod(Exception):
  function register_vcs_handler (line 58) | def register_vcs_handler(vcs, method):  # decorator
  function run_command (line 69) | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
  function versions_from_parentdir (line 106) | def versions_from_parentdir(parentdir_prefix, root, verbose):
  function git_get_keywords (line 132) | def git_get_keywords(versionfile_abs):
  function git_versions_from_keywords (line 161) | def git_versions_from_keywords(keywords, tag_prefix, verbose):
  function git_pieces_from_vcs (line 216) | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_comma...
  function plus_or_dot (line 307) | def plus_or_dot(pieces):
  function render_pep440 (line 314) | def render_pep440(pieces):
  function render_pep440_pre (line 339) | def render_pep440_pre(pieces):
  function render_pep440_post (line 355) | def render_pep440_post(pieces):
  function render_pep440_old (line 382) | def render_pep440_old(pieces):
  function render_git_describe (line 404) | def render_git_describe(pieces):
  function render_git_describe_long (line 424) | def render_git_describe_long(pieces):
  function render (line 444) | def render(pieces, style):
  function get_versions (line 476) | def get_versions():

FILE: delira/data_loading/augmenter.py
  class AbstractAugmenter (line 15) | class AbstractAugmenter(object):
    method __init__ (line 20) | def __init__(
    method __iter__ (line 69) | def __iter__(self):
  class _ParallelAugmenter (line 73) | class _ParallelAugmenter(AbstractAugmenter):
    method __init__ (line 78) | def __init__(self, data_loader, batchsize, sampler, num_processes=None,
    method abort_event (line 123) | def abort_event(self):
    method abort_event (line 135) | def abort_event(self, new_event):
    method _start_processes (line 147) | def _start_processes(self):
    method _shutdown_processes (line 182) | def _shutdown_processes(self):
    method _next_index_pipe (line 215) | def _next_index_pipe(self):
    method _next_data_pipe (line 226) | def _next_data_pipe(self):
    method _enqueue_indices (line 236) | def _enqueue_indices(self, sample_idxs):
    method _receive_data (line 255) | def _receive_data(self):
    method __iter__ (line 269) | def __iter__(self):
  class _WorkerProcess (line 320) | class _WorkerProcess(multiprocessing.Process):
    method __init__ (line 325) | def __init__(self, dataloader: DataLoader,
    method run (line 358) | def run(self) -> None:
  class _SequentialAugmenter (line 391) | class _SequentialAugmenter(AbstractAugmenter):
    method __init__ (line 397) | def __init__(
    method __iter__ (line 425) | def __iter__(self):
  class Augmenter (line 442) | class Augmenter(object):
    method __init__ (line 449) | def __init__(self, data_loader, batchsize, sampler, num_processes=None,
    method _resolve_augmenter_cls (line 481) | def _resolve_augmenter_cls(num_processes, **kwargs):
    method __iter__ (line 503) | def __iter__(self):

FILE: delira/data_loading/data_loader.py
  class DataLoader (line 7) | class DataLoader:
    method __init__ (line 13) | def __init__(self, data):
    method __call__ (line 38) | def __call__(self, indices):
    method process_id (line 68) | def process_id(self):
    method process_id (line 81) | def process_id(self, new_id):

FILE: delira/data_loading/data_manager.py
  class DataManager (line 17) | class DataManager(object):
    method __init__ (line 24) | def __init__(self, data, batch_size, n_process_augmentation,
    method get_batchgen (line 103) | def get_batchgen(self, seed=1):
    method get_subset (line 141) | def get_subset(self, indices):
    method update_state_from_dict (line 171) | def update_state_from_dict(self, new_state: dict):
    method batch_size (line 219) | def batch_size(self):
    method batch_size (line 232) | def batch_size(self, new_batch_size):
    method n_process_augmentation (line 247) | def n_process_augmentation(self):
    method n_process_augmentation (line 262) | def n_process_augmentation(self, new_process_number):
    method transforms (line 279) | def transforms(self):
    method transforms (line 293) | def transforms(self, new_transforms):
    method data_loader_cls (line 312) | def data_loader_cls(self):
    method data_loader_cls (line 325) | def data_loader_cls(self, new_loader_cls):
    method n_samples (line 345) | def n_samples(self):
    method n_batches (line 358) | def n_batches(self):
    method dataset (line 384) | def dataset(self):
    method dataset (line 388) | def dataset(self, new_dset):
    method __iter__ (line 394) | def __iter__(self):

FILE: delira/data_loading/dataset.py
  class AbstractDataset (line 14) | class AbstractDataset:
    method __init__ (line 20) | def __init__(self, data_path: str, load_fn: typing.Callable):
    method _make_dataset (line 35) | def _make_dataset(self, path: str):
    method __getitem__ (line 53) | def __getitem__(self, index):
    method __len__ (line 70) | def __len__(self):
    method __iter__ (line 81) | def __iter__(self):
    method get_sample_from_index (line 92) | def get_sample_from_index(self, index):
    method get_subset (line 120) | def get_subset(self, indices):
  class _DatasetIter (line 152) | class _DatasetIter(object):
    method __init__ (line 157) | def __init__(self, dset):
    method __iter__ (line 168) | def __iter__(self):
    method __next__ (line 171) | def __next__(self):
  class DictDataset (line 180) | class DictDataset(AbstractDataset):
    method __init__ (line 185) | def __init__(self, data: dict):
    method __getitem__ (line 197) | def __getitem__(self, index: int):
    method get_sample_from_index (line 215) | def get_sample_from_index(self, index):
    method _make_dataset (line 232) | def _make_dataset(self, path: str):
    method __len__ (line 245) | def __len__(self):
  class IterableDataset (line 257) | class IterableDataset(AbstractDataset):
    method __init__ (line 262) | def __init__(self, data: Iterable):
    method __getitem__ (line 273) | def __getitem__(self, index):
    method get_sample_from_index (line 291) | def get_sample_from_index(self, index):
    method _make_dataset (line 308) | def _make_dataset(self, path: str):
    method __len__ (line 321) | def __len__(self):
  class BlankDataset (line 333) | class BlankDataset(AbstractDataset):
    method __init__ (line 340) | def __init__(self, data, old_getitem, **kwargs):
    method __getitem__ (line 361) | def __getitem__(self, index):
    method __len__ (line 378) | def __len__(self):
  class BaseCacheDataset (line 391) | class BaseCacheDataset(AbstractDataset):
    method __init__ (line 401) | def __init__(self, data_path: typing.Union[str, list],
    method _make_dataset (line 423) | def _make_dataset(self, path: typing.Union[str, list]):
    method __getitem__ (line 461) | def __getitem__(self, index):
  class BaseLazyDataset (line 480) | class BaseLazyDataset(AbstractDataset):
    method __init__ (line 486) | def __init__(self, data_path: typing.Union[str, list],
    method _make_dataset (line 508) | def _make_dataset(self, path: typing.Union[str, list]):
    method __getitem__ (line 538) | def __getitem__(self, index):
  class BaseExtendCacheDataset (line 557) | class BaseExtendCacheDataset(BaseCacheDataset):
    method __init__ (line 568) | def __init__(self, data_path: typing.Union[str, list],
    method _make_dataset (line 593) | def _make_dataset(self, path: typing.Union[str, list]):
  class ConcatDataset (line 632) | class ConcatDataset(AbstractDataset):
    method __init__ (line 633) | def __init__(self, *datasets):
    method get_sample_from_index (line 652) | def get_sample_from_index(self, index):
    method __getitem__ (line 692) | def __getitem__(self, index):
    method __len__ (line 695) | def __len__(self):

FILE: delira/data_loading/load_utils.py
  function norm_range (line 9) | def norm_range(mode):
  function norm_zero_mean_unit_std (line 47) | def norm_zero_mean_unit_std(data):
  class LoadSample (line 61) | class LoadSample:
    method __init__ (line 66) | def __init__(self,
    method __call__ (line 113) | def __call__(self, path) -> dict:
  class LoadSampleLabel (line 148) | class LoadSampleLabel(LoadSample):
    method __init__ (line 149) | def __init__(self,
    method __call__ (line 199) | def __call__(self, path) -> dict:

FILE: delira/data_loading/numba_transform.py
  class NumbaTransformWrapper (line 10) | class NumbaTransformWrapper(AbstractTransform):
    method __init__ (line 11) | def __init__(self, transform: AbstractTransform, nopython=True,
    method __call__ (line 26) | def __call__(self, **kwargs):
  class NumbaTransform (line 30) | class NumbaTransform(NumbaTransformWrapper):
    method __init__ (line 31) | def __init__(self, transform_cls, nopython=True, target="cpu",
  class NumbaCompose (line 39) | class NumbaCompose(Compose):
    method __init__ (line 40) | def __init__(self, transforms):

FILE: delira/data_loading/sampler/abstract.py
  class AbstractSampler (line 4) | class AbstractSampler(object):
    method __init__ (line 9) | def __init__(self, indices):
    method __iter__ (line 19) | def __iter__(self):
    method __len__ (line 31) | def __len__(self):
    method from_dataset (line 44) | def from_dataset(cls, dset: AbstractDataset, **kwargs):

FILE: delira/data_loading/sampler/batch.py
  class BatchSampler (line 4) | class BatchSampler(object):
    method __init__ (line 10) | def __init__(self, sampler: AbstractSampler, batch_size, drop_last=Fal...
    method __iter__ (line 26) | def __iter__(self):
    method __len__ (line 50) | def __len__(self):

FILE: delira/data_loading/sampler/random.py
  class RandomSampler (line 5) | class RandomSampler(AbstractSampler):
    method __init__ (line 10) | def __init__(self, indices, replacement=False, num_samples=None):
    method __iter__ (line 32) | def __iter__(self):
    method __len__ (line 52) | def __len__(self):
  class RandomSamplerNoReplacement (line 67) | class RandomSamplerNoReplacement(RandomSampler):
    method __init__ (line 72) | def __init__(self, indices):
  class RandomSamplerWithReplacement (line 84) | class RandomSamplerWithReplacement(RandomSampler):
    method __init__ (line 89) | def __init__(self, indices, num_samples=None):

FILE: delira/data_loading/sampler/sequential.py
  class SequentialSampler (line 4) | class SequentialSampler(AbstractSampler):
    method __iter__ (line 9) | def __iter__(self):

FILE: delira/data_loading/sampler/weighted.py
  class WeightedRandomSampler (line 6) | class WeightedRandomSampler(AbstractSampler):
    method __init__ (line 11) | def __init__(self, weights, num_samples=None):
    method __iter__ (line 29) | def __iter__(self):
    method __len__ (line 41) | def __len__(self):
  class PrevalenceRandomSampler (line 53) | class PrevalenceRandomSampler(WeightedRandomSampler):
    method __init__ (line 58) | def __init__(self, indices):
    method from_dataset (line 80) | def from_dataset(cls, dset: AbstractDataset, key="label", **kwargs):

FILE: delira/io/chainer.py
  function save_checkpoint (line 7) | def save_checkpoint(file, model=None, optimizers=None, epoch=None):
  function _deserialize_and_load (line 74) | def _deserialize_and_load(archive: zipfile.ZipFile, file: str, obj,
  function load_checkpoint (line 108) | def load_checkpoint(file, old_state: dict = None,

FILE: delira/io/sklearn.py
  function save_checkpoint (line 6) | def save_checkpoint(file: str, model=None, epoch=None, **kwargs):
  function load_checkpoint (line 26) | def load_checkpoint(file, **kwargs):

FILE: delira/io/tf.py
  function save_checkpoint (line 10) | def save_checkpoint(file: str, model=None):
  function load_checkpoint (line 24) | def load_checkpoint(file: str, model=None):
  function _create_varlist (line 42) | def _create_varlist(model: AbstractTfEagerNetwork = None,
  function save_checkpoint_eager (line 56) | def save_checkpoint_eager(file,
  function load_checkpoint_eager (line 74) | def load_checkpoint_eager(file,

FILE: delira/io/torch.py
  function save_checkpoint_torch (line 11) | def save_checkpoint_torch(file: str, model=None, optimizers=None,
  function load_checkpoint_torch (line 61) | def load_checkpoint_torch(file, **kwargs):
  function save_checkpoint_torchscript (line 88) | def save_checkpoint_torchscript(file: str, model=None, optimizers=None,
  function load_checkpoint_torchscript (line 132) | def load_checkpoint_torchscript(file: str, **kwargs):

FILE: delira/logging/base_backend.py
  class BaseBackend (line 20) | class BaseBackend(object, metaclass=ABCMeta):
    class FigureManager (line 26) | class FigureManager:
      method __init__ (line 32) | def __init__(self, push_fn, figure_kwargs: dict, push_kwargs: dict):
      method __enter__ (line 52) | def __enter__(self):
      method __exit__ (line 61) | def __exit__(self, *args):
    method __init__ (line 80) | def __init__(self, abort_event: Event = None, queue: Queue = None):
    method _log_item (line 143) | def _log_item(self):
    method _resolve_global_step (line 181) | def _resolve_global_step(self, key, **val):
    method run (line 236) | def run(self):
    method set_queue (line 252) | def set_queue(self, queue: Queue):
    method set_event (line 264) | def set_event(self, event: Event):
    method _call_exec_fn (line 276) | def _call_exec_fn(self, exec_fn, args):
    method _image (line 312) | def _image(self, *args, **kwargs):
    method _images (line 332) | def _images(self, *args, **kwargs):
    method _image_with_boxes (line 350) | def _image_with_boxes(self, *args, **kwargs):
    method _scalar (line 370) | def _scalar(self, *args, **kwargs):
    method _scalars (line 390) | def _scalars(self, *args, **kwargs):
    method _histogram (line 410) | def _histogram(self, *args, **kwargs):
    method _figure (line 431) | def _figure(self, *args, **kwargs):
    method _audio (line 451) | def _audio(self, *args, **kwargs):
    method _video (line 471) | def _video(self, *args, **kwargs):
    method _text (line 491) | def _text(self, *args, **kwargs):
    method _graph_pytorch (line 511) | def _graph_pytorch(self, *args, **kwargs):
    method _graph_tf (line 532) | def _graph_tf(self, *args, **kwargs):
    method _graph_onnx (line 552) | def _graph_onnx(self, *args, **kwargs):
    method _embedding (line 572) | def _embedding(self, *args, **kwargs):
    method _pr_curve (line 592) | def _pr_curve(self, *args, **kwargs):
    method _scatter (line 612) | def _scatter(self, plot_kwargs: dict, figure_kwargs: dict = None,
    method _line (line 636) | def _line(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
    method _stem (line 660) | def _stem(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
    method _heatmap (line 683) | def _heatmap(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
    method _bar (line 706) | def _bar(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
    method _boxplot (line 729) | def _boxplot(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
    method _surface (line 752) | def _surface(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
    method _contour (line 776) | def _contour(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
    method _quiver (line 800) | def _quiver(self, plot_kwargs=None, figure_kwargs=None, **kwargs):
    method name (line 824) | def name(self):

FILE: delira/logging/base_logger.py
  class Logger (line 11) | class Logger(object):
    method __init__ (line 17) | def __init__(self, backend: BaseBackend, max_queue_size: int = None,
    method log (line 135) | def log(self, log_message: dict):
    method __call__ (line 209) | def __call__(self, log_message: dict):
    method close (line 228) | def close(self):
    method __del__ (line 242) | def __del__(self):
  class SingleThreadedLogger (line 252) | class SingleThreadedLogger(Logger):
    method log (line 258) | def log(self, log_message: dict):
  function make_logger (line 275) | def make_logger(backend: BaseBackend, max_queue_size: int = None,

FILE: delira/logging/logging_context.py
  class LoggingContext (line 8) | class LoggingContext(object):
    method __init__ (line 13) | def __init__(
    method __enter__ (line 59) | def __enter__(self):
    method __exit__ (line 73) | def __exit__(self, *args):
    method log (line 94) | def log(self, msg: dict):
    method __call__ (line 109) | def __call__(self, log_message: dict):

FILE: delira/logging/registry.py
  function log (line 9) | def log(msg: dict, name=None):
  function logger_exists (line 49) | def logger_exists(name: str):
  function register_logger (line 67) | def register_logger(logger: Logger, name: str, overwrite=False):
  function unregister_logger (line 93) | def unregister_logger(name: str):
  function get_logger (line 110) | def get_logger(name):
  function get_available_loggers (line 128) | def get_available_loggers():

FILE: delira/logging/tensorboard_backend.py
  class TensorboardBackend (line 16) | class TensorboardBackend(WriterLoggingBackend):
    method __init__ (line 21) | def __init__(self, writer_kwargs=None,
    method _call_exec_fn (line 46) | def _call_exec_fn(self, exec_fn, args):
    method __del__ (line 70) | def __del__(self):
    method _graph_pytorch (line 78) | def _graph_pytorch(self, model, input_to_model=None, verbose=False,
    method _graph_tf (line 101) | def _graph_tf(self, graph, run_metadata=None):
    method _graph_onnx (line 140) | def _graph_onnx(self, prototxt):
    method _embedding (line 154) | def _embedding(self, mat, metadata=None, label_img=None, global_step=N...
    method _scalars (line 183) | def _scalars(self, main_tag: str, tag_scalar_dict: dict, global_step=N...
    method name (line 214) | def name(self):

FILE: delira/logging/visdom_backend.py
  class VisdomBackend (line 8) | class VisdomBackend(WriterLoggingBackend):
    method __init__ (line 13) | def __init__(self, writer_kwargs: dict = None,
    method name (line 37) | def name(self):

FILE: delira/logging/writer_backend.py
  class WriterLoggingBackend (line 7) | class WriterLoggingBackend(BaseBackend):
    method __init__ (line 12) | def __init__(self, writer_cls, writer_kwargs: dict,
    method convert_to_npy (line 19) | def convert_to_npy(*args, **kwargs):
    method _image (line 40) | def _image(self, tag, img_tensor, global_step=None, walltime=None,
    method _images (line 66) | def _images(self, tag, img_tensor, global_step=None, walltime=None,
    method _image_with_boxes (line 92) | def _image_with_boxes(self, tag, img_tensor, box_tensor, global_step=N...
    method _scalar (line 124) | def _scalar(self, tag, scalar_value, global_step=None, walltime=None):
    method _scalars (line 145) | def _scalars(self, main_tag, tag_scalar_dict, global_step=None,
    method _histogram (line 169) | def _histogram(self, tag, values, global_step=None, bins='tensorflow',
    method _figure (line 194) | def _figure(self, tag, figure, global_step=None, close=True,
    method _audio (line 218) | def _audio(self, tag, snd_tensor, global_step=None, sample_rate=44100,
    method _text (line 243) | def _text(self, tag, text_string, global_step=None, walltime=None):
    method _pr_curve (line 264) | def _pr_curve(self, tag, labels, predictions, global_step=None,
    method _video (line 297) | def _video(self, tag, vid_tensor, global_step=None, fps=4, walltime=No...
    method name (line 322) | def name(self):

FILE: delira/models/abstract_network.py
  class AbstractNetwork (line 7) | class AbstractNetwork(object):
    method __init__ (line 16) | def __init__(self, **kwargs):
    method __call__ (line 32) | def __call__(self, *args, **kwargs):
    method closure (line 54) | def closure(model, data_dict: dict, optimizers: dict, losses: dict,
    method prepare_batch (line 94) | def prepare_batch(batch: dict, input_device, output_device):
    method init_kwargs (line 124) | def init_kwargs(self):

FILE: delira/models/backends/chainer/abstract_network.py
  class ChainerMixin (line 12) | class ChainerMixin(AbstractNetwork):
  class AbstractChainerNetwork (line 16) | class AbstractChainerNetwork(chainer.Chain, ChainerMixin):
    method __init__ (line 21) | def __init__(self, **kwargs):
    method forward (line 35) | def forward(self, *args, **kwargs) -> dict:
    method __call__ (line 54) | def __call__(self, *args, **kwargs) -> dict:
    method prepare_batch (line 76) | def prepare_batch(batch: dict, input_device, output_device):
    method closure (line 112) | def closure(model, data_dict: dict, optimizers: dict, losses: dict,

FILE: delira/models/backends/chainer/data_parallel.py
  function _apply_scatter (line 6) | def _apply_scatter(inputs: chainer.Variable, target_devices: list,
  function _apply_gather (line 91) | def _apply_gather(target_device, dim, *outputs):
  function _scatter (line 98) | def _scatter(inputs, target_devices: list, dim):
  function _gather (line 170) | def _gather(outputs, target_device, dim=0):
  class DataParallelChainerNetwork (line 199) | class DataParallelChainerNetwork(AbstractChainerNetwork):
    method __init__ (line 205) | def __init__(self, module: AbstractChainerNetwork, devices: list,
    method forward (line 248) | def forward(self, *args, **kwargs):
    method params (line 283) | def params(self, include_uninit=True):
    method _scatter (line 300) | def _scatter(inputs, kwargs, target_devices: list, dim=0):
    method _gather (line 342) | def _gather(predictions, dim, target_device):
    method cleargrads (line 363) | def cleargrads(self):
    method zerograds (line 367) | def zerograds(self):
    method closure (line 372) | def closure(self):
    method prepare_batch (line 376) | def prepare_batch(self):
  class ParallelOptimizerCumulateGradientsHook (line 380) | class ParallelOptimizerCumulateGradientsHook(object):
    method __call__ (line 390) | def __call__(self, optimizer: chainer.Optimizer):
  class ParallelOptimizerUpdateModelParameters (line 407) | class ParallelOptimizerUpdateModelParameters(object):
    method __call__ (line 417) | def __call__(self, optimizer: chainer.Optimizer):
  class DataParallelChainerOptimizer (line 423) | class DataParallelChainerOptimizer(chainer.Optimizer):
    method __init__ (line 433) | def __init__(self, optimizer):
    method from_optimizer_class (line 451) | def from_optimizer_class(cls, optim_cls, *args, **kwargs):
    method setup (line 475) | def setup(self, link):
    method target (line 492) | def target(self):
    method epoch (line 496) | def epoch(self):
    method _pre_update_hooks (line 500) | def _pre_update_hooks(self):
    method _loss_scale (line 504) | def _loss_scale(self):
    method _loss_scale_max (line 508) | def _loss_scale_max(self):
    method _loss_scaling_is_dynamic (line 512) | def _loss_scaling_is_dynamic(self):
    method use_auto_new_epoch (line 516) | def use_auto_new_epoch(self):
    method update (line 520) | def update(self):
    method new_epoch (line 524) | def new_epoch(self):
    method add_hook (line 528) | def add_hook(self):
    method remove_hook (line 532) | def remove_hook(self):
    method call_hooks (line 536) | def call_hooks(self):
    method serialize (line 540) | def serialize(self):
    method loss_scaling (line 544) | def loss_scaling(self):
    method set_loss_scale (line 548) | def set_loss_scale(self):
    method check_nan_in_grads (line 552) | def check_nan_in_grads(self):
    method is_safe_to_update (line 556) | def is_safe_to_update(self):
    method update_loss_scale (line 560) | def update_loss_scale(self):

FILE: delira/models/backends/sklearn/abstract_network.py
  class SklearnEstimator (line 7) | class SklearnEstimator(AbstractNetwork):
    method __init__ (line 13) | def __init__(self, module: BaseEstimator):
    method __call__ (line 40) | def __call__(self, *args, **kwargs):
    method iterative_training (line 60) | def iterative_training(self):
    method prepare_batch (line 75) | def prepare_batch(batch: dict, input_device, output_device):
    method closure (line 106) | def closure(model, data_dict: dict, optimizers: dict, losses: dict,

FILE: delira/models/backends/tf_eager/abstract_network.py
  class AbstractTfEagerNetwork (line 8) | class AbstractTfEagerNetwork(AbstractNetwork, tf.keras.layers.Layer):
    method __init__ (line 15) | def __init__(self, data_format="channels_first", trainable=True,
    method call (line 42) | def call(self, *args, **kwargs):
    method __call__ (line 61) | def __call__(self, *args, **kwargs):
    method prepare_batch (line 77) | def prepare_batch(batch: dict, input_device, output_device):
    method closure (line 112) | def closure(model, data_dict: dict,

FILE: delira/models/backends/tf_eager/data_parallel.py
  class DataParallelTfEagerNetwork (line 6) | class DataParallelTfEagerNetwork(AbstractTfEagerNetwork):
    method __init__ (line 15) | def __init__(self, module, devices):
    method call (line 32) | def call(self, *args, **kwargs):
    method closure (line 47) | def closure(self):
    method prepare_batch (line 51) | def prepare_batch(self):

FILE: delira/models/backends/tf_graph/abstract_network.py
  class AbstractTfGraphNetwork (line 9) | class AbstractTfGraphNetwork(AbstractNetwork, metaclass=abc.ABCMeta):
    method __init__ (line 20) | def __init__(self, sess=tf.Session, **kwargs):
    method __call__ (line 39) | def __call__(self, *args, **kwargs):
    method run (line 59) | def run(self, *args, **kwargs):
    method _add_losses (line 91) | def _add_losses(self, losses: dict):
    method _add_optims (line 121) | def _add_optims(self, optims: dict):
    method prepare_batch (line 144) | def prepare_batch(batch: dict, input_device, output_device):
    method closure (line 170) | def closure(model, data_dict: dict, optimizers: dict, losses: dict,

FILE: delira/models/backends/torch/abstract_network.py
  class AbstractPyTorchNetwork (line 8) | class AbstractPyTorchNetwork(AbstractNetwork, torch.nn.Module):
    method __init__ (line 19) | def __init__(self, **kwargs):
    method forward (line 33) | def forward(self, *inputs):
    method __call__ (line 49) | def __call__(self, *args, **kwargs):
    method prepare_batch (line 69) | def prepare_batch(batch: dict, input_device, output_device):
    method closure (line 102) | def closure(model, data_dict: dict, optimizers: dict, losses: dict,

FILE: delira/models/backends/torch/data_parallel.py
  class DataParallelPyTorchNetwork (line 7) | class DataParallelPyTorchNetwork(AbstractPyTorchNetwork,
    method __init__ (line 14) | def __init__(self, module: AbstractPyTorchNetwork,
    method forward (line 41) | def forward(self, *args, **kwargs):
    method closure (line 63) | def closure(self):
    method prepare_batch (line 67) | def prepare_batch(self):

FILE: delira/models/backends/torch/utils.py
  function scale_loss (line 13) | def scale_loss(loss,

FILE: delira/models/backends/torchscript/abstract_network.py
  class AbstractTorchScriptNetwork (line 6) | class AbstractTorchScriptNetwork(AbstractNetwork, torch.jit.ScriptModule):
    method __init__ (line 19) | def __init__(self, optimize=True, **kwargs):
    method __call__ (line 33) | def __call__(self, *args, **kwargs):
    method prepare_batch (line 53) | def prepare_batch(batch: dict, input_device, output_device):
    method closure (line 86) | def closure(model, data_dict: dict, optimizers: dict, losses: dict,

FILE: delira/training/backends/chainer/experiment.py
  class ChainerExperiment (line 14) | class ChainerExperiment(BaseExperiment):
    method __init__ (line 15) | def __init__(self,
    method test (line 79) | def test(self, network: AbstractChainerNetwork,

FILE: delira/training/backends/chainer/trainer.py
  class ChainerNetworkTrainer (line 17) | class ChainerNetworkTrainer(BaseNetworkTrainer):
    method __init__ (line 27) | def __init__(self,
    method _setup (line 189) | def _setup(self, network, optim_fn, optimizer_cls, optimizer_params,
    method _at_training_begin (line 325) | def _at_training_begin(self, *args, **kwargs):
    method _at_training_end (line 344) | def _at_training_end(self, *args, **kwargs):
    method _at_epoch_end (line 364) | def _at_epoch_end(self, metrics_val, val_score_key, epoch, is_best,
    method _train_single_epoch (line 408) | def _train_single_epoch(self, batchgen: MultiThreadedAugmenter, epoch,
    method predict_data_mgr (line 427) | def predict_data_mgr(self, datamgr, batchsize=None, metrics={},
    method save_state (line 462) | def save_state(self, file_name, epoch, **kwargs):
    method load_state (line 485) | def load_state(file_name, **kwargs):
    method update_state (line 508) | def update_state(self, file_name, *args, **kwargs):
    method _update_state (line 533) | def _update_state(self, new_state):
    method _search_for_prev_state (line 561) | def _search_for_prev_state(path, extensions=None):

FILE: delira/training/backends/chainer/utils.py
  function _single_element_tensor_conversion (line 7) | def _single_element_tensor_conversion(element):
  function convert_to_numpy (line 12) | def convert_to_numpy(*args, **kwargs):
  function create_optims_default (line 39) | def create_optims_default(model, optim_cls, **optimizer_params):

FILE: delira/training/backends/sklearn/experiment.py
  class SklearnExperiment (line 15) | class SklearnExperiment(BaseExperiment):
    method __init__ (line 16) | def __init__(self,
    method _setup_training (line 80) | def _setup_training(self, config, **kwargs):
    method _setup_test (line 120) | def _setup_test(self, config, model, convert_batch_to_npy_fn,

FILE: delira/training/backends/sklearn/trainer.py
  class SklearnEstimatorTrainer (line 20) | class SklearnEstimatorTrainer(BaseNetworkTrainer):
    method __init__ (line 30) | def __init__(self,
    method _setup (line 157) | def _setup(self, estimator, key_mapping, convert_batch_to_npy_fn,
    method _at_training_begin (line 206) | def _at_training_begin(self, *args, **kwargs):
    method _at_training_end (line 225) | def _at_training_end(self, *args, **kwargs):
    method _at_epoch_end (line 245) | def _at_epoch_end(self, metrics_val, val_score_key, epoch, is_best,
    method _get_classes_if_necessary (line 285) | def _get_classes_if_necessary(self, dmgr: DataManager, verbose,
    method train (line 330) | def train(self, num_epochs, datamgr_train, datamgr_valid=None,
    method save_state (line 400) | def save_state(self, file_name, epoch, **kwargs):
    method load_state (line 422) | def load_state(file_name, *args, **kwargs):
    method _update_state (line 445) | def _update_state(self, new_state):
    method _search_for_prev_state (line 470) | def _search_for_prev_state(path, extensions=None):
    method calc_metrics (line 498) | def calc_metrics(batch, metrics: dict = None, metric_keys=None):

FILE: delira/training/backends/sklearn/utils.py
  function create_optims_default (line 1) | def create_optims_default(*args, **kwargs):

FILE: delira/training/backends/tf_eager/experiment.py
  class TfEagerExperiment (line 17) | class TfEagerExperiment(BaseExperiment):
    method __init__ (line 18) | def __init__(self,
    method kfold (line 83) | def kfold(self, data: DataManager, metrics: dict, num_epochs=None,
    method test (line 203) | def test(self, network, test_data: DataManager,
    method setup (line 264) | def setup(self, config, training=True, **kwargs):

FILE: delira/training/backends/tf_eager/trainer.py
  class TfEagerNetworkTrainer (line 18) | class TfEagerNetworkTrainer(BaseNetworkTrainer):
    method __init__ (line 19) | def __init__(self,
    method _setup (line 178) | def _setup(self, network, optim_fn, optimizer_cls, optimizer_params,
    method _at_training_end (line 254) | def _at_training_end(self, *args, **kwargs):
    method _train_single_epoch (line 274) | def _train_single_epoch(self, batchgen, epoch, verbose=False):
    method predict_data_mgr (line 290) | def predict_data_mgr(self, datamgr, batchsize=None, metrics=None,
    method save_state (line 320) | def save_state(self, file_name, *args, **kwargs):
    method load_state (line 332) | def load_state(self, file_name, *args, **kwargs):
    method _search_for_prev_state (line 349) | def _search_for_prev_state(path, extensions=None):

FILE: delira/training/backends/tf_eager/utils.py
  function _single_element_tensor_conversion (line 7) | def _single_element_tensor_conversion(element):
  function convert_to_numpy (line 11) | def convert_to_numpy(*args, **kwargs):
  function create_optims_default (line 38) | def create_optims_default(optim_cls, **optim_params):

FILE: delira/training/backends/tf_graph/experiment.py
  class TfGraphExperiment (line 17) | class TfGraphExperiment(TfEagerExperiment):
    method __init__ (line 18) | def __init__(self,
    method test (line 88) | def test(self, network, test_data: DataManager,

FILE: delira/training/backends/tf_graph/trainer.py
  class TfGraphNetworkTrainer (line 20) | class TfGraphNetworkTrainer(BaseNetworkTrainer):
    method __init__ (line 30) | def __init__(self,
    method _setup (line 187) | def _setup(self, network, optim_fn, optimizer_cls, optimizer_params,
    method _at_training_end (line 276) | def _at_training_end(self, *args, **kwargs):
    method _train_single_epoch (line 298) | def _train_single_epoch(self, dmgr_train: DataManager, epoch,
    method predict_data_mgr (line 315) | def predict_data_mgr(self, datamgr, batch_size=None, metrics=None,
    method save_state (line 346) | def save_state(self, file_name, *args, **kwargs):
    method load_state (line 357) | def load_state(self, file_name, *args, **kwargs):
    method _search_for_prev_state (line 372) | def _search_for_prev_state(path, extensions=None):

FILE: delira/training/backends/tf_graph/utils.py
  function initialize_uninitialized (line 4) | def initialize_uninitialized(sess):

FILE: delira/training/backends/torch/experiment.py
  class PyTorchExperiment (line 17) | class PyTorchExperiment(BaseExperiment):
    method __init__ (line 18) | def __init__(self,
    method kfold (line 82) | def kfold(self, data: DataManager, metrics: dict, num_epochs=None,
    method test (line 202) | def test(self, network, test_data: DataManager,

FILE: delira/training/backends/torch/trainer.py
  class PyTorchNetworkTrainer (line 23) | class PyTorchNetworkTrainer(BaseNetworkTrainer):
    method __init__ (line 33) | def __init__(self,
    method _setup (line 248) | def _setup(self, network, optim_fn, optimizer_cls, optimizer_params,
    method _at_training_begin (line 366) | def _at_training_begin(self, *args, **kwargs):
    method _at_training_end (line 382) | def _at_training_end(self, *args, **kwargs):
    method _at_epoch_end (line 402) | def _at_epoch_end(self, metrics_val, val_score_key, epoch, is_best,
    method _train_single_epoch (line 444) | def _train_single_epoch(self, batchgen: MultiThreadedAugmenter, epoch,
    method predict_data_mgr (line 463) | def predict_data_mgr(self, datamgr, batchsize=None, metrics=None,
    method save_state (line 501) | def save_state(self, file_name, epoch, **kwargs):
    method load_state (line 521) | def load_state(file_name, **kwargs):
    method _update_state (line 544) | def _update_state(self, new_state):
    method _search_for_prev_state (line 575) | def _search_for_prev_state(path, extensions=None):

FILE: delira/training/backends/torch/utils.py
  function create_optims_default (line 9) | def create_optims_default(model, optim_cls, **optim_params):
  function _single_element_tensor_conversion (line 31) | def _single_element_tensor_conversion(element):
  function convert_to_numpy (line 35) | def convert_to_numpy(*args, **kwargs):

FILE: delira/training/backends/torchscript/experiment.py
  class TorchScriptExperiment (line 13) | class TorchScriptExperiment(PyTorchExperiment):
    method __init__ (line 14) | def __init__(self,

FILE: delira/training/backends/torchscript/trainer.py
  class TorchScriptNetworkTrainer (line 19) | class TorchScriptNetworkTrainer(PyTorchNetworkTrainer):
    method __init__ (line 20) | def __init__(self,
    method save_state (line 190) | def save_state(self, file_name, epoch, **kwargs):
    method load_state (line 212) | def load_state(file_name, **kwargs):
    method _update_state (line 231) | def _update_state(self, new_state):
    method _search_for_prev_state (line 252) | def _search_for_prev_state(path, extensions=None):

FILE: delira/training/base_experiment.py
  class BaseExperiment (line 26) | class BaseExperiment(object):
    method __init__ (line 41) | def __init__(self,
    method setup (line 144) | def setup(self, config, training=True, **kwargs):
    method _setup_training (line 178) | def _setup_training(self, config, **kwargs):
    method _setup_test (line 245) | def _setup_test(self, config, model, convert_batch_to_npy_fn,
    method run (line 277) | def run(self, train_data: DataManager,
    method resume (line 326) | def resume(self, save_path: str, train_data: DataManager,
    method test (line 365) | def test(self, network, test_data: DataManager,
    method kfold (line 417) | def kfold(self, data: DataManager, metrics: dict, num_epochs=None,
    method __str__ (line 604) | def __str__(self):
    method __call__ (line 619) | def __call__(self, *args, **kwargs):
    method save (line 638) | def save(self):
    method load (line 650) | def load(file_name):
    method _resolve_params (line 663) | def _resolve_params(self, config: typing.Union[DeliraConfig, None]):
    method _resolve_kwargs (line 691) | def _resolve_kwargs(self, kwargs: typing.Union[dict, None]):
    method __getstate__ (line 719) | def __getstate__(self):
    method __setstate__ (line 722) | def __setstate__(self, state):

FILE: delira/training/base_trainer.py
  class BaseNetworkTrainer (line 21) | class BaseNetworkTrainer(Predictor):
    method __init__ (line 37) | def __init__(self,
    method _setup (line 188) | def _setup(self, network, lr_scheduler_cls, lr_scheduler_params, gpu_ids,
    method _at_training_begin (line 212) | def _at_training_begin(self, *args, **kwargs):
    method _at_training_end (line 230) | def _at_training_end(self, *args, **kwargs):
    method _at_epoch_begin (line 252) | def _at_epoch_begin(self, val_score_key, epoch, num_epochs,
    method _at_epoch_end (line 276) | def _at_epoch_end(self, metrics_val, val_score_key, epoch, is_best,
    method _at_iter_begin (line 310) | def _at_iter_begin(self, iter_num, epoch=0, **kwargs):
    method _at_iter_end (line 333) | def _at_iter_end(self, iter_num, data_dict, metrics, epoch=0, **kwargs):
    method _train_single_epoch (line 365) | def _train_single_epoch(self, dmgr_train: DataManager, epoch,
    method train (line 439) | def train(self, num_epochs, datamgr_train, datamgr_valid=None,
    method fold (line 566) | def fold(self):
    method fold (line 579) | def fold(self, fold):
    method register_callback (line 601) | def register_callback(self, callback: AbstractCallback):
    method save_state (line 630) | def save_state(self, file_name, *args, **kwargs):
    method load_state (line 648) | def load_state(file_name, *args, **kwargs):
    method _update_state (line 671) | def _update_state(self, new_state):
    method update_state (line 699) | def update_state(self, file_name, *args, **kwargs):
    method _is_better_val_scores (line 721) | def _is_better_val_scores(old_val_score, new_val_score,
    method name (line 751) | def name(self):
    method _reinitialize_logging (line 755) | def _reinitialize_logging(self, logging_type, logging_kwargs: dict,
    method _search_for_prev_state (line 854) | def _search_for_prev_state(path, extensions=None):
    method register_callback (line 910) | def register_callback(self, callback: AbstractCallback):

FILE: delira/training/callbacks/abstract_callback.py
  class AbstractCallback (line 1) | class AbstractCallback(object):
    method __init__ (line 12) | def __init__(self, *args, **kwargs):
    method at_epoch_begin (line 25) | def at_epoch_begin(self, trainer, *args, **kwargs):
    method at_epoch_end (line 48) | def at_epoch_end(self, trainer, *args, **kwargs):
    method at_training_begin (line 71) | def at_training_begin(self, trainer, *args, **kwargs):
    method at_training_end (line 89) | def at_training_end(self, trainer, *args, **kwargs):
    method at_iter_begin (line 108) | def at_iter_begin(self, trainer, *args, **kwargs):
    method at_iter_end (line 135) | def at_iter_end(self, trainer, *args, **kwargs):

FILE: delira/training/callbacks/early_stopping.py
  class EarlyStopping (line 4) | class EarlyStopping(AbstractCallback):
    method __init__ (line 14) | def __init__(self, monitor_key,
    method _is_better (line 51) | def _is_better(self, metric):
    method at_epoch_end (line 72) | def at_epoch_end(self, trainer, **kwargs):

FILE: delira/training/callbacks/logging_callback.py
  class DefaultLoggingCallback (line 6) | class DefaultLoggingCallback(AbstractCallback):
    method __init__ (line 12) | def __init__(self, backend: BaseBackend, max_queue_size: int = None,
    method at_iter_end (line 53) | def at_iter_end(self, trainer, iter_num=None, data_dict=None, train=Fa...
    method create_tag (line 87) | def create_tag(tag: str, train: bool):

FILE: delira/training/callbacks/pytorch_schedulers.py
  class DefaultPyTorchSchedulerCallback (line 9) | class DefaultPyTorchSchedulerCallback(AbstractCallback):
    method __init__ (line 16) | def __init__(self, *args, **kwargs):
    method at_epoch_end (line 31) | def at_epoch_end(self, trainer, **kwargs):
  class OneCycleLRCallback (line 51) | class OneCycleLRCallback(DefaultPyTorchSchedulerCallback):
    method __init__ (line 57) | def __init__(
    method at_iter_begin (line 144) | def at_iter_begin(self, trainer, train,
    method at_epoch_end (line 167) | def at_epoch_end(self, trainer, **kwargs):
  class ReduceLROnPlateauCallback (line 170) | class ReduceLROnPlateauCallback(DefaultPyTorchSchedulerCallback):
    method __init__ (line 176) | def __init__(self, optimizer, mode='min', factor=0.1, patience=10,
    method at_epoch_end (line 238) | def at_epoch_end(self, trainer,
  class CosineAnnealingLRCallback (line 266) | class CosineAnnealingLRCallback(DefaultPyTorchSchedulerCallback):
    method __init__ (line 272) | def __init__(self, optimizer, T_max, eta_min=0, last_epoch=-1):
  class ExponentialLRCallback (line 292) | class ExponentialLRCallback(DefaultPyTorchSchedulerCallback):
    method __init__ (line 298) | def __init__(self, optimizer, gamma, last_epoch=-1):
  class LambdaLRCallback (line 315) | class LambdaLRCallback(DefaultPyTorchSchedulerCallback):
    method __init__ (line 321) | def __init__(self, optimizer, lr_lambda, last_epoch=-1):
  class MultiStepLRCallback (line 340) | class MultiStepLRCallback(DefaultPyTorchSchedulerCallback):
    method __init__ (line 346) | def __init__(self, optimizer, milestones, gamma=0.1, last_epoch=-1):
  class StepLRCallback (line 367) | class StepLRCallback(DefaultPyTorchSchedulerCallback):
    method __init__ (line 373) | def __init__(self, optimizer, step_size, gamma=0.1, last_epoch=-1):

FILE: delira/training/losses.py
  class BCEFocalLossPyTorch (line 7) | class BCEFocalLossPyTorch(torch.nn.Module):
    method __init__ (line 13) | def __init__(self, alpha=None, gamma=2, reduction='elementwise_mean'):
    method forward (line 45) | def forward(self, p, t):
  class BCEFocalLossLogitPyTorch (line 73) | class BCEFocalLossLogitPyTorch(torch.nn.Module):
    method __init__ (line 79) | def __init__(self, alpha=None, gamma=2, reduction='elementwise_mean'):
    method forward (line 110) | def forward(self, p, t):

FILE: delira/training/metrics.py
  class SklearnClassificationMetric (line 11) | class SklearnClassificationMetric(object):
    method __init__ (line 12) | def __init__(self, score_fn, gt_logits=False, pred_logits=True, **kwar...
    method __call__ (line 32) | def __call__(self, y_true, y_pred, **kwargs):
  class SklearnAccuracyScore (line 62) | class SklearnAccuracyScore(SklearnClassificationMetric):
    method __init__ (line 67) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnBalancedAccuracyScore (line 71) | class SklearnBalancedAccuracyScore(SklearnClassificationMetric):
    method __init__ (line 76) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnF1Score (line 81) | class SklearnF1Score(SklearnClassificationMetric):
    method __init__ (line 86) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnFBetaScore (line 90) | class SklearnFBetaScore(SklearnClassificationMetric):
    method __init__ (line 95) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnHammingLoss (line 99) | class SklearnHammingLoss(SklearnClassificationMetric):
    method __init__ (line 104) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnJaccardSimilarityScore (line 108) | class SklearnJaccardSimilarityScore(SklearnClassificationMetric):
    method __init__ (line 113) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnLogLoss (line 118) | class SklearnLogLoss(SklearnClassificationMetric):
    method __init__ (line 123) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnMatthewsCorrCoeff (line 127) | class SklearnMatthewsCorrCoeff(SklearnClassificationMetric):
    method __init__ (line 132) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnPrecisionScore (line 136) | class SklearnPrecisionScore(SklearnClassificationMetric):
    method __init__ (line 141) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnRecallScore (line 145) | class SklearnRecallScore(SklearnClassificationMetric):
    method __init__ (line 150) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class SklearnZeroOneLoss (line 154) | class SklearnZeroOneLoss(SklearnClassificationMetric):
    method __init__ (line 159) | def __init__(self, gt_logits=False, pred_logits=True, **kwargs):
  class AurocMetric (line 163) | class AurocMetric(object):
    method __init__ (line 164) | def __init__(self, classes=(0, 1), **kwargs):
    method __call__ (line 187) | def __call__(self, y_true, y_pred, **kwargs):

FILE: delira/training/predictor.py
  class Predictor (line 16) | class Predictor(object):
    method __init__ (line 29) | def __init__(
    method _setup (line 67) | def _setup(self, network, key_mapping, convert_batch_args_kwargs_to_np...
    method __call__ (line 102) | def __call__(self, data: dict, **kwargs):
    method predict (line 120) | def predict(self, data: dict, already_prepared=False, **kwargs):
    method _at_iter_begin (line 159) | def _at_iter_begin(self, iter_num, **kwargs):
    method _at_iter_end (line 185) | def _at_iter_end(self, iter_num, data_dict, metrics, **kwargs):
    method predict_data_mgr (line 217) | def predict_data_mgr(
    method predict_data_mgr_cache_metrics_only (line 334) | def predict_data_mgr_cache_metrics_only(self, datamgr, batchsize=None,
    method predict_data_mgr_cache_all (line 385) | def predict_data_mgr_cache_all(self, datamgr, batchsize=None, metrics=...
    method predict_data_mgr_cache (line 434) | def predict_data_mgr_cache(self, datamgr, batchsize=None, metrics=None,
    method __convert_dict (line 521) | def __convert_dict(old_dict, new_dict):
    method __concatenate_dict_items (line 566) | def __concatenate_dict_items(dict_like: dict):
    method __setattr__ (line 589) | def __setattr__(self, key, value):
    method calc_metrics (line 616) | def calc_metrics(batch: LookupConfig, metrics=None, metric_keys=None):
    method register_callback (line 647) | def register_callback(self, callback: AbstractCallback):

FILE: delira/training/utils.py
  function recursively_convert_elements (line 5) | def recursively_convert_elements(element, check_type, conversion_fn):
  function _correct_zero_shape (line 58) | def _correct_zero_shape(arg):
  function convert_to_numpy_identity (line 80) | def convert_to_numpy_identity(*args, **kwargs):

FILE: delira/utils/codecs.py
  class Encoder (line 12) | class Encoder:
    method __call__ (line 18) | def __call__(self, obj) -> typing.Any:
    method encode (line 34) | def encode(self, obj) -> typing.Any:
    method _encode_list (line 79) | def _encode_list(self, obj) -> list:
    method _encode_dict (line 95) | def _encode_dict(self, obj) -> dict:
    method _encode_array (line 112) | def _encode_array(self, obj) -> dict:
    method _encode_mapping (line 132) | def _encode_mapping(self, obj) -> dict:
    method _encode_iterable (line 154) | def _encode_iterable(self, obj) -> dict:
    method _encode_module (line 176) | def _encode_module(self, obj) -> dict:
    method _encode_type (line 193) | def _encode_type(self, obj) -> dict:
    method _encode_function (line 213) | def _encode_function(self, obj) -> dict:
    method _encode_class (line 233) | def _encode_class(self, obj) -> dict:
  class Decoder (line 257) | class Decoder:
    method __init__ (line 262) | def __init__(self):
    method __call__ (line 275) | def __call__(self, obj) -> typing.Any:
    method decode (line 291) | def decode(self, obj) -> typing.Any:
    method _decode_dict (line 314) | def _decode_dict(self, obj) -> dict:
    method _decode_list (line 335) | def _decode_list(self, obj) -> list:
    method _decode_array (line 351) | def _decode_array(self, obj) -> np.ndarray:
    method _decode_convert (line 367) | def _decode_convert(self, obj: dict) -> typing.Union[
    method _decode_module (line 387) | def _decode_module(self, obj: dict) -> types.ModuleType:
    method _decode_type (line 403) | def _decode_type(self, obj) -> typing.Any:
    method _decode_function (line 422) | def _decode_function(self, obj: dict) -> typing.Union[
    method _decode_class (line 442) | def _decode_class(self, obj: dict) -> typing.Any:
    method _decode_classargs (line 477) | def _decode_classargs(self, obj: dict) -> typing.Any:
    method _decode_functionargs (line 508) | def _decode_functionargs(self, obj: dict) -> typing.Any:

FILE: delira/utils/config.py
  function non_string_warning (line 15) | def non_string_warning(func):
  class Config (line 42) | class Config(dict):
    method __init__ (line 47) | def __init__(self, dict_like=None, **kwargs):
    method __setattr__ (line 85) | def __setattr__(self, key, value):
    method __setitem__ (line 100) | def __setitem__(self, key, value):
    method _traverse_keys (line 121) | def _traverse_keys(self, keys, create=False):
    method _set_internal_item (line 150) | def _set_internal_item(self, key, item, deepcopy=False):
    method _to_config (line 170) | def _to_config(cls, item):
    method _create_internal_dict (line 193) | def _create_internal_dict(*args, **kwargs):
    method __getitem__ (line 206) | def __getitem__(self, key):
    method __contains__ (line 229) | def __contains__(self, key):
    method update (line 251) | def update(self, update_dict, deepcopy=False, overwrite=False):
    method _update (line 274) | def _update(self, key, item, deepcopy=False, overwrite=False):
    method _raise_overwrite (line 300) | def _raise_overwrite(self, key, overwrite):
    method dump (line 321) | def dump(self, path, formatter=yaml.dump, encoder_cls=Encoder, **kwargs):
    method dumps (line 342) | def dumps(self, formatter=yaml.dump, encoder_cls=Encoder, **kwargs):
    method load (line 361) | def load(self, path, formatter=yaml.load, decoder_cls=Decoder, **kwargs):
    method loads (line 382) | def loads(self, data, formatter=yaml.load, decoder_cls=Decoder, **kwar...
    method create_from_dict (line 403) | def create_from_dict(cls, value, deepcopy=False):
    method create_from_argparse (line 432) | def create_from_argparse(cls, value, deepcopy=False, **kwargs):
    method create_from_file (line 465) | def create_from_file(cls, path, formatter=yaml.load, decoder_cls=Decoder,
    method create_from_str (line 493) | def create_from_str(cls, data, formatter=yaml.load, decoder_cls=Decoder,
    method create_argparser (line 520) | def create_argparser(self):
    method _add_unknown_args (line 556) | def _add_unknown_args(unknown_args):
    method update_from_argparse (line 602) | def update_from_argparse(self, parser=None, add_unknown_items=False):
  class LookupConfig (line 645) | class LookupConfig(Config):
    method _create_internal_dict (line 651) | def _create_internal_dict(*args, **kwargs):
    method __contains__ (line 664) | def __contains__(self, key):
    method nested_get (line 686) | def nested_get(self, key, *args, allow_multiple=False, **kwargs):
  class DeliraConfig (line 735) | class DeliraConfig(LookupConfig):
    method __init__ (line 741) | def __init__(self, dict_like=None, fixed_model=None, fixed_training=None,
    method generate_dict (line 774) | def generate_dict(value):
    method params (line 794) | def params(self):
    method variable_params (line 809) | def variable_params(self):
    method variable_params (line 822) | def variable_params(self, new_params: dict):
    method fixed_params (line 847) | def fixed_params(self):
    method fixed_params (line 860) | def fixed_params(self, new_params: dict):
    method model_params (line 884) | def model_params(self):
    method model_params (line 897) | def model_params(self, new_params: dict):
    method training_params (line 921) | def training_params(self):
    method training_params (line 934) | def training_params(self, new_params: dict):
    method log_as_string (line 957) | def log_as_string(self, full_config=False, **kwargs):

FILE: delira/utils/context_managers.py
  class DebugMode (line 4) | class DebugMode(object):
    method __init__ (line 11) | def __init__(self, mode):
    method _switch_to_new_mode (line 21) | def _switch_to_new_mode(self):
    method __enter__ (line 31) | def __enter__(self):
    method __exit__ (line 37) | def __exit__(self, *args, **kwargs):
  class DebugEnabled (line 56) | class DebugEnabled(DebugMode):
    method __init__ (line 62) | def __init__(self):
  class DebugDisabled (line 66) | class DebugDisabled(DebugMode):
    method __init__ (line 71) | def __init__(self):

FILE: delira/utils/decorators.py
  function dtype_func (line 9) | def dtype_func(class_object):
  function classtype_func (line 42) | def classtype_func(class_object):
  function make_deprecated (line 74) | def make_deprecated(new_func):

FILE: delira/utils/dict_reductions.py
  function reduce_last (line 7) | def reduce_last(items: list) -> Union[float, int, np.ndarray]:
  function reduce_first (line 25) | def reduce_first(items: list) -> Union[float, int, np.ndarray]:
  function reduce_mean (line 43) | def reduce_mean(items: list) -> Union[float, int, np.ndarray]:
  function reduce_median (line 61) | def reduce_median(items: list) -> Union[float, int, np.ndarray]:
  function reduce_max (line 79) | def reduce_max(items: list) -> Union[float, int, np.ndarray]:
  function reduce_min (line 97) | def reduce_min(items: list) -> Union[float, int, np.ndarray]:
  function flatten_dict (line 115) | def flatten_dict(d: dict, parent_key: str = '', sep: str = '.') -> dict:
  function unflatten_dict (line 146) | def unflatten_dict(dictionary: dict, sep: str = ".") -> dict:
  function reduce_dict (line 174) | def reduce_dict(items: list, reduce_fn) -> dict:
  function possible_reductions (line 234) | def possible_reductions() -> tuple:
  function get_reduction (line 246) | def get_reduction(reduce_type: str) -> Callable:

FILE: delira/utils/messenger.py
  class BaseMessenger (line 9) | class BaseMessenger(ABC):
    method __init__ (line 15) | def __init__(self, experiment: BaseExperiment, notify_epochs: int = No...
    method emit_message (line 31) | def emit_message(self, msg: str) -> dict:
    method __getattr__ (line 49) | def __getattr__(self, attr):
    method run (line 70) | def run(self, *args, **kwargs):
    method resume (line 107) | def resume(self, *args, **kwargs):
    method test (line 143) | def test(self, *args, **kwargs):
    method kfold (line 173) | def kfold(self, *args, **kwargs):
  class MessengerEpochCallback (line 218) | class MessengerEpochCallback(AbstractCallback):
    method __init__ (line 227) | def __init__(self, n_epochs: int, messenger: BaseMessenger):
    method at_epoch_end (line 241) | def at_epoch_end(self, trainer, **kwargs) -> dict:
  class MessengerFoldCallback (line 265) | class MessengerFoldCallback(AbstractCallback):
    method __init__ (line 274) | def __init__(self, messenger: BaseMessenger):
    method at_training_begin (line 285) | def at_training_begin(self, trainer, **kwargs) -> dict:
    method at_training_end (line 305) | def at_training_end(self, trainer, **kwargs) -> dict:
  class SlackMessenger (line 326) | class SlackMessenger(BaseMessenger):
    method __init__ (line 341) | def __init__(self, experiment: BaseExperiment, token: str,
    method emit_message (line 401) | def emit_message(self, msg, **kwargs):
    method _emit_message_v1 (line 434) | def _emit_message_v1(self, msg, **kwargs) -> dict:
    method _emit_message_v2 (line 462) | def _emit_message_v2(self, msg, **kwargs):

FILE: delira/utils/path.py
  function subdirs (line 4) | def subdirs(d):

FILE: delira/utils/time.py
  function now (line 4) | def now():

FILE: docs/_api/_build/delira/logging/logging_context.py
  class LoggingContext (line 8) | class LoggingContext(object):
    method __init__ (line 13) | def __init__(
    method __enter__ (line 59) | def __enter__(self):
    method __exit__ (line 73) | def __exit__(self, *args):
    method log (line 94) | def log(self, msg: dict):
    method __call__ (line 109) | def __call__(self, log_message: dict):

FILE: docs/_api/_build/delira/logging/registry.py
  function log (line 9) | def log(msg: dict, name=None):
  function logger_exists (line 49) | def logger_exists(name: str):
  function register_logger (line 67) | def register_logger(logger: Logger, name: str, overwrite=False):
  function unregister_logger (line 93) | def unregister_logger(name: str):
  function get_logger (line 110) | def get_logger(name):
  function get_available_loggers (line 128) | def get_available_loggers():

FILE: docs/_api/_build/delira/logging/tensorboard_backend.py
  class TensorboardBackend (line 8) | class TensorboardBackend(WriterLoggingBackend):
    method __init__ (line 13) | def __init__(self, writer_kwargs=None,
    method _call_exec_fn (line 33) | def _call_exec_fn(self, exec_fn, args):
    method __del__ (line 57) | def __del__(self):
    method _graph_pytorch (line 65) | def _graph_pytorch(self, model, input_to_model=None, verbose=False,
    method _graph_tf (line 88) | def _graph_tf(self, graph, run_metadata=None):
    method _graph_onnx (line 127) | def _graph_onnx(self, prototxt):
    method _embedding (line 141) | def _embedding(self, mat, metadata=None, label_img=None, global_step=N...
    method _scalars (line 170) | def _scalars(self, main_tag: str, tag_scalar_dict: dict, global_step=N...
    method name (line 201) | def name(self):

FILE: docs/_api/_build/delira/logging/visdom_backend.py
  class VisdomBackend (line 8) | class VisdomBackend(WriterLoggingBackend):
    method __init__ (line 13) | def __init__(self, writer_kwargs: dict = None,
    method name (line 37) | def name(self):

FILE: docs/_api/_build/delira/logging/writer_backend.py
  class WriterLoggingBackend (line 7) | class WriterLoggingBackend(BaseBackend):
    method __init__ (line 12) | def __init__(self, writer_cls, writer_kwargs: dict,
    method convert_to_npy (line 19) | def convert_to_npy(*args, **kwargs):
    method _image (line 40) | def _image(self, tag, img_tensor, global_step=None, walltime=None,
    method _images (line 66) | def _images(self, tag, img_tensor, global_step=None, walltime=None,
    method _image_with_boxes (line 92) | def _image_with_boxes(self, tag, img_tensor, box_tensor, global_step=N...
    method _scalar (line 124) | def _scalar(self, tag, scalar_value, global_step=None, walltime=None):
    method _scalars (line 145) | def _scalars(self, main_tag, tag_scalar_dict, global_step=None,
    method _histogram (line 169) | def _histogram(self, tag, values, global_step=None, bins='tensorflow',
    method _figure (line 194) | def _figure(self, tag, figure, global_step=None, close=True,
    method _audio (line 218) | def _audio(self, tag, snd_tensor, global_step=None, sample_rate=44100,
    method _text (line 243) | def _text(self, tag, text_string, global_step=None, walltime=None):
    method _pr_curve (line 264) | def _pr_curve(self, tag, labels, predictions, global_step=None,
    method _video (line 297) | def _video(self, tag, vid_tensor, global_step=None, fps=4, walltime=No...
    method name (line 322) | def name(self):

FILE: docs/conf.py
  function read_file (line 31) | def read_file(file):

FILE: setup.py
  function resolve_requirements (line 6) | def resolve_requirements(file):
  function read_file (line 21) | def read_file(file):
  function unify_requirements (line 27) | def unify_requirements(base_requirements: list, *additional_requirement_...
  function parse_all_requirements (line 36) | def parse_all_requirements(backend_requirement_dict: dict):

FILE: tests/data_loading/test_augmenters.py
  class TestAugmenters (line 10) | class TestAugmenters(unittest.TestCase):
    method setUp (line 11) | def setUp(self) -> None:
    method _aug_test (line 31) | def _aug_test(self):
    method test_parallel (line 60) | def test_parallel(self):
    method test_parallel_drop_last (line 66) | def test_parallel_drop_last(self):
    method test_sequential (line 72) | def test_sequential(self):
    method test_sequential_drop_last (line 78) | def test_sequential_drop_last(self):
    method _test_sampler_indices (line 81) | def _test_sampler_indices(self, parallel: bool):
    method test_sampling_order_parallel (line 115) | def test_sampling_order_parallel(self):
    method test_sampling_order_sequential (line 121) | def test_sampling_order_sequential(self):

FILE: tests/data_loading/test_data_loader.py
  class DataLoaderTest (line 8) | class DataLoaderTest(unittest.TestCase):
    method _test_data_loader (line 10) | def _test_data_loader(self, data):
    method test_data_loader_dset (line 34) | def test_data_loader_dset(self):
    method test_data_loader_dict (line 41) | def test_data_loader_dict(self):
    method test_data_loader_iterable (line 49) | def test_data_loader_iterable(self):

FILE: tests/data_loading/test_data_manager.py
  class DataManagerTest (line 12) | class DataManagerTest(unittest.TestCase):
    method test_datamanager (line 17) | def test_datamanager(self):

FILE: tests/data_loading/test_dataset.py
  class DataSubsetConcatTest (line 12) | class DataSubsetConcatTest(unittest.TestCase):
    method load_dummy_sample (line 15) | def load_dummy_sample(path, label_load_fct):
    method test_data_subset_concat (line 34) | def test_data_subset_concat(self):
    method test_cache_dataset (line 89) | def test_cache_dataset(self):
    method test_lazy_dataset (line 149) | def test_lazy_dataset(self):
    method test_load_sample (line 175) | def test_load_sample(self):

FILE: tests/data_loading/test_numba_transforms.py
  class NumbaTest (line 13) | class NumbaTest(unittest.TestCase):
    method setUp (line 14) | def setUp(self) -> None:
    method compare_transform_outputs (line 30) | def compare_transform_outputs(self, transform, numba_transform):
    method test_zoom (line 42) | def test_zoom(self):
    method test_pad (line 50) | def test_pad(self):
    method test_compose (line 58) | def test_compose(self):

FILE: tests/data_loading/test_sampler.py
  class SamplerTest (line 11) | class SamplerTest(unittest.TestCase):
    method setUp (line 12) | def setUp(self) -> None:
    method test_batch_sampler (line 18) | def test_batch_sampler(self):
    method test_sequential (line 41) | def test_sequential(self):
    method test_random_replacement (line 54) | def test_random_replacement(self):
    method test_random_no_replacement (line 71) | def test_random_no_replacement(self):
    method test_prevalence_sampler (line 88) | def test_prevalence_sampler(self):
    method test_abstract_sampler_iter (line 107) | def test_abstract_sampler_iter(self):

FILE: tests/data_loading/utils.py
  class DummyDataset (line 7) | class DummyDataset(AbstractDataset):
    method __init__ (line 8) | def __init__(self, length=600, class_weights=[0.5, 0.3, 0.2]):
    method __getitem__ (line 20) | def __getitem__(self, index):
    method __len__ (line 23) | def __len__(self):

FILE: tests/io/test_chainer.py
  class Model (line 10) | class Model(AbstractChainerNetwork):
    method __init__ (line 11) | def __init__(self):
    method forward (line 17) | def forward(self, x):
  class IoChainerTest (line 25) | class IoChainerTest(unittest.TestCase):
    method test_load_save (line 30) | def test_load_save(self):

FILE: tests/io/test_sklearn.py
  class IoSklearnTest (line 6) | class IoSklearnTest(unittest.TestCase):
    method test_load_save (line 11) | def test_load_save(self):

FILE: tests/io/test_tf.py
  class IoTfTest (line 6) | class IoTfTest(unittest.TestCase):
    method setUp (line 8) | def setUp(self) -> None:
    method test_load_save (line 19) | def test_load_save(self):
    method test_load_save_eager (line 72) | def test_load_save_eager(self):
    method tearDown (line 114) | def tearDown(self) -> None:

FILE: tests/io/test_torch.py
  class IoTorchTest (line 6) | class IoTorchTest(unittest.TestCase):
    method test_load_save (line 11) | def test_load_save(self):
    method test_torchscript_save (line 41) | def test_torchscript_save(self):

FILE: tests/logging/test_logging_frequency.py
  class DummyBackend (line 6) | class DummyBackend(BaseBackend):
    method _text (line 7) | def _text(self, logging_no: int, tag: str, global_step=None):
    method _image (line 11) | def _image(self, *args, **kwargs):
    method _images (line 14) | def _images(self, *args, **kwargs):
    method _image_with_boxes (line 17) | def _image_with_boxes(self, *args, **kwargs):
    method _scalar (line 20) | def _scalar(self, *args, **kwargs):
    method _scalars (line 23) | def _scalars(self, *args, **kwargs):
    method _histogram (line 26) | def _histogram(self, *args, **kwargs):
    method _figure (line 29) | def _figure(self, *args, **kwargs):
    method _audio (line 32) | def _audio(self, *args, **kwargs):
    method _video (line 35) | def _video(self, *args, **kwargs):
    method _graph_pytorch (line 38) | def _graph_pytorch(self, *args, **kwargs):
    method _graph_tf (line 41) | def _graph_tf(self, *args, **kwargs):
    method _graph_onnx (line 44) | def _graph_onnx(self, *args, **kwargs):
    method _embedding (line 47) | def _embedding(self, *args, **kwargs):
    method _pr_curve (line 50) | def _pr_curve(self, *args, **kwargs):
  class LoggingFrequencyTestCase (line 54) | class LoggingFrequencyTestCase(unittest.TestCase):
    method _logging_freq_test (line 56) | def _logging_freq_test(self, frequencies, num_runs: int, check_freq=No...
    method test_logging_freq (line 77) | def test_logging_freq(self):

FILE: tests/logging/test_logging_outside_trainer.py
  class LoggingOutsideTrainerTestCase (line 14) | class LoggingOutsideTrainerTestCase(unittest.TestCase):
    method test_logging_freq (line 18) | def test_logging_freq(self):

FILE: tests/logging/test_single_threaded_logging.py
  class TestTensorboardLogging (line 27) | class TestTensorboardLogging(unittest.TestCase):
    method setUp (line 29) | def setUp(self) -> None:
    method _setup_logger (line 87) | def _setup_logger(self):
    method _check_for_tag (line 92) | def _check_for_tag(self, tag, logdir=None):
    method _destroy_logger (line 117) | def _destroy_logger(logger: Logger):
    method test_image_npy (line 122) | def test_image_npy(self):
    method test_image_torch (line 129) | def test_image_torch(self):
    method test_img_npy (line 135) | def test_img_npy(self):
    method test_img_torch (line 142) | def test_img_torch(self):
    method test_picture_npy (line 148) | def test_picture_npy(self):
    method test_picture_torch (line 155) | def test_picture_torch(self):
    method test_images_npy (line 162) | def test_images_npy(self):
    method test_images_torch (line 169) | def test_images_torch(self):
    method test_imgs_npy (line 175) | def test_imgs_npy(self):
    method test_imgs_torch (line 182) | def test_imgs_torch(self):
    method test_pictures_npy (line 188) | def test_pictures_npy(self):
    method test_pictures_torch (line 195) | def test_pictures_torch(self):
    method test_image_with_boxes_npy (line 201) | def test_image_with_boxes_npy(self):
    method test_image_with_boxes_torch (line 211) | def test_image_with_boxes_torch(self):
    method test_bounding_boxes_npy (line 219) | def test_bounding_boxes_npy(self):
    method test_bounding_boxes_torch (line 229) | def test_bounding_boxes_torch(self):
    method test_bboxes_npy (line 238) | def test_bboxes_npy(self):
    method test_bboxes_torch (line 248) | def test_bboxes_torch(self):
    method test_scalar (line 256) | def test_scalar(self):
    method test_scalar_npy (line 266) | def test_scalar_npy(self):
    method test_scalar_torch (line 279) | def test_scalar_torch(self):
    method test_value (line 288) | def test_value(self):
    method test_value_npy (line 298) | def test_value_npy(self):
    method test_value_torch (line 310) | def test_value_torch(self):
    method test_scalars (line 320) | def test_scalars(self):
    method test_scalars_npy (line 333) | def test_scalars_npy(self):
    method test_scalars_torch (line 349) | def test_scalars_torch(self):
    method test_values (line 363) | def test_values(self):
    method test_values_npy (line 376) | def test_values_npy(self):
    method test_values_torch (line 392) | def test_values_torch(self):
    method test_histogram_npy (line 406) | def test_histogram_npy(self):
    method test_histogram_torch (line 418) | def test_histogram_torch(self):
    method test_hist_npy (line 428) | def test_hist_npy(self):
    method test_hist_torch (line 440) | def test_hist_torch(self):
    method test_figure (line 450) | def test_figure(self):
    method test_fig (line 464) | def test_fig(self):
    method test_audio_npy (line 478) | def test_audio_npy(self):
    method test_audio_torch (line 488) | def test_audio_torch(self):
    method test_sound_npy (line 496) | def test_sound_npy(self):
    method test_sound_torch (line 506) | def test_sound_torch(self):
    method test_video_npy (line 514) | def test_video_npy(self):
    method test_video_torch (line 527) | def test_video_torch(self):
    method test_text (line 539) | def test_text(self):
    method test_graph_tf (line 549) | def test_graph_tf(self):
    method test_graph_torch (line 573) | def test_graph_torch(self):
    method test_graph_onnx (line 588) | def test_graph_onnx(self):
    method test_embedding_npy (line 599) | def test_embedding_npy(self):
    method test_embedding_torch (line 606) | def test_embedding_torch(self):
    method test_pr_curve_npy (line 611) | def test_pr_curve_npy(self):
    method test_pr_curve_torch (line 621) | def test_pr_curve_torch(self):
    method test_pr_npy (line 629) | def test_pr_npy(self):
    method test_pr_torch (line 639) | def test_pr_torch(self):
    method tearDown (line 647) | def tearDown(self) -> None:

FILE: tests/models/data_parallel/test_chainer.py
  class TestDataParallelChainer (line 5) | class TestDataParallelChainer(unittest.TestCase):
    method setUp (line 7) | def setUp(self) -> None:
    method test_update (line 46) | def test_update(self):
    method test_keyword_arguments_different_batchsize (line 79) | def test_keyword_arguments_different_batchsize(self):
    method test_positional_arguments (line 100) | def test_positional_arguments(self):

FILE: tests/models/data_parallel/test_torch.py
  class TestDataParallelTorch (line 8) | class TestDataParallelTorch(unittest.TestCase):
    method setUp (line 10) | def setUp(self) -> None:
    method test_update (line 39) | def test_update(self):

FILE: tests/models/test_abstract_models.py
  class TestAbstractModels (line 8) | class TestAbstractModels(unittest.TestCase):
    method _setup_torch (line 11) | def _setup_torch(*args):
    method _setup_torchscript (line 27) | def _setup_torchscript(*args):
    method _setup_tfeager (line 45) | def _setup_tfeager(*args):
    method _setup_tfgraph (line 63) | def _setup_tfgraph(*args):
    method _setup_chainer (line 95) | def _setup_chainer(*args):
    method _setup_sklearn (line 116) | def _setup_sklearn(*args):
    method run_model_arg (line 134) | def run_model_arg(self, device=None):
    method run_model_kwarg (line 141) | def run_model_kwarg(self, device=None, keyword="data"):
    method setUp (line 148) | def setUp(self) -> None:
    method test_sklearn (line 174) | def test_sklearn(self):
    method test_chainer (line 180) | def test_chainer(self):
    method test_pytorch (line 187) | def test_pytorch(self):
    method test_torchscript (line 193) | def test_torchscript(self):
    method test_tf_eager (line 199) | def test_tf_eager(self):
    method test_tf_graph (line 205) | def test_tf_graph(self):
    method tearDown (line 209) | def tearDown(self) -> None:

FILE: tests/training/backends/test_chainer.py
  class DummyNetworkChainer (line 15) | class DummyNetworkChainer(AbstractChainerNetwork):
    method __init__ (line 16) | def __init__(self):
    method forward (line 23) | def forward(self, x):
  class TestChainerBackend (line 31) | class TestChainerBackend(
    method setUp (line 34) | def setUp(self) -> None:

FILE: tests/training/backends/test_sklearn.py
  class TestSklearnBackend (line 9) | class TestSklearnBackend(
    method setUp (line 12) | def setUp(self) -> None:
    method test_experiment_test (line 67) | def test_experiment_test(self):

FILE: tests/training/backends/test_tf_eager.py
  class DummyNetworkTfEager (line 13) | class DummyNetworkTfEager(AbstractTfEagerNetwork):
    method __init__ (line 14) | def __init__(self):
    method call (line 27) | def call(self, x: tf.Tensor):
  class TestTfEagerBackend (line 31) | class TestTfEagerBackend(
    method setUp (line 34) | def setUp(self) -> None:
    method tearDown (line 78) | def tearDown(self):

FILE: tests/training/backends/test_tf_graph.py
  class DummyNetworkTfGraph (line 13) | class DummyNetworkTfGraph(AbstractTfGraphNetwork):
    method __init__ (line 14) | def __init__(self):
  class TestTfGraphBackend (line 40) | class TestTfGraphBackend(
    method setUp (line 43) | def setUp(self) -> None:
    method tearDown (line 87) | def tearDown(self):

FILE: tests/training/backends/test_torch.py
  class DummyNetworkTorch (line 12) | class DummyNetworkTorch(AbstractPyTorchNetwork):
    method __init__ (line 13) | def __init__(self):
    method forward (line 22) | def forward(self, x):
  class TestTorchBackend (line 29) | class TestTorchBackend(
    method setUp (line 32) | def setUp(self) -> None:

FILE: tests/training/backends/test_torchscript.py
  class DummyNetworkTorchScript (line 12) | class DummyNetworkTorchScript(AbstractTorchScriptNetwork):
    method __init__ (line 15) | def __init__(self):
    method forward (line 25) | def forward(self, x):
  class TestTorchScriptBackend (line 32) | class TestTorchScriptBackend(
    method setUp (line 35) | def setUp(self) -> None:

FILE: tests/training/backends/utils.py
  class DummyDataset (line 25) | class DummyDataset(AbstractDataset):
    method __init__ (line 26) | def __init__(self, length):
    method __getitem__ (line 30) | def __getitem__(self, index):
    method __len__ (line 34) | def __len__(self):
    method get_sample_from_index (line 37) | def get_sample_from_index(self, index):
  class LoggingCallback (line 41) | class LoggingCallback():
    method at_epoch_begin (line 42) | def at_epoch_begin(self, trainer, curr_epoch, **kwargs):
    method at_epoch_end (line 46) | def at_epoch_end(self, trainer, curr_epoch, **kwargs):
    method at_training_begin (line 50) | def at_training_begin(self, trainer, **kwargs):
    method at_training_end (line 54) | def at_training_end(self, trainer, **kwargs):
    method at_iter_begin (line 58) | def at_iter_begin(self, trainer, iter_num, **kwargs):
    method at_iter_end (line 62) | def at_iter_end(self, trainer, iter_num, **kwargs):
  function add_logging_callback (line 67) | def add_logging_callback(dict_like):
  function run_experiment (line 74) | def run_experiment(experiment_cls, config, network_cls, len_train, len_t...
  function test_experiment (line 88) | def test_experiment(experiment_cls, config, network_cls, len_test, **kwa...
  function kfold_experiment (line 102) | def kfold_experiment(experiment_cls, config, network_cls, len_data,
  function create_experiment_test_template_for_backend (line 120) | def create_experiment_test_template_for_backend(backend: str):

FILE: tests/training/test_losses_torch.py
  class FocalLossTestPyTorch (line 7) | class FocalLossTestPyTorch(unittest.TestCase):
    method test_focalloss (line 11) | def test_focalloss(self):

FILE: tests/training/test_metrics.py
  class TestMetrics (line 11) | class TestMetrics(unittest.TestCase):
    method test_sklearn_classification_metric (line 16) | def test_sklearn_classification_metric(self):
    method test_auroc_metric (line 38) | def test_auroc_metric(self):

FILE: tests/utils/__init__.py
  function check_for_environment_variable (line 5) | def check_for_environment_variable(variable: str, value: str):
  function check_for_backend (line 11) | def check_for_backend(backend_name, environment_variable):
  function check_for_torch_backend (line 19) | def check_for_torch_backend():
  function check_for_torchscript_backend (line 23) | def check_for_torchscript_backend():
  function check_for_tf_eager_backend (line 27) | def check_for_tf_eager_backend():
  function check_for_tf_graph_backend (line 31) | def check_for_tf_graph_backend():
  function check_for_chainer_backend (line 35) | def check_for_chainer_backend():
  function check_for_sklearn_backend (line 39) | def check_for_sklearn_backend():
  function check_for_no_backend (line 43) | def check_for_no_backend():

FILE: tests/utils/dict_reductions.py
  class TestDictReductions (line 8) | class TestDictReductions(unittest.TestCase):
    method setUp (line 9) | def setUp(self) -> None:
    method test_dict_flatten (line 54) | def test_dict_flatten(self):
    method test_dict_unflatten (line 58) | def test_dict_unflatten(self):
    method test_dict_flatten_unflatten (line 62) | def test_dict_flatten_unflatten(self):
    method test_reduction_fuctions (line 69) | def test_reduction_fuctions(self):
    method test_reduce_dict (line 80) | def test_reduce_dict(self):

FILE: tests/utils/test_codecs.py
  class CodecsTest (line 10) | class CodecsTest(unittest.TestCase):
    method test_encoder (line 14) | def test_encoder(self):
    method test_decoder (line 49) | def test_decoder(self):

FILE: tests/utils/test_config.py
  class ConfigTest (line 17) | class ConfigTest(unittest.TestCase):
    method setUp (line 18) | def setUp(self):
    method _setup_logger (line 37) | def _setup_logger(self):
    method test_config_access (line 45) | def test_config_access(self):
    method test_config_access_with_non_existing_keys (line 103) | def test_config_access_with_non_existing_keys(self):
    method test_update (line 115) | def test_update(self):
    method test_dump_and_load (line 154) | def test_dump_and_load(self):
    method test_copy (line 182) | def test_copy(self):
    method test_create_from_argparse (line 200) | def test_create_from_argparse(self):
    method test_internal_type (line 217) | def test_internal_type(self):
    method test_create_argparser (line 224) | def test_create_argparser(self):
    method test_update_from_argparse (line 244) | def test_update_from_argparse(self):
  class LookupConfigTest (line 264) | class LookupConfigTest(ConfigTest):
    method setUp (line 265) | def setUp(self):
    method test_nested_lookpup (line 272) | def test_nested_lookpup(self):
  class DeliraConfigTest (line 304) | class DeliraConfigTest(LookupConfigTest):
    method setUp (line 305) | def setUp(self):
    method test_property_params (line 312) | def test_property_params(self):
    method test_logging_as_string (line 344) | def test_logging_as_string(self):
    method test_internal_type (line 387) | def test_internal_type(self):

FILE: tests/utils/test_messenger.py
  class DummyNetwork (line 23) | class DummyNetwork(AbstractNetwork):
    method __init__ (line 28) | def __init__(self, **kwargs):
    method __call__ (line 31) | def __call__(self, *args, **kwargs):
    method closure (line 35) | def closure(model, data_dict: dict, optimizers: dict, losses=None,
    method prepare_batch (line 40) | def prepare_batch(batch: dict, input_device, output_device):
  class DummyTrainer (line 44) | class DummyTrainer(BaseNetworkTrainer):
    method __init__ (line 49) | def __init__(self, *args, **kwargs):
    method train (line 59) | def train(self, *args, num_epochs=2, **kwargs):
    method test (line 68) | def test(self, *args, **kwargs):
    method save_state (line 71) | def save_state(self, file_name, *args, **kwargs):
  class DummyPredictor (line 75) | class DummyPredictor(Predictor):
    method predict (line 80) | def predict(self, *args, **kwargs):
    method predict_data_mgr (line 83) | def predict_data_mgr(self, *args, **kwargs):
  class DummyExperiment (line 88) | class DummyExperiment(BaseExperiment):
    method __init__ (line 89) | def __init__(self):
    method run (line 108) | def run(self, *args, raise_error=False, **kwargs):
    method resume (line 114) | def resume(self, *args, raise_error=False, **kwargs):
    method test (line 120) | def test(self, *args, raise_error=False, **kwargs):
    method kfold (line 126) | def kfold(self, *args, raise_error=False, **kwargs):
  class LoggingBaseMessenger (line 133) | class LoggingBaseMessenger(BaseMessenger):
    method __init__ (line 134) | def __init__(
    method emit_message (line 145) | def emit_message(self, msg):
  class TestBaseMessenger (line 149) | class TestBaseMessenger(unittest.TestCase):
    method setUp (line 150) | def setUp(self) -> None:
    method create_experiment (line 195) | def create_experiment(self, expected_msg=None):
    method run_experiment (line 209) | def run_experiment(self, raise_error=False, expected_msg=None):
    method t_experiment (line 237) | def t_experiment(self, raise_error=False, expected_msg=None):
    method kfold_experiment (line 263) | def kfold_experiment(self, raise_error=False, expected_msg=None):
    method test_create_experiment (line 295) | def test_create_experiment(self):
    method test_run_successful (line 302) | def test_run_successful(self):
    method test_run_failed (line 310) | def test_run_failed(self):
    method test_test_successful (line 318) | def test_test_successful(self):
    method test_test_failed (line 326) | def test_test_failed(self):
    method test_kfold_successful (line 334) | def test_kfold_successful(self):
    method test_kfold_failed (line 342) | def test_kfold_failed(self):
  class LoggingSlackMessenger (line 347) | class LoggingSlackMessenger(SlackMessenger):
    method emit_message (line 348) | def emit_message(self, msg):
  class TestSlackMessenger (line 353) | class TestSlackMessenger(TestBaseMessenger):
    method setUp (line 354) | def setUp(self) -> None:

FILE: versioneer.py
  class VersioneerConfig (line 292) | class VersioneerConfig:
  function get_root (line 296) | def get_root():
  function get_config_from_root (line 335) | def get_config_from_root(root):
  class NotThisMethod (line 364) | class NotThisMethod(Exception):
  function register_vcs_handler (line 373) | def register_vcs_handler(vcs, method):  # decorator
  function run_command (line 384) | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
  function git_get_keywords (line 945) | def git_get_keywords(versionfile_abs):
  function git_versions_from_keywords (line 974) | def git_versions_from_keywords(keywords, tag_prefix, verbose):
  function git_pieces_from_vcs (line 1029) | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_comma...
  function do_vcs_install (line 1120) | def do_vcs_install(manifest_in, versionfile_source, ipy):
  function versions_from_parentdir (line 1158) | def versions_from_parentdir(parentdir_prefix, root, verbose):
  function versions_from_file (line 1201) | def versions_from_file(filename):
  function write_to_version_file (line 1218) | def write_to_version_file(filename, versions):
  function plus_or_dot (line 1229) | def plus_or_dot(pieces):
  function render_pep440 (line 1236) | def render_pep440(pieces):
  function render_pep440_pre (line 1261) | def render_pep440_pre(pieces):
  function render_pep440_post (line 1277) | def render_pep440_post(pieces):
  function render_pep440_old (line 1304) | def render_pep440_old(pieces):
  function render_git_describe (line 1326) | def render_git_describe(pieces):
  function render_git_describe_long (line 1346) | def render_git_describe_long(pieces):
  function render (line 1366) | def render(pieces, style):
  class VersioneerBadRootError (line 1398) | class VersioneerBadRootError(Exception):
  function get_versions (line 1402) | def get_versions(verbose=False):
  function get_version (line 1478) | def get_version():
  function get_cmdclass (line 1483) | def get_cmdclass():
  function do_setup (line 1697) | def do_setup():
  function scan_setup_py (line 1779) | def scan_setup_py():
Condensed preview — 236 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (1,344K chars).
[
  {
    "path": ".codecov.yml",
    "chars": 200,
    "preview": "comment: off\n\ncoverage:\n  status:\n    project:\n      default:\n        target: auto\n        threshold: 0.50\n        base:"
  },
  {
    "path": ".gitattributes",
    "chars": 32,
    "preview": "delira/_version.py export-subst\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/bug_report.md",
    "chars": 447,
    "preview": "---\nname: Bug report\nabout: Report a bug and give us a minimal example to reproduce it\ntitle: \"[Bug]\"\nlabels: bug\nassign"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/feature_request.md",
    "chars": 429,
    "preview": "---\nname: Feature request\nabout: Request a feature\ntitle: \"[FeatureRequest]\"\nlabels: new feature\nassignees: ''\n\n---\n\n**D"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/question.md",
    "chars": 330,
    "preview": "---\nname: Question\nabout: Ask a question/for support\ntitle: \"[Question]\"\nlabels: question\nassignees: ''\n\n---\n\n**Descript"
  },
  {
    "path": ".gitignore",
    "chars": 1394,
    "preview": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packagi"
  },
  {
    "path": ".readthedocs.yml",
    "chars": 294,
    "preview": "# .readthedocs.yml\nversion: 2\n\nformats: \n    - epub\n    - pdf\n    - htmlzip\n\n# python:\n#     version: 3.7\n#     install:"
  },
  {
    "path": ".travis.yml",
    "chars": 6008,
    "preview": "language: python\n\nmatrix:\n    include:\n        # basic tests withut a backend\n        - name: \"Unittests Python 3.5 No B"
  },
  {
    "path": "AUTHORS.rst",
    "chars": 905,
    "preview": "Authors\n==========\n\n\n**Core Development Team:**\n\n- Justus Schock: `GitHub <https://github.com/justusschock>`_ | `LinkedI"
  },
  {
    "path": "CODEOWNERS",
    "chars": 2257,
    "preview": "# Use this CODEOWNERS file for automatically request reviews from owners at PRs. \n# For Details see https://help.github."
  },
  {
    "path": "CONTRIBUTING.md",
    "chars": 2798,
    "preview": "# Contributing to `delira`\n\nIf you are interested in contributing to `delira`, you will either\n\n* implement a new featur"
  },
  {
    "path": "LICENSE",
    "chars": 34523,
    "preview": "                    GNU AFFERO GENERAL PUBLIC LICENSE\n                       Version 3, 19 November 2007\n\n Copyright (C)"
  },
  {
    "path": "MANIFEST.in",
    "chars": 149,
    "preview": "include requirements/*.txt\ninclude *.md\ninclude LICENSE\ninclude notebooks/*.ipynb\ninclude setup.cfg\ninclude versioneer.p"
  },
  {
    "path": "README.md",
    "chars": 8080,
    "preview": "[<img src=\"https://img.shields.io/badge/chat-slack%20channel-75BBC4.svg\">](https://join.slack.com/t/deliradev/shared_inv"
  },
  {
    "path": "delira/__init__.py",
    "chars": 378,
    "preview": "from delira._debug_mode import get_current_debug_mode, switch_debug_mode, \\\n    set_debug_mode\nfrom delira._backends imp"
  },
  {
    "path": "delira/_backends.py",
    "chars": 3505,
    "preview": "import os\nimport json\nfrom delira._version import get_versions as _get_versions\n\n# to register new possible backends, th"
  },
  {
    "path": "delira/_debug_mode.py",
    "chars": 1010,
    "preview": "__DEBUG_MODE = False\n\n# Functions to get and set the internal __DEBUG_MODE variable. This variable\n# currently only defi"
  },
  {
    "path": "delira/_version.py",
    "chars": 18442,
    "preview": "# This file helps to compute a version number in source trees obtained from\n# git-archive tarball (such as those provide"
  },
  {
    "path": "delira/data_loading/__init__.py",
    "chars": 713,
    "preview": "# basic imports\nfrom delira.data_loading.data_loader import DataLoader\nfrom delira.data_loading.dataset import AbstractD"
  },
  {
    "path": "delira/data_loading/augmenter.py",
    "chars": 16782,
    "preview": "import multiprocessing\nfrom multiprocessing import connection as mpconnection\nfrom collections import Callable\nimport ab"
  },
  {
    "path": "delira/data_loading/data_loader.py",
    "chars": 2724,
    "preview": "import numpy as np\nfrom delira.data_loading.dataset import AbstractDataset, DictDataset, \\\n    IterableDataset\nfrom coll"
  },
  {
    "path": "delira/data_loading/data_manager.py",
    "chars": 11323,
    "preview": "import logging\n\nfrom batchgenerators.transforms import AbstractTransform\n\nfrom delira import get_current_debug_mode\nfrom"
  },
  {
    "path": "delira/data_loading/dataset.py",
    "chars": 17622,
    "preview": "import abc\nimport os\nimport typing\n\nimport numpy as np\nfrom skimage.transform import resize\nfrom sklearn.model_selection"
  },
  {
    "path": "delira/data_loading/load_utils.py",
    "chars": 7160,
    "preview": "import collections\nimport os\n\nimport numpy as np\nfrom skimage.io import imread\nfrom skimage.transform import resize\n\n\nde"
  },
  {
    "path": "delira/data_loading/numba_transform.py",
    "chars": 1447,
    "preview": "from batchgenerators.transforms import AbstractTransform, Compose\n\nimport logging\nfrom delira import get_current_debug_m"
  },
  {
    "path": "delira/data_loading/sampler/__init__.py",
    "chars": 420,
    "preview": "from delira.data_loading.sampler.abstract import AbstractSampler\nfrom delira.data_loading.sampler.batch import BatchSamp"
  },
  {
    "path": "delira/data_loading/sampler/abstract.py",
    "chars": 1346,
    "preview": "from delira.data_loading.dataset import AbstractDataset\n\n\nclass AbstractSampler(object):\n    \"\"\"\n    Abstract Class defi"
  },
  {
    "path": "delira/data_loading/sampler/batch.py",
    "chars": 1634,
    "preview": "from delira.data_loading.sampler.abstract import AbstractSampler\n\n\nclass BatchSampler(object):\n    \"\"\"\n    A Sampler-Wra"
  },
  {
    "path": "delira/data_loading/sampler/random.py",
    "chars": 2524,
    "preview": "from delira.data_loading.sampler.abstract import AbstractSampler\nimport numpy as np\n\n\nclass RandomSampler(AbstractSample"
  },
  {
    "path": "delira/data_loading/sampler/sequential.py",
    "chars": 433,
    "preview": "from delira.data_loading.sampler.abstract import AbstractSampler\n\n\nclass SequentialSampler(AbstractSampler):\n    \"\"\"\n   "
  },
  {
    "path": "delira/data_loading/sampler/weighted.py",
    "chars": 2563,
    "preview": "from delira.data_loading.sampler.abstract import AbstractSampler\nfrom delira.data_loading.dataset import AbstractDataset"
  },
  {
    "path": "delira/io/__init__.py",
    "chars": 1119,
    "preview": "from delira import get_backends\n\nif \"TORCH\" in get_backends():\n    from delira.io.torch import save_checkpoint_torch as "
  },
  {
    "path": "delira/io/chainer.py",
    "chars": 5651,
    "preview": "import chainer\nimport zipfile\nimport os\nimport json\n\n\ndef save_checkpoint(file, model=None, optimizers=None, epoch=None)"
  },
  {
    "path": "delira/io/sklearn.py",
    "chars": 1039,
    "preview": "import logging\nimport joblib\nlogger = logging.getLogger(__name__)\n\n\ndef save_checkpoint(file: str, model=None, epoch=Non"
  },
  {
    "path": "delira/io/tf.py",
    "chars": 2754,
    "preview": "from delira.models.backends.tf_eager import AbstractTfEagerNetwork\nimport typing\nimport logging\n\nimport tensorflow as tf"
  },
  {
    "path": "delira/io/torch.py",
    "chars": 5025,
    "preview": "from delira.models.backends.torchscript import AbstractTorchScriptNetwork\nfrom delira.models.backends.torch import Abstr"
  },
  {
    "path": "delira/logging/__init__.py",
    "chars": 534,
    "preview": "from delira.logging.tensorboard_backend import TensorboardBackend\nfrom delira.logging.visdom_backend import VisdomBacken"
  },
  {
    "path": "delira/logging/base_backend.py",
    "chars": 23039,
    "preview": "\nfrom queue import Empty\nfrom abc import abstractmethod, ABCMeta\nfrom threading import Event\nfrom queue import Queue\nimp"
  },
  {
    "path": "delira/logging/base_logger.py",
    "chars": 12466,
    "preview": "from multiprocessing.queues import Queue as MpQueue\nfrom threading import Event\nfrom queue import Queue, Full\nfrom delir"
  },
  {
    "path": "delira/logging/logging_context.py",
    "chars": 3401,
    "preview": "from delira.logging.registry import logger_exists, register_logger, \\\n    unregister_logger, log as _log\nfrom delira.log"
  },
  {
    "path": "delira/logging/registry.py",
    "chars": 2998,
    "preview": "from delira.logging.base_logger import Logger\nfrom collections import OrderedDict\n\n# Registry dict containing all regist"
  },
  {
    "path": "delira/logging/tensorboard_backend.py",
    "chars": 6601,
    "preview": "from threading import Event\nfrom queue import Queue\n\nfrom delira.logging.writer_backend import WriterLoggingBackend\n\n# u"
  },
  {
    "path": "delira/logging/visdom_backend.py",
    "chars": 927,
    "preview": "import tensorboardX\nfrom threading import Event\nfrom queue import Queue\n\nfrom delira.logging.writer_backend import Write"
  },
  {
    "path": "delira/logging/writer_backend.py",
    "chars": 10542,
    "preview": "\nfrom delira.logging.base_backend import BaseBackend\nfrom queue import Queue\nfrom threading import Event\n\n\nclass WriterL"
  },
  {
    "path": "delira/models/__init__.py",
    "chars": 96,
    "preview": "from delira.models.abstract_network import AbstractNetwork\nfrom delira.models.backends import *\n"
  },
  {
    "path": "delira/models/abstract_network.py",
    "chars": 3364,
    "preview": "import abc\nimport logging\n\nfile_logger = logging.getLogger(__name__)\n\n\nclass AbstractNetwork(object):\n    \"\"\"\n    Abstra"
  },
  {
    "path": "delira/models/backends/__init__.py",
    "chars": 476,
    "preview": "from delira import get_backends as _get_backends\n\nif \"CHAINER\" in _get_backends():\n    from delira.models.backends.chain"
  },
  {
    "path": "delira/models/backends/chainer/__init__.py",
    "chars": 598,
    "preview": "from delira import get_backends as _get_backends\n\nif \"CHAINER\" in _get_backends():\n    from delira.models.backends.chain"
  },
  {
    "path": "delira/models/backends/chainer/abstract_network.py",
    "chars": 4803,
    "preview": "import abc\nimport chainer\nimport numpy as np\n\nfrom delira.models.abstract_network import AbstractNetwork\n\n\n# Use this Mi"
  },
  {
    "path": "delira/models/backends/chainer/data_parallel.py",
    "chars": 16942,
    "preview": "from delira.models.backends.chainer.abstract_network import \\\n    AbstractChainerNetwork\nimport chainer\n\n\ndef _apply_sca"
  },
  {
    "path": "delira/models/backends/sklearn/__init__.py",
    "chars": 173,
    "preview": "from delira import get_backends as _get_backends\nif \"SKLEARN\" in _get_backends():\n    from delira.models.backends.sklear"
  },
  {
    "path": "delira/models/backends/sklearn/abstract_network.py",
    "chars": 4742,
    "preview": "from inspect import signature as get_signature\nfrom sklearn.base import BaseEstimator\n\nfrom delira.models.abstract_netwo"
  },
  {
    "path": "delira/models/backends/tf_eager/__init__.py",
    "chars": 275,
    "preview": "from delira import get_backends as _get_backends\n\nif \"TF\" in _get_backends():\n    from delira.models.backends.tf_eager.a"
  },
  {
    "path": "delira/models/backends/tf_eager/abstract_network.py",
    "chars": 5118,
    "preview": "import abc\nimport typing\nimport tensorflow as tf\nimport numpy as np\nfrom delira.models.abstract_network import AbstractN"
  },
  {
    "path": "delira/models/backends/tf_eager/data_parallel.py",
    "chars": 1296,
    "preview": "import tensorflow as tf\nfrom delira.models.backends.tf_eager.abstract_network import \\\n    AbstractTfEagerNetwork\n\n\nclas"
  },
  {
    "path": "delira/models/backends/tf_graph/__init__.py",
    "chars": 176,
    "preview": "from delira import get_backends as _get_backends\n\nif \"TF\" in _get_backends():\n    from delira.models.backends.tf_graph.a"
  },
  {
    "path": "delira/models/backends/tf_graph/abstract_network.py",
    "chars": 6442,
    "preview": "import abc\nimport logging\nimport tensorflow as tf\nimport numpy as np\n\nfrom delira.models.abstract_network import Abstrac"
  },
  {
    "path": "delira/models/backends/torch/__init__.py",
    "chars": 334,
    "preview": "from delira import get_backends as _get_backends\n\nif \"TORCH\" in _get_backends():\n    from delira.models.backends.torch.a"
  },
  {
    "path": "delira/models/backends/torch/abstract_network.py",
    "chars": 4467,
    "preview": "import abc\nimport torch\nfrom delira.models.abstract_network import AbstractNetwork\n\nfrom delira.models.backends.torch.ut"
  },
  {
    "path": "delira/models/backends/torch/data_parallel.py",
    "chars": 2147,
    "preview": "import torch\n\nfrom delira.models.backends.torch.abstract_network import \\\n    AbstractPyTorchNetwork\n\n\nclass DataParalle"
  },
  {
    "path": "delira/models/backends/torch/utils.py",
    "chars": 2564,
    "preview": "import contextlib\n\ntry:\n    # use apex loss scaling if possible\n    # (and enabled, this is done internally by apex)\n   "
  },
  {
    "path": "delira/models/backends/torchscript/__init__.py",
    "chars": 142,
    "preview": "from delira import get_backends as _get_backends\n\nif \"TORCH\" in _get_backends():\n    from .abstract_network import Abstr"
  },
  {
    "path": "delira/models/backends/torchscript/abstract_network.py",
    "chars": 4208,
    "preview": "import abc\nimport torch\nfrom delira.models.abstract_network import AbstractNetwork\n\n\nclass AbstractTorchScriptNetwork(Ab"
  },
  {
    "path": "delira/training/__init__.py",
    "chars": 208,
    "preview": "\nfrom delira.training.base_experiment import BaseExperiment\nfrom delira.training.base_trainer import BaseNetworkTrainer\n"
  },
  {
    "path": "delira/training/backends/__init__.py",
    "chars": 489,
    "preview": "from delira import get_backends as _get_backends\n\n\nif \"CHAINER\" in _get_backends():\n    from delira.training.backends.ch"
  },
  {
    "path": "delira/training/backends/chainer/__init__.py",
    "chars": 470,
    "preview": "from delira import get_backends as _get_backends\n\nif \"CHAINER\" in _get_backends():\n    from delira.training.backends.cha"
  },
  {
    "path": "delira/training/backends/chainer/experiment.py",
    "chars": 5844,
    "preview": "import typing\nfrom functools import partial\n\nfrom delira.models.backends.chainer import AbstractChainerNetwork\nfrom deli"
  },
  {
    "path": "delira/training/backends/chainer/trainer.py",
    "chars": 21449,
    "preview": "from delira.training.backends.chainer.utils import convert_to_numpy\nfrom delira.training.backends.chainer.utils import c"
  },
  {
    "path": "delira/training/backends/chainer/utils.py",
    "chars": 1878,
    "preview": "import chainer\nfrom delira.models.backends.chainer import DataParallelChainerOptimizer\nfrom delira.training.utils import"
  },
  {
    "path": "delira/training/backends/sklearn/__init__.py",
    "chars": 372,
    "preview": "from delira import get_backends as _get_backends\n\nif \"SKLEARN\" in _get_backends():\n    from delira.training.backends.skl"
  },
  {
    "path": "delira/training/backends/sklearn/experiment.py",
    "chars": 5721,
    "preview": "from functools import partial\nimport typing\nimport os\n\nfrom sklearn.base import BaseEstimator\n\nfrom delira.models.backen"
  },
  {
    "path": "delira/training/backends/sklearn/trainer.py",
    "chars": 17961,
    "preview": "from delira.training.backends.sklearn.utils import create_optims_default\nfrom delira.training.utils import convert_to_nu"
  },
  {
    "path": "delira/training/backends/sklearn/utils.py",
    "chars": 410,
    "preview": "def create_optims_default(*args, **kwargs):\n    \"\"\"\n    Function returning an empty optimizer dict\n\n    Parameters\n    -"
  },
  {
    "path": "delira/training/backends/tf_eager/__init__.py",
    "chars": 469,
    "preview": "from delira import get_backends as _get_backends\n\nif \"TF\" in _get_backends():\n    from delira.training.backends.tf_eager"
  },
  {
    "path": "delira/training/backends/tf_eager/experiment.py",
    "chars": 11353,
    "preview": "import typing\nfrom functools import partial\n\nimport tensorflow as tf\n\nfrom delira.data_loading import DataManager\nfrom d"
  },
  {
    "path": "delira/training/backends/tf_eager/trainer.py",
    "chars": 14428,
    "preview": "from delira.training.backends.tf_eager.utils import create_optims_default\nfrom delira.training.backends.tf_eager.utils i"
  },
  {
    "path": "delira/training/backends/tf_eager/utils.py",
    "chars": 1416,
    "preview": "import tensorflow as tf\n\nfrom delira.training.utils import convert_to_numpy_identity, \\\n    recursively_convert_elements"
  },
  {
    "path": "delira/training/backends/tf_graph/__init__.py",
    "chars": 328,
    "preview": "from delira import get_backends as _get_backends\n\nif \"TF\" in _get_backends():\n    from delira.training.backends.tf_graph"
  },
  {
    "path": "delira/training/backends/tf_graph/experiment.py",
    "chars": 5711,
    "preview": "import typing\nfrom functools import partial\n\nimport tensorflow as tf\n\nfrom delira.models.backends.tf_graph import Abstra"
  },
  {
    "path": "delira/training/backends/tf_graph/trainer.py",
    "chars": 14977,
    "preview": "from delira.training.backends.tf_graph.utils import initialize_uninitialized\nfrom delira.training.backends.tf_eager.util"
  },
  {
    "path": "delira/training/backends/tf_graph/utils.py",
    "chars": 547,
    "preview": "import tensorflow as tf\n\n\ndef initialize_uninitialized(sess):\n    \"\"\"\n    Function to initialize only uninitialized vari"
  },
  {
    "path": "delira/training/backends/torch/__init__.py",
    "chars": 458,
    "preview": "from delira import get_backends as _get_backends\n\nif \"TORCH\" in _get_backends():\n    from delira.training.backends.torch"
  },
  {
    "path": "delira/training/backends/torch/experiment.py",
    "chars": 10564,
    "preview": "from functools import partial\nimport typing\n\nimport torch\n\nfrom delira.models.backends.torch import AbstractPyTorchNetwo"
  },
  {
    "path": "delira/training/backends/torch/trainer.py",
    "chars": 23005,
    "preview": "import logging\nimport os\nfrom functools import partial\nimport warnings\n\nimport torch\nfrom batchgenerators.dataloading im"
  },
  {
    "path": "delira/training/backends/torch/utils.py",
    "chars": 1699,
    "preview": "import torch\n\nfrom delira.utils.decorators import dtype_func\nfrom delira.training.utils import convert_to_numpy_identity"
  },
  {
    "path": "delira/training/backends/torchscript/__init__.py",
    "chars": 274,
    "preview": "from delira import get_backends as _get_backends\n\nif \"TORCH\" in _get_backends():\n    from delira.training.backends.torch"
  },
  {
    "path": "delira/training/backends/torchscript/experiment.py",
    "chars": 3215,
    "preview": "import typing\n\nfrom delira.models.backends.torchscript import AbstractTorchScriptNetwork\n\nfrom delira.utils import Delir"
  },
  {
    "path": "delira/training/backends/torchscript/trainer.py",
    "chars": 10560,
    "preview": "import logging\n\nfrom delira.io.torch import load_checkpoint_torchscript, \\\n    save_checkpoint_torchscript\nfrom delira.m"
  },
  {
    "path": "delira/training/base_experiment.py",
    "chars": 24744,
    "preview": "import typing\nimport logging\nimport pickle\nimport os\nfrom datetime import datetime\nimport warnings\n\nimport copy\n\nimport "
  },
  {
    "path": "delira/training/base_trainer.py",
    "chars": 31578,
    "preview": "import logging\nimport os\nimport pickle\nimport typing\nimport warnings\n\nfrom delira.utils.config import LookupConfig\n\nimpo"
  },
  {
    "path": "delira/training/callbacks/__init__.py",
    "chars": 1243,
    "preview": "from delira import get_backends\n\nfrom delira.training.callbacks.logging_callback import DefaultLoggingCallback\nfrom deli"
  },
  {
    "path": "delira/training/callbacks/abstract_callback.py",
    "chars": 4289,
    "preview": "class AbstractCallback(object):\n    \"\"\"\n    Implements abstract callback interface.\n    All callbacks should be derived "
  },
  {
    "path": "delira/training/callbacks/early_stopping.py",
    "chars": 2723,
    "preview": "from delira.training.callbacks.abstract_callback import AbstractCallback\n\n\nclass EarlyStopping(AbstractCallback):\n    \"\""
  },
  {
    "path": "delira/training/callbacks/logging_callback.py",
    "chars": 3373,
    "preview": "from delira.training.callbacks.abstract_callback import AbstractCallback\nfrom delira.logging import make_logger, BaseBac"
  },
  {
    "path": "delira/training/callbacks/pytorch_schedulers.py",
    "chars": 13622,
    "preview": "from delira import get_backends\nfrom delira.training.callbacks.abstract_callback import AbstractCallback\n\nif 'TORCH' in "
  },
  {
    "path": "delira/training/losses.py",
    "chars": 4894,
    "preview": "from delira import get_backends\n\nif \"TORCH\" in get_backends():\n    import torch\n    import torch.nn.functional as F\n\n   "
  },
  {
    "path": "delira/training/metrics.py",
    "chars": 6714,
    "preview": "\nfrom sklearn.metrics import accuracy_score, balanced_accuracy_score, \\\n    f1_score, fbeta_score, hamming_loss, jaccard"
  },
  {
    "path": "delira/training/predictor.py",
    "chars": 22421,
    "preview": "import logging\nimport gc\n\nimport numpy as np\nfrom tqdm import tqdm\n\nfrom delira.data_loading import DataManager\nfrom del"
  },
  {
    "path": "delira/training/utils.py",
    "chars": 2831,
    "preview": "import collections\nimport numpy as np\n\n\ndef recursively_convert_elements(element, check_type, conversion_fn):\n    \"\"\"\n  "
  },
  {
    "path": "delira/utils/__init__.py",
    "chars": 125,
    "preview": "from delira.utils.config import DeliraConfig, Config\nfrom delira.utils.path import subdirs\nfrom delira.utils.time import"
  },
  {
    "path": "delira/utils/codecs.py",
    "chars": 14089,
    "preview": "import importlib\nimport types\nimport collections\nimport inspect\nimport numpy as np\nimport logging\nimport typing\nfrom fun"
  },
  {
    "path": "delira/utils/config.py",
    "chars": 30576,
    "preview": "import copy\nfrom delira._version import get_versions\nfrom delira.utils.time import now\nfrom nested_lookup import nested_"
  },
  {
    "path": "delira/utils/context_managers.py",
    "chars": 1748,
    "preview": "from delira import get_current_debug_mode, set_debug_mode\n\n\nclass DebugMode(object):\n    \"\"\"\n    Context Manager to set "
  },
  {
    "path": "delira/utils/decorators.py",
    "chars": 3253,
    "preview": "import warnings\nfrom functools import wraps\n\nimport numpy as np\n\nfrom delira import get_backends\n\n\ndef dtype_func(class_"
  },
  {
    "path": "delira/utils/dict_reductions.py",
    "chars": 5649,
    "preview": "from collections import MutableMapping\nfrom typing import Union, Dict, Callable\nimport numpy as np\n\n\n# Reduction Functio"
  },
  {
    "path": "delira/utils/messenger.py",
    "chars": 13923,
    "preview": "import logging\nimport warnings\nfrom abc import ABC, abstractmethod\n\nfrom delira.training import BaseExperiment\nfrom deli"
  },
  {
    "path": "delira/utils/path.py",
    "chars": 398,
    "preview": "import os\n\n\ndef subdirs(d):\n    \"\"\"For a given directory, return a list of all subdirectories (full paths)\n\n    Paramete"
  },
  {
    "path": "delira/utils/time.py",
    "chars": 210,
    "preview": "import datetime\n\n\ndef now():\n    \"\"\"Return current time as YYYY-MM-DD_HH-MM-SS\n\n    Returns\n    -------\n    string\n     "
  },
  {
    "path": "docker/Dockerfile",
    "chars": 1020,
    "preview": "FROM nvidia/cuda:9.2-base-ubuntu18.04\n\nRUN apt-get update && apt-get install -y \\\n    curl \\\n    ca-certificates \\\n    s"
  },
  {
    "path": "docs/Makefile",
    "chars": 603,
    "preview": "# Minimal makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line.\nSPHINXOPTS    =\nSPHI"
  },
  {
    "path": "docs/_api/_build/delira/backend_resolution.rst",
    "chars": 469,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira._backends\n\nBackend Resolution\n==================\n"
  },
  {
    "path": "docs/_api/_build/delira/class_hierarchy.rst",
    "chars": 193,
    "preview": "Class Hierarchy Diagrams\n========================\n\n.. contents::\n\n* `Coarse <../../../_static/class_hierarchy/delira_coa"
  },
  {
    "path": "docs/_api/_build/delira/data_loading/arbitrary_data.rst",
    "chars": 358,
    "preview": "Arbitrary Data\n--------------\n\nThe following classes are implemented to work with every kind of data. You can\nuse every "
  },
  {
    "path": "docs/_api/_build/delira/data_loading/data_loading.rst",
    "chars": 164,
    "preview": "Data Loading\n============\n\nThis module provides Utilities to load the Data\n\n.. toctree::\n\n    Arbitrary Data <arbitrary_"
  },
  {
    "path": "docs/_api/_build/delira/data_loading/dataloader.rst",
    "chars": 391,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.data_loading\n\nDataloader\n**********\n\nThe Dataload"
  },
  {
    "path": "docs/_api/_build/delira/data_loading/datamanager.rst",
    "chars": 473,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.data_loading.data_manager\n\n\nDatamanager\n*********"
  },
  {
    "path": "docs/_api/_build/delira/data_loading/dataset.rst",
    "chars": 1820,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.data_loading\n\nDatasets\n********\n\nThe Dataset the "
  },
  {
    "path": "docs/_api/_build/delira/data_loading/nii.rst",
    "chars": 650,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.data_loading.nii\n\nNii-Data\n--------\n\nSince ``deli"
  },
  {
    "path": "docs/_api/_build/delira/data_loading/sampler.rst",
    "chars": 1645,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n\nSampler\n-------\n\nSampler define the way of iterating over the dataset and "
  },
  {
    "path": "docs/_api/_build/delira/data_loading/utils.rst",
    "chars": 617,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.data_loading.load_utils\n\nUtils\n*****\n\n:hidden:`no"
  },
  {
    "path": "docs/_api/_build/delira/debug_mode.rst",
    "chars": 995,
    "preview": "def get_current_debug_mode():\n    \"\"\"\n    Getter function for the current debug mode\n    Returns\n    -------\n    bool\n  "
  },
  {
    "path": "docs/_api/_build/delira/delira.io.rst",
    "chars": 1788,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\nIO\n==\n\n.. currentmodule:: delira.io\n\nif \"CHAINER\" in get_backends():\n    fr"
  },
  {
    "path": "docs/_api/_build/delira/delira.rst",
    "chars": 360,
    "preview": "Delira\n======\n\n.. toctree::\n    :maxdepth: 10\n    :glob:\n\n    Data Loading <data_loading/data_loading>\n    IO <delira.io"
  },
  {
    "path": "docs/_api/_build/delira/delira.utils.rst",
    "chars": 804,
    "preview": "Utils\n=====\n\nThis package provides utility functions as image operations, various decorators,\npath operations and time o"
  },
  {
    "path": "docs/_api/_build/delira/logging/backends.rst",
    "chars": 800,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.logging.base_backend\n\n:hidden:`BaseBackend`\n~~~~~"
  },
  {
    "path": "docs/_api/_build/delira/logging/base_logger.rst",
    "chars": 440,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.logging.base_logger\n\n:hidden:`Logger`\n~~~~~~~~~~~"
  },
  {
    "path": "docs/_api/_build/delira/logging/handlers.rst",
    "chars": 363,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.logging\n\n:hidden:`MultiStreamHandler`\n~~~~~~~~~~~"
  },
  {
    "path": "docs/_api/_build/delira/logging/logging.rst",
    "chars": 253,
    "preview": "Logging\n=======\n\nThe logging module provides the utilities for logging arbitrary values to\ndifferent backends and a logg"
  },
  {
    "path": "docs/_api/_build/delira/logging/logging_context.py",
    "chars": 3401,
    "preview": "from delira.logging.registry import logger_exists, register_logger, \\\n    unregister_logger, log as _log\nfrom delira.log"
  },
  {
    "path": "docs/_api/_build/delira/logging/logging_context.rst",
    "chars": 235,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.logging.logging_context\n\n:hidden:`LoggingContext`"
  },
  {
    "path": "docs/_api/_build/delira/logging/registry.py",
    "chars": 2998,
    "preview": "from delira.logging.base_logger import Logger\nfrom collections import OrderedDict\n\n# Registry dict containing all regist"
  },
  {
    "path": "docs/_api/_build/delira/logging/registry.rst",
    "chars": 584,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. :currentmodule:: delira.logging.registry\n\n:hidden:`log`\n~~~~~~~~~~~~~\n\n."
  },
  {
    "path": "docs/_api/_build/delira/logging/tensorboard_backend.py",
    "chars": 6145,
    "preview": "import tensorboardX\nfrom threading import Event\nfrom queue import Queue\n\nfrom delira.logging.writer_backend import Write"
  },
  {
    "path": "docs/_api/_build/delira/logging/visdom_backend.py",
    "chars": 927,
    "preview": "import tensorboardX\nfrom threading import Event\nfrom queue import Queue\n\nfrom delira.logging.writer_backend import Write"
  },
  {
    "path": "docs/_api/_build/delira/logging/writer_backend.py",
    "chars": 10542,
    "preview": "\nfrom delira.logging.base_backend import BaseBackend\nfrom queue import Queue\nfrom threading import Event\n\n\nclass WriterL"
  },
  {
    "path": "docs/_api/_build/delira/models/chainer.rst",
    "chars": 1057,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.models.backends.chainer\n\nChainer\n.......\n\n\n:hidde"
  },
  {
    "path": "docs/_api/_build/delira/models/models.rst",
    "chars": 616,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\nModels\n======\n\n``delira`` comes with it's own model-structure tree - with\n:"
  },
  {
    "path": "docs/_api/_build/delira/models/sklearn.rst",
    "chars": 267,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.models.backends.sklearn\n\nSciKit-Learn\n..........."
  },
  {
    "path": "docs/_api/_build/delira/models/tfeager.rst",
    "chars": 489,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.models.backends.tf_eager\n\nTensorFlow Eager Execut"
  },
  {
    "path": "docs/_api/_build/delira/models/tfgraph.rst",
    "chars": 314,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.models.backends.tf_graph\n\nTensorFlow Graph Execut"
  },
  {
    "path": "docs/_api/_build/delira/models/torch.rst",
    "chars": 521,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.models.backends.torch\n\nPyTorch\n.......\n\n:hidden:`"
  },
  {
    "path": "docs/_api/_build/delira/models/torchscript.rst",
    "chars": 298,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.models.backends.torchscript\n\nTorchScript\n........"
  },
  {
    "path": "docs/_api/_build/delira/training/backends/backends.rst",
    "chars": 485,
    "preview": "Backends\n========\n\nThe following section contains all backends which are implemented,\ndeveloped and maintained for usage"
  },
  {
    "path": "docs/_api/_build/delira/training/backends/chainer.rst",
    "chars": 666,
    "preview": "Chainer\n.......\n\n.. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training.backends.chainer\n\n:hidd"
  },
  {
    "path": "docs/_api/_build/delira/training/backends/sklearn.rst",
    "chars": 568,
    "preview": "SciKit-Learn\n............\n\n.. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training.backends.skle"
  },
  {
    "path": "docs/_api/_build/delira/training/backends/tfeager.rst",
    "chars": 705,
    "preview": "TensorFlow Eager Execution\n..........................\n\n.. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: d"
  },
  {
    "path": "docs/_api/_build/delira/training/backends/tfgraph.rst",
    "chars": 576,
    "preview": "TensorFlow Graph Execution\n..........................\n\n.. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: d"
  },
  {
    "path": "docs/_api/_build/delira/training/backends/torch.rst",
    "chars": 659,
    "preview": "PyTorch\n.......\n\n.. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training.backends.torch\n\n:hidden"
  },
  {
    "path": "docs/_api/_build/delira/training/backends/torchscript.rst",
    "chars": 458,
    "preview": "TorchScript\n...........\n\n.. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training.backends.torchs"
  },
  {
    "path": "docs/_api/_build/delira/training/callbacks.rst",
    "chars": 3038,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training.callbacks\n\nCallbacks\n=========\n\nCallback"
  },
  {
    "path": "docs/_api/_build/delira/training/experiment.rst",
    "chars": 462,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training\n\nExperiments\n===========\n\nExperiments ar"
  },
  {
    "path": "docs/_api/_build/delira/training/losses.rst",
    "chars": 455,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training.losses\n\nCustom Loss Functions\n=========="
  },
  {
    "path": "docs/_api/_build/delira/training/metrics.rst",
    "chars": 2143,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training.metrics\n\nMetrics\n=======\n\n:hidden:`Sklea"
  },
  {
    "path": "docs/_api/_build/delira/training/parameters.rst",
    "chars": 239,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training\n\nParameters\n===============\n\n:hidden:`Pa"
  },
  {
    "path": "docs/_api/_build/delira/training/predictor.rst",
    "chars": 448,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training\n\n\nPredictor\n=========\n\nThe predictor imp"
  },
  {
    "path": "docs/_api/_build/delira/training/trainer.rst",
    "chars": 436,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training\n\n\nNetworkTrainer\n==============\nThe netw"
  },
  {
    "path": "docs/_api/_build/delira/training/training.rst",
    "chars": 387,
    "preview": "Training\n========\nThe training subpackage implements Callbacks, a class for Hyperparameters,\ntraining routines and wrapp"
  },
  {
    "path": "docs/_api/_build/delira/training/utils.rst",
    "chars": 474,
    "preview": ".. role:: hidden\n    :class: hidden-section\n\n.. currentmodule:: delira.training.utils\n\n\ndef recursively_convert_elements"
  },
  {
    "path": "docs/_api/_build/modules.rst",
    "chars": 85,
    "preview": "API Documentation\n=================\n\n.. toctree::\n   :maxdepth: 10\n\n   delira/delira\n"
  },
  {
    "path": "docs/classification_pytorch.rst",
    "chars": 8080,
    "preview": "\nClassification with Delira - A very short introduction\n======================================================\n\n*Author:"
  },
  {
    "path": "docs/conda.yml",
    "chars": 99,
    "preview": "name: delira-docs\ndependencies:\n  - python=3.7\n  - pip:\n    - sphinx==1.8.4\n    - sphinx-rtd-theme\n"
  },
  {
    "path": "docs/conf.py",
    "chars": 9084,
    "preview": "# -*- coding: utf-8 -*-\n#\n# Configuration file for the Sphinx documentation builder.\n#\n# This file does only contain a s"
  },
  {
    "path": "docs/custom_backend.rst",
    "chars": 49396,
    "preview": "\nHow To: Integrate your own Computation Backend\n==============================================\n\n*Author: Justus Schock*\n"
  },
  {
    "path": "docs/gan_pytorch.rst",
    "chars": 6940,
    "preview": "\nGenerative Adversarial Nets with Delira - A very short introduction\n==================================================="
  },
  {
    "path": "docs/getting_started.rst",
    "chars": 4950,
    "preview": "Getting started\n===============\n\nBackends\n--------\n\nBefore installing ``delira``, you have to choose a suitable backend."
  },
  {
    "path": "docs/index.rst",
    "chars": 932,
    "preview": ".. delira documentation master file, created by\n   sphinx-quickstart on Sat Dec  1 20:56:35 2018.\n   You can adapt this "
  },
  {
    "path": "docs/requirements.txt",
    "chars": 31,
    "preview": "sphinx==1.8.4\nsphinx-rtd-theme\n"
  },
  {
    "path": "docs/segmentation_2d_pytorch.rst",
    "chars": 9246,
    "preview": "\nSegmentation in 2D using U-Nets with Delira - A very short introduction\n==============================================="
  },
  {
    "path": "docs/segmentation_3d_pytorch.rst",
    "chars": 8283,
    "preview": "\nSegmentation in 3D using U-Nets with Delira - A very short introduction\n==============================================="
  },
  {
    "path": "docs/tutorial_delira.rst",
    "chars": 27862,
    "preview": "\nDelira Introduction\n===================\n\n*Last updated: 09.05.2019*\n\nAuthors: Justus Schock, Christoph Haarburger\n\nLoad"
  },
  {
    "path": "notebooks/classification_examples/chainer.ipynb",
    "chars": 23170,
    "preview": "{\n  \"cells\": [\n    {\n      \"cell_type\": \"markdown\",\n      \"metadata\": {\n        \"pycharm\": {}\n      },\n      \"source\": ["
  },
  {
    "path": "notebooks/classification_examples/pytorch.ipynb",
    "chars": 15677,
    "preview": "{\n  \"cells\": [\n    {\n      \"cell_type\": \"markdown\",\n      \"metadata\": {\n        \"pycharm\": {}\n      },\n      \"source\": ["
  },
  {
    "path": "notebooks/classification_examples/sklearn.ipynb",
    "chars": 14868,
    "preview": "{\n  \"cells\": [\n    {\n      \"cell_type\": \"markdown\",\n      \"metadata\": {\n        \"pycharm\": {}\n      },\n      \"source\": ["
  },
  {
    "path": "notebooks/classification_examples/tf_eager.ipynb",
    "chars": 23527,
    "preview": "{\n  \"cells\": [\n    {\n      \"cell_type\": \"markdown\",\n      \"metadata\": {\n        \"pycharm\": {}\n      },\n      \"source\": ["
  },
  {
    "path": "notebooks/classification_examples/tf_graph.ipynb",
    "chars": 23308,
    "preview": "{\n  \"cells\": [\n    {\n      \"cell_type\": \"markdown\",\n      \"metadata\": {\n        \"pycharm\": {}\n      },\n      \"source\": ["
  },
  {
    "path": "notebooks/classification_examples/torchscript.ipynb",
    "chars": 16071,
    "preview": "{\n  \"cells\": [\n    {\n      \"cell_type\": \"markdown\",\n      \"metadata\": {\n        \"pycharm\": {}\n      },\n      \"source\": ["
  },
  {
    "path": "notebooks/custom_backend.ipynb",
    "chars": 57166,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# How To: Integrate your own Comput"
  },
  {
    "path": "notebooks/gan_pytorch.ipynb",
    "chars": 9544,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {\n    \"pycharm\": {}\n   },\n   \"source\": [\n    \"# Generative "
  },
  {
    "path": "notebooks/segmentation_2d_pytorch.ipynb",
    "chars": 13251,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {\n    \"pycharm\": {}\n   },\n   \"source\": [\n    \"# Segmentatio"
  },
  {
    "path": "notebooks/segmentation_3d_pytorch.ipynb",
    "chars": 11833,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {\n    \"pycharm\": {}\n   },\n   \"source\": [\n    \"# Segmentatio"
  },
  {
    "path": "notebooks/tutorial_delira.ipynb",
    "chars": 34252,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {\n    \"pycharm\": {}\n   },\n   \"source\": [\n    \"# Delira Intr"
  },
  {
    "path": "paper/paper.bib",
    "chars": 1470,
    "preview": "@online{batchgenerators,\n  author = {MIC-DKFZ},\n  title = {batchgenerators},\n  year = 2019,\n  url = {https://github.com/"
  },
  {
    "path": "paper/paper.md",
    "chars": 2984,
    "preview": "---\ntitle: 'Delira: A High-Level Framework for Deep Learning in Medical Image Analysis'\ntags:\n  - python\n  - deep learni"
  },
  {
    "path": "pytest.ini",
    "chars": 70,
    "preview": "[pytest]\ntestpaths = tests\naddopts = --cov=delira\npython_files = *.py\n"
  },
  {
    "path": "requirements/base.txt",
    "chars": 168,
    "preview": "numpy>=1.15.0\nscikit-learn>=0.20.0\njupyter>=1.0.0\nipython\njoblib\npylint\ntqdm\nvisdom>=0.1.8.5\npyyaml\nbatchgenerators>=0.1"
  },
  {
    "path": "requirements/chainer.txt",
    "chars": 22,
    "preview": "chainer >= 6.0.0\nh5py\n"
  },
  {
    "path": "requirements/tensorflow.txt",
    "chars": 21,
    "preview": "tensorflow-gpu==1.14\n"
  },
  {
    "path": "requirements/torch.txt",
    "chars": 32,
    "preview": "torchvision>=0.2.1\ntorch>=1.0.0\n"
  },
  {
    "path": "scripts/ci/build_docs.sh",
    "chars": 94,
    "preview": "#!/usr/bin/env bash\n\ncd ./docs;\nmake html;\nmake html;\nmake html;\ntouch _build/html/.nojekyll;\n"
  },
  {
    "path": "scripts/ci/install_before_docs.sh",
    "chars": 58,
    "preview": "#!/usr/bin/env bash\npip install -r docs/requirements.txt;\n"
  },
  {
    "path": "scripts/ci/install_before_style_check.sh",
    "chars": 68,
    "preview": "#!/usr/bin/env bash\n\npip install pycodestyle;\npip install autopep8;\n"
  },
  {
    "path": "scripts/ci/install_before_tests.sh",
    "chars": 742,
    "preview": "#!/usr/bin/env bash\n\npip install -U pip wheel;\npip install -r requirements/base.txt;\n\nif [[ \"$BACKEND\" == \"TFEager\" ]]; "
  },
  {
    "path": "scripts/ci/run_style_checks.sh",
    "chars": 1233,
    "preview": "#!/usr/bin/env bash\n\n# based onhttps://gist.github.com/MichaelCurrie/802ce28c993ff2dd632c\n\n# find pep8 errors and ignore"
  },
  {
    "path": "scripts/ci/run_tests.sh",
    "chars": 46,
    "preview": "#!/usr/bin/env bash\n\ncoverage run -m unittest\n"
  },
  {
    "path": "setup.cfg",
    "chars": 245,
    "preview": "[pycodestyle]\nexclude = .eggs,*.egg,build,docs/*,.git,versioneer.py,*/conf.py\nignore = E721\n\n[versioneer]\nVCS = git\nstyl"
  },
  {
    "path": "setup.py",
    "chars": 2785,
    "preview": "import os\nfrom setuptools import find_packages, setup\nimport versioneer\n\n\ndef resolve_requirements(file):\n    if not os."
  },
  {
    "path": "tests/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "tests/data_loading/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "tests/data_loading/test_augmenters.py",
    "chars": 4194,
    "preview": "from delira.data_loading import Augmenter, DataLoader, SequentialSampler, \\\n    AbstractDataset\nimport numpy as np\nfrom "
  },
  {
    "path": "tests/data_loading/test_data_loader.py",
    "chars": 1991,
    "preview": "import unittest\nfrom delira.data_loading import DataLoader, SequentialSampler, BatchSampler\nfrom .utils import DummyData"
  }
]

// ... and 36 more files (download for full content)

About this extraction

This page contains the full source code of the justusschock/delira GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 236 files (1.2 MB), approximately 280.3k tokens, and a symbol index with 1049 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!