Repository: attack68/rateslib
Branch: main
Commit: 1c4e6f060e1b
Files: 379
Total size: 6.9 MB
Directory structure:
gitextract_nnjeuw27/
├── .ai-opt-out
├── .aiignore
├── .gitattributes
├── .github/
│ └── workflows/
│ ├── release-all.yml
│ ├── release-linux.yml
│ ├── release-macos.yml
│ ├── release-musllinux.yml
│ ├── release-sdist.yml
│ ├── release-windows.yml
│ ├── ubuntu-latest-python-specific.yml
│ ├── ubuntu-latest-rust-specific.yml
│ ├── ubuntu-latest.yml
│ ├── ubuntu-minimum.yml
│ ├── windows-latest.yml
│ └── windows-minimum.yml
├── .gitignore
├── COMMERCIAL_LICENCE
├── COMMERCIAL_LICENCE_ADDENDUM1
├── Cargo.toml
├── LICENCE
├── README.md
├── docs/
│ └── source/
│ └── z_ir_vol_time_to_expiry.rst
├── notebooks/
│ ├── coding/
│ │ ├── ch5_fx.ipynb
│ │ ├── curves.ipynb
│ │ └── scheduling.ipynb
│ └── coding_2/
│ ├── AutomaticDifferentiation.ipynb
│ ├── Calendars.ipynb
│ ├── Cookbook.ipynb
│ ├── CurveSolving.ipynb
│ ├── Curves.ipynb
│ ├── FXRates.ipynb
│ ├── FXVolatility.ipynb
│ ├── Instruments.ipynb
│ ├── InterpolationAndSplines.ipynb
│ ├── Legs.ipynb
│ ├── Periods.ipynb
│ └── Scheduling.ipynb
├── pyproject.toml
├── python/
│ ├── rateslib/
│ │ ├── __init__.py
│ │ ├── _spec_loader.py
│ │ ├── curves/
│ │ │ ├── __init__.py
│ │ │ ├── _parsers.py
│ │ │ ├── academic/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── ns.py
│ │ │ │ ├── nss.py
│ │ │ │ └── sw.py
│ │ │ ├── curves.py
│ │ │ ├── interpolation.py
│ │ │ ├── rs.py
│ │ │ └── utils.py
│ │ ├── data/
│ │ │ ├── __instrument_spec.csv
│ │ │ ├── fixings.py
│ │ │ ├── historical/
│ │ │ │ ├── aud_rfr.csv
│ │ │ │ ├── cad_rfr.csv
│ │ │ │ ├── corra.csv
│ │ │ │ ├── estr.csv
│ │ │ │ ├── eur_rfr.csv
│ │ │ │ ├── gbp_rfr.csv
│ │ │ │ ├── inr_rfr.csv
│ │ │ │ ├── jpy_rfr.csv
│ │ │ │ ├── nok_rfr.csv
│ │ │ │ ├── nowa.csv
│ │ │ │ ├── sek_rfr.csv
│ │ │ │ ├── sofr.csv
│ │ │ │ ├── sonia.csv
│ │ │ │ ├── swestr.csv
│ │ │ │ └── usd_rfr.csv
│ │ │ └── loader.py
│ │ ├── default.py
│ │ ├── dual/
│ │ │ ├── __init__.py
│ │ │ ├── ift.py
│ │ │ ├── newton.py
│ │ │ ├── quadratic.py
│ │ │ ├── utils.py
│ │ │ └── variable.py
│ │ ├── enums/
│ │ │ ├── __init__.py
│ │ │ ├── generics.py
│ │ │ └── parameters.py
│ │ ├── errors.py
│ │ ├── fx/
│ │ │ ├── __init__.py
│ │ │ ├── fx_forwards.py
│ │ │ └── fx_rates.py
│ │ ├── instruments/
│ │ │ ├── __init__.py
│ │ │ ├── bonds/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── bill.py
│ │ │ │ ├── bond_future.py
│ │ │ │ ├── conventions/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── accrued.py
│ │ │ │ │ └── discounting.py
│ │ │ │ ├── fixed_rate_bond.py
│ │ │ │ ├── float_rate_note.py
│ │ │ │ ├── index_fixed_rate_bond.py
│ │ │ │ └── protocols/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── accrued.py
│ │ │ │ ├── cashflows.py
│ │ │ │ ├── duration.py
│ │ │ │ ├── oaspread.py
│ │ │ │ ├── repo.py
│ │ │ │ └── ytm.py
│ │ │ ├── cds.py
│ │ │ ├── fee.py
│ │ │ ├── fly.py
│ │ │ ├── fra.py
│ │ │ ├── fx_forward.py
│ │ │ ├── fx_options/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── brokerfly.py
│ │ │ │ ├── call_put.py
│ │ │ │ ├── risk_reversal.py
│ │ │ │ ├── straddle.py
│ │ │ │ ├── strangle.py
│ │ │ │ └── vol_value.py
│ │ │ ├── fx_swap.py
│ │ │ ├── iirs.py
│ │ │ ├── ir_options/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── call_put.py
│ │ │ │ ├── risk_reversal.py
│ │ │ │ ├── straddle.py
│ │ │ │ ├── strangle.py
│ │ │ │ └── vol_value.py
│ │ │ ├── irs.py
│ │ │ ├── loan.py
│ │ │ ├── ndf.py
│ │ │ ├── ndxcs.py
│ │ │ ├── portfolio.py
│ │ │ ├── protocols/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── analytic_delta.py
│ │ │ │ ├── analytic_fixings.py
│ │ │ │ ├── cashflows.py
│ │ │ │ ├── fixings.py
│ │ │ │ ├── kwargs.py
│ │ │ │ ├── npv.py
│ │ │ │ ├── pricing.py
│ │ │ │ ├── rate.py
│ │ │ │ ├── sensitivities.py
│ │ │ │ └── utils.py
│ │ │ ├── sbs.py
│ │ │ ├── spread.py
│ │ │ ├── stir_future.py
│ │ │ ├── value.py
│ │ │ ├── xcs.py
│ │ │ ├── yoyis.py
│ │ │ ├── zcis.py
│ │ │ └── zcs.py
│ │ ├── legs/
│ │ │ ├── __init__.py
│ │ │ ├── amortization.py
│ │ │ ├── credit.py
│ │ │ ├── custom.py
│ │ │ ├── fixed.py
│ │ │ ├── float.py
│ │ │ └── protocols/
│ │ │ ├── __init__.py
│ │ │ ├── analytic_delta.py
│ │ │ ├── analytic_fixings.py
│ │ │ ├── cashflows.py
│ │ │ ├── fixings.py
│ │ │ └── npv.py
│ │ ├── local_types.py
│ │ ├── mutability/
│ │ │ └── __init__.py
│ │ ├── periods/
│ │ │ ├── __init__.py
│ │ │ ├── cashflow.py
│ │ │ ├── credit.py
│ │ │ ├── fixed_period.py
│ │ │ ├── float_period.py
│ │ │ ├── float_rate.py
│ │ │ ├── fx_volatility.py
│ │ │ ├── ir_volatility.py
│ │ │ ├── parameters/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── credit.py
│ │ │ │ ├── fx_volatility.py
│ │ │ │ ├── index.py
│ │ │ │ ├── ir_volatility.py
│ │ │ │ ├── mtm.py
│ │ │ │ ├── period.py
│ │ │ │ ├── rate.py
│ │ │ │ └── settlement.py
│ │ │ ├── protocols/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── analytic_delta.py
│ │ │ │ ├── analytic_fixings.py
│ │ │ │ ├── analytic_greeks.py
│ │ │ │ ├── cashflows.py
│ │ │ │ ├── fixings.py
│ │ │ │ └── npv.py
│ │ │ └── utils.py
│ │ ├── py.typed
│ │ ├── rs.pyi
│ │ ├── scheduling/
│ │ │ ├── __init__.py
│ │ │ ├── adjuster.py
│ │ │ ├── calendars.py
│ │ │ ├── convention.py
│ │ │ ├── dcfs.py
│ │ │ ├── frequency.py
│ │ │ ├── imm.py
│ │ │ ├── rollday.py
│ │ │ └── schedule.py
│ │ ├── serialization/
│ │ │ ├── __init__.py
│ │ │ ├── json.py
│ │ │ └── utils.py
│ │ ├── solver.py
│ │ ├── splines/
│ │ │ ├── __init__.py
│ │ │ └── evaluate.py
│ │ ├── utils/
│ │ │ └── calendars.py
│ │ ├── verify.py
│ │ └── volatility/
│ │ ├── __init__.py
│ │ ├── fx/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── delta_vol.py
│ │ │ ├── sabr.py
│ │ │ └── utils.py
│ │ ├── ir/
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── sabr.py
│ │ │ ├── spline.py
│ │ │ └── utils.py
│ │ └── utils.py
│ └── tests/
│ ├── curves/
│ │ ├── test_curves.py
│ │ ├── test_curvesrs.py
│ │ ├── test_ns.py
│ │ ├── test_nss.py
│ │ └── test_sw.py
│ ├── instruments/
│ │ ├── test_instruments_bonds_legacy.py
│ │ └── test_instruments_legacy.py
│ ├── legs/
│ │ ├── test_analytic_delta.py
│ │ ├── test_init.py
│ │ ├── test_leg_fixings.py
│ │ └── test_legs_legacy.py
│ ├── periods/
│ │ ├── test_fixings_exposure.py
│ │ ├── test_fixings_load.py
│ │ ├── test_float_rate.py
│ │ ├── test_periods_init.py
│ │ ├── test_periods_legacy.py
│ │ └── test_static_npv.py
│ ├── scheduling/
│ │ ├── test_calendars.py
│ │ ├── test_calendarsrs.py
│ │ ├── test_frequency.py
│ │ ├── test_imm.py
│ │ ├── test_schedule.py
│ │ └── test_schedulers.py
│ ├── serialization/
│ │ ├── test_json.py
│ │ ├── test_pickle.py
│ │ └── test_repr.py
│ ├── test_default.py
│ ├── test_dual.py
│ ├── test_dualpy.py
│ ├── test_dualrs.py
│ ├── test_enums.py
│ ├── test_fixings.py
│ ├── test_fx.py
│ ├── test_fx_volatility.py
│ ├── test_fxrs.py
│ ├── test_ir_volatility.py
│ ├── test_serialization.py
│ ├── test_solver.py
│ ├── test_splines.py
│ └── test_to_fix.py
├── robots.txt
└── rust/
├── _README.txt
├── curves/
│ ├── curve.rs
│ ├── curve_py.rs
│ ├── interpolation/
│ │ ├── interpolation_py.rs
│ │ ├── intp_flat_backward.rs
│ │ ├── intp_flat_forward.rs
│ │ ├── intp_linear.rs
│ │ ├── intp_linear_zero_rate.rs
│ │ ├── intp_log_cubic.rs
│ │ ├── intp_log_linear.rs
│ │ ├── intp_null.rs
│ │ ├── mod.rs
│ │ └── utils.rs
│ ├── mod.rs
│ ├── nodes.rs
│ └── serde.rs
├── dual/
│ ├── docs.rs
│ ├── dual.rs
│ ├── dual_ops/
│ │ ├── add.rs
│ │ ├── convert.rs
│ │ ├── div.rs
│ │ ├── eq.rs
│ │ ├── from.rs
│ │ ├── math_funcs.rs
│ │ ├── mod.rs
│ │ ├── mul.rs
│ │ ├── neg.rs
│ │ ├── num.rs
│ │ ├── numeric_ops.rs
│ │ ├── one.rs
│ │ ├── ord.rs
│ │ ├── pow.rs
│ │ ├── rem.rs
│ │ ├── signed.rs
│ │ ├── sub.rs
│ │ ├── sum.rs
│ │ └── zero.rs
│ ├── dual_py.rs
│ ├── enums.rs
│ ├── linalg/
│ │ ├── linalg_dual.rs
│ │ ├── linalg_f64.rs
│ │ └── mod.rs
│ ├── linalg_py.rs
│ └── mod.rs
├── enums/
│ ├── mod.rs
│ ├── parameters.rs
│ └── py/
│ ├── float_fixing_method.rs
│ ├── ir_option_metric.rs
│ ├── leg_index_base.rs
│ └── mod.rs
├── fx/
│ ├── mod.rs
│ ├── rates/
│ │ ├── ccy.rs
│ │ ├── fxpair.rs
│ │ ├── fxrate.rs
│ │ └── mod.rs
│ └── rates_py.rs
├── fx_volatility/
│ ├── mod.rs
│ └── sabr_funcs.rs
├── json/
│ ├── json_py.rs
│ └── mod.rs
├── lib.rs
├── main.rs
├── scheduling/
│ ├── calendars/
│ │ ├── adjuster.rs
│ │ ├── cal.rs
│ │ ├── calendar.rs
│ │ ├── dateroll.rs
│ │ ├── manager.rs
│ │ ├── mod.rs
│ │ ├── named/
│ │ │ ├── all.rs
│ │ │ ├── bjs.rs
│ │ │ ├── bjs_script.py
│ │ │ ├── bus.rs
│ │ │ ├── fed.rs
│ │ │ ├── fed_script.py
│ │ │ ├── ldn.rs
│ │ │ ├── ldn_script.py
│ │ │ ├── mex.rs
│ │ │ ├── mex_script.py
│ │ │ ├── mod.rs
│ │ │ ├── mum.rs
│ │ │ ├── mum_script.py
│ │ │ ├── nsw.rs
│ │ │ ├── nsw_script.py
│ │ │ ├── nyc.rs
│ │ │ ├── nyc_script.py
│ │ │ ├── osl.rs
│ │ │ ├── osl_script.py
│ │ │ ├── stk.rs
│ │ │ ├── stk_script.py
│ │ │ ├── syd.rs
│ │ │ ├── syd_script.py
│ │ │ ├── tgt.rs
│ │ │ ├── tgt_script.py
│ │ │ ├── tro.rs
│ │ │ ├── tro_script.py
│ │ │ ├── tyo.rs
│ │ │ ├── tyo_script.py
│ │ │ ├── wlg.rs
│ │ │ ├── wlg_script.py
│ │ │ ├── zur.rs
│ │ │ └── zur_script.py
│ │ ├── named_cal.rs
│ │ └── union_cal.rs
│ ├── convention.rs
│ ├── frequency/
│ │ ├── frequency.rs
│ │ ├── imm.rs
│ │ ├── mod.rs
│ │ └── rollday.rs
│ ├── mod.rs
│ ├── py/
│ │ ├── adjuster.rs
│ │ ├── calendar.rs
│ │ ├── convention.rs
│ │ ├── frequency.rs
│ │ ├── imm.rs
│ │ ├── mod.rs
│ │ ├── rollday.rs
│ │ └── schedule.rs
│ ├── schedule.rs
│ └── serde.rs
├── splines/
│ ├── mod.rs
│ ├── spline.rs
│ └── spline_py.rs
└── tests/
├── dual1.rs
├── mod.rs
└── splines/
└── mod.rs
================================================
FILE CONTENTS
================================================
================================================
FILE: .ai-opt-out
================================================
opt-out
================================================
FILE: .aiignore
================================================
# Block all files from AI training
*
# Specifically block metadata and documentation
**/*.md
issues/**
discussions/**
CONTRIBUTING.md
================================================
FILE: .gitattributes
================================================
* ai-training=false
* linguist-generated=true
================================================
FILE: .github/workflows/ubuntu-latest-python-specific.yml
================================================
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Python linting and typing
on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.13"]
steps:
- uses: actions/checkout@v4
- name: Set up Rust
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Set up uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
- name: Python Ruff linting
run: |
uv run --group lint ruff check
- name: Python Ruff formatting
run: |
uv run --group lint ruff format --check
- name: Python static typing
run: |
uv run --group typing mypy --config-file pyproject.toml
================================================
FILE: .github/workflows/ubuntu-latest-rust-specific.yml
================================================
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Ruff linting and tests
on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.13"]
steps:
- uses: actions/checkout@v4
- name: Set up Rust
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Rust linting checks
run: |
cargo fmt --check
- name: Rust library tests
run: |
cargo test --lib
- name: Rust doc tests
run: |
cargo test --doc
================================================
FILE: .github/workflows/ubuntu-latest.yml
================================================
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Ubuntu Pytest
on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: [ "3.10", "3.11", "3.12", "3.13", "3.14" ]
steps:
- uses: actions/checkout@v4
- name: Set up Rust
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Set up uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
# - name: Test with pytest and display Coverage
# env:
# RATESLIB_LICENCE: ${{ secrets.RATESLIB_LICENCE }}
# run: |
# uv run --group test coverage run -m --source=rateslib pytest
# uv run coverage report -m
- name: Test with pytest
env:
RATESLIB_LICENCE: ${{ secrets.RATESLIB_LICENCE }}
run: |
uv run --group test pytest
================================================
FILE: .github/workflows/ubuntu-minimum.yml
================================================
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Ubuntu minimum support
on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
steps:
- uses: actions/checkout@v4
- name: Set up Rust
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Set up uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
- name: Pytest minimum dependencies
# --resolution=lowest-direct picks the oldest allowed versions
# --group test ensures your test dependencies (like pytest) are included
env:
RATESLIB_LICENCE: ${{ secrets.RATESLIB_LICENCE }}
RATESLIB_DEVELOPMENT: False
run: uv run --resolution=lowest-direct --group test pytest
================================================
FILE: .github/workflows/windows-latest.yml
================================================
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Windows Pytest
on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]
jobs:
build:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
env:
MPLBACKEND: Agg # https://github.com/orgs/community/discussions/26434
steps:
- uses: actions/checkout@v4
- name: Set up Rust
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Set up uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
- name: Test with pytest
env:
RATESLIB_LICENCE: ${{ secrets.RATESLIB_LICENCE }}
run: |
uv run --group test pytest
================================================
FILE: .github/workflows/windows-minimum.yml
================================================
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Windows minimum
on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]
jobs:
build:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
env:
MPLBACKEND: Agg # https://github.com/orgs/community/discussions/26434
steps:
- uses: actions/checkout@v4
- name: Set up Rust
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Set up uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
- name: Pytest minimum dependencies
# --resolution=lowest-direct picks the oldest allowed versions
# --group test ensures your test dependencies (like pytest) are included
env:
RATESLIB_LICENCE: ${{ secrets.RATESLIB_LICENCE }}
RATESLIB_DEVELOPMENT: False
run: uv run --resolution=lowest-direct --group test pytest
================================================
FILE: .gitignore
================================================
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Rust extensions
src/bin
config.toml
# Distribution / packaging
.Python
local_resources/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
.asv/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
.pypirc
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.dual_log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv*/
venv11/
venv311/
venv312/
venv11+/
venv9/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# scractch files
scratch.py
scratch1.py
scratch2.py
scratch3.py
scratch4.py
scratch5.py
scratch6.py
.idea/
/*.ipynb
Cargo.lock
.devcontainer
.cargo/
================================================
FILE: COMMERCIAL_LICENCE
================================================
Commercial Subscription Licence Agreement
=========================================
This Commercial Subscription Licence Agreement (“Agreement”) is entered into between:
Licensor:
Siffrorna Technology Limited
42 Town Street, Sutton, Retford, DN22 8PT, UK
and
Licensee:
The individual or legal entity accepting this Agreement.
1. Grant of Licence
-------------------
Subject to payment of applicable fees and compliance with this Agreement, the Licensor grants
the Licensee a non-exclusive, non-transferable, non-sublicensable licence to use the software
identified below (the “Software”) during the Subscription Term.
The Licensee may install and use the Software only within the scope of the subscription
purchased. Unless otherwise agreed in writing as per any purchase order or licence key file,
each subscription permits use by a single internal user or a single designated system environment.
This licence permits the Licensee to:
- use the Software for internal commercial and internal professional purposes only
- deploy the Software in internal production environments
- integrate the Software with internal systems solely for the Licensee’s internal
business purposes
- modify the Software and create derivative works for internal use
- distribute the Software as part of an internal product or service
Except as expressly permitted, all rights are reserved by the Licensor.
The Licensor shall have no responsibility for any modified versions of the Software
created by the Licensee.
2. Subscription Term
--------------------
This Agreement is effective for the duration of the active subscription (“Subscription Term”).
Upon expiration or termination of the Subscription Term, all rights granted under this
Agreement automatically terminate unless renewed in writing.
3. Fees and Payment
-------------------
Use of the Software under this Agreement requires payment of the applicable subscription fees,
as agreed separately or displayed at the time of purchase.
Failure to pay fees when due constitutes a material breach of this Agreement.
4. Ownership
------------
The Software is licensed, not sold.
All right, title, and interest in and to the Software, including all intellectual property
rights, remain with the Licensor.
5. Restrictions
---------------
The Licensee may not:
- remove or obscure copyright or licence notices
- misrepresent ownership of the Software
- use the Software in violation of applicable laws or regulations
6. Termination
--------------
This Agreement may be terminated:
- automatically upon expiration of the Subscription Term
- immediately by the Licensor in the event of material breach
- by the Licensee by ceasing use and not renewing the subscription
Upon termination, the Licensee must cease use of the Software and delete all copies, except
where continued use is expressly permitted in writing.
7. Disclaimer of Warranty
-------------------------
THE SOFTWARE IS PROVIDED “AS IS” AND “AS AVAILABLE”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED.
TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE LICENSOR DISCLAIMS ALL WARRANTIES, INCLUDING BUT
NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT,
AND ANY WARRANTIES ARISING FROM COURSE OF DEALING OR USAGE OF TRADE.
THE LICENSOR DOES NOT WARRANT THAT THE SOFTWARE WILL BE ERROR-FREE, THAT DEFECTS WILL BE
CORRECTED, OR THAT ANY RESULTS, OUTPUTS, CALCULATIONS, OR ANALYTICAL RESULTS GENERATED BY
THE SOFTWARE WILL BE ACCURATE, COMPLETE, OR SUITABLE FOR ANY PARTICULAR PURPOSE.
8. No Investment Advice and Independent Judgment
------------------------------------------------
The Software and any outputs, calculations, models, analytics or data generated by the
Software are provided solely for informational and analytical purposes. They do not constitute
investment advice, financial advice, trading advice, or a recommendation to buy, sell, or
hold any security, financial instrument, or investment.
The Licensee acknowledges that it is solely responsible for evaluating the accuracy,
completeness, and usefulness of the Software and any outputs generated by it. The Licensee
must exercise its own independent judgment when making investment, trading, or
financial decisions and must not rely solely on the Software.
The Licensee acknowledges that the Software is a tool designed to assist analysis and
that all investment, trading, and financial decisions remain the sole responsibility
of the Licensee.
9. Data and Input Responsibility
--------------------------------
The Licensee is solely responsible for all data, assumptions, parameters,
configurations, and other inputs used with the Software. The Licensor shall not be
responsible for any errors or losses arising from inaccurate or incomplete inputs
supplied by the Licensee or any third party.
10. Intended Use
----------------
The Software is provided as a general analytical tool. The Licensee is responsible for
determining whether the Software is appropriate for its intended use. The Licensor shall
not be liable for any losses arising from use of the Software in applications or
contexts for which it was not designed.
11. Limitation of Liability
--------------------------
TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE LICENSOR SHALL NOT BE LIABLE FOR ANY INDIRECT,
INCIDENTAL, SPECIAL, CONSEQUENTIAL, OR PUNITIVE DAMAGES, INCLUDING BUT NOT LIMITED TO
LOSS OF PROFITS, TRADING LOSSES, LOSS OF BUSINESS OPPORTUNITY, LOSS OF DATA,
OR BUSINESS INTERRUPTION, ARISING OUT OF OR IN CONNECTION WITH THIS AGREEMENT OR THE
USE OF THE SOFTWARE.
THE TOTAL AGGREGATE LIABILITY OF THE LICENSOR ARISING OUT OF OR IN CONNECTION WITH THIS
AGREEMENT OR THE SOFTWARE, WHETHER IN CONTRACT, TORT (INCLUDING NEGLIGENCE), OR
OTHERWISE, SHALL NOT EXCEED THE TOTAL FEES PAID BY THE LICENSEE FOR THE SOFTWARE
DURING THE TWELVE (12) MONTHS PRECEDING THE EVENT GIVING RISE TO THE CLAIM.
12. Governing Law
----------------
This Agreement shall be governed by and construed in accordance with the laws of England
and Wales, excluding its conflict of law principles.
13. Severability
----------------
If any provision of this Agreement is held to be invalid or unenforceable, the remaining
provisions shall remain in full force and effect.
14. Entire Agreement
--------------------
This Agreement constitutes the entire agreement between the parties regarding the Software and
supersedes all prior or contemporaneous agreements or understandings relating to its
subject matter.
15. Language
------------
This Agreement is written in English. Any translations are provided for convenience only.
In the event of any conflict, the English version shall prevail.
END OF AGREEMENT
================================================
FILE: COMMERCIAL_LICENCE_ADDENDUM1
================================================
Continuity of Licence Addendum
==============================
(Commercial Subscription Licence)
This Continuity of Licence Addendum (“Addendum”) forms part of the Commercial Subscription
Licence Agreement (“Agreement”) between the Licensor and the Licensee.
1. Continuity Event
-------------------
A Continuity Event occurs if any of the following circumstances arise:
a) the Licensor enters liquidation, dissolution, or bankruptcy proceedings and ceases to carry
on business;
b) the Licensor permanently ceases operations and is no longer offering commercial licences for
the Software; or
c) where the Licensor is a sole proprietor or single-employee entity, the death or permanent
incapacity of that individual results in the Licensor being unable to continue licensing or
supporting the Software.
2. Effect of Continuity Event
-----------------------------
Upon the occurrence of a Continuity Event:
- any valid and paid-up Commercial Subscription Licence held by the Licensee shall
automatically convert into a perpetual, non-exclusive, royalty-free licence to use
the Software; and
- the Licensee may continue to exercise the rights granted under the Commercial Subscription
Licence as they existed immediately prior to the Continuity Event.
3. Scope of Continued Rights
----------------------------
Following a Continuity Event, the Licensee may:
- continue to use the Software for commercial and internal business purposes;
- deploy the Software in production environments;
- maintain, modify, and create derivative works of the Software for its own internal use;
- continue distributing the Software internally solely as part of its existing products
or services.
The Licensee may not:
- resell, sublicense, or otherwise make the Software available on a standalone basis; or
- represent itself as the owner of the Software or its intellectual property.
4. No Obligation to Provide Support
-----------------------------------
Nothing in this Addendum obligates the Licensor
(or any successor or estate) to provide maintenance, updates, support, or
warranties following a Continuity Event.
5. Survival
-----------
This Addendum shall survive termination or expiration of the Agreement and shall take
effect only upon the occurrence of a Continuity Event.
6. No Early Trigger
-------------------
The occurrence of a Continuity Event shall not be deemed to have occurred solely due to:
- a temporary suspension of business;
- a delay in responding to communications;
- a change in ownership or corporate structure where licensing continues; or
- the discontinuation of a particular product version while the Licensor continues to operate.
7. Governing Law
----------------
This Addendum shall be governed by and construed in accordance with the laws of
England and Wales, excluding its conflict of law principles.
END OF ADDENDUM
================================================
FILE: Cargo.toml
================================================
[package]
name = "rateslib"
version = "2.7.1"
edition = "2021"
exclude = [
".github/*",
"benches/*",
"benchmarks/*",
"notebooks/*",
"docs/*",
"robots.txt",
]
[lib]
name = "rateslib"
path = "rust/lib.rs"
crate-type = ["cdylib", "rlib"] # "lib" alone works but this is more explicit
[[bin]]
name = "main"
path = "rust/main.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
#pyo3 = { version = "0.20.3", features = ["abi3-py39", "extension-module"] }
serde = { version = "1.0", features = ["derive", "rc"] }
chrono = { version = "0.4", features = ["serde"] }
indexmap = { version = "2.7", features = ["serde"] }
ndarray = { version = "0.17", features = ["serde"] }
internment = { version = "0.8", features = ["serde"] }
pyo3 = "0.28"
num-traits = "0.2"
auto_ops = "0.3"
numpy = "0.28"
itertools = "0.14"
statrs = "0.18"
bincode = { version = "2.0", features = ["serde"] }
serde_json = "1.0"
[features]
# multiple-pymethods = ["pyo3/multiple-pymethods"]
abi3-py310 = ["pyo3/abi3-py310"]
pyo3-chrono = ["pyo3/chrono"]
pyo3-indexmap = ["pyo3/indexmap"]
default = ["abi3-py310", "pyo3-chrono", "pyo3-indexmap"]
# 'extension-module' has been added to 'features' of [tool.maturin] in pyproject.toml
#extension-module = ["pyo3/extension-module"]
#default = ["extension-module", "abi3-py39", "chrono"]
# ------------- When building commment the below out.
#[dev-dependencies]
#criterion = { version = "0.4", features = ["html_reports"] }
#[[bench]]
#name = "my_benchmark"
#harness = false
================================================
FILE: LICENCE
================================================
LICENCE
=======
Dual Licensing – Source-Available Non-Commercial Licence
and Commercial Subscription Licence
Copyright © 2023 Siffrorna Technology Limited
All rights reserved.
Licence Acceptance
------------------
By downloading, installing, copying, accessing, or otherwise using this software, you
acknowledge that you have read, understood, and agree to be bound by the terms of one of the
licences below.
This software is not open source.
Dual-Licensing Overview
-----------------------
This software is offered under two alternative licences:
1. Non-Commercial Source-Available Licence (free, default)
2. Commercial Subscription Licence (paid, required for business use)
You may use this software only if you comply with the terms of one of these licences.
1. Non-Commercial Source-Available Licence
------------------------------------------
(Personal and Educational Use Only)
1.1 Grant of Rights
Subject to the restrictions below, permission is granted to view, download, and run the
software solely for non-commercial purposes, including:
- personal use
- academic or educational use
This licence does not grant any right to distribute, modify, or commercially exploit the software.
1.2 Restrictions
You may not, directly or indirectly:
- Install or use the software for any purpose in a commercial environment
- Sell, license, sublicense, rent, lease, or monetize the software
- Distribute or redistribute the software, in source or binary form
- Modify, adapt, translate, or create derivative works
- Incorporate the software into any other software, library, service, or product
- Use the software to provide services to third parties
- Circumvent, remove, or obscure copyright or licence notices
For the purposes of this licence, “commercial” means any use primarily intended for or
directed toward commercial advantage, monetary compensation, or business operations,
whether direct or indirect.
1.3 Ownership
The software is licensed, not sold.
All right, title, and interest in and to the software remain with the copyright holder.
1.4 Termination
Any violation of this licence automatically terminates the rights granted herein.
Upon termination, you must immediately cease all use of the software and delete all copies
in your possession or control.
2. Commercial Subscription Licence
----------------------------------
(Required for Business or Revenue-Generating Use)
Any use of the software in a commercial, professional, or revenue-generating context requires
a valid Commercial Subscription Licence.
A Commercial Subscription Licence may permit, subject to separate written terms:
- Commercial and internal business use
- Deployment in production environments
- Integration with proprietary or open-source systems
- Modification and derivative works
- Distribution as part of a product or service
Commercial licences are offered under separate written terms and are typically provided on
a subscription basis.
To obtain a Commercial Subscription Licence, visit https://rateslib.com/licence
3. No Implied Rights
--------------------
Except as expressly granted in writing, no rights are granted under this licence, whether by
implication, estoppel, or otherwise.
No patent, trademark, or other intellectual property rights are granted under the
Non-Commercial Source-Available Licence.
4. Restriction on Machine Learning, AI Training and Generative AI
-----------------------------------------------------------------
The Source Code and all related assets in this repository may not be used, directly or indirectly,
for the purpose of training, developing, or improving any artificial intelligence,
machine learning model, or large language model. This includes, but is not limited to,
using the Source Code for data mining, scraping, or as part of a training dataset for generative
AI tools. Any such use is an unauthorized reproduction and a violation of this license.
5. Disclaimer of Warranty
-------------------------
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE,
AND NON-INFRINGEMENT.
6. Limitation of Liability
--------------------------
TO THE MAXIMUM EXTENT PERMITTED BY LAW, IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR
ANY CLAIM, DAMAGES, OR OTHER LIABILITY ARISING FROM OR IN CONNECTION WITH THE USE OF THE SOFTWARE.
7. Governing Law
----------------
This licence and any dispute arising out of or in connection with it shall be governed by and
construed in accordance with the laws of England and Wales, excluding its conflict of
law principles.
Nothing in this section limits the copyright holder’s right to seek injunctive or equitable
relief in any jurisdiction.
8. Severability
---------------
If any provision of this licence is held to be invalid, illegal, or unenforceable by a court of
competent jurisdiction, the remaining provisions shall remain in full force and effect.
9. No Waiver
------------
Failure to enforce any provision of this licence shall not constitute a waiver of future
enforcement of that or any other provision.
10. Language
-----------
This licence is written in English. Any translations are provided for convenience only.
In the event of any inconsistency or dispute, the English version shall prevail.
END OF TERMS
================================================
FILE: README.md
================================================
# Rateslib
``Rateslib`` is a state-of-the-art **fixed income library** designed for Python.
Its purpose is to provide advanced, flexible and efficient fixed income analysis
with a high level, well documented API.
The techniques and object interaction within *rateslib* were inspired by
the requirements of multi-disciplined fixed income teams working, both cooperatively
and independently, within global investment banks.
Licence
=======
This library is released under specific Dual Licensing Terms - Source-Available Non-Commercial Licence
and Commercial Subscription Licence. See [latest licence](https://rateslib.com/py/en/latest/i_licence.html)
This project is source-available, **not** open source. Commercial use requires a paid licence.
Get Started
===========
Read the documentation at
[rateslib.com/py](https://rateslib.com/py/)
================================================
FILE: docs/source/z_ir_vol_time_to_expiry.rst
================================================
.. _cook-ir-vol-time-doc:
.. ipython:: python
:suppress:
from rateslib.curves import *
from rateslib.instruments import *
from rateslib.solver import Solver
from rateslib import calendars
from itertools import product
from rateslib.volatility import IRSabrCube, IRSplineCube, IRSplineSmile, IRSabrSmile
import matplotlib.pyplot as plt
from datetime import datetime as dt
import numpy as np
from pandas import DataFrame, option_context, Series
import pandas as pd
IR Volatility Time To Expiry Remapping
**********************************************************************
This page presents examples for working with time to expiry for IR volatility products.
**Key Points**
- Every *time to expiry* is an Act365 calendar day measure unless remapped.
- At each ``expiry`` on any *Cube* *time to expiry* is Act365 calendar day measure assuming the associated
volatility is calibrated to market.
- Any intermediate *time to expiry* between the chosen ``expiries`` on a *Cube* can be remapped.
Introduction
-------------
Every *IR volatility* pricing object has an ``eval_date`` as part of its ``meta`` parameters.
This allows any :class:`~rateslib.volatility._BaseIRSmile` to make a natural measure of time to expiry
using the equation:
.. math::
t = \frac{days(expiry - eval date)}{365}
When a :class:`~rateslib.volatility._BaseIRSmile` yields a volatility value for a specific strike,
that volatility value is assumed to be associated with that *time to expiry* that
that :class:`~rateslib.volatility._BaseIRSmile` calculates.
Most of the time a user will not need to be aware of that. Lets create a basic swaption and analyse
different pricing models. For example:
.. ipython:: python
curve = Curve({dt(2001, 1, 1): 1.0, dt(2004, 1, 1): 0.90}, convention="act360", calendar="nyc")
iro = IRSCall(expiry=dt(2002, 1, 1), tenor="1y", irs_series="usd_irs", strike=3.30)
.. tabs::
.. group-tab:: IRSplineSmile
.. ipython:: python
irss = IRSplineSmile(
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
tenor="1y",
nodes={-25.0: 52, 0: 50, 25: 53},
k=4,
irs_series="usd_irs"
)
print(irss.get_from_strike(k=2.4, f=2.1))
print(iro.rate(curves=curve, vol=irss, metric="percentnotional"))
.. group-tab:: IRSabrSmile
.. ipython:: python
irss = IRSabrSmile(
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
tenor="1y",
nodes={"alpha": 0.35, "rho": -0.05, "nu": 0.65},
beta=0.5,
irs_series="usd_irs",
)
print(irss.get_from_strike(k=2.4, f=2.1))
print(iro.rate(curves=curve, vol=irss, metric="percentnotional"))
The pair of values here :math:`(t, \sigma)` are used in pricing models such as the Black76 or Bachelier model directly.
Time Scaling
--------------
It is possible, however, to apply a scaling parameter to the calendar day measure to arrive at a different
*time to expiry*. Doing so yields a pair :math:`(\hat{t}, \hat{\sigma})`
.. math::
\hat{t} = \xi t
.. tabs::
.. group-tab:: IRSplineSmile
.. ipython:: python
irss = IRSplineSmile(
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
tenor="1y",
nodes={-25.0: 52, 0: 50, 25: 53},
k=4,
irs_series="usd_irs",
time_scalar=0.98,
)
print(irss.get_from_strike(k=2.4, f=2.1))
print(iro.rate(curves=curve, vol=irss, metric="percentnotional"))
.. group-tab:: IRSabrSmile
.. ipython:: python
irss = IRSabrSmile(
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
tenor="1y",
nodes={"alpha": 0.35, "rho": -0.05, "nu": 0.65},
beta=0.5,
irs_series="usd_irs",
time_scalar=0.98,
)
print(irss.get_from_strike(k=2.4, f=2.1))
print(iro.rate(curves=curve, vol=irss, metric="percentnotional"))
Working with a Cube
----------------------
Typically the *time scalar* is not a quantity one will add to a *Smile* directly.
Instead it exists to allow *Cubes* to handle time interpolation.
The ``weights`` argument on a *Cube* can apportion volatility to specific dates in between
chosen ``expiries``. It is **assumed** that on every given expiry the time scalar equals one
and the *Cube* is calibrated to market *Instruments*.
.. tabs::
.. tab:: Calendar Days
.. ipython:: python
irsc1 = IRSabrCube(
eval_date=dt(2001, 1, 1),
expiries=[dt(2001, 2, 1), dt(2001, 3, 1), dt(2001, 4, 1), dt(2001, 7, 1)],
tenors=["1y"],
alpha=0.35,
beta=0.5,
rho=-0.05,
nu=0.45,
irs_series="usd_irs",
)
.. tab:: Business Days
.. ipython:: python
nyc = calendars.get("nyc")
weights = Series( # set the weight of non-business days to zero
index=[_ for _ in nyc.cal_date_range(dt(2001, 1, 1), dt(2001, 8, 1)) if nyc.is_non_bus_day(_)],
data=0.0
)
irsc2 = IRSabrCube(
eval_date=dt(2001, 1, 1),
expiries=[dt(2001, 2, 1), dt(2001, 3, 1), dt(2001, 4, 1), dt(2001, 7, 1)],
tenors=["1y"],
alpha=0.35,
beta=0.5,
rho=-0.05,
nu=0.45,
irs_series="usd_irs",
weights=weights,
)
.. tab:: Semi-Business Days
.. ipython:: python
weights2 = Series( # set the weight of non-business days to 0.5
index=[_ for _ in nyc.cal_date_range(dt(2001, 1, 1), dt(2001, 8, 1)) if nyc.is_non_bus_day(_)],
data=0.5
)
irsc3 = IRSabrCube(
eval_date=dt(2001, 1, 1),
expiries=[dt(2001, 2, 1), dt(2001, 3, 1), dt(2001, 4, 1), dt(2001, 7, 1)],
tenors=["1y"],
alpha=0.35,
beta=0.5,
rho=-0.05,
nu=0.45,
irs_series="usd_irs",
weights=weights2,
)
Prices of Options
-------------------
With the different models above we plot the prices of ATM Payer Swaptions. In fact these graphs show the
differences in prices of percent of notional for an option of every expiry date. After the end of the ``weights``
*Series* the prices converge as both models fall back to calendar day type.
.. ipython:: python
x, y, y2 = [], [], []
for expiry in nyc.cal_date_range(dt(2001, 1, 5), dt(2001, 9, 1)):
iro = IRSCall(
expiry=expiry,
tenor="1y",
strike="atm",
irs_series="usd_irs",
)
x.append(expiry)
y.append(iro.rate(curves=curve, vol=irsc1, metric="percentnotional") - iro.rate(curves=curve, vol=irsc2, metric="percentnotional"))
y2.append(iro.rate(curves=curve, vol=irsc1, metric="percentnotional") - iro.rate(curves=curve, vol=irsc3, metric="percentnotional"))
.. plot::
from rateslib import dt, Curve, IRSabrCube, calendars, IRSCall
from pandas import Series
curve = Curve({dt(2001, 1, 1): 1.0, dt(2004, 1, 1): 0.90}, convention="act360", calendar="nyc")
irsc1 = IRSabrCube(
eval_date=dt(2001, 1, 1),
expiries=[dt(2001, 2, 1), dt(2001, 3, 1), dt(2001, 4, 1), dt(2001, 7, 1)],
tenors=["1y"],
alpha=0.35,
beta=0.5,
rho=-0.05,
nu=0.45,
irs_series="usd_irs",
)
nyc = calendars.get("nyc")
weights = Series( # set the weight of non-business days to zero
index=[_ for _ in nyc.cal_date_range(dt(2001, 1, 1), dt(2001, 8, 1)) if nyc.is_non_bus_day(_)],
data=0.0
)
weights2 = Series( # set the weight of non-business days to 0.5
index=[_ for _ in nyc.cal_date_range(dt(2001, 1, 1), dt(2001, 8, 1)) if nyc.is_non_bus_day(_)],
data=0.5
)
irsc2 = IRSabrCube(
eval_date=dt(2001, 1, 1),
expiries=[dt(2001, 2, 1), dt(2001, 3, 1), dt(2001, 4, 1), dt(2001, 7, 1)],
tenors=["1y"],
alpha=0.35,
beta=0.5,
rho=-0.05,
nu=0.45,
irs_series="usd_irs",
weights=weights,
)
irsc3 = IRSabrCube(
eval_date=dt(2001, 1, 1),
expiries=[dt(2001, 2, 1), dt(2001, 3, 1), dt(2001, 4, 1), dt(2001, 7, 1)],
tenors=["1y"],
alpha=0.35,
beta=0.5,
rho=-0.05,
nu=0.45,
irs_series="usd_irs",
weights=weights2,
)
x, y, y2 = [], [], []
for expiry in nyc.cal_date_range(dt(2001, 1, 5), dt(2001, 9, 1)):
iro = IRSCall(
expiry=expiry,
tenor="1y",
strike="atm",
irs_series="usd_irs",
)
x.append(expiry)
y.append(iro.rate(curves=curve, vol=irsc1, metric="percentnotional") - iro.rate(curves=curve, vol=irsc2, metric="percentnotional"))
y2.append(iro.rate(curves=curve, vol=irsc1, metric="percentnotional") - iro.rate(curves=curve, vol=irsc3, metric="percentnotional"))
from matplotlib import pyplot as plt
fig, ax = plt.subplots(1,1)
ax.plot(x,y)
ax.plot(x,y2)
ax.scatter([dt(2001, 2, 1), dt(2001, 3, 1), dt(2001, 4, 1), dt(2001, 7, 1)], [0, 0, 0, 0], s=25, c='r')
plt.show()
plt.close()
================================================
FILE: notebooks/coding/ch5_fx.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"from rateslib.fx import FXRates, FXForwards\n",
"from rateslib.dual import Dual\n",
"from rateslib.curves import Curve\n",
"from datetime import datetime as dt"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Chapter 5 - FX Rates\n",
"\n",
"### Unsuitable initialisation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"FXRates({\"usdeur\": 1.0, \"noksek\":1.0})"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"FXRates({\"usdeur\": 1.0, \"gbpusd\":1.0, \"gbpeur\": 1.0})"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"FXRates({\"usdeur\": 1.0, \"eurusd\":1.0, \"noksek\": 1.0})"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## FX Rates Array\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr = FXRates({\"usdeur\": 2.0, \"usdgbp\": 2.5})\n",
"fxr.rates_table()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr.rate(\"eurgbp\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Representation via Dual"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr = FXRates({\"usdnok\": 8.0})\n",
"fxr.convert(1000000, \"nok\", \"usd\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Equivalence of Cash Positions and Base Value"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr.currencies"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"base_value = fxr.convert_positions([0, 1000000], \"usd\")\n",
"base_value"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"positions = fxr.positions(base_value, \"usd\")\n",
"positions"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Introduce a third currency"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr = FXRates({\"usdeur\": 0.9, \"eurnok\": 8.888889})\n",
"fxr.currencies"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"base_value = fxr.convert_positions([0, 0, 1000000], \"usd\")\n",
"base_value"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr.positions(base_value, \"usd\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"base_value = Dual(125000, \"fx_usdnok\", [-15625])\n",
"positions = fxr.positions(base_value, \"usd\")\n",
"positions"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr.convert_positions(positions, \"usd\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Re-expression in Majors"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr_crosses = FXRates({\"eurusd\": 1.0, \"gbpjpy\": 100, \"eurjpy\": 100})\n",
"fxr_crosses.convert(1, \"usd\", \"jpy\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr_majors = fxr_crosses.restate([\"eurusd\", \"usdjpy\", \"gbpusd\"])\n",
"fxr_majors.convert(1, \"usd\", \"jpy\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## FX Forwards"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fx_rates = FXRates({\"usdeur\": 0.9, \"eurnok\": 8.888889}, dt(2022, 1, 3))\n",
"fx_curves = {\n",
" \"usdusd\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.96}),\n",
" \"eureur\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}),\n",
" \"eurusd\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.991}),\n",
" \"noknok\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}),\n",
" \"nokeur\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.978}),\n",
"}\n",
"fxf = FXForwards(fx_rates, fx_curves)\n",
"fxf.rate(\"usdnok\", dt(2022, 8, 15))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Equivalence of Delta Risk"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fx_rates = FXRates({\"usdeur\": 0.9, \"eurnok\": 8.888889}, dt(2022, 1, 3))\n",
"start, end = dt(2022, 1, 1), dt(2023, 1,1)\n",
"fx_curves = {\n",
" \"usdusd\": Curve({start: 1.0, end: 0.96}, id=\"uu\", ad=1),\n",
" \"eureur\": Curve({start: 1.0, end: 0.99}, id=\"ee\", ad=1),\n",
" \"eurusd\": Curve({start: 1.0, end: 0.991}, id=\"eu\", ad=1),\n",
" \"noknok\": Curve({start: 1.0, end: 0.98}, id=\"nn\", ad=1),\n",
" \"nokeur\": Curve({start: 1.0, end: 0.978}, id=\"ne\", ad=1),\n",
"}\n",
"fxf = FXForwards(fx_rates, fx_curves)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"discounted_nok = fx_curves[\"nokeur\"][dt(2022, 8, 15)] * 1000\n",
"base_value = discounted_nok * fxf.rate(\"nokusd\", dt(2022, 1, 1))\n",
"base_value"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"forward_eur = fxf.rate(\"nokeur\", dt(2022, 8, 15)) * 1000\n",
"discounted_eur = forward_eur * fx_curves[\"eureur\"][dt(2022, 8, 15)]\n",
"base_value = discounted_eur * fxf.rate(\"eurusd\", dt(2022, 1, 1))\n",
"base_value"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"base_value.gradient([\"uu1\", \"ee1\", \"eu1\", \"nn1\", \"ne1\", \"fx_usdeur\", \"fx_eurnok\"])\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Combining Settlement Dates\n",
"\n",
"### Separable system"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr1 = FXRates({\"eurusd\": 1.05}, settlement=dt(2022, 1, 3))\n",
"fxr2 = FXRates({\"usdcad\": 1.1}, settlement=dt(2022, 1, 2))\n",
"fxf = FXForwards(\n",
" fx_rates=[fxr1, fxr2],\n",
" fx_curves={\n",
" \"usdusd\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"eureur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"usdeur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadusd\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" }\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxf.rate(\"eurcad\", dt(2022, 2, 1))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Acyclic Dependent Systems"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxf = FXForwards(\n",
" fx_rates=[fxr1, fxr2],\n",
" fx_curves={\n",
" \"usdusd\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"eureur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"usdeur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadeur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" }\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxf.rate(\"eurcad\", dt(2022, 2, 1))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Cyclic Dependent Systems Fail"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr1 = FXRates({\"eurusd\": 1.05, \"gbpusd\": 1.25}, settlement=dt(2022, 1, 3))\n",
"fxf = FXForwards(\n",
" fx_rates=[fxr1, fxr2],\n",
" fx_curves={\n",
" \"usdusd\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"eureur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"usdeur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadeur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"gbpcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"gbpgbp\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" }\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"But cyclic systems can be restructured"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr1 = FXRates({\"eurusd\": 1.05}, settlement=dt(2022, 1, 3))\n",
"fxr3 = FXRates({\"gbpusd\": 1.25}, settlement=dt(2022, 1, 3))\n",
"fxf = FXForwards(\n",
" fx_rates=[fxr1, fxr2, fxr3],\n",
" fx_curves={\n",
" \"usdusd\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"eureur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"usdeur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadeur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"gbpcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"gbpgbp\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" }\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxf.rate(\"eurcad\", dt(2022, 2, 1))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Unsolvable System"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fxr1 = FXRates({\"eurusd\": 1.05, \"gbpusd\": 1.25}, settlement=dt(2022, 1, 3))\n",
"fxr3 = FXRates({\"gbpjpy\": 100}, settlement=dt(2022, 1, 4))\n",
"FXForwards(\n",
" fx_rates=[fxr1, fxr2, fxr3],\n",
" fx_curves={\n",
" \"usdusd\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"eureur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"gbpgbp\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"usdjpy\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"eurcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"eurjpy\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"gbpcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" }\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Dual Representation"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"\n",
"\n",
"
\n",
" \n",
" \n",
" | \n",
" 2022-01-01 | \n",
" 2022-01-02 | \n",
" 2022-01-03 | \n",
"
\n",
" \n",
" \n",
" \n",
" | cad | \n",
" 0.0 | \n",
" 181500.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
" | eur | \n",
" 0.0 | \n",
" 0.0 | \n",
" -100000.0 | \n",
"
\n",
" \n",
" | usd | \n",
" 100000.0 | \n",
" -165000.0 | \n",
" 105000.0 | \n",
"
\n",
" \n",
"
\n",
"
"
],
"text/plain": [
" 2022-01-01 2022-01-02 2022-01-03\n",
"cad 0.0 181500.0 0.0\n",
"eur 0.0 0.0 -100000.0\n",
"usd 100000.0 -165000.0 105000.0"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"fxr1 = FXRates({\"eurusd\": 1.05}, settlement=dt(2022, 1, 3))\n",
"fxr2 = FXRates({\"usdcad\": 1.1}, settlement=dt(2022, 1, 2))\n",
"fxf = FXForwards(\n",
" fx_rates=[fxr1, fxr2],\n",
" fx_curves={\n",
" \"usdusd\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"eureur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadcad\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"usdeur\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" \"cadusd\": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),\n",
" }\n",
")\n",
"pv = Dual(100000, [\"fx_eurusd\", \"fx_usdcad\"], [-100000, -150000])\n",
"fxf.positions(pv, base=\"usd\")"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"cad 181500.0\n",
"eur -100000.0\n",
"usd 40000.0\n",
"dtype: float64"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"fxf.positions(pv, base=\"usd\", aggregate=True)"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
""
]
},
"execution_count": 18,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"fxf.convert_positions(fxf.positions(pv, base=\"usd\"))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.1"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
================================================
FILE: notebooks/coding/curves.ipynb
================================================
{
"cells": [
{
"cell_type": "markdown",
"id": "f8825706-c252-40d7-8075-b438f5756093",
"metadata": {},
"source": [
"# Curves\n",
"\n",
"### CompositeCurve example\n",
"\n",
"The first section here regards efficient operations and compositing two curves."
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "c88c3ce0-72f1-4182-a6c0-36209ccc9954",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import dt\n",
"from rateslib.curves import Curve, LineCurve, CompositeCurve"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "9cbc5699-fa68-46cd-8e75-3752d078977c",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"3.75"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"line_curve1 = LineCurve({dt(2022, 1, 1): 2.0, dt(2022, 1, 3): 4.0}, id=\"C1_\")\n",
"line_curve2 = LineCurve({dt(2022, 1, 1): 0.5, dt(2022, 1, 3): 1.0}, id=\"C2_\")\n",
"composite_curve = CompositeCurve(curves=(line_curve1, line_curve2))\n",
"composite_curve.rate(dt(2022, 1, 2))"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "fd5f49ac-ed99-4422-844a-13c657b823f1",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
""
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"line_curve1._set_ad_order(1)\n",
"line_curve2._set_ad_order(1)\n",
"composite_curve.rate(dt(2022, 1, 2))"
]
},
{
"cell_type": "markdown",
"id": "8cb305f5-19a5-46b6-a9f2-82a2bd1f6592",
"metadata": {},
"source": [
"The code above demonstrates the summing of individual rates and of interoperability with Dual datatypes."
]
},
{
"cell_type": "markdown",
"id": "b658689c-65f2-4aae-992a-7fbf61f5d2c4",
"metadata": {},
"source": [
"### Error in approximated rates and execution time"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "973e0754-edfc-42ce-9d0c-d2272c69465f",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(array([ 3455, 3451, 22875, 21294, 48033, 892]),\n",
" array([0.e+00, 5.e-07, 1.e-06, 5.e-06, 1.e-05, 5.e-05, 1.e+00]))"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import numpy as np\n",
"MIN, MAX, SAMPLES, DAYS, d = 0, 4, 100000, 3, 1.0/365\n",
"c1 = np.random.rand(DAYS, SAMPLES) * (MAX - MIN) + MIN\n",
"c2 = np.random.rand(DAYS, SAMPLES) * (MAX - MIN) + MIN\n",
"r_true=((1 + d * (c1 + c2) / 100).prod(axis=0) - 1) * 100 / (d * DAYS)\n",
"c1_bar = ((1 + d * c1 / 100).prod(axis=0)**(1/DAYS) - 1) * 100 / d\n",
"c2_bar = ((1 + d * c2 / 100).prod(axis=0)**(1/DAYS) - 1) * 100 / d\n",
"r_bar = ((1 + d * (c1_bar + c2_bar) / 100) ** DAYS - 1) * 100 / (d * DAYS)\n",
"np.histogram(np.abs(r_true-r_bar), bins=[0, 5e-7, 1e-6, 5e-6, 1e-5, 5e-5, 1]) "
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "c56f3b7d-07ce-4007-bf57-bda4bc2259cb",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"50.3 µs ± 1.22 µs per loop (mean ± std. dev. of 7 runs, 10,000 loops each)\n"
]
}
],
"source": [
"composite_curve = CompositeCurve(\n",
" (\n",
" Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.95}, id=\"C1_\"),\n",
" Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.99}, id=\"C2_\"),\n",
" )\n",
")\n",
"%timeit composite_curve.rate(dt(2022, 6, 1), \"1y\", approximate=True) "
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "7e74017e-41f0-424a-bd17-aed0d168a8df",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"21.9 ms ± 890 µs per loop (mean ± std. dev. of 7 runs, 10 loops each)\n"
]
}
],
"source": [
"%timeit composite_curve.rate(dt(2022, 6, 1), \"1y\", approximate=False)"
]
},
{
"cell_type": "markdown",
"id": "5f2769bb-0f25-4d5e-996f-5684e1f18a26",
"metadata": {},
"source": [
"### Curve operations: shift"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "64caaec4-072a-4dd5-a9ef-ac4b95852a7f",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1.9926509362075961\n",
"2.4926509362108717\n"
]
}
],
"source": [
"curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98})\n",
"print(curve.rate(dt(2022, 2, 1), \"1d\"))\n",
"print(curve.shift(50).rate(dt(2022, 2, 1), \"1d\"))"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "5a386ff1-1577-42b1-8d13-7d53dc509aa5",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"2.050958904109589\n",
"2.550958904109589\n"
]
}
],
"source": [
"line_curve = LineCurve({dt(2022, 1, 1): 2.0, dt(2023, 1, 1): 2.6})\n",
"print(line_curve.rate(dt(2022, 2, 1), \"1d\"))\n",
"print(line_curve.shift(50).rate(dt(2022, 2, 1), \"1d\"))"
]
},
{
"cell_type": "markdown",
"id": "44ebfa6c-72ee-473e-9299-e1727a8884b7",
"metadata": {},
"source": [
"### Curve operations: translate"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "bdf0aca9-39e0-406b-9d64-9bfcfc9ffed6",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"3.8711064912719806 3.8711064912719806\n",
"3.8709012910311813 3.8709012910311813\n",
"3.8706902731000525 3.870690273092059\n",
"0.0 0.0\n",
"0.0 0.0\n",
"3.8971038951416404 3.9052558203165333\n"
]
}
],
"source": [
"for interpolation in [\n",
" \"linear\", \"log_linear\", \"linear_index\", \"flat_forward\", \"flat_backward\", \"linear_zero_rate\"\n",
"]:\n",
" curve = Curve(\n",
" nodes={dt(2022, 1, 1): 1.0, dt(2022, 2, 1):0.998, dt(2022, 3, 1): 0.995}, \n",
" interpolation=interpolation\n",
" )\n",
" curve_translated = curve.translate(dt(2022, 1, 15)) \n",
" print(\n",
" curve.rate(dt(2022, 2, 15), \"1d\"),\n",
" curve_translated.rate(dt(2022, 2, 15), \"1d\") \n",
" )"
]
},
{
"cell_type": "markdown",
"id": "327672ae-28af-4e15-bfe1-0b5a52cedcc8",
"metadata": {},
"source": [
"### Curve operations: roll"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "6e4e86f8-ff29-48bb-a7f7-9985aa2f0748",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"2.1111503451809455\n",
"2.1111503451809455\n"
]
}
],
"source": [
"curve = Curve(\n",
" nodes={dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98, dt(2024, 1, 1): 0.97},\n",
" t=[dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1),\n",
" dt(2023, 1, 1),\n",
" dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)]\n",
")\n",
"print(curve.rate(dt(2022, 6, 1), \"1d\"))\n",
"print(curve.roll(\"30d\").rate(dt(2022, 7, 1), \"1d\"))"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "d4e5fc59-aa88-48ec-a7f6-e5d13b10b1f3",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"2.3082258965546494\n",
"2.3082258965546494\n"
]
}
],
"source": [
"line_curve = LineCurve(\n",
" nodes={dt(2022, 1, 1): 2.0, dt(2023, 1, 1): 2.6, dt(2024, 1, 1): 2.5},\n",
" t=[dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1),\n",
" dt(2023, 1, 1),\n",
" dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)]\n",
")\n",
"print(line_curve.rate(dt(2022, 6, 1)))\n",
"print(line_curve.roll(\"-31d\").rate(dt(2022, 5, 1), \"1d\"))"
]
},
{
"cell_type": "markdown",
"id": "ee9951a7-1eea-4255-9e5c-2a4818983598",
"metadata": {},
"source": [
"### Operations on CompositeCurves"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "8fff533b-4ee2-4405-b6e3-bdf4fe53aadf",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"3.0252576094156325"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"composite_curve.rate(dt(2022, 6, 1), \"1d\")"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "53409cb5-9512-43f1-8e9c-cb1886ed1f6e",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"3.525257609418908"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"composite_curve.shift(50).rate(dt(2022, 6, 1), \"1d\")"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "89680e78-5318-4b56-af1d-8be7dae90ca4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"3.025257609407639"
]
},
"execution_count": 16,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"composite_curve.roll(\"30d\").rate(dt(2022, 7, 1), \"1d\")"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "b23d6958-903a-4442-98a7-d5585fe4d56c",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"3.0252576094156325"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"composite_curve.translate(dt(2022, 5, 1)).rate(dt(2022, 6, 1), \"1d\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.1"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding/scheduling.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "88003b38-8369-4263-b409-fe548b1250cb",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import dt\n",
"from rateslib.scheduling import _get_unadjusted_roll"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "ac36569f-fa13-49ae-829d-458104892eed",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"15"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_get_unadjusted_roll(ueffective=dt(2022, 3, 15), utermination=dt(2023, 3, 15), eom=False)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "e5181913-6ea2-4ccc-893f-fdf8621e4534",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"28"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_get_unadjusted_roll(ueffective=dt(2022, 2, 28), utermination=dt(2023, 2, 28), eom=False)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "8bb47315-3042-4c9a-a8eb-3766be6a2fe7",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'eom'"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_get_unadjusted_roll(ueffective=dt(2022, 2, 28), utermination=dt(2023, 2, 28), eom=True)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "c46d63ff-99c8-48fa-a03d-93e90e0b7b1c",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 60,
"id": "bf3b7df1-9cd1-4a0d-988a-d7d80e7b9b08",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.scheduling import _generate_regular_schedule_unadjusted"
]
},
{
"cell_type": "code",
"execution_count": 61,
"id": "19802482-fe5d-4e21-aa7b-8b419fe6245e",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[datetime.datetime(2023, 3, 15, 0, 0),\n",
" datetime.datetime(2023, 4, 19, 0, 0),\n",
" datetime.datetime(2023, 5, 17, 0, 0),\n",
" datetime.datetime(2023, 6, 21, 0, 0),\n",
" datetime.datetime(2023, 7, 19, 0, 0),\n",
" datetime.datetime(2023, 8, 16, 0, 0),\n",
" datetime.datetime(2023, 9, 20, 0, 0)]"
]
},
"execution_count": 61,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"dates = [\n",
" d for d in \n",
" _generate_regular_schedule_unadjusted(\n",
" ueffective=dt(2023, 3, 15),\n",
" utermination=dt(2023, 9, 20),\n",
" frequency=\"M\",\n",
" roll=\"imm\"\n",
" )\n",
"]\n",
"dates"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3b0dbdd3-5daa-4933-8cc2-5d08605bf57f",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "04b4befd-8430-4471-9b44-b367343268b0",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "995ebd97-495e-4736-a5a1-780b5d87b0d1",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "4e3a0787-082b-45eb-a769-6347e90a5190",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.scheduling import _check_unadjusted_regular_swap"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "d0d910d9-a974-4167-bfd3-f4a9c772de9a",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(False, 'Roll day could not be inferred from given dates.')"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_check_unadjusted_regular_swap(\n",
" ueffective=dt(2022, 3, 16), utermination=dt(2022, 9, 21),\n",
" frequency=\"M\", roll=None, eom=False\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "67d9c5e4-84c7-4f0b-afa6-896c8eebf521",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(True,\n",
" {'ueffective': datetime.datetime(2022, 2, 28, 0, 0),\n",
" 'utermination': datetime.datetime(2023, 2, 28, 0, 0),\n",
" 'frequency': 'M',\n",
" 'roll': 'eom',\n",
" 'eom': True})"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_check_unadjusted_regular_swap(\n",
" ueffective=dt(2022, 2, 28), utermination=dt(2023, 2, 28),\n",
" frequency=\"M\", eom=True, roll=None\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "cc11e373-87c8-49d8-859c-06610031c44a",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.scheduling import _get_unadjusted_short_stub_date"
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "d02c470c-727b-4878-9658-5f63e5d4bace",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"datetime.datetime(2022, 6, 30, 0, 0)"
]
},
"execution_count": 24,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_get_unadjusted_short_stub_date(\n",
" ueffective=dt(2022, 6, 15), utermination=dt(2023, 2, 28),\n",
" frequency=\"M\", eom=True, roll=None, stub_side=\"FRONT\"\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 25,
"id": "de5285b8-50bf-4a84-af4e-a0113a63bf50",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"datetime.datetime(2022, 6, 28, 0, 0)"
]
},
"execution_count": 25,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_get_unadjusted_short_stub_date(\n",
" ueffective=dt(2022, 6, 15), utermination=dt(2023, 2, 28),\n",
" frequency=\"M\", eom=False, roll=None, stub_side=\"FRONT\"\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 26,
"id": "0135ff22-784f-4c43-8162-2577073a1927",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"datetime.datetime(2022, 6, 29, 0, 0)"
]
},
"execution_count": 26,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_get_unadjusted_short_stub_date(\n",
" ueffective=dt(2022, 6, 15), utermination=dt(2023, 2, 28),\n",
" frequency=\"M\", eom=True, roll=29, stub_side=\"FRONT\"\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ab24b046-b86b-4945-8767-381c53d00a04",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 47,
"id": "19505125-6eed-43da-a541-32aab0f845d6",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.scheduling import _get_unadjusted_stub_date"
]
},
{
"cell_type": "code",
"execution_count": 50,
"id": "daff6659-f94a-4cd6-ab65-018fb20af567",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"datetime.datetime(2022, 7, 31, 0, 0)"
]
},
"execution_count": 50,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_get_unadjusted_stub_date(\n",
" ueffective=dt(2022, 6, 15), utermination=dt(2023, 2, 28),\n",
" frequency=\"M\", eom=True, roll=None, stub=\"LONGFRONT\"\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 51,
"id": "94e4ff12-a80a-44d1-85cd-b7ea2435cb52",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"datetime.datetime(2022, 7, 28, 0, 0)"
]
},
"execution_count": 51,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_get_unadjusted_stub_date(\n",
" ueffective=dt(2022, 6, 15), utermination=dt(2023, 2, 28),\n",
" frequency=\"M\", eom=False, roll=None, stub=\"LONGFRONT\"\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": 52,
"id": "2f6ddb5e-6322-43f0-9d76-6a989c3617a7",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"datetime.datetime(2022, 7, 29, 0, 0)"
]
},
"execution_count": 52,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_get_unadjusted_stub_date(\n",
" ueffective=dt(2022, 6, 15), utermination=dt(2023, 2, 28),\n",
" frequency=\"M\", eom=True, roll=29, stub=\"LONGFRONT\"\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "519ff189-e0df-44ba-a8f2-05da66123906",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 56,
"id": "8afde93d-6f6d-4656-a456-bc69900e53f0",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.calendars import get_calendar\n",
"from rateslib.scheduling import _check_regular_swap"
]
},
{
"cell_type": "code",
"execution_count": 58,
"id": "ee7487e8-2b6d-437b-954f-26eb2ae913c4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(True,\n",
" {'ueffective': datetime.datetime(2022, 6, 5, 0, 0),\n",
" 'utermination': datetime.datetime(2022, 12, 5, 0, 0),\n",
" 'frequency': 'Q',\n",
" 'roll': 5,\n",
" 'eom': False})"
]
},
"execution_count": 58,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"_check_regular_swap(\n",
" effective=dt(2022, 6, 6),\n",
" termination=dt(2022, 12, 5),\n",
" frequency=\"Q\",\n",
" modifier=\"MF\",\n",
" eom=False,\n",
" roll=None,\n",
" calendar=get_calendar(\"bus\"),\n",
")\n",
" "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3957a681-a10a-4925-8fe9-07d227a46786",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.1"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/AutomaticDifferentiation.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "207f47dd-6e8d-4a49-8d4c-c775b157f8cb",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import *"
]
},
{
"cell_type": "markdown",
"id": "4a2c4aa5-99a9-4a63-8b8a-3dc9fe2785ae",
"metadata": {},
"source": [
"# Definitions of dual numbers"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b5149cbc-1a29-4f20-a40b-3980866b6914",
"metadata": {},
"outputs": [],
"source": [
"z_x = Dual2(0.0, [\"x\"], [], [])\n",
"z_x"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "26ce32a9-9f08-4477-8fd7-f98fed699362",
"metadata": {},
"outputs": [],
"source": [
"z_x * z_x"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5e6b947f-1ea1-44d6-8c7b-a7d9445f5158",
"metadata": {},
"outputs": [],
"source": [
"(z_x * z_x).dual2"
]
},
{
"cell_type": "markdown",
"id": "3364bb0a-7fe0-43fa-bda4-c67c7e6e4630",
"metadata": {},
"source": [
"# General functions of dual numbers"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e48f6a4d-fe10-4ea5-9f39-85c310423e76",
"metadata": {},
"outputs": [],
"source": [
"import math\n",
"def dual_sin(x: float | Dual) -> float | Dual:\n",
" if isinstance(x, Dual):\n",
" return Dual(math.sin(x.real), x.vars, math.cos(x.real) * x.dual)\n",
" return math.sin(x)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d3cd2d23-0538-41e3-b94d-401bb7d2d35c",
"metadata": {},
"outputs": [],
"source": [
"x = Dual(2.1, [\"y\"], [])\n",
"dual_sin(x)"
]
},
{
"cell_type": "markdown",
"id": "e5cda5f5-c336-4e1c-abb6-2bf84db40352",
"metadata": {},
"source": [
"# Upcasting and dynamic variables"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "79196a21-0418-47a3-9d65-a6448b57df06",
"metadata": {},
"outputs": [],
"source": [
"first_dual = Dual(11.0, [\"x\", \"y\"], [3, 8])\n",
"second_dual = Dual(-3.0, [\"y\", \"z\"], [-2, 5])\n",
"first_dual + second_dual + 2.65"
]
},
{
"cell_type": "markdown",
"id": "93465777-50f0-4b1d-ad6d-00b054a52a37",
"metadata": {},
"source": [
"# First order derivatives and performance"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e66cf2c2-26a9-4b87-9600-265d43dd98ce",
"metadata": {},
"outputs": [],
"source": [
"def func(x, y, z):\n",
" return x**6 + dual_exp(x/y) + dual_log(z)\n",
"\n",
"x, y, z = 2.0, 1.0, 2.0\n",
"func(x, y, z)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ff535dd2-92af-4141-8343-78025f101278",
"metadata": {},
"outputs": [],
"source": [
"%timeit func(x, y, z)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "827b29f9-9314-40f3-a02d-44072643ee75",
"metadata": {},
"outputs": [],
"source": [
"x, y, z = Dual(2.0, [\"x\"], []), Dual(1.0, [\"y\"], []), Dual(2.0, [\"z\"], [])\n",
"func(x, y, z)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5e5d6fd4-ea9d-4cad-888c-935a6eec92c5",
"metadata": {},
"outputs": [],
"source": [
"%timeit func(x, y, z)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1a1fb920-993f-4f9f-a023-7b946051f31c",
"metadata": {},
"outputs": [],
"source": [
"x = Dual(2.0, [\"x\", \"y\", \"z\"], [1.0, 0.0, 0.0])\n",
"y = Dual(1.0, [\"x\", \"y\", \"z\"], [0.0, 1.0, 0.0])\n",
"z = Dual(2.0, [\"x\", \"y\", \"z\"], [0.0, 0.0, 1.0])"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "6dbce6a4-54b5-46dd-ae43-7011aa21703e",
"metadata": {},
"outputs": [],
"source": [
"%timeit func(x, y, z)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ceb92f32-8064-46e1-a3ce-54b888759744",
"metadata": {},
"outputs": [],
"source": [
"x = Dual(2.0, [\"x\", \"y\", \"z\"], [1.0, 0.0, 0.0])\n",
"y = Dual.vars_from(x, 1.0, [\"x\", \"y\", \"z\"], [0.0, 1.0, 0.0])\n",
"z = Dual.vars_from(x, 2.0, [\"x\", \"y\", \"z\"], [0.0, 0.0, 1.0])"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "94cc0d63-5705-4848-9207-76d1885ba7d4",
"metadata": {},
"outputs": [],
"source": [
"%timeit func(x, y, z)"
]
},
{
"cell_type": "markdown",
"id": "170edec4-7a07-43ed-812f-2681225fd9b0",
"metadata": {},
"source": [
"# Numerical differentiation"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a616687b-e18f-491a-867e-efa6a4352262",
"metadata": {},
"outputs": [],
"source": [
"def df_fwd_diff(f, x, y, z):\n",
" base = f(x, y, z)\n",
" dh = 1e-10\n",
" dx = f(x+dh, y, z) - base\n",
" dy = f(x, y+dh, z) - base\n",
" dz = f(x, y, z+dh) - base\n",
" return base, dx/dh, dy/dh, dz/dh\n",
"\n",
"%timeit df_fwd_diff(func, 2.0, 1.0, 2.0) "
]
},
{
"cell_type": "markdown",
"id": "5841581d-1acf-4a97-b4c5-f5136ee9b4b1",
"metadata": {},
"source": [
"# Functions with execution line delay"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a47f8cea-1e05-4387-8e8f-f09030b39aa0",
"metadata": {},
"outputs": [],
"source": [
"import time\n",
"def func_complex(x, y, z):\n",
" time.sleep(0.000025)\n",
" return x**6 + dual_exp(x/y) + dual_log(z)\n",
"\n",
"%timeit func_complex(2.0, 1.0, 2.0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "16867e87-f124-45de-81c6-ddf251bc07c8",
"metadata": {},
"outputs": [],
"source": [
"%timeit func_complex(x, y, z)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e3273e1f-af3f-4b4f-bd37-93893cfd2055",
"metadata": {},
"outputs": [],
"source": [
"%timeit df_fwd_diff(func_complex, 2.0, 1.0, 2.0)"
]
},
{
"cell_type": "markdown",
"id": "3dd7f29a-5c85-4b51-8877-c198ca0c52f4",
"metadata": {},
"source": [
"# Second order derivatives"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "65332b6c-e18d-4cd2-b4a7-434b4098a70f",
"metadata": {},
"outputs": [],
"source": [
"x = Dual2(2.0, [\"x\", \"y\", \"z\"], [1.0, 0.0, 0.0], [])\n",
"y = Dual2(1.0, [\"x\", \"y\", \"z\"], [0.0, 1.0, 0.0], [])\n",
"z = Dual2(2.0, [\"x\", \"y\", \"z\"], [0.0, 0.0, 1.0], [])\n",
"func(x, y, z)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0ed2c033-9373-469c-a857-47aa3a32892c",
"metadata": {},
"outputs": [],
"source": [
"gradient(func(x, y, z), [\"x\", \"y\"], order=2)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "72618649-a785-40d0-b8ea-f297a1f50621",
"metadata": {},
"outputs": [],
"source": [
"%timeit func(x, y, z)"
]
},
{
"cell_type": "markdown",
"id": "17ff6e34-0a43-4158-961a-5c3fe51d167b",
"metadata": {},
"source": [
"# Exogenous Variables"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "959084c7-cc2a-4206-adaa-7d663a2c6a7e",
"metadata": {},
"outputs": [],
"source": [
"x = Variable(1.5, [\"x\"])\n",
"y = Variable(3.9, [\"y\"])\n",
"x * y"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3c30894e-04a8-429a-bbea-4d8d3fc2144c",
"metadata": {},
"outputs": [],
"source": [
"defaults._global_ad_order = 2"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "dcbe5c20-ce23-4546-a9c5-bf4fe3753f01",
"metadata": {},
"outputs": [],
"source": [
"x * y"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a78d3ed5-96e5-4c87-8933-62245877a379",
"metadata": {},
"outputs": [],
"source": [
"(x * y).dual2"
]
},
{
"cell_type": "markdown",
"id": "ec449dab-7251-4225-b2e0-eb5212f9095a",
"metadata": {},
"source": [
"# One Dimensional Newton-Raphson Algorithm"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "04dc714b-49d8-4f77-b199-da3bd10416f8",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.dual import newton_1dim\n",
"\n",
"def f(g, s):\n",
" f0 = g**2 - s # Function value\n",
" f1 = 2*g # Analytical derivative is required\n",
" return f0, f1\n",
"\n",
"s = Dual(2.0, [\"s\"], [])\n",
"newton_1dim(f, g0=1.0, args=(s,))"
]
},
{
"cell_type": "markdown",
"id": "9b6df9e8-f5ae-4d3d-ac86-2cee332bf1de",
"metadata": {},
"source": [
"# One Dimensional Inverse Function Theorem"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "289e0c45-7a40-4c24-b9f1-f1efe21b1966",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.dual import ift_1dim\n",
"\n",
"def s(g):\n",
" return dual_exp(g) + g**2\n",
"\n",
"s_tgt = Dual(2.0, [\"s\"], [])\n",
"ift_1dim(s, s_tgt, h=\"modified_brent\", ini_h_args=(0.0, 2.0))"
]
},
{
"cell_type": "markdown",
"id": "82100c0b-63bc-4d19-bf62-848bb6cc91b3",
"metadata": {},
"source": [
"# Normal functions"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "06f5fa1d-8084-46a2-b9a1-94db300ec28e",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.dual import dual_norm_pdf, dual_norm_cdf, dual_inv_norm_cdf"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ed9396d9-88d6-4817-bd86-c6a93fc3222e",
"metadata": {},
"outputs": [],
"source": [
"dual_norm_pdf(Variable(1.5, [\"u\"]))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5e661276-6c82-4f99-9a18-0e1bc0a71c83",
"metadata": {},
"outputs": [],
"source": [
"dual_norm_cdf(Variable(1.5, [\"u\"]))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ced0f08c-0ebf-4274-82e9-fe81480ab0fa",
"metadata": {},
"outputs": [],
"source": [
"dual_inv_norm_cdf(Variable(0.933193, [\"v\"]))"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/Calendars.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "2362d250-6e1f-43d3-a853-4e53db61ef19",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import *"
]
},
{
"cell_type": "markdown",
"id": "7c691656-ba4f-4278-8849-a75fc13b83f8",
"metadata": {},
"source": [
"# Timings\n",
"\n",
"Get a calendar straight from a hash table."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0d8d2453-d45d-44a1-9794-31f9315f2de4",
"metadata": {},
"outputs": [],
"source": [
"%timeit get_calendar(\"ldn\")"
]
},
{
"cell_type": "markdown",
"id": "09dcad09-c185-48c3-9773-818f3af0d5db",
"metadata": {},
"source": [
"Construct a ``Cal`` directly from a list of holidays and week mask."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "375dc5c5-afbb-43a4-bd45-abb989ce3057",
"metadata": {},
"outputs": [],
"source": [
"cal = get_calendar(\"ldn\")\n",
"holidays = cal.holidays\n",
"%timeit Cal(holidays=holidays, week_mask=[5,6])"
]
},
{
"cell_type": "markdown",
"id": "e17cff9e-1685-491c-8b68-d43a5bd6f6d4",
"metadata": {},
"source": [
"Get a ``NamedCal`` parsed and constructed in Python."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9096fea1-5cf6-4866-8f2d-2f549092be48",
"metadata": {},
"outputs": [],
"source": [
"%timeit get_calendar(\"ldn,tgt\")"
]
},
{
"cell_type": "markdown",
"id": "359e90af-641f-4753-a031-105a5fe0d54e",
"metadata": {},
"source": [
"Construct a ``UnionCal`` directly from multiple ``Cal``."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "07ef7035-3406-4f6f-91fe-5232599ee91c",
"metadata": {},
"outputs": [],
"source": [
"c1 = Cal(holidays=get_calendar(\"ldn\", named=False).holidays, week_mask=[5,6])\n",
"c2 = Cal(holidays=get_calendar(\"tgt\", named=False).holidays, week_mask=[5,6])\n",
"\n",
"%timeit UnionCal([c1, c2])"
]
},
{
"cell_type": "markdown",
"id": "ad0bd111-3c5d-4f93-a097-ecdf3cdb6090",
"metadata": {},
"source": [
"Add a new calendar to ``defaults.calendars`` and fetch that directly."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3db567be-5110-41e5-a36e-c8a7c8fd9445",
"metadata": {},
"outputs": [],
"source": [
"defaults.calendars[\"ldn,tgt\"] = get_calendar(\"ldn,tgt\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e1a030ff-3403-4b5d-a271-7154c62e5597",
"metadata": {},
"outputs": [],
"source": [
"%timeit get_calendar(\"ldn,tgt\")"
]
},
{
"cell_type": "markdown",
"id": "97dd17e2-5b84-4a9c-ba5d-017ba5815ee2",
"metadata": {},
"source": [
"# Tenor Manipulations"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c1afb68c-f364-47cb-8818-63736ec0a911",
"metadata": {},
"outputs": [],
"source": [
"add_tenor(dt(2001, 9, 28), \"-6m\", modifier=\"MF\", calendar=\"LDN\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "993f5d28-6945-44ea-a83d-d7a66a80256f",
"metadata": {},
"outputs": [],
"source": [
"add_tenor(dt(2001, 9, 28), \"-6m\", modifier=\"MF\", calendar=\"LDN\", roll=31)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8bcddb24-17ab-46ca-a8c1-77c6e4fe2ac8",
"metadata": {},
"outputs": [],
"source": [
"add_tenor(dt(2001, 9, 28), \"-6m\", modifier=\"MF\", calendar=\"LDN\", roll=29)"
]
},
{
"cell_type": "markdown",
"id": "32a400ca-6e1c-4f0d-880f-bf256e6ce776",
"metadata": {},
"source": [
"# Associated Settlement Calendars"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f38317db-2936-496e-a29e-3a7a5fcaa6b2",
"metadata": {},
"outputs": [],
"source": [
"tgt_and_nyc = get_calendar(\"tgt,nyc\")\n",
"tgt_and_nyc.add_bus_days(dt(2009, 11, 10), 2, True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "adc402b7-de57-4bc8-afb0-652a61c150d9",
"metadata": {},
"outputs": [],
"source": [
"tgt_plus_nyc_settle = get_calendar(\"tgt|nyc\")\n",
"tgt_plus_nyc_settle.add_bus_days(dt(2009, 11, 10), 2, True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "79f4e7e4-c5ed-4d1e-ad55-669838a1b2ff",
"metadata": {},
"outputs": [],
"source": [
"tgt_plus_nyc_settle.add_bus_days(dt(2009, 11, 10), 1, settlement=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "90585e34-914d-4292-a6e8-9d146ba432de",
"metadata": {},
"outputs": [],
"source": [
"tgt_plus_nyc_settle.add_bus_days(dt(2009, 11, 10), 1, settlement=False)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/Cookbook.ipynb
================================================
{
"cells": [
{
"cell_type": "markdown",
"id": "a731e2a1-7df1-4627-87a4-eece8b11f3ec",
"metadata": {},
"source": [
"# Turns"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b19074cb-c470-4da9-8c03-5db7f134bd4d",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import *"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "bba57742-a6f2-4c05-b221-6af17deab2bf",
"metadata": {},
"outputs": [],
"source": [
"curve = Curve(\n",
" nodes={dt(2022, 12, 1): 1.0, dt(2023, 2, 1): 1.0}, \n",
" interpolation=\"log_linear\"\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e7d887d4-7ed4-477e-b64b-08707dd27d27",
"metadata": {},
"outputs": [],
"source": [
"curve = Curve({\n",
" dt(2022, 12, 1): 1.0,\n",
" dt(2022, 12, 31): 1.0,\n",
" dt(2023, 1, 1): 1.0,\n",
" dt(2023, 2, 1): 1.0,\n",
"}, interpolation=\"log_linear\")\n",
"instruments = [\n",
" IRS(dt(2022, 12, 1), \"1d\", \"A\", curves=curve),\n",
" Spread(\n",
" IRS(dt(2022, 12, 30), \"1d\", \"A\", curves=curve),\n",
" IRS(dt(2022, 12, 31), \"1d\", \"A\", curves=curve),\n",
" ),\n",
" Spread(\n",
" IRS(dt(2022, 12, 31), \"1d\", \"A\", curves=curve),\n",
" IRS(dt(2023, 1, 1), \"1d\", \"A\", curves=curve),\n",
" ), \n",
"]\n",
"solver = Solver(curves=[curve], instruments=instruments, s=[0.0, -0.5, 0.5])"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ba791a79-f913-44ff-8631-c4c65b9a9a28",
"metadata": {},
"outputs": [],
"source": [
"instruments = [\n",
" IRS(dt(2022, 12, 1), \"1d\", \"A\", curves=curve),\n",
" Spread(\n",
" IRS(dt(2022, 12, 30), \"1d\", \"A\", curves=curve),\n",
" IRS(dt(2022, 12, 31), \"1d\", \"A\", curves=curve),\n",
" ),\n",
" IRS(dt(2023, 1, 1), \"1d\", \"A\", curves=curve),\n",
"]\n",
"solver = Solver(curves=[curve], instruments=instruments, s=[0.0, -50.0, 0.0])"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c662bd2b-118c-4016-9c24-d8141d5c3a2c",
"metadata": {},
"outputs": [],
"source": [
"curve.plot(\"1b\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "93a4f1b0-cacc-4e3c-933f-6305b21b3d0f",
"metadata": {},
"outputs": [],
"source": [
"linecurve = LineCurve({\n",
" dt(2022, 12, 1): 0.0,\n",
" dt(2022, 12, 31): -50.0,\n",
" dt(2023, 1, 1): 0.0,\n",
"}, interpolation=\"flat_forward\")\n",
"instruments = [\n",
" Value(dt(2022, 12, 1), curves=linecurve),\n",
" Value(dt(2022, 12, 31), curves=linecurve),\n",
" Value(dt(2023, 1, 1), curves=linecurve),\n",
"]\n",
"solver = Solver(curves=[linecurve], instruments=instruments, s=[0.0, -0.5, 0.0])\n",
"linecurve.plot(\"1b\", right=dt(2023, 2, 1))"
]
},
{
"cell_type": "markdown",
"id": "596da5c8-04c9-4668-a8ec-755c788e5d77",
"metadata": {},
"source": [
"### Injecting turns to spline curves"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2545ff72-0a2c-46e3-a1bb-ab0ef94f211d",
"metadata": {},
"outputs": [],
"source": [
"turn_curve = Curve({\n",
" dt(2022, 12, 1): 1.0,\n",
" dt(2022, 12, 31): 1.0,\n",
" dt(2023, 1, 1): 1.0,\n",
" dt(2023, 2, 1): 1.0,\n",
"}, interpolation=\"log_linear\")\n",
"cubic_curve = Curve({\n",
" dt(2022, 12, 1): 1.0,\n",
" dt(2022, 12, 21): 1.0,\n",
" dt(2023, 1, 11): 1.0,\n",
" dt(2023, 2, 1): 1.0,\n",
"}, t = [\n",
" dt(2022, 12, 1), dt(2022, 12, 1), dt(2022, 12, 1), dt(2022, 12, 1),\n",
" dt(2022, 12, 21),\n",
" dt(2023, 1, 11),\n",
" dt(2023, 2, 1), dt(2023, 2, 1), dt(2023, 2, 1), dt(2023, 2, 1),\n",
"])\n",
"composite_curve = CompositeCurve([turn_curve, cubic_curve])\n",
"instruments = [\n",
" IRS(dt(2022, 12, 1), \"1d\", \"A\", curves=turn_curve),\n",
" Spread(\n",
" IRS(dt(2022, 12, 30), \"1d\", \"A\", curves=turn_curve),\n",
" IRS(dt(2022, 12, 31), \"1d\", \"A\", curves=turn_curve),\n",
" ),\n",
" IRS(dt(2023, 1, 1), \"1d\", \"A\", curves=turn_curve),\n",
" IRS(dt(2022, 12, 1), \"20d\", \"A\", curves=composite_curve),\n",
" IRS(dt(2022, 12, 21), \"20d\", \"A\", curves=composite_curve),\n",
" IRS(dt(2023, 1, 11), \"18d\", \"A\", curves=composite_curve),\n",
"]\n",
"solver = Solver(\n",
" curves=[turn_curve, cubic_curve, composite_curve], \n",
" instruments=instruments, \n",
" s=[0.0, -50.0, 0.0, 2.01, 2.175, 2.35],\n",
" instrument_labels=[\"zero1\", \"turn\", \"zero2\", \"irs1\", \"irs2\", \"irs3\"],\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f418e922-efc7-4eb6-a715-46a5b12ae319",
"metadata": {},
"outputs": [],
"source": [
"composite_curve.plot(\"1b\")"
]
},
{
"cell_type": "markdown",
"id": "a95663da-9904-4d57-9990-ffb3f9509672",
"metadata": {},
"source": [
"### Irrational turns on tenor curves"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e94c4fbc-7a59-46bc-9770-3c115c82c298",
"metadata": {},
"outputs": [],
"source": [
"turn_curve = LineCurve({\n",
" dt(2022, 9, 15): 0.0,\n",
" dt(2022, 10, 1): -0.20,\n",
" dt(2023, 1, 1): 0.0,\n",
"}, interpolation=\"flat_forward\")\n",
"fading_turn_curve = LineCurve({\n",
" dt(2022, 9, 15): 0.0,\n",
" dt(2022, 9, 30): 0.0,\n",
" dt(2022, 10, 1): -0.20,\n",
" dt(2022, 12, 31): -0.04,\n",
" dt(2023, 1, 1): 0.0,\n",
" dt(2023, 3, 15): 0.0,\n",
"}, interpolation=\"linear\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "322b92ab-e10f-47b0-8847-5ae6e0eb5768",
"metadata": {},
"outputs": [],
"source": [
"line_curve = LineCurve({\n",
" dt(2022, 9, 15): 1.0,\n",
" dt(2022, 12, 15): 1.0,\n",
" dt(2023, 3, 15): 1.0,\n",
"}, interpolation=\"linear\")\n",
"composite_curve=CompositeCurve([fading_turn_curve, line_curve], id=\"cc\")\n",
"instruments = [\n",
" Value(dt(2022, 9, 15), curves=fading_turn_curve),\n",
" Value(dt(2022, 9, 30), curves=fading_turn_curve),\n",
" Value(dt(2022, 10, 1), curves=fading_turn_curve),\n",
" Value(dt(2022, 12, 31), curves=fading_turn_curve),\n",
" Value(dt(2023, 1, 1), curves=fading_turn_curve),\n",
" Value(dt(2023, 3, 15), curves=fading_turn_curve),\n",
" Value(dt(2022, 9, 15), curves=composite_curve),\n",
" Value(dt(2022, 12, 15), curves=composite_curve),\n",
" Value(dt(2023, 3, 15), curves=composite_curve),\n",
"]\n",
"solver = Solver(\n",
" curves=[fading_turn_curve, line_curve, composite_curve], \n",
" instruments=instruments, \n",
" s=[0.0, 0.0, -0.2, -0.04, 0.0, 0.0, 3.5, 3.7, 4.05],\n",
" instrument_labels=[\"zero1\", \"zero2\", \"turnA\", \"turnB\", \"zero3\", \"zero4\", \"fra1\", \"fra2\", \"fra3\"],\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "de87d60c-351e-4e19-b380-a4d24b97b956",
"metadata": {},
"outputs": [],
"source": [
"composite_curve.plot(\"1b\")"
]
},
{
"cell_type": "markdown",
"id": "5b810412-99f3-4192-8b67-a3beeee18fb4",
"metadata": {},
"source": [
"# Analysing roll on trade strategies"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "74f4c34a-ff5d-4c34-9fa1-ad208e13d8cf",
"metadata": {},
"outputs": [],
"source": [
"curve = Curve(\n",
" nodes={\n",
" dt(2024, 1, 1): 1.0,\n",
" dt(2025, 1, 1): 0.96,\n",
" dt(2026, 1, 1): 0.935,\n",
" dt(2027, 1, 1): 0.915,\n",
" },\n",
" convention=\"act360\",\n",
" t=[\n",
" dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1),\n",
" dt(2025, 1, 1), dt(2026, 1, 1),\n",
" dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1)\n",
" ],\n",
")\n",
"irs = IRS(\n",
" effective=dt(2024, 1, 1),\n",
" termination=\"18m\",\n",
" spec=\"usd_irs\",\n",
")\n",
"irs.rate(curve)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "179e8bac-63d9-4eef-a61d-25d284dd7a76",
"metadata": {},
"outputs": [],
"source": [
"irs.rate(curve.roll(\"6w\"))"
]
},
{
"cell_type": "markdown",
"id": "066f8cf4-47e3-46cf-adff-8487abbf3e17",
"metadata": {},
"source": [
"# Stepping underspecified Curves on central bank effective dates"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "118da85c-15bb-46ff-902a-c2cd40893073",
"metadata": {},
"outputs": [],
"source": [
"curve = Curve(\n",
" nodes={\n",
" dt(2024, 1, 31): 1.00, dt(2024, 2, 2): 1.00, dt(2024, 3, 13): 1.00, \n",
" dt(2024, 4, 17): 1.0, dt(2024, 6, 12): 1.0, dt(2024, 7, 24): 1.0,\n",
" dt(2024, 9, 18): 1.0, dt(2024, 10, 23): 1.0, dt(2024, 12, 18): 1.0,\n",
" dt(2025, 1, 29): 1.0, dt(2025, 7, 31): 1.0,\n",
" },\n",
" convention=\"act360\", interpolation=\"log_linear\", calendar=\"tgt\", id=\"estr\",\n",
")\n",
"instruments = [\n",
" IRS(dt(2024, 1, 31), \"1b\", spec=\"eur_irs\", curves=\"estr\"), # O/N rate\n",
" IRS(dt(2024, 2, 2), dt(2024, 3, 13), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2024, 3, 13), dt(2024, 4, 17), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2024, 3, 20), dt(2024, 6, 19), spec=\"eur_irs\", curves=\"estr\"), # IMM\n",
" IRS(dt(2024, 6, 19), dt(2024, 9, 18), spec=\"eur_irs\", curves=\"estr\"), # IMM\n",
" IRS(dt(2024, 9, 18), dt(2024, 12, 18), spec=\"eur_irs\", curves=\"estr\"), # IMM\n",
" IRS(dt(2024, 12, 18), dt(2025, 3, 19), spec=\"eur_irs\", curves=\"estr\"), # IMM\n",
"]\n",
"pps = [ # policy periods\n",
" IRS(dt(2024, 2, 2), dt(2024, 3, 13), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2024, 3, 13), dt(2024, 4, 17), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2024, 4, 17), dt(2024, 6, 12), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2024, 6, 12), dt(2024, 7, 24), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2024, 7, 24), dt(2024, 9, 18), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2024, 9, 18), dt(2024, 10, 2), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2024, 10, 23), dt(2024, 12, 18), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2024, 12, 18), dt(2025, 1, 29), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
" IRS(dt(2025, 1, 29), dt(2025, 3, 15), spec=\"eur_irs\", curves=\"estr\"), # MPC\n",
"]\n",
"curvature = [\n",
" Fly(pps[2], pps[3], pps[4]), \n",
" Fly(pps[4], pps[5], pps[6]), \n",
" Fly(pps[6], pps[7], pps[8]),\n",
"]\n",
"solver = Solver(\n",
" curves=[curve],\n",
" instruments=instruments+curvature,\n",
" weights=[1.0] * 7 + [1e-8] * 3,\n",
" s=[3.899, 3.904, 3.859, 3.692, 3.215, 2.725, 2.37] + [0.0] * 3,\n",
" instrument_labels=[\n",
" \"depo\", \"1r\", \"2r\", \"1f\", \"2f\", \"3f\", \"4f\", \"cv0\", \"cv1\", \"cv2\"\n",
" ],\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "bc6cdbaa-255c-4c33-b163-0d2a14a0f57a",
"metadata": {},
"outputs": [],
"source": [
"curve.plot(\"1b\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/CurveSolving.ipynb
================================================
{
"cells": [
{
"cell_type": "markdown",
"id": "95844b55-1388-4bed-ae0e-63dd3296d868",
"metadata": {},
"source": [
"### This chapter on Curve Solving has no actionable 'rateslib' code listing"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/Curves.ipynb
================================================
{
"cells": [
{
"cell_type": "markdown",
"id": "f8825706-c252-40d7-8075-b438f5756093",
"metadata": {},
"source": [
"# Curves\n",
"\n",
"### CompositeCurve example\n",
"\n",
"The first section here regards efficient operations and compositing two curves."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c88c3ce0-72f1-4182-a6c0-36209ccc9954",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import dt, defaults\n",
"from rateslib.curves import Curve, LineCurve, CompositeCurve, MultiCsaCurve"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9cbc5699-fa68-46cd-8e75-3752d078977c",
"metadata": {},
"outputs": [],
"source": [
"line_curve1 = LineCurve({dt(2022, 1, 1): 2.0, dt(2022, 1, 3): 4.0}, id=\"C1_\")\n",
"line_curve2 = LineCurve({dt(2022, 1, 1): 0.5, dt(2022, 1, 3): 1.0}, id=\"C2_\")\n",
"composite_curve = CompositeCurve(curves=(line_curve1, line_curve2))\n",
"composite_curve.rate(dt(2022, 1, 2))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "fd5f49ac-ed99-4422-844a-13c657b823f1",
"metadata": {},
"outputs": [],
"source": [
"line_curve1._set_ad_order(1)\n",
"line_curve2._set_ad_order(1)\n",
"composite_curve.rate(dt(2022, 1, 2))"
]
},
{
"cell_type": "markdown",
"id": "8cb305f5-19a5-46b6-a9f2-82a2bd1f6592",
"metadata": {},
"source": [
"The code above demonstrates the summing of individual rates and of interoperability with Dual datatypes.\n",
"\n",
"Below measures rate lookup."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "cd18dd64-5f2c-47ed-8039-284be1c8fc33",
"metadata": {},
"outputs": [],
"source": [
"defaults.curve_caching = False\n",
"\n",
"composite_curve = CompositeCurve(\n",
" (\n",
" Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.95}, id=\"C1_\"),\n",
" Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.99}, id=\"C2_\"),\n",
" )\n",
")\n",
"%timeit composite_curve.rate(dt(2022, 6, 1), \"1y\") "
]
},
{
"cell_type": "markdown",
"id": "ef56982a-2ffc-45f5-9c2b-08517f22f026",
"metadata": {},
"source": [
"### MultiCsaCurve"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ec3bb527-c507-43bf-ad2e-e744ad40e351",
"metadata": {},
"outputs": [],
"source": [
"c1 = Curve({dt(2022, 1, 1): 1.0, dt(2052, 1, 1): 0.5})\n",
"c2 = Curve({dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 0.4, dt(2052, 1, 1):0.39}) \n",
"mcc = MultiCsaCurve([c1, c2])\n",
"\n",
"%timeit c2[dt(2052, 1, 1)]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "78b70fb7-d04d-478c-8509-0e1f5c42573f",
"metadata": {},
"outputs": [],
"source": [
"%timeit mcc[dt(2052, 1, 1)]"
]
},
{
"cell_type": "markdown",
"id": "b658689c-65f2-4aae-992a-7fbf61f5d2c4",
"metadata": {},
"source": [
"### Error in approximated rates and execution time"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "973e0754-edfc-42ce-9d0c-d2272c69465f",
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"MIN, MAX, SAMPLES, DAYS, d = 0, 4, 100000, 3, 1.0/365\n",
"c1 = np.random.rand(DAYS, SAMPLES) * (MAX - MIN) + MIN\n",
"c2 = np.random.rand(DAYS, SAMPLES) * (MAX - MIN) + MIN\n",
"r_true=((1 + d * (c1 + c2) / 100).prod(axis=0) - 1) * 100 / (d * DAYS)\n",
"c1_bar = ((1 + d * c1 / 100).prod(axis=0)**(1/DAYS) - 1) * 100 / d\n",
"c2_bar = ((1 + d * c2 / 100).prod(axis=0)**(1/DAYS) - 1) * 100 / d\n",
"r_bar = ((1 + d * (c1_bar + c2_bar) / 100) ** DAYS - 1) * 100 / (d * DAYS)\n",
"np.histogram(np.abs(r_true-r_bar), bins=[0, 5e-7, 1e-6, 5e-6, 1e-5, 5e-5, 1]) "
]
},
{
"cell_type": "markdown",
"id": "5f2769bb-0f25-4d5e-996f-5684e1f18a26",
"metadata": {},
"source": [
"### Curve operations: shift"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "64caaec4-072a-4dd5-a9ef-ac4b95852a7f",
"metadata": {},
"outputs": [],
"source": [
"curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, convention=\"Act365F\", id=\"v\", ad=1)\n",
"curve.rate(dt(2022, 6, 1), \"1b\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "90928588-dc74-4886-a044-8d6f69b9cfcf",
"metadata": {},
"outputs": [],
"source": [
"shifted_curve = curve.shift(50)\n",
"shifted_curve.rate(dt(2022, 6, 1), \"1b\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b488431e-7e87-4195-9cf0-82f10a3d9bd0",
"metadata": {},
"outputs": [],
"source": [
"type(shifted_curve)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9803bfc3-52ce-4480-a972-a6aee2f9f100",
"metadata": {},
"outputs": [],
"source": [
"%timeit curve.rate(dt(2022, 6, 1), \"1b\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d66ee277-43e7-4f5a-9d2f-2c72c6e76a15",
"metadata": {},
"outputs": [],
"source": [
"%timeit shifted_curve.rate(dt(2022, 6, 1), \"1b\")"
]
},
{
"cell_type": "markdown",
"id": "327672ae-28af-4e15-bfe1-0b5a52cedcc8",
"metadata": {},
"source": [
"### Curve operations: roll"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "6e4e86f8-ff29-48bb-a7f7-9985aa2f0748",
"metadata": {},
"outputs": [],
"source": [
"curve = Curve(\n",
" nodes={dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98, dt(2024, 1, 1): 0.97},\n",
" t=[dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1),\n",
" dt(2023, 1, 1),\n",
" dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)]\n",
")\n",
"print(curve.rate(dt(2022, 6, 1), \"1d\"))\n",
"print(curve.roll(\"30d\").rate(dt(2022, 7, 1), \"1d\"))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d4e5fc59-aa88-48ec-a7f6-e5d13b10b1f3",
"metadata": {},
"outputs": [],
"source": [
"line_curve = LineCurve(\n",
" nodes={dt(2022, 1, 1): 2.0, dt(2023, 1, 1): 2.6, dt(2024, 1, 1): 2.5},\n",
" t=[dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1),\n",
" dt(2023, 1, 1),\n",
" dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)]\n",
")\n",
"print(line_curve.rate(dt(2022, 6, 1)))\n",
"print(line_curve.roll(\"-31d\").rate(dt(2022, 5, 1), \"1d\"))"
]
},
{
"cell_type": "markdown",
"id": "44ebfa6c-72ee-473e-9299-e1727a8884b7",
"metadata": {},
"source": [
"### Curve operations: translate"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "bdf0aca9-39e0-406b-9d64-9bfcfc9ffed6",
"metadata": {},
"outputs": [],
"source": [
"for interpolation in [\n",
" \"linear\", \"log_linear\", \"linear_index\", \"flat_forward\", \"flat_backward\", \"linear_zero_rate\"\n",
"]:\n",
" curve = Curve(\n",
" nodes={dt(2022, 1, 1): 1.0, dt(2022, 2, 1):0.998, dt(2022, 3, 1): 0.995}, \n",
" interpolation=interpolation\n",
" )\n",
" curve_translated = curve.translate(dt(2022, 1, 15)) \n",
" print(\n",
" curve.rate(dt(2022, 2, 15), \"1d\"),\n",
" curve_translated.rate(dt(2022, 2, 15), \"1d\") \n",
" )"
]
},
{
"cell_type": "markdown",
"id": "ee9951a7-1eea-4255-9e5c-2a4818983598",
"metadata": {},
"source": [
"### Operations on CompositeCurves"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8fff533b-4ee2-4405-b6e3-bdf4fe53aadf",
"metadata": {},
"outputs": [],
"source": [
"composite_curve.rate(dt(2022, 6, 1), \"1d\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "53409cb5-9512-43f1-8e9c-cb1886ed1f6e",
"metadata": {},
"outputs": [],
"source": [
"composite_curve.shift(50).rate(dt(2022, 6, 1), \"1d\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "89680e78-5318-4b56-af1d-8be7dae90ca4",
"metadata": {},
"outputs": [],
"source": [
"composite_curve.roll(\"30d\").rate(dt(2022, 7, 1), \"1d\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b23d6958-903a-4442-98a7-d5585fe4d56c",
"metadata": {},
"outputs": [],
"source": [
"composite_curve.translate(dt(2022, 5, 1)).rate(dt(2022, 6, 1), \"1d\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/FXRates.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "48397019-8e34-4802-9f82-eba040e083fd",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import FXRates, FXForwards, Dual, dt, Curve, gradient\n",
"import numpy as np"
]
},
{
"cell_type": "markdown",
"id": "26a71ddc-8f7a-4b70-a032-e80abfeded61",
"metadata": {},
"source": [
"# Defined FXRates Systems - Errors"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9562c03a-5c29-4260-8470-392cc7ba21c1",
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" FXRates(fx_rates={\"usdeur\": 0.9, \"noksek\": 1.10})\n",
"except ValueError as e:\n",
" print(e)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0d5680fe-26fc-4b37-babe-9b6156195eac",
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" FXRates(fx_rates={\"usdeur\": 0.9, \"gbpusd\": 1.10, \"eurgbp\": 1.124})\n",
"except ValueError as e:\n",
" print(e)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8e89f984-8ae4-490a-8dd1-dad6e72159bb",
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" FXRates ( fx_rates ={\" usdeur \": 0.90 , \" eurusd \": 1.11 , \" noksek \": 1.10})\n",
"except ValueError as e:\n",
" print(e)"
]
},
{
"cell_type": "markdown",
"id": "79afcdf3-34ca-440b-94c1-85d58ad8303c",
"metadata": {},
"source": [
"# FXRates Array"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9de92e9f-a7cd-4701-9900-510ba9d72cba",
"metadata": {},
"outputs": [],
"source": [
"fxr = FXRates({\"usdeur\": 2.0, \"usdgbp\": 2.5})\n",
"from rateslib.dual.utils import _dual_float\n",
"np.reshape([_dual_float(_) for _ in fxr.fx_array.ravel()], (3,3))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e4595d60-9093-410c-a256-49e5faab4bc1",
"metadata": {},
"outputs": [],
"source": [
"fxr.rate(\"eurgbp\")"
]
},
{
"cell_type": "markdown",
"id": "7512e613-2166-4cdb-a48e-648ebb47fcfe",
"metadata": {},
"source": [
"# Representation via Dual"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e9e9569b-2de9-49bb-8978-e6421721768f",
"metadata": {},
"outputs": [],
"source": [
"1e6 * (1/8.0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4d5d77d6-8b33-4b1b-8153-fa16531149f1",
"metadata": {},
"outputs": [],
"source": [
"fxr = FXRates({\"usdnok\": 8.0})\n",
"fxr.convert(1e6, \"nok\", \"usd\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9ba025b2-e121-4875-8d93-daebc796e967",
"metadata": {},
"outputs": [],
"source": [
"fxr._set_ad_order(2)\n",
"fxr.convert(1e6, \"nok\", \"usd\")"
]
},
{
"cell_type": "markdown",
"id": "40b16a07-dd1d-46e3-a3cc-413dd874fe6f",
"metadata": {},
"source": [
"# Cash positions and base value"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "88061cf6-1fca-4171-87bc-a1f1c1b20819",
"metadata": {},
"outputs": [],
"source": [
"fxr = FXRates({\"usdnok\": 8.0})\n",
"fxr.currencies"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f4fa9823-f17a-4c2d-b4fc-cb7ea8ce213f",
"metadata": {},
"outputs": [],
"source": [
"# convert cash positions into an aggregated NOK value\n",
"base_nok_value = fxr . convert_positions ([0 , 1000000] , \"nok\")\n",
"base_nok_value"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8fec05c6-f5c2-4e39-957e-eacb87b6a323",
"metadata": {},
"outputs": [],
"source": [
"# Convert cash positions into an aggregated USD value\n",
"base_usd_value = fxr.convert_positions ([0 , 1000000] , \"usd\")\n",
"base_usd_value"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "058b7041-784d-44ab-8224-6b529e7d8a18",
"metadata": {},
"outputs": [],
"source": [
"# Convert an aggregated USD value back to cash positions\n",
"positions = fxr.positions(base_usd_value , \"usd\")\n",
"positions"
]
},
{
"cell_type": "markdown",
"id": "d6a071c6-9629-47f5-90cd-e12c9f1d363c",
"metadata": {},
"source": [
"### Introducing additional currency exposures"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a3929f34-f705-4bfa-baaf-e4fe39d10360",
"metadata": {},
"outputs": [],
"source": [
"fxr = FXRates ({\"usdeur\": 0.9 , \"eurnok \": 8.888889})\n",
"fxr.currencies"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "132afa56-41cf-4832-a72f-f42e1bc2af69",
"metadata": {},
"outputs": [],
"source": [
"base_value = fxr.convert_positions ([0 , 0, 1000000] , \"usd\")\n",
"base_value"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d8ce0e36-9e05-444f-a4a9-5e5f7194cf0c",
"metadata": {},
"outputs": [],
"source": [
"positions = fxr.positions(base_value, \"usd\")\n",
"positions"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "75a8e694-06de-49d5-ac54-fc6cfc766058",
"metadata": {},
"outputs": [],
"source": [
"base_usd_value = Dual(125000 , [\"fx_usdnok\"], [-15625])\n",
"positions = fxr.positions(base_usd_value, \"usd\")\n",
"positions"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e310fc72-cd0b-4ec2-accd-fa8bc9d864bf",
"metadata": {},
"outputs": [],
"source": [
"fxr.convert_positions(positions, \"usd\")"
]
},
{
"cell_type": "markdown",
"id": "fb72ccf5-49c2-49a8-80f5-f66e2da6c800",
"metadata": {},
"source": [
"### Re-expression in Majors or Crosses"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e8c647e5-387b-4da2-962d-57b6b6ec6edd",
"metadata": {},
"outputs": [],
"source": [
"fxr_crosses = FXRates({\"eurusd\": 1.0 , \"gbpjpy\": 100 , \"eurjpy\": 100})\n",
"fxr_crosses.convert(1, \"usd\", \"jpy\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8d88c1c0-b5d4-4021-b9ca-36efd17e5710",
"metadata": {},
"outputs": [],
"source": [
"fxr_majors = fxr_crosses.restate ([\"eurusd\", \"usdjpy\", \"gbpusd\"])\n",
"fxr_majors.convert(1, \"usd\", \"jpy\")"
]
},
{
"cell_type": "markdown",
"id": "8726169a-e1e2-4f75-a5c0-4dc83f37aa02",
"metadata": {},
"source": [
"# FXForwards"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8256869c-9b79-4018-9793-65d9f067c464",
"metadata": {},
"outputs": [],
"source": [
"fx_rates = FXRates ({\"usdeur\": 0.9 , \"eurnok\": 8.888889} , dt(2022, 1, 3))\n",
"fx_curves = {\n",
" # local currency curves first\n",
" \"usdusd\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.96}),\n",
" \"eureur\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}),\n",
" \"noknok\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}),\n",
" # cross - currency collateral curves next\n",
" \"eurusd\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.991}) ,\n",
" \"nokeur\": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.978}) ,\n",
"}\n",
"fxf = FXForwards(fx_rates, fx_curves)\n",
"fxf.rate(\"usdnok\", dt(2022, 8, 15))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "308b549d-6c64-4c50-bf85-71cbdb8e838d",
"metadata": {},
"outputs": [],
"source": [
"fxf.currencies"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c86ef618-595e-4690-9849-70a86af32a02",
"metadata": {},
"outputs": [],
"source": [
"# Paths are expressed by indexed currencies: 1 = \"EUR\"\n",
"fxf._paths"
]
},
{
"cell_type": "markdown",
"id": "1c64ab97-058b-4e16-bfa0-6238d36c0a60",
"metadata": {},
"source": [
"### Equivalence of Delta Risk"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "14ba7ac1-3597-455d-b638-16dc75b62155",
"metadata": {},
"outputs": [],
"source": [
"fx_rates = FXRates({\"usdeur\": 0.9, \"eurnok\": 8.888889}, dt(2022 , 1, 3))\n",
"start, end = dt(2022, 1, 1), dt(2023, 1, 1)\n",
"fx_curves = {\n",
" \"usdusd\": Curve({start: 1.0 , end: 0.96}, id=\"uu\", ad=1) ,\n",
" \"eureur\": Curve({start: 1.0 , end: 0.99}, id=\"ee\", ad=1) ,\n",
" \"eurusd\": Curve({start: 1.0 , end: 0.991}, id=\"eu\", ad=1) ,\n",
" \"noknok\": Curve({start: 1.0 , end: 0.98}, id=\"nn\", ad=1) ,\n",
" \"nokeur\": Curve({start: 1.0 , end: 0.978}, id=\"ne\", ad=1) ,\n",
"}\n",
"fxf = FXForwards(fx_rates, fx_curves)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a307d877-f53e-4d1c-bdfd-3910dbcf044e",
"metadata": {},
"outputs": [],
"source": [
"discounted_nok = fx_curves[\"nokeur\"][dt(2022, 8, 15)] * 1000\n",
"base_value_1 = discounted_nok * fxf.rate(\"nokusd\", dt(2022 , 1, 1))\n",
"base_value_1"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "feb226d6-fb62-4b50-b000-1dd7d986c5ad",
"metadata": {},
"outputs": [],
"source": [
"gradient(base_value_1, [\"uu1\", \"ee1\", \"eu1\", \"nn1\", \"ne1\", \"fx_usdeur\", \"fx_eurnok\"])"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ff513fb7-82ac-4ab7-a0b9-db13522b1052",
"metadata": {},
"outputs": [],
"source": [
"forward_eur = fxf.rate(\"nokeur\", dt(2022, 8, 15)) * 1000\n",
"discounted_eur = forward_eur * fx_curves[\"eureur\"][dt(2022, 8, 15)]\n",
"base_value_2 = discounted_eur * fxf.rate(\"eurusd\", dt(2022, 1, 1))\n",
"base_value_2"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "13543dab-b1c3-472f-a64d-641c8014de2b",
"metadata": {},
"outputs": [],
"source": [
"gradient(base_value_2, [\"uu1\", \"ee1\", \"eu1\", \"nn1\", \"ne1\", \"fx_usdeur\", \"fx_eurnok\"])"
]
},
{
"cell_type": "markdown",
"id": "e68ee47b-b557-4ad0-bfa8-61660922841c",
"metadata": {},
"source": [
"# Combining Settlement dates"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ce5bb6b0-aa86-41ba-bd60-8244ee9ded1a",
"metadata": {},
"outputs": [],
"source": [
"curve = Curve ({ dt (2000 , 1, 1): 1.0 , dt (2001 , 1, 1): 0.99})\n",
"fxr1 = FXRates ({\"eurusd\": 1.10 , \"gbpusd\": 1.30} , settlement =dt (2000 , 1, 1))\n",
"fxr2 = FXRates ({\"usdcad\": 1.05} , settlement =dt (2000 , 1, 2))\n",
"fxr3 = FXRates ({\"gbpjpy\": 100.0} , settlement =dt (2000 , 1, 3))\n",
"try:\n",
" fxf = FXForwards (\n",
" fx_curves ={\n",
" \"usdusd\": curve, \"eureur\": curve, \"gbpgbp\": curve,\n",
" \"jpyjpy\": curve, \"cadcad\": curve, \"usdjpy\": curve,\n",
" \"eurjpy\": curve, \"eurcad\": curve, \"gbpcad\": curve,\n",
" },\n",
" fx_rates =[fxr1, fxr2, fxr3]\n",
" )\n",
"except ValueError as e:\n",
" print(e)"
]
},
{
"cell_type": "markdown",
"id": "19c3f55b-6e41-4fea-b057-5c71d1457f38",
"metadata": {},
"source": [
"### Dual represenation"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2cc50ab7-f56f-4f2a-bacb-d7c98604a854",
"metadata": {},
"outputs": [],
"source": [
"pv = Dual(100000 , [\"fx_eurusd\", \"fx_usdcad\"], [-100000 , 150000]) # base is USD"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4eb92a49-fd50-4fb6-a23a-9111e5a6d5e1",
"metadata": {},
"outputs": [],
"source": [
"fxr1 = FXRates ({\"eurusd\": 1.05} , settlement=dt(2022, 1, 3))\n",
"fxr2 = FXRates ({\"usdcad\": 1.1} , settlement=dt(2022, 1, 2))\n",
"fxf = FXForwards (\n",
" fx_rates =[fxr1, fxr2],\n",
" fx_curves ={\n",
" \"usdusd\": Curve ({dt(2022, 1, 1): 1.0 , dt(2022, 2, 1): 0.999}) ,\n",
" \"eureur\": Curve ({dt(2022, 1, 1): 1.0 , dt(2022, 2, 1): 0.999}) ,\n",
" \"cadcad\": Curve ({dt(2022, 1, 1): 1.0 , dt(2022, 2, 1): 0.999}) ,\n",
" \"usdeur\": Curve ({dt(2022, 1, 1): 1.0 , dt(2022, 2, 1): 0.999}) ,\n",
" \"cadusd\": Curve ({dt(2022, 1, 1): 1.0 , dt(2022, 2, 1): 0.999}) ,\n",
" }\n",
")\n",
"fxf.positions(pv, base=\"usd\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/FXVolatility.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "477eccd6-a966-41f8-b6db-954a2e3a09b0",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import *\n",
"from pandas import Series"
]
},
{
"cell_type": "markdown",
"id": "ffb96542-04f8-46cb-865f-4ebc8681cb93",
"metadata": {},
"source": [
"### Time Weighting for Volatility Surface"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "89f7bc83-a5e0-41be-8f6f-36cd9694a684",
"metadata": {},
"outputs": [],
"source": [
"fxv = FXDeltaVolSurface( \n",
" eval_date=dt(2024, 7, 25),\n",
" expiries=[dt(2024, 7, 30), dt(2024, 8, 5)], \n",
" delta_indexes=[0.5],\n",
" node_values =[[10.0] , [10.0]] , \n",
" weights=Series(0.1, index=[\n",
" dt(2024, 7, 27), dt(2024, 7, 28), dt(2024, 8, 3), dt(2024, 8, 4)]\n",
" ),\n",
" delta_type=\"forward\", \n",
")\n",
"print(fxv.meta.weights[dt(2024, 7, 25):dt(2024, 8, 5)])"
]
},
{
"cell_type": "markdown",
"id": "67cdacf9-ba83-4433-aada-76b489ba78f0",
"metadata": {},
"source": [
"### Sticky strike, sticky delta and Solver delta"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9d3e0eef-4831-4536-a7ee-789db442a18d",
"metadata": {},
"outputs": [],
"source": [
"# Define Curves\n",
"usd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar=\"nyc\", id=\"usd\") \n",
"eur = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar=\"tgt\", id=\"eur\") \n",
"eurusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, id=\"eurusd\")\n",
"\n",
"# Create an FX Forward market with spot FX rate data\n",
"spot = dt(2024, 5, 9)\n",
"fxr = FXRates({\"eurusd\": 1.0760}, settlement=spot) \n",
"fxf = FXForwards(\n",
" fx_rates=fxr, \n",
" fx_curves={\"eureur\": eur, \"usdusd\": usd, \"eurusd\": eurusd},\n",
")\n",
"\n",
"# Solve the Curves to market\n",
"pre_solver = Solver(\n",
" curves=[eur, eurusd, usd], \n",
" instruments=[\n",
" IRS(spot, \"3W\", spec=\"eur_irs\", curves=\"eur\"),\n",
" IRS(spot, \"3W\", spec=\"usd_irs\", curves=\"usd\"),\n",
" FXSwap(spot, \"3W\", pair=\"eurusd\", curves=[None, \"eurusd\", None, \"usd\"]),\n",
" ],\n",
" s=[3.90, 5.32, 8.85], \n",
" fx=fxf,\n",
" id=\"fxf\",\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "98f143f4-adbc-40f0-a205-6291488cec5c",
"metadata": {},
"outputs": [],
"source": [
"# Define the Vol Smile\n",
"smile = FXSabrSmile(\n",
" nodes={\"alpha\": 0.05, \"beta\": 1.0, \"rho\": 0.01, \"nu\": 0.03}, \n",
" eval_date=dt(2024, 5, 7),\n",
" expiry=dt(2024, 5, 28),\n",
" id=\"smile\",\n",
" pair=\"eurusd\",\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4aa1bb20-522a-4c49-9ebf-88f11f214d5d",
"metadata": {},
"outputs": [],
"source": [
"# Collect FXOption arguments\n",
"option_args = dict(\n",
" pair=\"eurusd\",\n",
" expiry=dt(2024, 5, 28), \n",
" calendar=\"tgt|fed\", \n",
" delta_type=\"spot\",\n",
" curves=[None, \"eurusd\", None, \"usd\"], \n",
" vol=\"smile\",\n",
")\n",
"# Calibrate the Smile to market option data\n",
"solver = Solver( \n",
" pre_solvers=[pre_solver], \n",
" curves=[smile],\n",
" instruments=[\n",
" FXStraddle(strike=\"atm_delta\", **option_args),\n",
" FXRiskReversal(strike=(\"-25d\", \"25d\"), **option_args),\n",
" FXRiskReversal(strike=(\"-10d\", \"10d\"), **option_args),\n",
" FXBrokerFly(strike=((\"-25d\", \"25d\"), \"atm_delta\"), **option_args),\n",
" FXBrokerFly(strike=((\"-10d\", \"10d\"), \"atm_delta\"), **option_args),\n",
" ],\n",
" s=[5.493, -0.157, -0.289, 0.071, 0.238],\n",
" fx=fxf,\n",
" id=\"smile\",\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "6205d317-42ab-41fa-9fc2-a1dd6cbd4a37",
"metadata": {},
"outputs": [],
"source": [
"fxc = FXCall(**option_args, notional=100e6, strike =1.07, premium=982144.59) # <-- mid-market premium giving zero NPV"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5cfde62c-8f0f-4c09-abd7-251dc617300f",
"metadata": {},
"outputs": [],
"source": [
"fxc.delta(solver=solver).loc[(\"fx\", \"fx\", \"eurusd\")]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5215023d-bb18-45fd-abc6-e9d77da2e99d",
"metadata": {},
"outputs": [],
"source": [
"fxc.gamma(solver=solver).loc[(\"usd\", \"usd\", \"fx\", \"fx\", \"eurusd\"), (\"fx\", \"fx\", \"eurusd\")]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "892be093-60d4-4054-a7e2-eb1da01a377b",
"metadata": {},
"outputs": [],
"source": [
"fxr.update({\"eurusd\": 1.0761})\n",
"pre_solver.iterate()\n",
"solver.iterate()\n",
"fxc.npv(solver=solver)"
]
},
{
"cell_type": "markdown",
"id": "2466685c-5b21-421e-a2db-3c78b7c46733",
"metadata": {},
"source": [
"### Sticky delta"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "86a640c0-fb02-41ab-855b-d8dd745fb5a8",
"metadata": {},
"outputs": [],
"source": [
"fxc.analytic_greeks(solver=solver)[\"delta_sticky\"]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7672a3fd-69ba-4e8e-907b-bd60a989a079",
"metadata": {},
"outputs": [],
"source": [
"fxc.analytic_greeks(solver=solver)[\"delta\"]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "947fc271-0d9a-4050-9c1c-a0268b8625d0",
"metadata": {},
"outputs": [],
"source": [
"option_args = dict(\n",
" pair=\"eurusd\",\n",
" expiry=dt(2024, 5, 28), \n",
" calendar=\"tgt|fed\", \n",
" delta_type=\"forward\",\n",
" curves=[None, \"eurusd\", None, \"usd\"], \n",
" vol=\"smile\",\n",
")\n",
"fxc = FXCall(**option_args, notional=100e6, strike =1.07, premium=982144.59) # <-- mid-market premium giving zero NPV\n",
"fxc.analytic_greeks(solver=solver)[\"delta_sticky\"]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "51e3b16d-d6b3-4183-bda5-374859727d73",
"metadata": {},
"outputs": [],
"source": [
"fxc.analytic_greeks(solver=solver)[\"delta\"]"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/Instruments.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "07490ad9-c75c-403e-83d5-9f808360b49e",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import FixedRateBond, dt, Bill, IndexFixedRateBond"
]
},
{
"cell_type": "markdown",
"id": "e8200863-fdbf-499f-82f0-88298eced48f",
"metadata": {},
"source": [
"# Bond analogue methods"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "42cba7c3-f7e0-43df-8b3b-0d9a8c5f2367",
"metadata": {},
"outputs": [],
"source": [
"bond = FixedRateBond (\n",
" effective=dt(2022, 1, 1) ,\n",
" termination=dt(2023, 1, 1) ,\n",
" fixed_rate=5.0,\n",
" spec =\"uk_gb\",\n",
")\n",
"bond.accrued(dt(2022, 4, 15))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1725b831-3372-4100-a24e-2dc1a6b0b4d9",
"metadata": {},
"outputs": [],
"source": [
"bond = FixedRateBond (\n",
" effective=dt(2022, 1, 1) ,\n",
" termination=dt(2023, 1, 1) ,\n",
" fixed_rate=5.0,\n",
" spec =\"ca_gb\",\n",
")\n",
"bond.accrued(dt(2022, 4, 15))"
]
},
{
"cell_type": "markdown",
"id": "3cdcce1b-f71a-4104-82c8-f6cc60063409",
"metadata": {},
"source": [
"### YTM iteration"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3c943458-8b8b-4a67-9ea6-f6e31dbc8e2b",
"metadata": {},
"outputs": [],
"source": [
"bond = FixedRateBond (\n",
" effective=dt(2000 , 1, 1) , termination =dt(2010 , 1, 1) ,\n",
" fixed_rate=2.5 , spec=\"us_gb\"\n",
")\n",
"bond.ytm(95.0, settlement=dt(2000, 7, 1))\n",
"# ( -3.0000 , 2.0000 , 12.0000) - Initial interval requires 4 function evaluations\n",
"# (2.0000 , 3.2858 , 12.0000) - Second interval requires 1 function evaluation\n",
"# (2.0000 , 3.1063 , 3.2858) - Third interval requires 1 function evaluation\n",
"# (3.1063 , 3.1120 , 3.2858) - Fourth interval requires 1 function evaluation"
]
},
{
"cell_type": "markdown",
"id": "7abe6651-eb34-466f-9b5f-745da0b8dcb2",
"metadata": {},
"source": [
"# Bills"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1c5d9f53-aea1-4307-836d-5338cff04346",
"metadata": {},
"outputs": [],
"source": [
"bill = Bill(\n",
" effective=dt(2023, 5, 17),\n",
" termination=dt(2023, 9, 26),\n",
" spec=\"us_gbb\"\n",
")\n",
"bill.ytm(99.75, settlement=dt(2023 , 9, 7))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5a24a850-47c8-41fd-b6a3-b5f7fd3806d7",
"metadata": {},
"outputs": [],
"source": [
"bond = FixedRateBond (\n",
" effective=dt(2023, 3, 26),\n",
" termination=dt(2023, 9, 26),\n",
" fixed_rate=0.0,\n",
" spec=\"us_gb\",\n",
")\n",
"bond.ytm(99.75, settlement=dt(2023, 9, 7))"
]
},
{
"cell_type": "markdown",
"id": "778325e0-6d64-4215-8218-1484fce9e643",
"metadata": {},
"source": [
"# Inflation Linked"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b4b0c470-fc27-47cd-a6ca-c8c839b12958",
"metadata": {},
"outputs": [],
"source": [
"ukt = FixedRateBond (\n",
" spec =\"uk_gb\",\n",
" effective =dt (2022 , 2, 1) ,\n",
" termination =\"2y\",\n",
" fixed_rate =2.5 ,\n",
")\n",
"ukt.price(ytm=3.0, settlement=dt(2023 , 10, 1))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5afffec3-c117-44bf-b4e5-7a027391cde9",
"metadata": {},
"outputs": [],
"source": [
"ukti = IndexFixedRateBond (\n",
" spec=\"uk_gbi\",\n",
" effective=dt(2022, 2, 1) ,\n",
" termination=\"2y\",\n",
" fixed_rate=2.5,\n",
" index_base=100.0,\n",
")\n",
"ukti.price(ytm=3.0, settlement=dt(2023 , 10, 1))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "13397bf0-a8e8-413f-a4e4-d1af737fa97e",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "e0e9a2c5-b929-4582-8ca5-40b38ca57562",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "456eef8f-0b15-4a51-a584-869c6d8c29d9",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/InterpolationAndSplines.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "ac6cd685-e9ba-4813-ac98-c533012f10ed",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import *\n",
"from rateslib.splines import evaluate"
]
},
{
"cell_type": "markdown",
"id": "e974b0e9-27b1-4df8-84e5-26805a44c22f",
"metadata": {},
"source": [
"# Splines and AD"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "aa34c83c-2f5a-42d8-9493-a2d5c638abdd",
"metadata": {},
"outputs": [],
"source": [
"pps = PPSplineDual(\n",
" k=3,\n",
" t=[0,0,0,4,4,4]\n",
")\n",
"pps.csolve(\n",
" tau=[1, 2, 3],\n",
" y=[\n",
" Dual(2.0, [\"y1\"], []),\n",
" Dual(1.0, [\"y2\"], []),\n",
" Dual(2.6, [\"y3\"], []),\n",
" ],\n",
" left_n=0,\n",
" right_n=0,\n",
" allow_lsq=False\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f2626baa-e0f9-4161-98c4-c209636f9f34",
"metadata": {},
"outputs": [],
"source": [
"pps.ppev_single(3.5)"
]
},
{
"cell_type": "markdown",
"id": "12b89c6b-7f90-4c17-b373-c0e88709f2e8",
"metadata": {},
"source": [
"# Application to curves"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c733ce29-72df-4807-825a-4a3268d0a133",
"metadata": {},
"outputs": [],
"source": [
"spline = PPSplineF64(\n",
" k=4,\n",
" t=[_.timestamp() for _ in [\n",
" dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1),\n",
" dt(2023, 1, 1),\n",
" dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)\n",
" ]]\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "29c3f337-4cbc-46e3-8ff5-e8e4b6cfc5a9",
"metadata": {},
"outputs": [],
"source": [
"spline.bsplmatrix(\n",
" tau=[_.timestamp() for _ in [\n",
" dt(2022, 1, 1), dt(2022, 1, 1), dt(2023, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)\n",
" ]],\n",
" left_n=2,\n",
" right_n=2\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "17c307c9-b798-47ee-910e-55cf57becc14",
"metadata": {},
"outputs": [],
"source": [
"spline.csolve(\n",
" tau=[_.timestamp() for _ in [\n",
" dt(2022, 1, 1), dt(2022, 1, 1), dt(2023, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)\n",
" ]],\n",
" y=[0.0, 1.5, 1.85, 1.80, 0.0],\n",
" left_n=2,\n",
" right_n=2,\n",
" allow_lsq=False,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "aaa86c7a-d11d-45d6-815b-88c3220a55bb",
"metadata": {},
"outputs": [],
"source": [
"spline.c"
]
},
{
"cell_type": "markdown",
"id": "18c744d3-51a9-473b-99e3-1bb4c0b133a8",
"metadata": {},
"source": [
"# Log-spline to DFs"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9b59245c-cf58-4a61-8603-ca751f0093cd",
"metadata": {},
"outputs": [],
"source": [
"from math import log, exp\n",
"from datetime import timedelta\n",
"\n",
"log_spline = PPSplineF64(\n",
" k=4,\n",
" t=[_.timestamp() for _ in [\n",
" dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1),\n",
" dt(2023, 1, 1),\n",
" dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)\n",
" ]]\n",
")\n",
"log_spline.csolve(\n",
" tau=[_.timestamp() for _ in [\n",
" dt(2022,1,1), dt(2022,1,1), dt(2023,1,1), dt(2024,1,1), dt(2024,1,1)\n",
" ]], \n",
" y=[0, log(1.0), log(0.983), log(0.964), 0],\n",
" left_n=2,\n",
" right_n=2,\n",
" allow_lsq=False,\n",
")\n",
"log_spline.c"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "eb7d47f8-e4db-4815-bf0c-a1b2f6e27a15",
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"x = [_.timestamp() for _ in [\n",
" dt(2022, 1, 1) + timedelta(days=i) for i in range(720)]]\n",
"fix, ax = plt.subplots(1,1)\n",
"ax.plot(x, [exp(log_spline.ppev_single(_)) for _ in x])\n",
" "
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/Legs.ipynb
================================================
{
"cells": [
{
"cell_type": "markdown",
"id": "8ced5feb-616f-469c-8b1a-68bd7c9ef252",
"metadata": {},
"source": [
"### The chapter on Legs contains no code listings"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/Periods.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "49c1059d-3472-4797-a9a2-ae7efbc9ba1d",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import Curve, FloatPeriod, dt, defaults"
]
},
{
"cell_type": "markdown",
"id": "ce44b389-41a3-4a48-8dad-9d9601eddc8e",
"metadata": {},
"source": [
"# Expression of fixings risk in fixings table"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7390a465-ddd6-424f-a417-98a4a6e5e310",
"metadata": {},
"outputs": [],
"source": [
"curve = Curve ({dt(2022, 1, 1): 1.0 , dt(2025, 1, 1): 0.94},\n",
" id=\"euribor3m\", calendar=\"tgt\", convention=\"act360\"\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2d803cf3-ec6b-415f-97d9-d7d70fa511b3",
"metadata": {},
"outputs": [],
"source": [
"imm_fp = FloatPeriod (\n",
" start=dt(2023, 3, 15),\n",
" end=dt(2023, 6, 21), # <--- IMM start and end dates\n",
" payment=dt(2023, 6, 21),\n",
" frequency=\"q\",\n",
" convention=\"act360\",\n",
" calendar=\"tgt\",\n",
" fixing_method=\"ibor\",\n",
" method_param=2,\n",
" notional=-1e6 # <-- Notional for period is -1mm\n",
" )\n",
"imm_fp.fixings_table(curve)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b6cbb243-dbc1-4d95-8a5d-9227e582a542",
"metadata": {},
"outputs": [],
"source": [
"curve2 = Curve ({dt(2022, 1, 1): 1.0 , dt(2025, 1, 1): 0.94} ,\n",
" id=\"euribor1m\", calendar=\"tgt\", convention=\"act360\"\n",
")\n",
"\n",
"stub_fp = FloatPeriod (\n",
" start=dt(2022, 3, 14),\n",
" end=dt(2022, 5, 14), # <--- 2M stub tenor\n",
" payment =dt(2022, 5, 14),\n",
" frequency=\"q\",\n",
" convention=\"act360\",\n",
" calendar=\"tgt\",\n",
" fixing_method=\"ibor\",\n",
" method_param=2,\n",
" notional=-1e6 ,\n",
" stub=True,\n",
")\n",
"stub_fp.fixings_table({\"1m\": curve2 , \"3m\": curve}, disc_curve=curve2)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "784ed642-54b7-4864-89cc-4d4d7d5c4805",
"metadata": {},
"outputs": [],
"source": [
"defaults.curve_caching = False"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ce13bfea-17a0-436c-aaab-ae9dfee8d2b8",
"metadata": {},
"outputs": [],
"source": [
"curve = Curve ({ dt(2022, 1, 4): 1.0, dt(2023, 1, 4): 0.98}, calendar=\"ldn\")\n",
"float_period = FloatPeriod(start=dt(2022, 1, 4), end=dt(2023, 1, 4),\n",
" payment=dt(2023, 1, 4) ,frequency =\"A\",\n",
" fixing_method=\"rfr_lookback\", method_param=0)\n",
"\n",
"%timeit float_period.fixings_table(curve)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "46902d71-7080-48d5-83d0-5ef250329709",
"metadata": {},
"outputs": [],
"source": [
"%timeit float_period.fixings_table(curve, approximate=True)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: notebooks/coding_2/Scheduling.ipynb
================================================
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "350b18e6-9448-4c28-9c45-444dabe50160",
"metadata": {},
"outputs": [],
"source": [
"from rateslib import *"
]
},
{
"cell_type": "markdown",
"id": "a454c714-2127-4a86-ad73-4f924210aee1",
"metadata": {},
"source": [
"# Regular Unadjusted Schedules"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a6a10173-9141-40f8-91f1-bb4c06b5a3be",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.scheduling import _generate_regular_schedule_unadjusted\n",
"\n",
"dates = list (_generate_regular_schedule_unadjusted (\n",
" ueffective=dt(2023 , 3, 15),\n",
" utermination=dt(2023 , 9, 20),\n",
" frequency=\"M\", \n",
" roll=\"imm\",\n",
"))\n",
"\n",
"dates"
]
},
{
"cell_type": "markdown",
"id": "f8e77e1c-6af9-4885-aaea-1de5b77ccee5",
"metadata": {},
"source": [
"# Stub and Roll Inference"
]
},
{
"cell_type": "markdown",
"id": "75166f2f-e9bd-4de1-b78d-b43f0a931e7c",
"metadata": {},
"source": [
"### Get a Roll"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d2905999-2165-480f-882c-a8b33e4aa105",
"metadata": {},
"outputs": [],
"source": [
"from rateslib . scheduling import _get_unadjusted_roll\n",
"\n",
"_get_unadjusted_roll (\n",
" ueffective =dt (2022 ,3 ,15) , utermination =dt (2023 ,3 ,15) , eom = True\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7b582faa-05f3-40ea-9964-8677ed1dd250",
"metadata": {},
"outputs": [],
"source": [
"_get_unadjusted_roll (\n",
" ueffective =dt (2022 ,2 ,28) , utermination =dt (2023 ,2 ,28) , eom = False\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ba2b8488-8e88-4ca2-a24b-db73abf65ca9",
"metadata": {},
"outputs": [],
"source": [
"_get_unadjusted_roll (\n",
" ueffective =dt (2022 ,2 ,28) , utermination =dt (2023 ,2 ,28) , eom = True\n",
")"
]
},
{
"cell_type": "markdown",
"id": "b054fb9d-5a88-4d05-bd1b-2e0c7cc34e22",
"metadata": {},
"source": [
"### Validate for a regular unadjusted swap"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3860386d-a9ce-4afd-a1ce-b4c959b20f26",
"metadata": {},
"outputs": [],
"source": [
"from rateslib . scheduling import _check_unadjusted_regular_swap\n",
"\n",
"_check_unadjusted_regular_swap(\n",
" ueffective=dt(2022, 2, 28),\n",
" utermination=dt(2023, 2, 28),\n",
" frequency=\"M\",\n",
" eom=False,\n",
" roll=NoInput(0),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2c83a2d2-fc06-44ae-bf81-680d15be987b",
"metadata": {},
"outputs": [],
"source": [
"_check_unadjusted_regular_swap (\n",
" ueffective=dt (2022 , 2, 28) ,\n",
" utermination=dt (2023 , 2, 28) ,\n",
" frequency=\"M\",\n",
" eom=True,\n",
" roll=NoInput(0),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "6b90f1c2-b9d9-4655-9936-999ffd613ddd",
"metadata": {},
"outputs": [],
"source": [
"_check_unadjusted_regular_swap (\n",
" ueffective=dt(2022 , 3, 16) ,\n",
" utermination=dt(2022 , 9, 21) ,\n",
" frequency=\"M\",\n",
" eom=False ,\n",
" roll=NoInput(0),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d766c8b5-9102-4853-8b05-a23d76c0c893",
"metadata": {},
"outputs": [],
"source": [
"_check_unadjusted_regular_swap (\n",
" ueffective=dt(2022 , 3, 16) ,\n",
" utermination=dt(2022 , 9, 21) ,\n",
" frequency=\"M\",\n",
" eom=False ,\n",
" roll=\"imm\",\n",
")"
]
},
{
"cell_type": "markdown",
"id": "cace2c03-ce4c-46a3-a384-524752253ae3",
"metadata": {},
"source": [
"### Get a stub"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f7aac92e-1ab9-4f38-942c-10e37faa2ee1",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.scheduling import _get_unadjusted_short_stub_date\n",
"\n",
"kws = dict (\n",
" ueffective =dt (2022 , 6, 15),\n",
" utermination =dt (2023 , 2, 28), # <-- End of Fenruary\n",
" frequency =\"M\",\n",
")\n",
"\n",
"_get_unadjusted_short_stub_date (**kws , eom=False , roll=NoInput(0) ,stub_side=\"FRONT\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ba070165-f691-432b-8819-76943a1f22a1",
"metadata": {},
"outputs": [],
"source": [
"_get_unadjusted_short_stub_date(**kws, eom=True, roll=NoInput(0), stub_side=\"FRONT\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "711b2df6-dc4c-46da-b167-62bd861465a3",
"metadata": {},
"outputs": [],
"source": [
"_get_unadjusted_short_stub_date(**kws, eom=True, roll=29, stub_side=\"FRONT\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b89e7485-2667-424c-9288-9758151c0b22",
"metadata": {},
"outputs": [],
"source": [
"from rateslib . scheduling import _get_unadjusted_stub_date\n",
"\n",
"_get_unadjusted_stub_date(**kws, eom=False, roll=NoInput(0), stub=\"LONGFRONT\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d5e1af0b-855f-425d-8a50-0ce085e7612b",
"metadata": {},
"outputs": [],
"source": [
"_get_unadjusted_stub_date(**kws, eom=True, roll=NoInput(0), stub=\"LONGFRONT\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b1514504-3a81-43b9-9ed5-d5f92bb28c55",
"metadata": {},
"outputs": [],
"source": [
"_get_unadjusted_stub_date(**kws, eom=False, roll=29, stub=\"LONGFRONT\")"
]
},
{
"cell_type": "markdown",
"id": "271c4d0d-ba1a-49fc-8ec6-eb355c46ed91",
"metadata": {},
"source": [
"### Validate for a regular swap account for business days"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "983a9f5e-4500-4ffc-955f-25a7811ded3a",
"metadata": {},
"outputs": [],
"source": [
"from rateslib.scheduling import _check_regular_swap\n",
"\n",
"_check_regular_swap( \n",
" effective=dt(2022, 6, 6), \n",
" termination=dt(2022, 12, 5),\n",
" frequency=\"Q\",\n",
" eom=False,\n",
" roll=NoInput(0),\n",
" modifier =\"MF\",\n",
" calendar=get_calendar(\"bus\"),\n",
")"
]
},
{
"cell_type": "markdown",
"id": "18abf86d-81f8-4fd1-bb4c-5a8e0d06bb8c",
"metadata": {},
"source": [
"# Schedule Building"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "562b95aa-550c-4654-bb4a-010ea7f39875",
"metadata": {},
"outputs": [],
"source": [
"sch = Schedule (\n",
" effective =\"1Y\",\n",
" termination =\"1Y\",\n",
" frequency =\"S\",\n",
" calendar =\"tgt\",\n",
" payment_lag =1,\n",
" eval_date=dt (2023 , 8, 17) ,\n",
" eval_mode=\"swaps_align\", \n",
")\n",
"print(sch)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "72ab63a2-fe4a-49bd-8baa-7ed046ad2b4f",
"metadata": {},
"outputs": [],
"source": [
"sch = Schedule (\n",
" effective =\"1Y\",\n",
" termination =\"1Y\",\n",
" frequency =\"S\",\n",
" calendar =\"tgt\",\n",
" payment_lag =1,\n",
" eval_date =dt (2023 , 8, 17) ,\n",
" eval_mode=\"swaptions_align\", \n",
")\n",
"print(sch)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
================================================
FILE: pyproject.toml
================================================
# pyproject.toml
[build-system]
requires = ["maturin>=1.0,<2.0"]
build-backend = "maturin"
[tool.maturin]
module-name = "rateslib.rs"
python-source = "python"
bindings = "pyo3"
compatibility = "linux"
features = ["pyo3/extension-module"]
# rustc --print target-list
# https://doc.rust-lang.org/rustc/platform-support.html
[project]
name = "rateslib"
version = "2.7.1"
description = "A fixed income library for trading interest rates"
readme = "README.md"
authors = [{ name = "J H M Darbyshire"}]
license-files = ["LICEN[CS]E", "COMMERCIAL_LICENCE", "COMMERCIAL_LICENCE_ADDENDUM1"]
keywords = ["interest rate", "derivatives", "swaps", "bonds", "fixed income"]
dependencies = [
"numpy>=1.21.5,<3.0",
"matplotlib>=3.5.1,<4.0",
"pandas>=1.4.1,<4.0",
]
requires-python = ">=3.10"
classifiers = [
"Programming Language :: Rust",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
[dependency-groups]
test = [
# "pandas>=3.0,<4.0",
"pytest>=9.0,<10.0",
"pytest-env>=1.0,<2.0",
"coverage>=7.6.1,<8.0",
]
lint = [
"ruff>=0.6.3,<1.0",
]
typing = [
"mypy>=1.13,<1.20",
"pandas-stubs>2.0,<4.0",
]
docs = [
"sphinx>=9.0,<10.0; python_version >= '3.11'",
"sphinx-automodapi>=0.16.0,<1.0",
"sphinxcontrib-googleanalytics>=0.4,<1.0",
"sphinx-tabs>=3.4,<4.0",
"pydata-sphinx-theme>=0.15.4,<1.0",
"nbsphinx>=0.9.5,<1.0",
]
gui = [
"jupyterlab>=4.0,<5.0",
"pickleshare>=0.7.5,<1.0",
]
[tool.pytest.ini_options]
# pythonpath = [".", "python/rateslib"]
minversion = "8.0"
addopts = [
"--ignore-glob=*_ignore.py",
] # use -s to show print capture, use -q for quiet, use -v for verbose
testpaths = [
"python/tests",
]
filterwarnings = [
"ignore::DeprecationWarning",
"ignore::PendingDeprecationWarning"
]
[tool.pytest_env]
MPLBACKEND = "Agg"
[tool.setuptools]
packages = ["rateslib"]
[project.urls]
Homepage = "https://github.com/attack68/rateslib"
[tool.ruff]
exclude = [
".git",
".github",
"docs",
"notebooks",
"target",
"venv9",
"venv11",
"scratch*.py",
"__pycache__",
"docs/source/conf.py",
"old",
"build",
"dist",
"bench",
"benchmarks",
]
# Same as Black.
line-length = 100
indent-width = 4
# Assume Python 3.12
target-version = "py310"
[tool.ruff.format]
quote-style = "double"
indent-style = "space"
docstring-code-format = false
[tool.ruff.lint]
select = [
# "ANN", # flake8-annotations -- Superceded by the use of mypy
# "COM", # flake8-commas -- conflicts with ruff format
"E", # pycodestyle
"W",
"F", # Pyflakes
"UP", # pyupgrade
"B", # flake8-bugbear
"SIM", # flake8-simplify
"C4", # flake8-comprehensions
"S", # flake8-bandit
"PIE", # flake8-pie
"A", # flake8-builtins
"Q", # flake8-quotes
"PT", # flake8-pytest-style
"C90", # mccabe complexity -- Requires work
"I", # isort
"N", # pep8 naming
# "RUF", # -- Requires work
# "D", Pydocs -- requires work
]
ignore = [
"A005", # json and typing module name shadowing is allowed
"PT011", "PT030", "PT031", # -- Requires work inputting match statements
"PIE790", # unnecessary pass
"C408", # unnecessary dict call
"N806", "N815", "N803", "N802",
"SIM116", # use a dict instead of successive ifs: off due to performance degradation.
"SIM108", # ternary operators: off due to code coverage degradation.
"B008", # function calls in argument defaults, e.g. NoInput(0)
# "B006", # mutable data structures for argument defaults, e.g. []
"B904", # raising within except clauses
"B028", # no explicit stack level
"E702", # semi-colons for multiple line statements
]
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["E402", "N801"]
"local_types.py" = ["E501", "E402"]
"python/tests/*" = ["F401", "B", "N", "S", "ANN", "D"]
"rust/*" = ["D"]
[tool.ruff.lint.mccabe]
# Flag errors (`C901`) whenever the complexity level exceeds 5.
max-complexity = 14
[tool.mypy]
files = ["python/"]
exclude = [
"python/tests",
# "/periods/ir_volatility.py",
]
strict = true
#packages = [
# "rateslib"
#]
[tool.coverage.run]
omit = [
"/local_types.py",
# "python/tests/*"
]
================================================
FILE: python/rateslib/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
__docformat__ = "restructuredtext"
# Let users know if they're missing any of our hard dependencies
_hard_dependencies = ("pandas", "matplotlib", "numpy")
_missing_dependencies: list[str] = []
for _dependency in _hard_dependencies:
try:
__import__(_dependency)
except ImportError as _e: # pragma: no cover
raise ImportError(f"`rateslib` requires installation of {_dependency}: {_e}")
from rateslib.verify import VERSION, Licence
__version__ = VERSION
licence = Licence()
from datetime import datetime as dt
from rateslib.data.loader import Fixings
from rateslib.default import Defaults
from rateslib.rs import CalendarManager
defaults = Defaults()
fixings = Fixings()
calendars = CalendarManager()
from contextlib import ContextDecorator
class default_context(ContextDecorator):
"""
Context manager to temporarily set options in the `with` statement context.
You need to invoke as ``option_context(pat, val, [(pat, val), ...])``.
Examples
--------
>>> with option_context('convention', "act360", 'frequency', "S"):
... pass
"""
def __init__(self, *args) -> None: # type: ignore[no-untyped-def]
if len(args) % 2 != 0 or len(args) < 2:
raise ValueError("Need to invoke as option_context(pat, val, [(pat, val), ...]).")
self.ops = list(zip(args[::2], args[1::2], strict=False))
def __enter__(self) -> None:
self.undo = [(pat, getattr(defaults, pat, None)) for pat, _ in self.ops]
for pat, val in self.ops:
setattr(defaults, pat, val)
def __exit__(self, *args) -> None: # type: ignore[no-untyped-def]
if self.undo:
for pat, val in self.undo:
setattr(defaults, pat, val)
from rateslib.curves import (
CompositeCurve,
Curve,
LineCurve,
MultiCsaCurve,
ProxyCurve,
index_left,
index_value,
)
from rateslib.curves.academic import (
NelsonSiegelCurve,
NelsonSiegelSvenssonCurve,
SmithWilsonCurve,
)
from rateslib.data.fixings import (
FloatRateIndex,
FloatRateSeries,
FXFixing,
FXIndex,
IBORFixing,
IBORStubFixing,
IndexFixing,
RFRFixing,
)
from rateslib.dual import ADOrder, Dual, Dual2, Variable, dual_exp, dual_log, dual_solve, gradient
from rateslib.enums import FloatFixingMethod, NoInput
from rateslib.fx import FXForwards, FXRates
from rateslib.instruments import (
CDS,
FRA,
IIRS,
IRS,
NDF,
NDXCS,
SBS,
XCS,
ZCIS,
ZCS,
Bill,
BillCalcMode,
BondCalcMode,
BondFuture,
Fee,
FixedRateBond,
FloatRateNote,
Fly,
FXBrokerFly,
FXCall,
FXForward,
FXPut,
FXRiskReversal,
FXStraddle,
FXStrangle,
FXSwap,
FXVolValue,
IndexFixedRateBond,
IRSCall,
IRSPut,
IRSRiskReversal,
IRSStraddle,
IRSStrangle,
IRVolValue,
Loan,
Portfolio,
Spread,
STIRFuture,
Value,
YoYIS,
)
from rateslib.legs import (
Amortization,
CreditPremiumLeg,
CreditProtectionLeg,
CustomLeg,
FixedLeg,
FloatLeg,
ZeroFixedLeg,
ZeroFloatLeg,
)
from rateslib.periods import (
Cashflow,
CreditPremiumPeriod,
CreditProtectionPeriod,
FixedPeriod,
FloatPeriod,
FXCallPeriod,
FXPutPeriod,
IRSCallPeriod,
IRSPutPeriod,
ZeroFixedPeriod,
ZeroFloatPeriod,
)
from rateslib.scheduling import (
Adjuster,
Cal,
Convention,
Frequency,
Imm,
NamedCal,
RollDay,
Schedule,
StubInference,
UnionCal,
add_tenor,
dcf,
get_calendar,
get_imm,
next_imm,
)
from rateslib.serialization import from_json
from rateslib.solver import Solver
from rateslib.splines import (
PPSplineDual,
PPSplineDual2,
PPSplineF64,
bspldnev_single,
bsplev_single,
)
from rateslib.volatility import (
FXDeltaVolSmile,
FXDeltaVolSurface,
FXSabrSmile,
FXSabrSurface,
IRSabrCube,
IRSabrSmile,
IRSplineCube,
IRSplineSmile,
)
# module level doc-string
__doc__ = """
RatesLib - An efficient and interconnected fixed income library for Python
==========================================================================
**rateslib** is a Python package providing fast, flexible, and accurate
fixed income instrument configuration and calculation.
It aims to be the fundamental high-level building block for practical analysis of
fixed income securities, derivatives, FX representation and curve construction
in Python.
""" # noqa: A001
__all__ = [
"dt",
"defaults",
"fixings",
"calendars",
"licence",
"from_json",
# enums.py
"NoInput",
"FloatFixingMethod",
# dual.py
"ADOrder",
"Dual",
"Dual2",
"Variable",
"dual_log",
"dual_exp",
"dual_solve",
"gradient",
# splines.py
"bsplev_single",
"bspldnev_single",
"PPSplineF64",
"PPSplineDual",
"PPSplineDual2",
# scheduling.py
"get_calendar",
"get_imm",
"next_imm",
"add_tenor",
"dcf",
"Cal",
"UnionCal",
"NamedCal",
"Schedule",
"Frequency",
"RollDay",
"Adjuster",
"StubInference",
"Convention",
"Imm",
# curves.py
"Curve",
"LineCurve",
"MultiCsaCurve",
"CompositeCurve",
"ProxyCurve",
"index_left",
"index_value",
# academic curves
"NelsonSiegelCurve",
"NelsonSiegelSvenssonCurve",
"SmithWilsonCurve",
# fixings.py
"FXFixing",
"IBORFixing",
"IBORStubFixing",
"IndexFixing",
"RFRFixing",
"FXIndex",
"FloatRateIndex",
"FloatRateSeries",
# volatility/fx
"FXDeltaVolSmile",
"FXDeltaVolSurface",
"FXSabrSmile",
"FXSabrSurface",
# volatility/ir
"IRSabrSmile",
"IRSabrCube",
"IRSplineSmile",
"IRSplineCube",
# solver.py
"Solver",
# fx.py
"FXRates",
"FXForwards",
# periods.py,
"FixedPeriod",
"FloatPeriod",
"ZeroFixedPeriod",
"ZeroFloatPeriod",
"Cashflow",
"FXCallPeriod",
"FXPutPeriod",
"IRSCallPeriod",
"IRSPutPeriod",
"CreditPremiumPeriod",
"CreditProtectionPeriod",
# legs.py
"Amortization",
"FixedLeg",
"FloatLeg",
"ZeroFloatLeg",
"ZeroFixedLeg",
"CustomLeg",
"CreditPremiumLeg",
"CreditProtectionLeg",
# instruments.py
"FixedRateBond",
"IndexFixedRateBond",
"FloatRateNote",
"BondFuture",
"BondCalcMode",
"CDS",
"FRA",
"Value",
"FXVolValue",
"IRVolValue",
"Bill",
"Fee",
"Loan",
"BillCalcMode",
"IRS",
"NDF",
"STIRFuture",
"IIRS",
"ZCS",
"ZCIS",
"YoYIS",
"SBS",
"FXSwap",
"FXForward",
"XCS",
"NDXCS",
"Spread",
"Fly",
"Portfolio",
"FXCall",
"FXPut",
"FXRiskReversal",
"FXStraddle",
"FXStrangle",
"FXBrokerFly",
"IRSCall",
"IRSPut",
"IRSRiskReversal",
"IRSStraddle",
"IRSStrangle",
]
================================================
FILE: python/rateslib/_spec_loader.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import os
from typing import TYPE_CHECKING
import pandas as pd
from packaging import version
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
)
DEVELOPMENT = os.environ.get("RATESLIB_DEVELOPMENT", "False")
# This is output from a development version and hard coded before a release for performance.
INSTRUMENT_SPECS: dict[str, dict[str, Any]] = {
"test": {
"frequency": "m",
"stub": "longfront",
"eom": False,
"modifier": "p",
"calendar": "nyc,tgt,ldn",
"payment_lag": 4,
"currency": "tes",
"convention": "yearsmonths",
"leg2_frequency": "m",
"leg2_stub": "longback",
"leg2_roll": 1,
"leg2_eom": False,
"leg2_modifier": "mp",
"leg2_calendar": "nyc,tgt,ldn",
"leg2_payment_lag": 3,
"leg2_convention": "one",
},
"eurusd_call": {
"modifier": "mf",
"calendar": "tgt|fed",
"payment_lag": 2,
"pair": "eurusd",
"delivery_lag": 2,
},
"us_ig_cds": {
"frequency": "q",
"stub": "shortfront",
"roll": 20,
"eom": False,
"modifier": "fex",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "act360",
"fixed_rate": 1.0,
},
"inr_ndirs": {
"frequency": "s",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "mum",
"payment_lag": 0,
"currency": "usd",
"convention": "act365f",
"pair": "usdinr",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"inrusd_ndxcs": {
"frequency": "s",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "mum|fed",
"payment_lag": 2,
"currency": "usd",
"convention": "act365f",
"fixed": True,
"pair": "usdinr",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"mxn_irs": {
"frequency": "28d",
"stub": "shortfront",
"eom": False,
"modifier": "f",
"calendar": "mex",
"payment_lag": 2,
"currency": "mxn",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"usd_irs": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "nyc",
"payment_lag": 2,
"currency": "usd",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"usd_irs_lt_2y": {
"frequency": "a",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "nyc",
"payment_lag": 2,
"currency": "usd",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"gbp_irs": {
"frequency": "a",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "ldn",
"payment_lag": 0,
"currency": "gbp",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"eur_irs": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 1,
"currency": "eur",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"sek_irs": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "stk",
"payment_lag": 1,
"currency": "sek",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"nok_irs": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "osl",
"payment_lag": 2,
"currency": "nok",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"chf_irs": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "zur",
"payment_lag": 2,
"currency": "chf",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"cad_irs": {
"frequency": "s",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tro",
"payment_lag": 1,
"currency": "cad",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"cad_irs_le_1y": {
"frequency": "a",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "tro",
"payment_lag": 1,
"currency": "cad",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"jpy_irs": {
"frequency": "a",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "tyo",
"payment_lag": 2,
"currency": "jpy",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"nzd_irs3": {
"frequency": "s",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "wlg",
"payment_lag": 0,
"currency": "nzd",
"convention": "act365f",
"leg2_frequency": "q",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
},
"nzd_irs6": {
"frequency": "s",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "wlg",
"payment_lag": 0,
"currency": "nzd",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
},
"nzd_irs": {
"frequency": "a",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "wlg",
"payment_lag": 2,
"currency": "nzd",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"aud_irs6": {
"frequency": "s",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "syd",
"payment_lag": 0,
"currency": "aud",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
},
"aud_irs3": {
"frequency": "q",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "syd",
"payment_lag": 0,
"currency": "aud",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
},
"aud_irs3_gt_3y": {
"frequency": "s",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "syd",
"payment_lag": 0,
"currency": "aud",
"convention": "act365f",
"leg2_frequency": "q",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
},
"aud_irs": {
"frequency": "a",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "syd",
"payment_lag": 2,
"currency": "aud",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"eur_irs6": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "30e360",
"leg2_frequency": "s",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"eur_irs3": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "30e360",
"leg2_frequency": "q",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"eur_irs1": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "30e360",
"leg2_frequency": "m",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"sek_irs3": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "stk",
"payment_lag": 0,
"currency": "sek",
"convention": "30e360",
"leg2_frequency": "q",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"nok_irs3": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "osl",
"payment_lag": 0,
"currency": "nok",
"convention": "30e360",
"leg2_frequency": "q",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"nok_irs6": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "osl",
"payment_lag": 0,
"currency": "nok",
"convention": "30e360",
"leg2_frequency": "s",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"eurusd_xcs": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tgt,nyc",
"payment_lag": 2,
"currency": "eur",
"convention": "act360",
"spread_compound_method": "none_simple",
"fixing_method": "rfr_payment_delay",
"payment_lag_exchange": 0,
"fixed": False,
"pair": "eurusd",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixed": False,
"leg2_mtm": True,
},
"gbpusd_xcs": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "ldn,nyc",
"payment_lag": 2,
"currency": "gbp",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "rfr_payment_delay",
"payment_lag_exchange": 0,
"fixed": False,
"pair": "gbpusd",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixed": False,
"leg2_mtm": True,
},
"eurgbp_xcs": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tgt,ldn",
"payment_lag": 2,
"currency": "eur",
"convention": "act360",
"spread_compound_method": "none_simple",
"fixing_method": "rfr_payment_delay",
"payment_lag_exchange": 0,
"fixed": False,
"pair": "eurgbp",
"leg2_convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixed": False,
"leg2_mtm": True,
},
"gbpeur_xcs": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tgt,ldn",
"payment_lag": 2,
"currency": "gbp",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "rfr_payment_delay",
"payment_lag_exchange": 0,
"fixed": False,
"pair": "eurgbp",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixed": False,
"leg2_mtm": True,
},
"jpyusd_xcs": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "nyc,tyo",
"payment_lag": 2,
"currency": "jpy",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "rfr_payment_delay",
"payment_lag_exchange": 0,
"fixed": False,
"pair": "usdjpy",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixed": False,
"leg2_mtm": True,
},
"audusd_xcs3": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "nyc,syd",
"payment_lag": 2,
"currency": "aud",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "ibor(0)",
"payment_lag_exchange": 0,
"fixed": False,
"pair": "audusd",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixed": False,
"leg2_mtm": True,
},
"audusd_xcs": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "nyc,syd",
"payment_lag": 2,
"currency": "aud",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "rfr_payment_delay",
"payment_lag_exchange": 0,
"fixed": False,
"pair": "audusd",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixed": False,
"leg2_mtm": True,
},
"nzdusd_xcs3": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "nyc,wlg",
"payment_lag": 2,
"currency": "nzd",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "ibor(0)",
"payment_lag_exchange": 0,
"fixed": False,
"pair": "nzdusd",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixed": False,
"leg2_mtm": True,
},
"nzdaud_xcs3": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "nyc,wlg,syd",
"payment_lag": 2,
"currency": "nzd",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "ibor(0)",
"payment_lag_exchange": 0,
"fixed": False,
"pair": "audnzd",
"leg2_convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
"leg2_fixed": False,
"leg2_mtm": True,
},
"eur_zcis": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "1+",
"leg2_index_method": "monthly",
"leg2_index_lag": 3,
},
"gbp_zcis": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "ldn",
"payment_lag": 0,
"currency": "gbp",
"convention": "1+",
"leg2_index_method": "monthly",
"leg2_index_lag": 2,
},
"usd_zcis": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "1+",
"leg2_index_method": "daily",
"leg2_index_lag": 3,
},
"gbp_zcs": {
"frequency": "a",
"stub": "shortfront",
"eom": True,
"modifier": "mf",
"calendar": "ldn",
"payment_lag": 0,
"currency": "gbp",
"convention": "act365f",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
},
"sek_iirs": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "stk",
"payment_lag": 0,
"currency": "sek",
"convention": "actacticma",
"index_method": "daily",
"index_lag": 3,
"leg2_frequency": "q",
"leg2_convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"eur_sbs36": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "act360",
"spread_compound_method": "none_simple",
"fixing_method": "ibor(2)",
"leg2_frequency": "s",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"nok_sbs36": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "osl",
"payment_lag": 0,
"currency": "nok",
"convention": "act360",
"spread_compound_method": "none_simple",
"fixing_method": "ibor(2)",
"leg2_frequency": "s",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"aud_sbs36": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "syd",
"payment_lag": 0,
"currency": "aud",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "ibor(0)",
"leg2_frequency": "s",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
},
"aud_sbs31": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "syd",
"payment_lag": 0,
"currency": "aud",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "ibor(0)",
"leg2_frequency": "m",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
},
"nzd_sbs36": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "wlg",
"payment_lag": 0,
"currency": "nzd",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "ibor(0)",
"leg2_frequency": "s",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
},
"nzd_sbs31": {
"frequency": "q",
"stub": "shortfront",
"eom": False,
"modifier": "mf",
"calendar": "wlg",
"payment_lag": 0,
"currency": "nzd",
"convention": "act365f",
"spread_compound_method": "none_simple",
"fixing_method": "ibor(0)",
"leg2_frequency": "m",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(0)",
},
"us_gb": {
"frequency": "s",
"stub": "shortfront",
"eom": True,
"modifier": "none",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "us_gb",
},
"us_gbi": {
"frequency": "s",
"stub": "shortfront",
"eom": True,
"modifier": "none",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "actacticma",
"payment_lag_exchange": 0,
"index_method": "daily",
"index_lag": 3,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "us_gb",
},
"us_corp": {
"frequency": "s",
"stub": "shortfront",
"eom": True,
"modifier": "none",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "30u360",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "us_corp",
},
"us_muni": {
"frequency": "s",
"stub": "shortfront",
"eom": True,
"modifier": "none",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "30u360",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "us_muni",
},
"us_gb_tsy": {
"frequency": "s",
"stub": "shortfront",
"eom": True,
"modifier": "none",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "us_gb_tsy",
},
"uk_gb": {
"frequency": "s",
"stub": "longfront",
"eom": False,
"modifier": "none",
"calendar": "ldn",
"payment_lag": 0,
"currency": "gbp",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-7b",
"calc_mode": "uk_gb",
},
"au_gb": {
"frequency": "s",
"stub": "longfront",
"eom": False,
"modifier": "none",
"calendar": "syd",
"payment_lag": 0,
"currency": "aud",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 2,
"ex_div": "-8d",
"calc_mode": "au_gb",
},
"nz_gb": {
"frequency": "s",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "wlg",
"payment_lag": 0,
"currency": "nzd",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-8b",
"calc_mode": "nz_gb",
},
"cn_gb": {
"frequency": "s",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "bjs",
"payment_lag": 0,
"currency": "cny",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "cn_gb",
},
"de_gb": {
"frequency": "a",
"stub": "longfront",
"eom": False,
"modifier": "none",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 2,
"ex_div": "-1b",
"calc_mode": "de_gb",
},
"fr_gb": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 2,
"ex_div": "-1b",
"calc_mode": "fr_gb",
},
"nl_gb": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 2,
"ex_div": "-1b",
"calc_mode": "nl_gb",
},
"it_gb": {
"frequency": "s",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 2,
"ex_div": "-1b",
"calc_mode": "it_gb",
},
"ch_gb": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "zur",
"payment_lag": 0,
"currency": "chf",
"convention": "30e360",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "ch_gb",
},
"se_gb": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "stk",
"payment_lag": 0,
"currency": "sek",
"convention": "actacticma",
"payment_lag_exchange": 0,
"settle": 2,
"ex_div": "-5b",
"calc_mode": "se_gb",
},
"no_gb": {
"frequency": "a",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "osl",
"payment_lag": 0,
"currency": "nok",
"convention": "actacticma_stub365f",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "no_gb",
},
"ca_gb": {
"frequency": "s",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "tro",
"payment_lag": 0,
"currency": "cad",
"convention": "actacticma_stub365f",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "ca_gb",
},
"ca_gbi": {
"frequency": "s",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "tro",
"payment_lag": 0,
"currency": "cad",
"convention": "actacticma_stub365f",
"payment_lag_exchange": 0,
"index_method": "daily",
"index_lag": 3,
"settle": 1,
"ex_div": "-1b",
"calc_mode": "ca_gb",
},
"us_gbb": {
"eom": True,
"modifier": "none",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "act360",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "0b",
"calc_mode": "us_gbb",
},
"se_gbb": {
"eom": False,
"modifier": "none",
"calendar": "stk",
"payment_lag": 0,
"currency": "sek",
"convention": "act360",
"payment_lag_exchange": 0,
"settle": 2,
"ex_div": "0b",
"calc_mode": "se_gbb",
},
"no_gbb": {
"eom": False,
"modifier": "none",
"calendar": "osl",
"payment_lag": 0,
"currency": "nok",
"convention": "act365f",
"payment_lag_exchange": 0,
"settle": 2,
"ex_div": "0b",
"calc_mode": "no_gbb",
},
"uk_gbb": {
"eom": True,
"modifier": "none",
"calendar": "ldn",
"payment_lag": 0,
"currency": "gbp",
"convention": "act365f",
"payment_lag_exchange": 0,
"settle": 1,
"ex_div": "0b",
"calc_mode": "uk_gbb",
},
"uk_gbi": {
"frequency": "s",
"stub": "shortfront",
"eom": False,
"modifier": "none",
"calendar": "ldn",
"payment_lag": 0,
"currency": "gbp",
"convention": "actacticma",
"payment_lag_exchange": 0,
"index_method": "daily",
"index_lag": 3,
"settle": 1,
"ex_div": "-7b",
"calc_mode": "uk_gb",
},
"sek_fra3": {
"termination": "3m",
"frequency": "q",
"eom": False,
"modifier": "mf",
"calendar": "stk",
"payment_lag": 0,
"currency": "sek",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"eur_fra3": {
"termination": "3m",
"frequency": "q",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"eur_fra6": {
"termination": "6m",
"frequency": "s",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"eur_fra1": {
"termination": "1m",
"frequency": "m",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"nok_fra3": {
"termination": "3m",
"frequency": "q",
"eom": False,
"modifier": "mf",
"calendar": "osl",
"payment_lag": 0,
"currency": "nok",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"nok_fra6": {
"termination": "6m",
"frequency": "s",
"eom": False,
"modifier": "mf",
"calendar": "osl",
"payment_lag": 0,
"currency": "nok",
"convention": "act360",
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
},
"usd_frn5": {
"frequency": "q",
"eom": False,
"modifier": "mf",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "act360",
"spread_compound_method": "none_simple",
"fixing_method": "rfr_observation_shift(5)",
"settle": 1,
"ex_div": "1b",
},
"usd_stir": {
"frequency": "q",
"roll": "imm",
"eom": False,
"modifier": "mf",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "actacticma",
"nominal": 1000000.0,
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixing_series": "usd_rfr",
},
"usd_stir1": {
"frequency": "m",
"roll": "som",
"eom": False,
"modifier": "mf",
"calendar": "nyc",
"payment_lag": 0,
"currency": "usd",
"convention": "actacticma",
"nominal": 5000400.0,
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay_avg",
"leg2_fixing_series": "usd_rfr",
},
"eur_stir": {
"frequency": "q",
"roll": "imm",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "actacticma",
"nominal": 1000000.0,
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixing_series": "eur_rfr",
},
"eur_stir1": {
"frequency": "m",
"roll": "som",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "actacticma",
"nominal": 3000000.0,
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay_avg",
"leg2_fixing_series": "eur_rfr",
},
"eur_stir3": {
"frequency": "q",
"roll": "imm",
"eom": False,
"modifier": "mf",
"calendar": "tgt",
"payment_lag": 0,
"currency": "eur",
"convention": "actacticma",
"nominal": 1000000.0,
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "ibor(2)",
"leg2_fixing_series": "eur_ibor",
},
"gbp_stir": {
"frequency": "q",
"roll": "imm",
"eom": False,
"modifier": "mf",
"calendar": "ldn",
"payment_lag": 0,
"currency": "gbp",
"convention": "actacticma",
"nominal": 1000000.0,
"leg2_spread_compound_method": "none_simple",
"leg2_fixing_method": "rfr_payment_delay",
"leg2_fixing_series": "gbp_rfr",
},
"uk_gb_2y": {
"calendar": "ldn",
"currency": "gbp",
"calc_mode": "ice_gbp",
"nominal": 100000.0,
"coupon": 3.0,
},
"uk_gb_5y": {
"calendar": "ldn",
"currency": "gbp",
"calc_mode": "ice_gbp",
"nominal": 100000.0,
"coupon": 4.0,
},
"uk_gb_10y": {
"calendar": "ldn",
"currency": "gbp",
"calc_mode": "ice_gbp",
"nominal": 100000.0,
"coupon": 4.0,
},
"uk_gb_30y": {
"calendar": "ldn",
"currency": "gbp",
"calc_mode": "ice_gbp",
"nominal": 100000.0,
"coupon": 4.0,
},
"us_gb_2y": {
"calendar": "fed",
"currency": "usd",
"calc_mode": "ust_short",
"nominal": 200000.0,
"coupon": 6.0,
},
"us_gb_3y": {
"calendar": "fed",
"currency": "usd",
"calc_mode": "ust_short",
"nominal": 200000.0,
"coupon": 6.0,
},
"us_gb_5y": {
"calendar": "fed",
"currency": "usd",
"calc_mode": "ust_short",
"nominal": 100000.0,
"coupon": 6.0,
},
"us_gb_10y": {
"calendar": "fed",
"currency": "usd",
"calc_mode": "ust_long",
"nominal": 100000.0,
"coupon": 6.0,
},
"us_gb_30y": {
"calendar": "fed",
"currency": "usd",
"calc_mode": "ust_long",
"nominal": 100000.0,
"coupon": 6.0,
},
"de_gb_2y": {
"calendar": "tgt",
"currency": "eur",
"calc_mode": "eurex_eur",
"nominal": 100000.0,
"coupon": 6.0,
},
"de_gb_5y": {
"calendar": "tgt",
"currency": "eur",
"calc_mode": "eurex_eur",
"nominal": 100000.0,
"coupon": 6.0,
},
"de_gb_10y": {
"calendar": "tgt",
"currency": "eur",
"calc_mode": "eurex_eur",
"nominal": 100000.0,
"coupon": 6.0,
},
"de_gb_30y": {
"calendar": "tgt",
"currency": "eur",
"calc_mode": "eurex_eur",
"nominal": 100000.0,
"coupon": 4.0,
},
"fr_gb_5y": {
"calendar": "tgt",
"currency": "eur",
"calc_mode": "eurex_eur",
"nominal": 100000.0,
"coupon": 6.0,
},
"fr_gb_10y": {
"calendar": "tgt",
"currency": "eur",
"calc_mode": "eurex_eur",
"nominal": 100000.0,
"coupon": 6.0,
},
"sp_gb_10y": {
"calendar": "tgt",
"currency": "eur",
"calc_mode": "eurex_eur",
"nominal": 100000.0,
"coupon": 6.0,
},
"ch_gb_10y": {
"calendar": "zur",
"currency": "chf",
"calc_mode": "eurex_chf",
"nominal": 100000.0,
"coupon": 6.0,
},
}
if DEVELOPMENT == "True":
# DEVELOPMENT mode is used to load and create instrument specs from a CSV file.
# This is loaded by default and slower to parse than directly creating a dict
# So when packaging output the INSTRUMENT_SPEC dict and paste into the non-development
# section.
if version.parse(pd.__version__) < version.parse("3.0.0"):
raise RuntimeError(
"Development of instrument `spec` loading from CSV should be handled by pandas >= 3.0."
"To avoid development mode set DEVELOPMENT=False."
)
path = "data/__instrument_spec.csv"
abspath = os.path.dirname(os.path.abspath(__file__))
target = os.path.join(abspath, path)
df2 = pd.read_csv(target, header=[0, 1, 2, 3], index_col=[0])
for column in df2.columns:
df2[column] = df2[column].astype(column[3]) # type: ignore[call-overload]
df2_legs = df2.loc[:, (slice(None), ["leg1", "leg2"])]
INSTRUMENT_SPECS = {}
for spec in df2_legs.index:
leg1 = df2_legs.loc[spec].dropna().droplevel([0, 3]).loc["leg1"].to_dict()
try:
leg2 = df2_legs.loc[spec].dropna().droplevel([0, 3]).loc["leg2"].to_dict()
except KeyError:
leg2 = {}
INSTRUMENT_SPECS.update({spec: {**leg1, **{f"leg2_{k}": v for k, v in leg2.items()}}})
# extra dtype conversion mappings for keys
def _map_str_float_int(v: Any) -> Any:
try:
return int(float(v))
except (ValueError, TypeError):
return v
_maps = {
"roll": _map_str_float_int,
"leg2_roll": _map_str_float_int,
}
for _, v in INSTRUMENT_SPECS.items():
for k2, v2 in v.items():
if k2 in _maps:
v[k2] = _maps[k2](v2)
================================================
FILE: python/rateslib/curves/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.curves.curves import (
CompositeCurve,
CreditImpliedCurve,
Curve,
LineCurve,
MultiCsaCurve,
ProxyCurve,
RolledCurve,
ShiftedCurve,
TranslatedCurve,
_BaseCurve,
_WithMutability,
_WithOperations,
index_value,
)
from rateslib.curves.interpolation import index_left
from rateslib.curves.utils import (
_CurveInterpolator,
_CurveMeta,
_CurveNodes,
_CurveSpline,
_CurveType,
_ProxyCurveInterpolator,
average_rate,
)
__all__ = (
"Curve",
"LineCurve",
"CompositeCurve",
"MultiCsaCurve",
"ProxyCurve",
"CreditImpliedCurve",
"RolledCurve",
"ShiftedCurve",
"TranslatedCurve",
"_BaseCurve",
"_WithOperations",
"_WithMutability",
"average_rate",
"index_left",
"index_value",
"_CurveMeta",
"_CurveType",
"_CurveSpline",
"_CurveInterpolator",
"_CurveNodes",
"_ProxyCurveInterpolator",
)
================================================
FILE: python/rateslib/curves/_parsers.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from collections.abc import Sequence
from typing import TYPE_CHECKING, TypeVar
import rateslib.errors as err
from rateslib import defaults
from rateslib.curves import MultiCsaCurve, ProxyCurve
from rateslib.curves.utils import _CurveType
from rateslib.enums.generics import Err, NoInput, Ok
if TYPE_CHECKING:
from rateslib.local_types import (
CurveInput,
CurveInput_,
CurveOption,
CurveOption_,
CurveOrId,
Curves_,
Curves_DiscTuple,
Curves_Tuple,
Result,
Solver,
_BaseCurve,
_BaseCurve_,
)
def _map_curve_or_id_from_solver_(curve: CurveOrId, solver: Solver) -> _BaseCurve:
"""
Maps a "Curve | str" to a "Curve" via a Solver mapping.
If a Curve, runs a check against whether that Curve is associated with the given Solver,
and perform an action based on `defaults.curve_not_in_solver`
"""
if isinstance(curve, str):
return solver._get_pre_curve(curve)
elif type(curve) is ProxyCurve or type(curve) is MultiCsaCurve:
# TODO: (mid) consider also adding CompositeCurves as exceptions under the same rule
# Proxy curves and MultiCsaCurves can exist outside of Solvers but be constructed
# directly from an FXForwards object tied to a Solver using only a Solver's
# dependent curves and AD variables.
return curve
else:
try:
# it is a safeguard to load curves from solvers when a solver is
# provided and multiple curves might have the same id
__: _BaseCurve = solver._get_pre_curve(curve.id)
if id(__) != id(curve): # Python id() is a memory id, not a string label id.
raise ValueError(
"A curve has been supplied, as part of ``curves``, which has the same "
f"`id` ('{curve.id}'),\nas one of the curves available as part of the "
"Solver's collection but is not the same object.\n"
"This is ambiguous and cannot price.\n"
"Either refactor the arguments as follows:\n"
"1) remove the conflicting curve: [curves=[..], solver=] -> "
"[curves=None, solver=]\n"
"2) change the `id` of the supplied curve and ensure the rateslib.defaults "
"option 'curve_not_in_solver' is set to 'ignore'.\n"
" This will remove the ability to accurately price risk metrics.",
)
return __
except AttributeError:
raise AttributeError(
"`curve` has no attribute `id`, likely it not a valid object, got: "
f"{curve}.\nSince a solver is provided have you missed labelling the `curves` "
f"of the instrument or supplying `curves` directly?",
)
except KeyError:
if defaults.curve_not_in_solver == "ignore":
return curve
elif defaults.curve_not_in_solver == "warn":
warnings.warn("`curve` not found in `solver`.", UserWarning)
return curve
else:
raise ValueError("`curve` must be in `solver`.")
def _map_curve_from_solver_(curve: CurveInput, solver: Solver) -> CurveOption:
"""
Maps a "Curve | str | dict[str, Curve | str]" to a "Curve | dict[str, Curve]" via a Solver.
If curve input involves strings get objects directly from solver curves mapping.
This is the explicit variety which does not handle NoInput.
"""
if isinstance(curve, dict):
mapped_dict: dict[str, _BaseCurve] = {
k: _map_curve_or_id_from_solver_(v, solver) for k, v in curve.items()
}
return mapped_dict
else:
return _map_curve_or_id_from_solver_(curve, solver)
def _map_curve_from_solver(curve: CurveInput_, solver: Solver) -> CurveOption_:
"""
Maps a "Curve | str | dict[str, Curve | str] | NoInput" to a
"Curve | dict[str, Curve] | NoInput" via a Solver.
This is the inexplicit variety which handles NoInput.
"""
if isinstance(curve, NoInput) or curve is None:
return NoInput(0)
else:
return _map_curve_from_solver_(curve, solver)
def _validate_curve_not_str(curve: CurveOrId) -> _BaseCurve:
if isinstance(curve, str):
raise ValueError("`curves` must contain Curve, not str, if `solver` not given.")
return curve
def _validate_no_str_in_curve_input(curve: CurveInput_) -> CurveOption_:
"""
If a Solver is not available then raise an Exception if a CurveInput contains string Id.
"""
if isinstance(curve, dict):
return {k: _validate_curve_not_str(v) for k, v in curve.items()}
elif isinstance(curve, NoInput) or curve is None:
return NoInput(0)
else:
return _validate_curve_not_str(curve)
def _get_curves_maybe_from_solver(
curves_attr: Curves_,
solver: Solver | NoInput,
curves: Curves_,
) -> Curves_DiscTuple:
"""
Attempt to resolve curves as a variety of input types to a 4-tuple consisting of:
(leg1 forecasting, leg1 discounting, leg2 forecasting, leg2 discounting)
Parameters
----------
curves_attr : Curves
This is an external set of Curves which is used as a substitute for pricing. These might
be taken from an Instrument at initialisation, for example.
solver: Solver
Solver containing the Curves mapping
curves: Curves
A possible override option to allow curves to be specified directly, even if they exist
as an attribute on the Instrument.
Returns
-------
4-Tuple of Curve, dict[str, Curve], NoInput
"""
if isinstance(curves, NoInput) and isinstance(curves_attr, NoInput):
# no data is available so consistently return a 4-tuple of no data
return (NoInput(0), NoInput(0), NoInput(0), NoInput(0))
elif isinstance(curves, NoInput):
# set the `curves` input as that which is set as attribute at instrument init.
curves = curves_attr
# refactor curves into a list
if isinstance(curves, str) or not isinstance(curves, Sequence): # Sequence can be str!
# convert isolated value input to list
curves_as_list: list[
_BaseCurve
| dict[str, str | _BaseCurve]
| dict[str, str]
| dict[str, _BaseCurve]
| NoInput
| str
] = [curves]
else:
curves_as_list = list(curves)
# parse curves_as_list
if isinstance(solver, NoInput):
curves_parsed: tuple[CurveOption_, ...] = tuple(
_validate_no_str_in_curve_input(curve) for curve in curves_as_list
)
else:
try:
curves_parsed = tuple(_map_curve_from_solver(curve, solver) for curve in curves_as_list)
except KeyError as e:
raise ValueError(
"`curves` must contain str curve `id` s existing in `solver` "
"(or its associated `pre_solvers`).\n"
f"The sought id was: '{e.args[0]}'.\n"
f"The available ids are {list(solver.pre_curves.keys())}.",
)
curves_tuple = _make_4_tuple_of_curve(curves_parsed)
return _validate_disc_curves_are_not_dict(curves_tuple)
def _make_4_tuple_of_curve(curves: tuple[CurveOption_, ...]) -> Curves_Tuple:
"""Convert user sequence input to a 4-Tuple."""
n = len(curves)
if n == 1:
curves *= 4
elif n == 2:
curves *= 2
elif n == 3:
curves += (curves[1],)
elif n > 4:
raise ValueError("Can only supply a maximum of 4 `curves`.")
return curves # type: ignore[return-value]
def _validate_curve_is_not_dict(curve: CurveOption_) -> _BaseCurve_:
if isinstance(curve, dict):
raise ValueError("`disc_curve` cannot be supplied as, or inferred from, a dict of Curves.")
return curve
def _validate_disc_curves_are_not_dict(curves_tuple: Curves_Tuple) -> Curves_DiscTuple:
return (
curves_tuple[0],
_validate_curve_is_not_dict(curves_tuple[1]),
curves_tuple[2],
_validate_curve_is_not_dict(curves_tuple[3]),
)
def _validate_curve_not_no_input(curve: _BaseCurve_) -> _BaseCurve:
if isinstance(curve, NoInput):
raise ValueError("`curve` must be supplied. Got NoInput or None.")
return curve
T = TypeVar("T")
def _validate_obj_not_no_input(obj: T | NoInput, name: str) -> T:
if isinstance(obj, NoInput):
raise ValueError(f"`{name}` must be supplied. Got NoInput or None.")
return obj
def _disc_maybe_from_curve(curve: CurveOption_, disc_curve: _BaseCurve_) -> _BaseCurve_:
"""Return a discount curve, pointed as the `curve` if not provided and if suitable Type."""
if isinstance(disc_curve, NoInput):
if isinstance(curve, dict):
raise ValueError("`disc_curve` cannot be inferred from a dictionary of curves.")
elif isinstance(curve, NoInput):
return NoInput(0)
elif curve._base_type == _CurveType.values:
raise ValueError("`disc_curve` cannot be inferred from a non-DF based curve.")
_: _BaseCurve | NoInput = curve
else:
_ = disc_curve
return _
def _disc_required_maybe_from_curve(curve: CurveOption_, disc_curve: CurveOption_) -> _BaseCurve:
"""Return a discount curve, pointed as the `curve` if not provided and if suitable Type."""
if isinstance(disc_curve, dict):
raise NotImplementedError("`disc_curve` cannot currently be inferred from a dict.")
_: _BaseCurve_ = _disc_maybe_from_curve(curve, disc_curve)
if isinstance(_, NoInput):
raise TypeError(
"`curves` have not been supplied correctly. "
"A `disc_curve` is required to perform function."
)
return _
def _try_disc_required_maybe_from_curve(
curve: CurveOption_, disc_curve: CurveOption_
) -> Result[_BaseCurve]:
"""Return a discount curve, pointed as the `curve` if not provided and if suitable Type."""
if isinstance(disc_curve, dict):
return Err(NotImplementedError(err.NI_NO_DISC_FROM_DICT))
if isinstance(disc_curve, NoInput):
if isinstance(curve, dict):
return Err(NotImplementedError(err.NI_NO_DISC_FROM_DICT))
elif isinstance(curve, NoInput):
return Err(ValueError(err.VE_NEEDS_DISC_CURVE))
elif curve._base_type == _CurveType.values:
return Err(ValueError(err.VE_NO_DISC_FROM_VALUES))
return Ok(curve)
if disc_curve._base_type == _CurveType.values:
return Err(ValueError(err.VE_NO_DISC_FROM_VALUES))
return Ok(disc_curve)
def _maybe_set_ad_order(
curve: CurveOption_, order: int | dict[str, int | None] | None
) -> int | dict[str, int | None] | None:
"""method is used internally to set AD order and then later revert the curve to its original"""
if isinstance(curve, NoInput) or order is None:
return None # do nothing
else:
if isinstance(curve, dict):
# method will return a dict of orders if a dict of curves is provided as input
if isinstance(order, dict):
return {
k: _maybe_set_ad_order(v, order[k]) # type: ignore[misc]
for k, v in curve.items()
}
else:
return {
k: _maybe_set_ad_order(v, order) # type: ignore[misc]
for k, v in curve.items()
}
else:
try:
original_order = curve.ad
curve._set_ad_order(order) # type: ignore[arg-type]
except AttributeError:
# Curve has no method (possibly a custom curve and not a subclass of _BaseCurve)
return None
return original_order
================================================
FILE: python/rateslib/curves/academic/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.curves.academic.ns import NelsonSiegelCurve
from rateslib.curves.academic.nss import NelsonSiegelSvenssonCurve
from rateslib.curves.academic.sw import SmithWilsonCurve
__all__ = ["NelsonSiegelCurve", "NelsonSiegelSvenssonCurve", "SmithWilsonCurve"]
================================================
FILE: python/rateslib/curves/academic/ns.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from uuid import uuid4
import numpy as np
from rateslib import defaults
from rateslib.curves import _BaseCurve, _CurveMeta, _CurveNodes, _CurveType, _WithMutability
from rateslib.dual import Dual, Dual2, dual_exp, set_order_convert
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.mutability import _clear_cache_post, _new_state_post
from rateslib.scheduling import Convention, dcf, get_calendar
from rateslib.scheduling.convention import _get_convention
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CalInput,
DualTypes,
Variable,
datetime,
float_,
int_,
str_,
)
class NelsonSiegelCurve(_WithMutability, _BaseCurve):
r"""
A Nelson-Siegel curve defined by discount factors.
The continuously compounded rate to maturity, :math:`r(T)`, is given by the following
equation of **four** parameters, :math:`[\beta_0, \beta_1, \beta_2, \lambda]`
.. math::
r(T) = \begin{bmatrix} \beta_0 & \beta_1 & \beta_2 \end{bmatrix} \begin{bmatrix} 1 \\ \lambda (1- e^{-T/ \lambda}) / T \\ \lambda (1- e^{-T/ \lambda})/ T - e^{-T/ \lambda} \end{bmatrix}
The **discount factors** on that curve equaling:
.. math::
v(T) = e^{-T r(T)}
*T* is determined as the day count fraction between the start of the curve and the maturity
under the given the ``convention`` and ``calendar``.
.. role:: red
.. role:: green
Parameters
----------
dates: 2-tuple of datetime, :red:`required`
The dates defining the eval date and final date of the *Curve*.
parameters: 4-tuple of Dual, Dual2, Variable, float, :red:`required`
The parameters associated with the *Curve*. In order these are
:math:`[\beta_0, \beta_1, \beta_2, \lambda]`.
id : str, :green:`optional (set randomly)`
The unique identifier to distinguish between curves in a multicurve framework.
convention : Convention, str, :green:`optional (set as ActActISDA)`
The convention of the curve for determining rates. Please see
:meth:`dcf()` for all available options.
modifier : str, :green:`optional (set by 'defaults')`
The modification rule, in {"F", "MF", "P", "MP"}, for determining rates when input as
a tenor, e.g. "3M".
calendar : calendar, str, :green:`optional (set as 'all')`
The holiday calendar object to use. If str, looks up named calendar from
static data. Used for determining rates.
ad : int in {0, 1, 2}, :green:`optional`
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
index_base: float, :green:`optional`
The initial index value at the initial node date of the curve. Used for
forecasting future index values.
index_lag : int, :green:`optional (set by 'defaults')`
Number of months of by which the index lags the date. For example if the initial
curve node date is 1st Sep 2021 based on the inflation index published
17th June 2023 then the lag is 3 months. Best practice is to use 0 months.
collateral : str, :green:`optional (set as None)`
A currency identifier to denote the collateral currency against which the discount factors
for this *Curve* are measured.
credit_discretization : int, :green:`optional (set by 'defaults')`
A parameter for numerically solving the integral for credit protection legs and default
events. Expressed in calendar days. Only used by *Curves* functioning as *hazard Curves*.
credit_recovery_rate : Variable | float, :green:`optional (set by 'defaults')`
A parameter used in pricing credit protection legs and default events.
""" # noqa: E501
# ABC properties
_ini_solve = 0
_base_type = _CurveType.dfs
_id = None # type: ignore[assignment]
_meta = None # type: ignore[assignment]
_nodes = None # type: ignore[assignment]
_ad = None # type: ignore[assignment]
_interpolator = None # type: ignore[assignment]
_n = 4
@_new_state_post
def __init__(
self,
dates: tuple[datetime, datetime],
parameters: tuple[DualTypes, DualTypes, DualTypes, DualTypes],
id: str_ = NoInput(0), # noqa: A002
*,
convention: Convention | str | NoInput = NoInput(0),
modifier: str | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
ad: int = 0,
index_base: Variable | float_ = NoInput(0),
index_lag: int | NoInput = NoInput(0),
collateral: str_ = NoInput(0),
credit_discretization: int_ = NoInput(0),
credit_recovery_rate: Variable | float_ = NoInput(0),
):
self._nodes = _CurveNodes({dates[0]: 0.0, dates[1]: 0.0})
self._params = parameters
self._meta = _CurveMeta(
_calendar=get_calendar(calendar),
_convention=_get_convention(_drb(Convention.ActActISDA, convention)),
_modifier=_drb(defaults.modifier, modifier).upper(),
_index_base=index_base,
_index_lag=_drb(defaults.index_lag_curve, index_lag),
_collateral=_drb(None, collateral),
_credit_discretization=_drb(
defaults.cds_protection_discretization, credit_discretization
),
_credit_recovery_rate=_drb(defaults.cds_recovery_rate, credit_recovery_rate),
)
self._id = _drb(uuid4().hex[:5], id) # 1 in a million clash
self._set_ad_order(order=ad) # will also clear and initialise the cache
@property
def params(self) -> tuple[DualTypes, DualTypes, DualTypes, DualTypes]:
r"""
The parameters associated with the *Curve*.
In order these are :math:`[\beta_0, \beta_1, \beta_2, \lambda]`.
"""
return self._params
def __getitem__(self, date: datetime) -> DualTypes:
if defaults.curve_caching and date in self._cache:
return self._cache[date]
if date < self.nodes.initial:
return 0.0
elif date == self.nodes.initial:
return 1.0
b0, b1, b2, l0 = self._params
T = dcf(
self.nodes.initial, date, convention=self.meta.convention, calendar=self.meta.calendar
)
a1 = l0 * (1 - dual_exp(-T / l0)) / T
a2 = a1 - dual_exp(-T / l0)
r = b0 + a1 * b1 + a2 * b2
return self._cached_value(date, dual_exp(-T * r))
# Solver mutability methods
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[Any]]:
return np.array(self._params)
def _get_node_vars(self) -> tuple[str, ...]:
return tuple(f"{self._id}{i}" for i in range(self._ini_solve, self._n))
@_new_state_post
@_clear_cache_post
def _set_node_vector(self, vector: list[DualTypes], ad: int) -> None:
if ad == 0:
self._params = tuple(_dual_float(_) for _ in vector) # type: ignore[assignment]
elif ad == 1:
self._params = tuple( # type: ignore[assignment]
Dual(_dual_float(_), [f"{self._id}{i}"], []) for i, _ in enumerate(vector)
)
else: # ad == 2
self._params = tuple( # type: ignore[assignment]
Dual2(_dual_float(_), [f"{self._id}{i}"], [], []) for i, _ in enumerate(vector)
)
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
if self.ad == order:
return None
elif order not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = order
self._params = tuple( # type: ignore[assignment]
set_order_convert(_, order, [f"{self._id}{i}"]) for i, _ in enumerate(self.params)
)
================================================
FILE: python/rateslib/curves/academic/nss.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from uuid import uuid4
import numpy as np
from rateslib import defaults
from rateslib.curves import _BaseCurve, _CurveMeta, _CurveNodes, _CurveType, _WithMutability
from rateslib.dual import Dual, Dual2, dual_exp, set_order_convert
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.mutability import _clear_cache_post, _new_state_post
from rateslib.scheduling import Convention, dcf, get_calendar
from rateslib.scheduling.convention import _get_convention
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CalInput,
DualTypes,
Variable,
datetime,
float_,
int_,
str_,
)
class NelsonSiegelSvenssonCurve(_WithMutability, _BaseCurve):
r"""
A Nelson-Siegel-Svensson curve defined by discount factors.
The continuously compounded rate to maturity, :math:`r(T)`, is given by the following
equation of **six** parameters, :math:`[\beta_0, \beta_1, \beta_2, \lambda_0, \beta_3, \lambda_1]`
.. math::
r(T) = \begin{bmatrix} \beta_0 & \beta_1 & \beta_2 & \beta_3 \end{bmatrix} \begin{bmatrix} 1 \\ \lambda_0 (1- e^{-T/ \lambda_0}) / T \\ \lambda_0 (1- e^{-T/ \lambda_0})/ T - e^{-T/ \lambda_0} \\ \lambda_1 (1- e^{-T/ \lambda_1})/ T - e^{-T/ \lambda_1} \end{bmatrix}
The **discount factors** on that curve equaling:
.. math::
v(T) = e^{-T r(T)}
*T* is determined as the day count fraction between the start of the curve and the maturity
under the given the ``convention`` and ``calendar``.
.. role:: red
.. role:: green
Parameters
----------
dates: 2-tuple of datetime, :red:`required`
The dates defining the eval date and final date of the *Curve*.
parameters: 6-tuple of Dual, Dual2, Variable, float, :red:`required`
The parameters associated with the *Curve*. In order these are
:math:`[\beta_0, \beta_1, \beta_2, \lambda_0, \beta_3, \lambda_1]`.
id : str, :green:`optional (set randomly)`
The unique identifier to distinguish between curves in a multicurve framework.
convention : Convention, str, :green:`optional (set as ActActISDA)`
The convention of the curve for determining rates. Please see
:meth:`dcf()` for all available options.
modifier : str, :green:`optional (set by 'defaults')`
The modification rule, in {"F", "MF", "P", "MP"}, for determining rates when input as
a tenor, e.g. "3M".
calendar : calendar, str, :green:`optional (set as 'all')`
The holiday calendar object to use. If str, looks up named calendar from
static data. Used for determining rates.
ad : int in {0, 1, 2}, :green:`optional`
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
index_base: float, :green:`optional`
The initial index value at the initial node date of the curve. Used for
forecasting future index values.
index_lag : int, :green:`optional (set by 'defaults')`
Number of months of by which the index lags the date. For example if the initial
curve node date is 1st Sep 2021 based on the inflation index published
17th June 2023 then the lag is 3 months. Best practice is to use 0 months.
collateral : str, :green:`optional (set as None)`
A currency identifier to denote the collateral currency against which the discount factors
for this *Curve* are measured.
credit_discretization : int, :green:`optional (set by 'defaults')`
A parameter for numerically solving the integral for credit protection legs and default
events. Expressed in calendar days. Only used by *Curves* functioning as *hazard Curves*.
credit_recovery_rate : Variable | float, :green:`optional (set by 'defaults')`
A parameter used in pricing credit protection legs and default events.
""" # noqa: E501
# ABC properties
_ini_solve = 0
_base_type = _CurveType.dfs
_id = None # type: ignore[assignment]
_meta = None # type: ignore[assignment]
_nodes = None # type: ignore[assignment]
_ad = None # type: ignore[assignment]
_interpolator = None # type: ignore[assignment]
_n = 6
@_new_state_post
def __init__(
self,
dates: tuple[datetime, datetime],
parameters: tuple[DualTypes, DualTypes, DualTypes, DualTypes, DualTypes, DualTypes],
id: str_ = NoInput(0), # noqa: A002
*,
convention: Convention | str | NoInput = NoInput(0),
modifier: str | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
ad: int = 0,
index_base: Variable | float_ = NoInput(0),
index_lag: int | NoInput = NoInput(0),
collateral: str_ = NoInput(0),
credit_discretization: int_ = NoInput(0),
credit_recovery_rate: Variable | float_ = NoInput(0),
):
self._nodes = _CurveNodes({dates[0]: 0.0, dates[1]: 0.0})
self._params = parameters
self._meta = _CurveMeta(
_calendar=get_calendar(calendar),
_convention=_get_convention(_drb(Convention.ActActISDA, convention)),
_modifier=_drb(defaults.modifier, modifier).upper(),
_index_base=index_base,
_index_lag=_drb(defaults.index_lag_curve, index_lag),
_collateral=_drb(None, collateral),
_credit_discretization=_drb(
defaults.cds_protection_discretization, credit_discretization
),
_credit_recovery_rate=_drb(defaults.cds_recovery_rate, credit_recovery_rate),
)
self._id = _drb(uuid4().hex[:5], id) # 1 in a million clash
self._set_ad_order(order=ad) # will also clear and initialise the cache
@property
def params(self) -> tuple[DualTypes, DualTypes, DualTypes, DualTypes, DualTypes, DualTypes]:
r"""
The parameters associated with the *Curve*.
In order these are :math:`[\beta_0, \beta_1, \beta_2, \lambda_0, \beta_3, \lambda_1]`.
"""
return self._params
def __getitem__(self, date: datetime) -> DualTypes:
if defaults.curve_caching and date in self._cache:
return self._cache[date]
if date < self.nodes.initial:
return 0.0
elif date == self.nodes.initial:
return 1.0
b0, b1, b2, l0, b3, l1 = self._params
T = dcf(
self.nodes.initial, date, convention=self.meta.convention, calendar=self.meta.calendar
)
a1 = l0 * (1 - dual_exp(-T / l0)) / T
a2 = a1 - dual_exp(-T / l0)
x1 = l1 * (1 - dual_exp(-T / l1)) / T
x2 = x1 - dual_exp(-T / l1)
r = b0 + a1 * b1 + a2 * b2 + x2 * b3
return self._cached_value(date, dual_exp(-T * r))
# Solver mutability methods
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[Any]]:
return np.array(self._params)
def _get_node_vars(self) -> tuple[str, ...]:
return tuple(f"{self._id}{i}" for i in range(self._ini_solve, self._n))
@_new_state_post
@_clear_cache_post
def _set_node_vector(self, vector: list[DualTypes], ad: int) -> None:
if ad == 0:
self._params = tuple(_dual_float(_) for _ in vector) # type: ignore[assignment]
elif ad == 1:
self._params = tuple( # type: ignore[assignment]
Dual(_dual_float(_), [f"{self._id}{i}"], []) for i, _ in enumerate(vector)
)
else: # ad == 2
self._params = tuple( # type: ignore[assignment]
Dual2(_dual_float(_), [f"{self._id}{i}"], [], []) for i, _ in enumerate(vector)
)
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
if self.ad == order:
return None
elif order not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = order
self._params = tuple( # type: ignore[assignment]
set_order_convert(_, order, [f"{self._id}{i}"]) for i, _ in enumerate(self.params)
)
================================================
FILE: python/rateslib/curves/academic/sw.py
================================================
#############################################################
# COPYRIGHT 2022 Siffrorna Technology Limited
# This code may not be copied, modified, used or distributed
# except with the express permission and licence to
# do so, provided by the copyright holder.
# See: https://rateslib.com/py/en/latest/i_licence.html
#############################################################
from __future__ import annotations
from datetime import timezone
from functools import cached_property
from typing import TYPE_CHECKING
from uuid import uuid4
import numpy as np
from rateslib import defaults
from rateslib.curves import (
_BaseCurve,
_CurveMeta,
_CurveNodes,
_CurveType,
_WithMutability,
)
from rateslib.dual import dual_exp, dual_log
from rateslib.enums.generics import NoInput, _drb
from rateslib.mutability import _new_state_post
from rateslib.scheduling import Convention, get_calendar
from rateslib.scheduling.convention import _get_convention
if TYPE_CHECKING:
from numpy import float64 as Nf64 # noqa: N812
from numpy import object_ as Nobject # noqa: N812
from numpy.typing import NDArray
from rateslib.local_types import ( # pragma: no cover
CalInput,
DualTypes,
Variable,
datetime,
float_,
int_,
str_,
)
UTC = timezone.utc
class _NullInterpolator:
def _csolve(self, curve_type: _CurveType, nodes: _CurveNodes, ad: int) -> None:
pass
def _dual_sinh(x: DualTypes) -> DualTypes:
return (dual_exp(x) - dual_exp(-x)) * 0.5
class SmithWilsonCurve(_WithMutability, _BaseCurve):
r"""
A Smith-Wilson style *Curve* defined by discount factors.
The discount factors on this curve are defined by:
.. math::
v(t) = e^{-wt} + \mathbf{W}[t, \mathbf{u}] \mathbf{\hat{b}}
where,
.. math::
W(t, u) &= e^{-w(t+u)} \left ( \alpha \min(t, u) - e^{\alpha max(t, u)} sinh(\alpha min(t, u)) \right ) \\
w &= \ln ( 1 + UFR)
and :math:`\alpha` and :math:`UFR` are parameters controlling convergence to some rate in the
long term, and :math:`\mathbf{\hat{b}}` are calibration parameters. All 'time' quantities are
derived under an effective '*Act/365.25*' day count convention.
.. role:: red
.. role:: green
Parameters
----------
nodes: dict[datetime, float]
The parameters of the *Curve*. The value associated with the *initial node date* is
treated as :math:`\alpha`. All subsequent key-value pairs define the (Mx1) vectors
:math:`\mathbf{u}` and :math:`\mathbf{\hat{b}}` respectively.
ufr: float, :red:`required`
The rates that is denoted by the *'ultimate forward rate'*.
solve_alpha: bool, :green:`optional (set as False)`
Define whether :math:`\alpha` is to be treated as a parameter in the solver process
simultaneously with :math:`\mathbf{\hat{b}}`.
id : str, :green:`optional (set randomly)`
The unique identifier to distinguish between curves in a multicurve framework.
convention : Convention, str, :green:`optional (set as Act365_25)`
The convention of the curve for determining rates. Please see
:meth:`dcf()` for all available options.
modifier : str, :green:`optional (set by 'defaults')`
The modification rule, in {"F", "MF", "P", "MP"}, for determining rates when input as
a tenor, e.g. "3M".
calendar : calendar, str, :green:`optional (set as 'all')`
The holiday calendar object to use. If str, looks up named calendar from
static data. Used for determining rates.
ad : int in {0, 1, 2}, :green:`optional`
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
index_base: float, :green:`optional`
The initial index value at the initial node date of the curve. Used for
forecasting future index values.
index_lag : int, :green:`optional (set by 'defaults')`
Number of months of by which the index lags the date. For example if the initial
curve node date is 1st Sep 2021 based on the inflation index published
17th June 2023 then the lag is 3 months. Best practice is to use 0 months.
collateral : str, :green:`optional (set as None)`
A currency identifier to denote the collateral currency against which the discount factors
for this *Curve* are measured.
credit_discretization : int, :green:`optional (set by 'defaults')`
A parameter for numerically solving the integral for credit protection legs and default
events. Expressed in calendar days. Only used by *Curves* functioning as *hazard Curves*.
credit_recovery_rate : Variable | float, :green:`optional (set by 'defaults')`
A parameter used in pricing credit protection legs and default events.
Notes
-----
**EIOPA's Approach**
The Smith-Wilson *Curve* as `defined by EIOPA `__
is a *Curve* designed with the following properties:
- A matrix-type formulation to solve calibration parameters using linear algebra.
- An *'ultra-forward-rate (UFR)'* and convergence parameter :math:`\alpha` to control
the curve beyond points at which there might be priced market instruments.
The official version of the Smith-Wilson discount factor function is:
.. math::
v(t) = e^{-wt} + \mathbf{W}[t, \mathbf{u}]\mathbf{C} \mathbf{b}
In this equation a set of *N* bonds (likely coupon bearing) are selected from the market and
the vector :math:`\mathbf{u}`, of length *M*, contains ordered times to each cashflow of any
bond. The *(MxN)* matrix :math:`\mathbf{C}_{i,j}` structures individual cashflows attributable
to each bond, *j*, at cashflow date, :math:`u_i`. And :math:`\mathbf{b}`, the calibration
parameters, must have length *N*.
The Smith-Wilson concept is to use that same equation replacing, *t*, with each :math:`u_i`,
and then multiplying each cashflow of any bond by those relevant discount factors to return the
market price, :math:`\mathbf{p}`, i.e.
.. math::
\mathbf{p} = \mathbf{C^T} v[\mathbf{u}] = \mathbf{C^T} e^{-w\mathbf{u}} + \mathbf{C^T W[u,u] C b}
After this is rearranged it yields,
.. math::
\mathbf{b} = \left ( \mathbf{C^T W[u,u] C} \right )^{-1} ( \mathbf{p} - \mathbf{C^T} e^{-w \mathbf{u}} )
which is transformable into the equations recognisable in the EIOPA document using their same
substitutions,
.. math::
\mathbf{b} &= \left ( \mathbf{Q^T H[u,u] Q} \right )^{-1} ( \mathbf{p} - \mathbf{q} ) \\
\mathbf{d} &= e^{-w \mathbf{u}} \\
\mathbf{Q} &= \mathbf{d_\Delta C} \\
\mathbf{W[u,u]} &= \mathbf{d_\Delta H[u,u] d_\Delta} \\
\mathbf{q} &= \mathbf{C^T d} \\
**Rateslib's Approach**
*Rateslib* makes two key changes. Firstly it recognises that for an unchanged :math:`\mathbf{u}`
vector, i.e. the cashflow dates remain the same, and unchanged discount factors at those dates,
i.e. unchanged :math:`v[\mathbf{u}]` the system can be equivalently formulated
in terms of zero coupon bonds, so that:
.. math::
\underbrace{\mathbf{C b}}_{(M \times N) (N \times 1)} = \underbrace{\mathbf{I \hat{b}}}_{(M \times M) (M \times 1)}
Since the market prices of the bonds are known and the discount factors of these synthesised
zero coupon bonds are not known apriori this transformation does not allow the linear
algebraic solution (EIOPA's approach) to remain viable. That leads to the second change.
*Rateslib* does not bootstrap or algebraically solve *Curves*. It uses a global solver.
This is why the above change is permissible because even under the
reformulation it will still converge on *a* solution for :math:`\mathbf{\hat{b}}` which
reprices the bonds.
**Implication**
The general rules for *Curve* solving remain applicable; if M > N then the system is
underspecified and may result in spurious behavior. If M = N and maturities are all
appropriately chosen the solution is exact and unique.
Because *rateslib* treats *Curve* parameterization and *Instrument* calibration as two
separate processes there is increased flexibility in both aspects. The calibrating bonds
do not necessarily have to match the *nodes* of the Smith-Wilson *Curve*. Under EIOPA's
approach this is obviously not possible because the framework of equations relies on
setting up the appropriate cashflow matrix and array of cashflow dates.
.. note::
*Rateslib* will not determine the matrices :math:`\mathbf{W[u,u], H[u,u], Q, C}` etc.
becuase its methods does not require them
Examples
--------
The `standard EIOPA example `__
happens to include a 20x20 cashflow matrix, each bond valued at par with increasing coupon
rates, implying increasing YTM.
.. image:: ../_static/eiopa_c.png
:align: center
:alt: EIOPA Example of Smith-Wilson Curve
:height: 304
:width: 597
Because this is a square matrix and satisfies the criteria above the *rateslib* solution
will match EIOPA's.
.. ipython:: python
:suppress:
from rateslib import FixedRateBond, Solver, SmithWilsonCurve, dt
.. ipython:: python
sw = SmithWilsonCurve(
nodes={
dt(2000, 1, 1): 0.12376, # <-- alpha value used in EIOPA file
**{dt(2000+i, 1, 1): 0.1 for i in range(1, 21)}
},
solve_alpha=False,
ufr= 4.2,
id="academic_curve",
)
coupons = [0.2, 0.225, 0.3, 0.425, 0.55, 0.7, 0.85, 1.0, 1.15, 1.275, 1.4, 1.475, 1.575, 1.65, 1.7, 1.75, 1.8, 1.825, 1.85, 1.875]
bonds = [
FixedRateBond(
effective=dt(2000, 1, 1),
termination=f"{i}Y", # <- 1Y to 20Y
fixed_rate=coupons[i-1], # <- Coupons as specified
calendar="all",
ex_div=1,
convention="actacticma",
frequency="A",
curves="academic_curve",
metric="dirty_price"
)
for i in range(1, 21)
]
prices = [100.0] * 20 # <- All bonds priced to par
Solver(curves=[sw], instruments=bonds, s=prices)
We can plot the resultant curves, which can be compared directly with the EIOPA file.
.. ipython:: python
sw.plot("Z")
sw.plot("1b")
.. plot::
from rateslib import SmithWilsonCurve, Solver, dt, FixedRateBond
import matplotlib.pyplot as plt
sw = SmithWilsonCurve(
nodes={
dt(2000, 1, 1): 0.12376,
**{dt(2000+i, 1, 1): 0.1 for i in range(1, 21)}
},
solve_alpha=False,
ufr= 4.2,
id="academic_curve",
)
coupons = [0.2, 0.225, 0.3, 0.425, 0.55, 0.7, 0.85, 1.0, 1.15, 1.275, 1.4, 1.475, 1.575, 1.65, 1.7, 1.75, 1.8, 1.825, 1.85, 1.875]
bonds = [
FixedRateBond(
effective=dt(2000, 1, 1),
termination=f"{i}Y", # <- 1Y to 20Y
fixed_rate=coupons[i-1], # <- Coupons as specified
calendar="all",
ex_div=1,
convention="actacticma",
frequency="A",
curves="academic_curve",
metric="dirty_price"
)
for i in range(1, 21)
]
prices = [100.0] * 20 # <- All bonds priced to par
Solver(curves=[sw], instruments=bonds, s=prices)
fig1, ax1, lines = sw.plot("z")
del fig1, ax1
plt.close()
fig, ax, _ = sw.plot("1b")
ax.plot(lines[0]._x, lines[0]._y)
plt.show()
plt.close()
""" # noqa: E501
# ABC properties
_ini_solve = 0
_base_type: _CurveType = _CurveType.dfs
_id: str = None # type: ignore[assignment]
_ad: int = None # type: ignore[assignment]
_meta: _CurveMeta = None # type: ignore[assignment]
_nodes: _CurveNodes = None # type: ignore[assignment]
_interpolator = _NullInterpolator() # type: ignore[assignment]
@_new_state_post
def __init__(
self,
nodes: dict[datetime, DualTypes],
ufr: DualTypes,
solve_alpha: bool = False,
id: str_ = NoInput(0), # noqa: A002
*,
convention: Convention | str | NoInput = NoInput(0),
modifier: str | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
ad: int = 0,
index_base: Variable | float_ = NoInput(0),
index_lag: int | NoInput = NoInput(0),
collateral: str_ = NoInput(0),
credit_discretization: int_ = NoInput(0),
credit_recovery_rate: Variable | float_ = NoInput(0),
):
self._nodes = _CurveNodes(_nodes=nodes)
if not solve_alpha:
self._ini_solve = 1
self._ufr = ufr
self._meta = _CurveMeta(
_calendar=get_calendar(calendar),
_convention=_get_convention(_drb(Convention.Act365_25, convention)),
_modifier=_drb(defaults.modifier, modifier).upper(),
_index_base=index_base,
_index_lag=_drb(defaults.index_lag_curve, index_lag),
_collateral=_drb(None, collateral),
_credit_discretization=_drb(
defaults.cds_protection_discretization, credit_discretization
),
_credit_recovery_rate=_drb(defaults.cds_recovery_rate, credit_recovery_rate),
)
self._id = _drb(uuid4().hex[:5], id) # 1 in a million clash
self._set_ad_order(order=ad) # will also clear and initialise the cache
@property
def alpha(self) -> DualTypes:
r"""The :math:`\alpha` value of the *Curve*."""
return self.nodes.nodes[self.nodes.initial]
@property
def b(self) -> NDArray[Nobject]:
r"""The :math:`\mathbf{\hat{b}}` parameters value of the *Curve*."""
return np.array(self.nodes.values[1:])
@property
def ufr(self) -> DualTypes:
"""The UFR value of the *Curve*."""
return self._ufr
@property
def k(self) -> DualTypes:
r"""
The :math:`\kappa` value as defined in the EIOPA documentation.
Under EIOPA:
.. math::
\kappa = \frac{ 1 + \alpha \mathbf{u^T Q b} }{ sinh[\alpha \mathbf{u^T}] \mathbf{Q b} }
"""
Q = np.diag([dual_exp(-self.w * _) for _ in self.u]) # Q is d_delta
numerator: DualTypes = 1 + self.alpha * np.matmul(
np.matmul(self.u[None, :], Q), self.b[:, None]
)
denominator: DualTypes = np.matmul(
np.matmul(np.array([_dual_sinh(self.alpha * _) for _ in self.u])[None, :], Q),
self.b[:, None],
)
return numerator / denominator
@cached_property
def w(self) -> DualTypes:
"""The :math:`w` value of the *Curve* derived from the UFR."""
return dual_log(1 + self.ufr / 100.0)
@cached_property
def u(self) -> NDArray[Nf64]:
r"""The :math:`\mathbf{u}` vector of the *Curve* derived from the node dates."""
# 31557600 = 365.25 days * 86400 seconds per day
return (np.array(self.nodes.posix_keys[1:]) - self.nodes.posix_keys[0]) / 31557600.0
def __getitem__(self, date: datetime) -> DualTypes:
if defaults.curve_caching and date in self._cache:
return self._cache[date]
if date < self.nodes.initial:
return 0.0
elif date == self.nodes.initial:
return 1.0
# 31557600 = 365.25 days * 86400 seconds per day
t = (date.replace(tzinfo=UTC).timestamp() - self.nodes.posix_keys[0]) / 31557600.0
a = self.alpha
w = self.w
v = dual_exp(-t * w)
mins = [min(t, _) for _ in self.u]
maxs = [max(t, _) for _ in self.u]
ww = np.array(
[
dual_exp(-u * w) * (a * min_ - dual_exp(-a * max_) * _dual_sinh(a * min_))
for (u, min_, max_) in zip(self.u, mins, maxs, strict=False)
]
)
v += np.inner(self.b, ww) * v
return self._cached_value(date, v)
================================================
FILE: python/rateslib/curves/curves.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import json
import pickle
import warnings
from abc import ABC, abstractmethod
from calendar import monthrange
from dataclasses import replace
from datetime import datetime, timedelta, timezone
from math import comb, prod
from typing import TYPE_CHECKING, TypeAlias
from uuid import uuid4
import numpy as np
from pandas import Series
import rateslib.errors as err
from rateslib import defaults, fixings
from rateslib.curves.interpolation import InterpolationFunction
from rateslib.curves.utils import (
_CreditImpliedType,
_CurveInterpolator,
_CurveMeta,
_CurveNodes,
_CurveType,
_ProxyCurveInterpolator,
average_rate,
)
from rateslib.data.loader import FixingMissingDataError, FixingRangeError
from rateslib.default import PlotOutput, plot
from rateslib.dual import Dual, Dual2, Variable, dual_exp, dual_log, set_order_convert
from rateslib.dual.utils import _dual_float, _get_order_of
from rateslib.enums.generics import Err, NoInput, Ok, _drb
from rateslib.enums.parameters import IndexMethod, _get_index_method
from rateslib.mutability import (
_clear_cache_post,
_new_state_post,
_no_interior_validation,
_validate_states,
_WithCache,
_WithState,
)
from rateslib.scheduling import Adjuster, Convention, add_tenor, dcf, get_calendar
from rateslib.scheduling.convention import _get_convention
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CalInput,
CurveOption_,
FXForwards,
Number,
Result,
datetime_,
float_,
int_,
str_,
)
UTC = timezone.utc
DualTypes: TypeAlias = (
"Dual | Dual2 | Variable | float" # required for non-cyclic import on _WithCache
)
class _WithOperations:
"""Provides automatic implementation of the curve operations required on a
:class:`~rateslib.curves._BaseCurve`."""
# Operations
@_validate_states
def shift(
self,
spread: DualTypes,
id: str_ = NoInput(0), # noqa: A002
) -> ShiftedCurve:
"""
Create a :class:`~rateslib.curves.ShiftedCurve`: moving *Self* vertically in rate space.
For examples see the documentation for :class:`~rateslib.curves.ShiftedCurve`.
Parameters
----------
spread : float, Dual, Dual2, Variable
The number of basis points added to the existing curve.
id : str, optional
Set the id of the returned curve.
Returns
-------
ShiftedCurve
"""
_: _BaseCurve = self # type: ignore[assignment]
return ShiftedCurve(curve=_, shift=spread, id=id)
@_validate_states
def translate(self, start: datetime, id: str_ = NoInput(0)) -> TranslatedCurve: # noqa: A002
"""
Create a :class:`~rateslib.curves.TranslatedCurve`: maintaining an identical rate space,
but moving the initial node date forwards in time.
For examples see the documentation for :class:`~rateslib.curves.TranslatedCurve`.
Parameters
----------
start : datetime
The new initial node date for the curve. Must be after the original initial node date.
id : str, optional
Set the id of the returned curve.
Returns
-------
TranslatedCurve
""" # noqa: E501
_: _BaseCurve = self # type: ignore[assignment]
return TranslatedCurve(curve=_, start=start, id=id)
@_validate_states
def roll(self, tenor: datetime | str | int, id: str_ = NoInput(0)) -> RolledCurve: # noqa: A002
"""
Create a :class:`~rateslib.curves.RolledCurve`: translating the rate space of *Self* in
time.
For examples see the documentation for :class:`~rateslib.curves.RolledCurve`.
Parameters
----------
tenor : datetime, str or int
The measure of time by which to translate the curve through time.
id : str, optional
Set the id of the returned curve.
Returns
-------
RolledCurve
""" # noqa: E501
_: _BaseCurve = self # type: ignore[assignment]
if isinstance(tenor, str):
tenor_: datetime | int = add_tenor(_._nodes.initial, tenor, "NONE", NoInput(0))
else:
tenor_ = tenor
if isinstance(tenor_, int):
roll_days: int = tenor_
else:
roll_days = (tenor_ - _._nodes.initial).days
return RolledCurve(curve=_, roll_days=roll_days, id=id)
class _BaseCurve(_WithState, _WithCache[datetime, DualTypes], _WithOperations, ABC):
"""
An ABC defining the base methods of a *Curve*.
Provided that the abstract base properties and methods of this class are implemented any
custom curve can be used within *rateslib*. Often the default implementations for some of
these, via ``super()`` are sufficient. The required base methods are:
- ``_meta``: returns a :class:`~rateslib.curves._CurveMeta` class.
- ``_interpolator``: returns a :class:`~rateslib.curves._CurveInterpolator` class.
- ``_nodes``: returns a :class:`~rateslib.curves._CurveNodes` class.
- ``_id``: returns a str representing the *Curve* id.
- ``_ad``: returns an integer in {0, 1, 2} indicating the automatic differentiation state.
- ``_base_type``: returns a :class:`~rateslib.curves._CurveType`.
- ``__getitem__(date)``: returns a float, :class:`~rateslib.dual.Dual`,
:class:`~rateslib.dual.Dual2`, or :class:`~rateslib.dual.Variable` given an input date.
- ``_set_ad_order(ad)``: mutates the node values of the *Curve* to adopt new automatic
differentiation states for facilitating other features, such as
:class:`~rateslib.solver.Solver` calibration and risk sensitivity calculation.
To automatically provide some of the operations the class
:class:`~rateslib.curves._WithOperations` can, and is likely to always be, inherited, without
the need for any additional implementation. In certain cases the `_base_type` will prevent
some methods from calculating and will raise `TypeError`.
To allow custom user curves to be calibrated by the :class:`~rateslib.solver.Solver` framework
the :class:`~rateslib.curves._WithMutability` class can be inherited. This requires two
additional implementation to allow a :class:`~rateslib.solver.Solver` to interact directly with
it:
- ``_get_node_vector()``: returns a NumPy array of the ordered node values consumed.
- ``_get_node_vars()``: returns a tuple of ordered string variable names associated with
each node of the *Curve*.
- ``_set_node_vector(array)``: accepts a NumPy array of the ordered node values and sets
these directly for the *Curve*.
.. rubric:: Examples
A demonstration of using this class to build a user custom *Curve* is presented at
`Cookbook: Building Custom Curves with _BaseCurve (e.g. Nelson-Siegel) <../z_basecurve.html>`_
"""
# Required properties
@property
@abstractmethod
def _meta(self) -> _CurveMeta:
return _CurveMeta(
_calendar=get_calendar(NoInput(0)),
_collateral=None,
_convention=_get_convention(defaults.convention),
_credit_discretization=defaults.cds_protection_discretization,
_credit_recovery_rate=defaults.cds_recovery_rate,
_index_base=NoInput(0),
_index_lag=defaults.index_lag_curve,
_modifier=defaults.modifier,
)
@property
@abstractmethod
def _interpolator(self) -> _CurveInterpolator:
# create a default CurveInterpolator that is functionless
# this is a placeholder obj that cannot be used for interpolation
return _CurveInterpolator(
local="log_linear",
t=NoInput(0),
endpoints=("natural", "natural"),
node_dates=[],
convention=defaults.convention.lower(),
curve_type=_CurveType.dfs,
)
@property
@abstractmethod
def _nodes(self) -> _CurveNodes: ...
@property
@abstractmethod
def _id(self) -> str:
return uuid4().hex[:5]
@property
@abstractmethod
def _ad(self) -> int: ...
@property
@abstractmethod
def _base_type(self) -> _CurveType: ...
# Required methods
@abstractmethod
def __getitem__(self, date: datetime) -> DualTypes:
"""
The get item method for any *Curve* type will allow the inheritance of the below
methods.
"""
if defaults.curve_caching and date in self._cache:
return self._cache[date]
if date < self.nodes.initial:
return 0.0
if self.interpolator.spline is None or date < self.interpolator.spline.t[0]:
val = self.interpolator.local_func(date, self)
else:
date_posix = date.replace(tzinfo=UTC).timestamp()
if date > self.interpolator.spline.t[-1]:
warnings.warn(
"Evaluating points on a curve beyond the endpoint of the basic "
"spline interval is undefined.\n"
f"date: {date.strftime('%Y-%m-%d')}, spline end: "
f"{self.interpolator.spline.t[-1].strftime('%Y-%m-%d')}, curve id: "
f"'{self.id}'\n"
"This often occurs when a curve is constructed with a final node date "
"that aligns with the maturity of an instrument with a payment lag.\nIn the "
"case that the instrument has a payment lag (e.g. a SOFR swap or ESTR swap or "
"bond terminating on a non-business day) then a cashflow will occur after the "
"maturity of the instrument.\nThe solution is to ensure that the final node "
"date of the curve is changed to be beyond that expected payment date.",
UserWarning,
)
if self._base_type == _CurveType.dfs:
val = dual_exp(self.interpolator.spline.spline.ppev_single(date_posix)) # type: ignore[union-attr]
else: # self._base_type == _CurveType.values:
val = self.interpolator.spline.spline.ppev_single(date_posix) # type: ignore[union-attr]
return self._cached_value(date, val)
@abstractmethod
def _set_ad_order(self, order: int) -> None: ...
# Properties
@property
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the *Curve*."""
return self._ad
@property
def meta(self) -> _CurveMeta:
"""An instance of :class:`~rateslib.curves._CurveMeta`."""
return self._meta
@property
def id(self) -> str:
"""A str identifier to name the *Curve* used in
:class:`~rateslib.solver.Solver` mappings."""
return self._id
@property
def nodes(self) -> _CurveNodes:
"""An instance of :class:`~rateslib.curves._CurveNodes`."""
return self._nodes
@property
def _n(self) -> int:
"""The number of pricing parameters of the *Curve*."""
return self.nodes.n
@property
def interpolator(self) -> _CurveInterpolator:
"""An instance of :class:`~rateslib.curves._CurveInterpolator`."""
return self._interpolator
# Rate Calculation
def rate(
self,
effective: datetime,
termination: datetime | str | NoInput = NoInput(0),
modifier: str | NoInput = NoInput(1),
float_spread: float | NoInput = NoInput(0),
spread_compound_method: str | NoInput = NoInput(0),
) -> DualTypes | None:
"""
Calculate the rate on the `Curve` using DFs.
If rates are sought for dates prior to the initial node of the curve `None`
will be returned.
Parameters
----------
effective : datetime
The start date of the period for which to calculate the rate.
termination : datetime or str
The end date of the period for which to calculate the rate.
modifier : str, optional
The day rule if determining the termination from tenor. If `False` is
determined from the `Curve` modifier.
float_spread : float, optional
A float spread can be added to the rate in certain cases.
spread_compound_method : str in {"none_simple", "isda_compounding"}
The method if adding a float spread.
If *"none_simple"* is used this results in an exact calculation.
If *"isda_compounding"* or *"isda_flat_compounding"* is used this results
in an approximation.
Returns
-------
Dual, Dual2 or float
Notes
-----
Calculating rates from a curve implies that the conventions attached to the
specific index, e.g. USD SOFR, or GBP SONIA, are applicable and these should
be set at initialisation of the ``Curve``. Thus, the convention used to
calculate the ``rate`` is taken from the ``Curve`` from which ``rate``
is called.
``modifier`` is only used if a tenor is given as the termination.
Major indexes, such as legacy IBORs, and modern RFRs typically use a
``convention`` which is either `"Act365F"` or `"Act360"`. These conventions
do not need additional parameters, such as the `termination` of a leg,
the `frequency` or a leg or whether it is a `stub` to calculate a DCF.
**Adding Floating Spreads**
An optimised method for adding floating spreads to a curve rate is provided.
This is quite restrictive and mainly used internally to facilitate other parts
of the library.
- When ``spread_compound_method`` is *"none_simple"* the spread is a simple
linear addition.
- When using *"isda_compounding"* or *"isda_flat_compounding"* the curve is
assumed to be comprised of RFR
rates and an approximation is used to derive to total rate.
Examples
--------
.. ipython:: python
curve_act365f = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 2, 1): 0.98,
dt(2022, 3, 1): 0.978,
},
convention='Act365F'
)
curve_act365f.rate(dt(2022, 2, 1), dt(2022, 3, 1))
Using a different convention will result in a different rate:
.. ipython:: python
curve_act360 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 2, 1): 0.98,
dt(2022, 3, 1): 0.978,
},
convention='Act360'
)
curve_act360.rate(dt(2022, 2, 1), dt(2022, 3, 1))
"""
try:
_: DualTypes = self._rate_with_raise(
effective, termination, modifier, float_spread, spread_compound_method
)
except ZeroDivisionError as e:
if "effective:" not in str(e):
return None # TODO (low): is this an unreachable line?
raise e
except ValueError as e:
if "`effective` date for rate period is before" in str(e):
return None
raise e
return _
def _rate_with_raise(
self,
effective: datetime,
termination: datetime | str | NoInput,
modifier: str | NoInput = NoInput(1),
float_spread: float | NoInput = NoInput(0),
spread_compound_method: str | NoInput = NoInput(0),
) -> DualTypes:
if self._base_type == _CurveType.dfs:
return self._rate_with_raise_dfs(
effective, termination, modifier, float_spread, spread_compound_method
)
else: # is _CurveType.values
return self._rate_with_raise_values(
effective, termination, modifier, float_spread, spread_compound_method
)
def _rate_with_raise_values(
self,
effective: datetime,
*args: Any,
**kwargs: Any,
) -> DualTypes:
if effective < self.nodes.initial: # Alternative solution to PR 172.
raise ValueError(
"`effective` date for rate period is before the initial node date of the Curve.\n"
"If you are trying to calculate a rate for an historical FloatPeriod have you "
"neglected to supply appropriate `fixings`?\n"
"See Documentation > Cookbook > Working with Fixings."
)
return self.__getitem__(effective)
def _rate_with_raise_dfs(
self,
effective: datetime,
termination: datetime | str | NoInput,
modifier: str | NoInput = NoInput(1),
float_spread: float | NoInput = NoInput(0),
spread_compound_method: str | NoInput = NoInput(0),
) -> DualTypes:
modifier_ = _drb(self.meta.modifier, modifier)
if effective < self.nodes.initial: # Alternative solution to PR 172.
raise ValueError(
"`effective` date for rate period is before the initial node date of the Curve.\n"
"If you are trying to calculate a rate for an historical FloatPeriod have you "
"neglected to supply appropriate `fixings`?\n"
"See Documentation > Cookbook > Working with Fixings."
)
if isinstance(termination, str):
termination = add_tenor(effective, termination, modifier_, self.meta.calendar)
elif isinstance(termination, NoInput):
raise ValueError("`termination` must be supplied for rate of DF based Curve.")
if termination == effective:
raise ZeroDivisionError(f"effective: {effective}, termination: {termination}")
df_ratio = self.__getitem__(effective) / self.__getitem__(termination)
n_ = df_ratio - 1.0
d_ = dcf(effective, termination, self.meta.convention, calendar=self.meta.calendar)
_: DualTypes = n_ / d_ * 100
if not isinstance(float_spread, NoInput) and abs(float_spread) > 1e-9:
if spread_compound_method == "none_simple":
return _ + float_spread / 100
elif spread_compound_method == "isda_compounding":
# this provides an approximated rate
r_bar, d, n = average_rate(effective, termination, self.meta.convention, _, d_)
_ = ((1 + (r_bar + float_spread / 100) / 100 * d) ** n - 1) / (n * d)
return 100 * _
elif spread_compound_method == "isda_flat_compounding":
# this provides an approximated rate
r_bar, d, n = average_rate(effective, termination, self.meta.convention, _, d_)
rd = r_bar / 100 * d
_ = (
(r_bar + float_spread / 100)
/ n
* (comb(int(n), 1) + comb(int(n), 2) * rd + comb(int(n), 3) * rd**2)
)
return _
else:
raise ValueError(
"Must supply a valid `spread_compound_method`, when `float_spread` "
" is not `None`.",
)
return _
# Index Calculations
def _try_index_value(
self, index_date: datetime, index_lag: int, index_method: IndexMethod = IndexMethod.Curve
) -> Result[DualTypes]:
if self._base_type == _CurveType.values:
return Err(TypeError("A 'values' type Curve cannot be used to forecast index values."))
if isinstance(self.meta.index_base, NoInput):
return Err(
ValueError(
"Curve must be initialised with an `index_base` value to derive `index_value`."
)
)
lag_months = index_lag - self.meta.index_lag
if index_method == IndexMethod.Curve:
if lag_months != 0:
return Err(
ValueError(
"'curve' interpolation can only be used with `index_value` when the Curve "
"`index_lag` matches the input `index_lag`."
)
)
# use traditional discount factor from Index base to determine index value.
if index_date < self.nodes.initial:
warnings.warn(
"The date queried on the Curve for an `index_value` is prior to the "
"initial node on the Curve.\nThis is returned as zero and likely "
f"causes downstream calculation error.\ndate queried: {index_date}"
"Either providing `index_fixings` to the object or extend the Curve backwards.",
UserWarning,
)
return Ok(0.0)
# return zero for index dates in the past
# the proper way for instruments to deal with this is to supply i_fixings
elif index_date == self.nodes.initial:
return Ok(self.meta.index_base)
else:
return Ok(self.meta.index_base * 1.0 / self.__getitem__(index_date))
elif index_method == IndexMethod.Monthly:
index_date_ = add_tenor(index_date, f"{lag_months * -1}M", "none", NoInput(0), 1)
return self._try_index_value(
index_date=index_date_,
index_lag=self.meta.index_lag,
index_method=IndexMethod.Curve,
)
elif index_method == IndexMethod.Daily:
n = monthrange(index_date.year, index_date.month)[1]
date_som = datetime(index_date.year, index_date.month, 1)
date_sonm = add_tenor(index_date, "1M", "none", NoInput(0), 1)
m1 = self._try_index_value(
index_date=date_som, index_lag=index_lag, index_method=IndexMethod.Monthly
)
m2 = self._try_index_value(
index_date=date_sonm, index_lag=index_lag, index_method=IndexMethod.Monthly
)
if m1.is_err:
return m1
if m2.is_err:
return m2
m1_, m2_ = m1.unwrap(), m2.unwrap()
return Ok(m1_ + (index_date.day - 1) / n * (m2_ - m1_))
else:
return Err( # pragma: no cover
ValueError(
"`interpolation` for `index_value` must be in {'curve', 'daily', 'monthly'}."
)
)
def index_value(
self,
index_date: datetime,
index_lag: int,
index_method: IndexMethod | str = IndexMethod.Curve,
) -> DualTypes:
"""
Calculate the accrued value of the index from the ``index_base``.
This method will raise if performed on a *'values'* type *Curve*.
Parameters
----------
index_date : datetime
The reference date for which the index value will be returned.
index_lag : int
The number of months by which to lag the index when determining the value.
index_method : IndexMethod or str in {"curve", "monthly", "daily"}
The interpolation method for returning the index value. Monthly returns the index value
for the start of the month and daily returns a value based on the
interpolation between nodes (which is recommended *"linear_index*) for
:class:`InflationCurve`.
Returns
-------
None, float, Dual, Dual2
Notes
------
The interpolation methods function as follows:
- **"curve"**: will raise if the requested ``index_lag`` does not match the lag attributed
to the *Curve*. In the case the ``index_lag`` matches, then the *index value* for any
date is derived via the implied interpolation for the discount factors of the *Curve*.
.. math::
I_v(m) = \\frac{I_b}{v(m)}
- **"monthly"**: For any date, *m*, uses the *"curve"* method having adjusted *m* in two
ways. Firstly it deducts a number of months equal to :math:`L - L_c`, where *L* is
the given ``index_lag`` and :math:`L_c` is the *index lag* of the *Curve*. And the day
of the month is set to 1.
.. math::
&I^{monthly}_v(m) = I_v(m_adj) \\\\
&\\text{where,} \\\\
&m_adj = Date(Year(m), Month(m) - L + L_c, 1) \\\\
- **"daily"**: For any date, *m*, with a given ``index_lag`` performs calendar day
interpolation on surrounding *"monthly"* values.
.. math::
&I^{daily}_v(m) = I^{monthly}_v(m) + \\frac{Day(m) - 1}{n} \\left ( I^{monthly}_v(m_+) - I^{monthly}_v(m) \\right ) \\\\
&\\text{where,} \\\\
&m_+ = \\text{Any date in the month following, }m
&n = \\text{Calendar days in, } Month(m)
Examples
--------
The SWESTR rate, for reference value date 6th Sep 2021, was published as
2.375% and the RFR index for that date was 100.73350964. Below we calculate
the value that was published for the RFR index on 7th Sep 2021 by the Riksbank.
.. ipython:: python
:suppress:
from rateslib import Curve, dt
.. ipython:: python
index_curve = Curve(
nodes={
dt(2021, 9, 6): 1.0,
dt(2021, 9, 7): 1 / (1 + 2.375/36000)
},
index_base=100.73350964,
convention="Act360",
index_lag=0,
)
index_curve.rate(dt(2021, 9, 6), "1d")
index_curve.index_value(dt(2021, 9, 7), 0)
""" # noqa: E501
return self._try_index_value(
index_date=index_date,
index_lag=index_lag,
index_method=_get_index_method(index_method),
).unwrap()
# Rate Plotting
def plot(
self,
tenor: str,
right: datetime | str | NoInput = NoInput(0),
left: datetime | str | NoInput = NoInput(0),
comparators: list[_BaseCurve] | NoInput = NoInput(0),
difference: bool = False,
labels: list[str] | NoInput = NoInput(0),
) -> PlotOutput:
"""
Plot given forward tenor rates from the curve. See notes.
Parameters
----------
tenor : str
The tenor of the forward rates to plot, e.g. "1D", "3M".
right : datetime or str, optional
The right bound of the graph. If given as str should be a tenor format
defining a point measured from the initial node date of the curve.
Defaults to the final node of the curve minus the ``tenor``.
left : datetime or str, optional
The left bound of the graph. If given as str should be a tenor format
defining a point measured from the initial node date of the curve.
Defaults to the initial node of the curve.
comparators: list[Curve]
A list of curves which to include on the same plot as comparators.
difference : bool
Whether to plot as comparator minus base curve or outright curve levels in
plot. Default is `False`.
labels : list[str]
A list of strings associated with the plot and comparators. Must be same
length as number of plots.
Returns
-------
(fig, ax, line) : Matplotlib.Figure, Matplotplib.Axes, Matplotlib.Lines2D
Notes
------
This function plots single-period, **simple interest** curve rates, which are defined as:
.. math::
1 + r d = \\frac{v_{start}}{v_{end}}
where *d* is the day count fraction determined using the ``convention`` associated
with the *Curve*.
This function does **not** plot swap rates,
which is impossible since the *Curve* object contains no information regarding the
parameters of the *'swap'* (e.g. its *frequency* or its *convention* etc.).
If ``tenors`` longer than one year are sought results may start to deviate from those
one might expect. See `Issue 246 `_.
"""
comparators_: list[_BaseCurve] = _drb([], comparators)
labels = _drb([], labels)
upper_tenor = tenor.upper()
x, y = self._plot_rates(upper_tenor, left, right)
y_ = [y] if not difference else []
for _, comparator in enumerate(comparators_):
if difference:
y_.append(
[
self._plot_diff(_x, tenor, _y, comparator)
for _x, _y in zip(x, y, strict=False)
]
)
else:
pm_ = comparator._plot_modifier(tenor)
if upper_tenor == "Z":
y_.append([comparator._plot_zero_rate(_x) for _x in x])
else:
y_.append([comparator._plot_rate(_x, tenor, pm_) for _x in x])
return plot([x] * len(y_), y_, labels)
def _plot_diff(
self, date: datetime, tenor: str, rate: DualTypes | None, comparator: _BaseCurve
) -> DualTypes | None: # pragma: no cover
if rate is None:
return None
if tenor == "Z" or tenor == "z":
rate2 = comparator._plot_zero_rate(date)
else:
rate2 = comparator._plot_rate(date, tenor, comparator._plot_modifier(tenor))
if rate2 is None:
return None
return rate2 - rate
def _plot_modifier(self, upper_tenor: str) -> str:
"""If tenor is in days do not allow modified for plot purposes"""
if "B" in upper_tenor or "D" in upper_tenor or "W" in upper_tenor:
if "F" in self.meta.modifier:
return "F"
elif "P" in self.meta.modifier: # pragma: no cover
return "P"
return self.meta.modifier
def _plot_rates(
self,
upper_tenor: str,
left: datetime | str | NoInput,
right: datetime | str | NoInput,
) -> tuple[list[datetime], list[DualTypes | None]]:
if isinstance(left, NoInput):
left_: datetime = self.nodes.initial
elif isinstance(left, str):
left_ = add_tenor(self.nodes.initial, left, "F", self.meta.calendar)
elif isinstance(left, datetime):
left_ = left
else:
raise ValueError("`left` must be supplied as datetime or tenor string.")
if isinstance(right, NoInput):
if upper_tenor == "Z":
# then plotting zero rates just use the last date
right_: datetime = self.nodes.final
else:
# pre-adjust the end date to enforce business date.
right_ = add_tenor(
self.meta.calendar.adjust(self.nodes.final, Adjuster.Previous()),
"-" + upper_tenor,
"P",
self.meta.calendar,
)
elif isinstance(right, str):
right_ = add_tenor(self.nodes.initial, right, "P", NoInput(0))
elif isinstance(right, datetime):
right_ = right
else:
raise ValueError("`right` must be supplied as datetime or tenor string.")
dates = self.meta.calendar.cal_date_range(start=left_, end=right_)
if upper_tenor == "Z":
rates = [self._plot_zero_rate(_) for _ in dates]
else:
rates = [
self._plot_rate(_, upper_tenor, self._plot_modifier(upper_tenor)) for _ in dates
]
return dates, rates
def _plot_rate(
self,
effective: datetime,
termination: str,
modifier: str,
) -> DualTypes | None:
try:
rate = self.rate(effective, termination, modifier)
except ValueError:
return None
return rate
def _plot_zero_rate(
self,
maturity: datetime,
) -> DualTypes | None:
"""plotting a continuously compounded zero rate is done using the ActActISDA convention"""
if self._base_type != _CurveType.dfs:
raise ValueError(
"To plot continuously compounded zero rates ('Z') the Curve `_base_type` must be "
f"discount factor based. Got: '{self._base_type}'."
)
if maturity <= self.nodes.initial:
return None
else:
t = dcf(self.nodes.initial, maturity, Convention.ActActISDA)
return (dual_log(self[maturity]) / -t) * 100.0
# Index Plotting
def plot_index(
self,
right: datetime | str | NoInput = NoInput(0),
left: datetime | str | NoInput = NoInput(0),
comparators: list[_BaseCurve] | NoInput = NoInput(0),
difference: bool = False,
labels: list[str] | NoInput = NoInput(0),
interpolation: str = "curve",
) -> PlotOutput:
"""
Plot given index values on a *Curve*.
Parameters
----------
right : datetime or str, optional
The right bound of the graph. If given as str should be a tenor format
defining a point measured from the initial node date of the curve.
Defaults to the final node of the curve minus the ``tenor``.
left : datetime or str, optional
The left bound of the graph. If given as str should be a tenor format
defining a point measured from the initial node date of the curve.
Defaults to the initial node of the curve.
comparators: list[Curve]
A list of curves which to include on the same plot as comparators.
difference : bool
Whether to plot as comparator minus base curve or outright curve levels in
plot. Default is `False`.
labels : list[str]
A list of strings associated with the plot and comparators. Must be same
length as number of plots.
interpolation : str in {"curve", "daily", "monthly"}
The type of index interpolation method to use.
Returns
-------
(fig, ax, line) : Matplotlib.Figure, Matplotplib.Axes, Matplotlib.Lines2D
"""
comparators = _drb([], comparators)
labels = _drb([], labels)
if left is NoInput.blank:
left_: datetime = self.nodes.initial
elif isinstance(left, str):
left_ = add_tenor(self.nodes.initial, left, "NONE", NoInput(0))
elif isinstance(left, datetime):
left_ = left
else:
raise ValueError("`left` must be supplied as datetime or tenor string.")
if right is NoInput.blank:
right_: datetime = self.nodes.final
elif isinstance(right, str):
right_ = add_tenor(self.nodes.initial, right, "NONE", NoInput(0))
elif isinstance(right, datetime):
right_ = right
else:
raise ValueError("`right` must be supplied as datetime or tenor string.")
points: int = (right_ - left_).days + 1
x = [left_ + timedelta(days=i) for i in range(points)]
rates = [self.index_value(_, self.meta.index_lag, interpolation) for _ in x]
if not difference:
y = [rates]
if not isinstance(comparators, NoInput) and len(comparators) > 0:
for comparator in comparators:
y.append([comparator.index_value(_, self.meta.index_lag) for _ in x])
elif difference and (isinstance(comparators, NoInput) or len(comparators) == 0):
raise ValueError("If `difference` is True must supply at least one `comparators`.")
else:
y = []
for comparator in comparators:
diff = [
comparator.index_value(_, self.meta.index_lag, interpolation) - rates[i]
for i, _ in enumerate(x)
]
y.append(diff)
return plot([x] * len(y), y, labels)
# Dunder operators
def __eq__(self, other: Any) -> bool:
"""Test two curves are identical"""
if type(self) is not type(other):
return False
attrs = [attr for attr in dir(self) if attr[:1] != "_"]
for attr in attrs:
if callable(getattr(self, attr, None)):
continue
elif getattr(self, attr, None) != getattr(other, attr, None):
return False
return True
def __repr__(self) -> str:
return f""
def copy(self) -> _BaseCurve:
"""
Create an identical copy of the curve object.
Returns
-------
Self
"""
ret: _BaseCurve = pickle.loads(pickle.dumps(self, -1)) # noqa: S301
return ret
# from rateslib.serialization import from_json
# return from_json(self.to_json())
class ShiftedCurve(_BaseCurve):
"""
Create a new :class:`~rateslib.curves._BaseCurve` type by compositing an input with
another flat curve of a set number of basis points.
Parameters
----------
curve: _BaseCurve
Any *BaseCurve* type.
shift: float | Variable
The amount by which to shift the curve.
id: str, optional
Identifier used for :class:`~rateslib.solver.Solver` mappings.
Notes
-----
For **values** based curves this will add the ``shift`` to every output *rate* generated
by ``curve``.
For **discount factor** based curves this will add the ``shift`` as a geometric 1-day average
rate to the input ``curve``, in accordance with *rateslib*'s definition of curve metric spaces.
This implies that the *shape* of the ``curve`` is preserved but it undergoes a vertical
translation in rate space. This class works by wrapping a
:class:`~rateslib.curves.CompositeCurve` and designing the spread curve according to these
definitions.
The **ad order** will be the maximum order of ``curve`` and ``spread``. The usual `TypeError`
will be raised if mixing of :class:`~rateslib.dual.Dual` and :class:`~rateslib.dual.Dual2`
is attempted.
Examples
--------
.. ipython:: python
:suppress:
from rateslib.curves import Curve
.. ipython:: python
curve = Curve(
nodes = {
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475
},
t = [
dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1),
],
)
shifted_curve = curve.shift(25)
curve.plot("1d", comparators=[shifted_curve], labels=["orig", "shift"])
.. plot::
from rateslib.curves import *
import matplotlib.pyplot as plt
from datetime import datetime as dt
curve = Curve(
nodes = {
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475
},
t = [
dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1),
],
)
spread_curve = curve.shift(25)
fig, ax, line = curve.plot("1d", comparators=[spread_curve], labels=["orig", "shift"])
plt.show()
plt.close()
"""
_obj: _BaseCurve
def __init__(
self,
curve: _BaseCurve,
shift: DualTypes,
id: str_ = NoInput(0), # noqa: A002
) -> None:
start, end = curve._nodes.initial, curve._nodes.final
if curve._base_type == _CurveType.dfs:
dcf_ = dcf(start, end, curve.meta.convention, calendar=curve.meta.calendar)
_, d, n = average_rate(start, end, curve.meta.convention, 0.0, dcf_)
shifted: _BaseCurve = Curve(
nodes={start: 1.0, end: 1.0 / (1 + d * shift / 10000) ** n},
convention=curve.meta.convention,
calendar=curve.meta.calendar,
modifier=curve.meta.modifier,
interpolation="log_linear",
index_base=curve.meta.index_base,
index_lag=curve.meta.index_lag,
ad=_get_order_of(shift),
)
else: # base type is values: LineCurve
shifted = LineCurve(
nodes={start: shift / 100.0, end: shift / 100.0},
convention=curve.meta.convention,
calendar=curve.meta.calendar,
modifier=curve.meta.modifier,
interpolation="flat_backward",
ad=_get_order_of(shift),
)
id_ = _drb(curve.id + "_shift_" + f"{_dual_float(shift):.1f}", id)
if shifted._ad + curve._ad == 3:
raise TypeError(
"Cannot create a ShiftedCurve with mixed AD orders.\n"
f"`curve` has AD order: {curve.ad}\n"
f"`shift` has AD order: {shifted.ad}"
)
self._obj = CompositeCurve(curves=[curve, shifted], id=id_, _no_validation=True)
def __getitem__(self, date: datetime) -> DualTypes:
return self.obj.__getitem__(date)
def _set_ad_order(self, ad: int) -> None:
return self.obj._set_ad_order(ad)
@property
def obj(self) -> _BaseCurve:
"""The wrapped :class:`~rateslib.curves.CompositeCurve` that performs calculations."""
return self._obj
@property
def _ad(self) -> int:
return self.obj.ad
@property
def _meta(self) -> _CurveMeta:
return self.obj.meta
@property
def _id(self) -> str:
return self.obj.id
@property
def _nodes(self) -> _CurveNodes:
return self.obj.nodes
@property
def _interpolator(self) -> _CurveInterpolator:
return self.obj.interpolator
@property
def _base_type(self) -> _CurveType:
return self.obj._base_type
class TranslatedCurve(_BaseCurve):
"""
Create a new :class:`~rateslib.curves._BaseCurve` type by maintaining the rate space of an
input curve but shifting the initial node date forwards in time.
A class which wraps the underlying curve and returns rates and/or discount factors which are
impacted by a change to initial node date. This is mostly used by discount factor (DF) based
curves whose DFs are adjusted to have a value of 1.0 on the requested start date.
Parameters
----------
curve: _BaseCurve
Any *BaseCurve* type.
start: datetime
The new initial node date for the curve. Must be after the initial node date of the input
``curve``.
id: str, optional
Identifier used for :class:`~rateslib.solver.Solver` mappings.
Examples
---------
.. ipython:: python
curve = Curve(
nodes = {
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475
},
t = [
dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1),
],
)
translated_curve = curve.translate(dt(2022, 12, 1))
# Discount factors
curve[dt(2022, 12, 1)]
translated_curve[dt(2022, 12, 1)]
curve.plot(
"1d",
comparators=[translated_curve],
labels=["orig", "translated"],
left=dt(2022, 12, 1),
)
.. plot::
from rateslib.curves import *
import matplotlib.pyplot as plt
from datetime import datetime as dt
curve = Curve(
nodes = {
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475
},
t = [
dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1),
],
interpolation="log_linear",
)
translated_curve = curve.translate(dt(2022, 12, 1))
fig, ax, line = curve.plot("1d", comparators=[translated_curve], labels=["orig", "translated"], left=dt(2022, 12, 1))
plt.show()
plt.close()
""" # noqa: E501
_obj: _BaseCurve
# abcs
_id: str = None # type: ignore[assignment]
_nodes: _CurveNodes = None # type: ignore[assignment]
def __init__(
self,
curve: _BaseCurve,
start: datetime,
id: str_ = NoInput(0), # noqa: A002
) -> None:
if start < curve.nodes.initial:
raise ValueError("Cannot translate into the past.")
self._id = _drb(curve.id + "_translated_" + f"{start.strftime('yy_mm_dd')}", id)
self._nodes = _CurveNodes(_nodes={start: 0.0, curve.nodes.final: 0.0})
self._obj = curve
def __getitem__(self, date: datetime) -> DualTypes:
if date < self.nodes.initial:
return 0.0
elif self._base_type == _CurveType.dfs:
return self.obj.__getitem__(date) / self.obj.__getitem__(self.nodes.initial)
else: # _CurveType.values
return self.obj.__getitem__(date)
def _set_ad_order(self, ad: int) -> None:
return self.obj._set_ad_order(ad)
@property
def obj(self) -> _BaseCurve:
"""The wrapped :class:`~rateslib.curves._BaseCurve` object that performs calculations."""
return self._obj
@property
def _ad(self) -> int:
return self.obj.ad
@property
def _interpolator(self) -> _CurveInterpolator:
return self.obj.interpolator
@property
def _meta(self) -> _CurveMeta:
if self._base_type == _CurveType.dfs and not isinstance(self.obj.meta.index_base, NoInput):
return replace(
self.obj.meta,
_index_base=self.obj.index_value(self.nodes.initial, self.obj.meta.index_lag), # type: ignore[arg-type]
)
else:
return self.obj.meta
@property
def _base_type(self) -> _CurveType:
return self.obj._base_type
class RolledCurve(_BaseCurve):
"""
Create a new :class:`~rateslib.curves._BaseCurve` type by translating the rate space of an
input curve horizontally in time.
A class which wraps the underlying curve and returns rates which are rolled in time,
measured by a set number of calendar days.
Parameters
----------
curve: _BaseCurve
Any *BaseCurve* type.
roll_days: int
The number of calendar days by which to translate the curve's rate space.
id: str, optional
Identifier used for :class:`~rateslib.solver.Solver` mappings.
Notes
-----
A positive number of ``roll_days`` will shift the ``curve`` rate space to the right.
This is the traditional direction for measuring *roll down* on a trade strategy.
The gap between the initial node date and the roll date (if ``roll_days`` is positive) is
determined by forward filling the first rate on a **values** based curve, or forward filling
the first overnight rate on a **discount factor** based curve.
Examples
---------
.. ipython:: python
curve = Curve(
nodes = {
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475
},
t = [
dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1),
],
)
rolled_curve = curve.roll("6m")
rolled_curve2 = curve.roll("-6m")
curve.plot(
"1d",
comparators=[rolled_curve, rolled_curve2],
labels=["orig", "6m roll", "-6m roll"],
right=dt(2026, 6, 30),
)
.. plot::
from rateslib.curves import *
import matplotlib.pyplot as plt
from datetime import datetime as dt
curve = Curve(
nodes = {
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475
},
t = [
dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1),
],
)
rolled_curve = curve.roll("6m")
rolled_curve2 = curve.roll("-6m")
fig, ax, line = curve.plot("1d", comparators=[rolled_curve, rolled_curve2], labels=["orig", "6m roll", "-6m roll"], right=dt(2026,6,30))
plt.show()
plt.close()
""" # noqa: E501
_obj: _BaseCurve
_roll_days: int
# abcs
_id: str = None # type: ignore[assignment]
def __init__(
self,
curve: _BaseCurve,
roll_days: int,
id: str_ = NoInput(0), # noqa: A002
) -> None:
self._roll_days = roll_days
self._id = _drb(curve.id + "_rolled_" + f"{roll_days}", id)
self._obj = curve
def __getitem__(self, date: datetime) -> DualTypes:
if date < self.nodes.initial:
return 0.0
boundary = self.nodes.initial + timedelta(days=self._roll_days)
if self._base_type == _CurveType.dfs:
if self._roll_days <= 0:
# boundary is irrelevant
scalar_date = self.obj.nodes.initial + timedelta(days=-self._roll_days)
return self.obj.__getitem__(
date - timedelta(days=self._roll_days)
) / self.obj.__getitem__(scalar_date)
else:
next_day = add_tenor(self.nodes.initial, "1b", "F", self.obj.meta.calendar)
on_rate = self.obj._rate_with_raise(self.nodes.initial, next_day)
dcf_ = dcf(
self.nodes.initial,
next_day,
self.obj.meta.convention,
calendar=self.obj.meta.calendar,
)
r_, d_, n_ = average_rate(
self.nodes.initial, next_day, self.obj.meta.convention, on_rate, dcf_
)
if self.nodes.initial <= date < boundary:
# must project forward
return 1.0 / (1 + r_ * d_ / 100.0) ** (date - self.nodes.initial).days
else: # boundary <= date:
scalar = (1.0 + d_ * r_ / 100) ** self._roll_days
return self.obj.__getitem__(date - timedelta(days=self._roll_days)) / scalar
else: # _CurveType.values
if self.nodes.initial <= date < boundary:
return self.obj.__getitem__(self.nodes.initial)
else: # boundary <= date:
return self.obj.__getitem__(date - timedelta(days=self._roll_days))
def _set_ad_order(self, order: int) -> None:
return self.obj._set_ad_order(order)
@property
def obj(self) -> _BaseCurve:
"""The wrapped :class:`~rateslib.curves._BaseCurve` object that performs calculations."""
return self._obj
@property
def roll_days(self) -> int:
"""The number of calendar days by which rates are rolled on the underlying curve."""
return self._roll_days
@property
def _ad(self) -> int:
return self.obj.ad
@property
def _interpolator(self) -> _CurveInterpolator:
return self.obj.interpolator
@property
def _meta(self) -> _CurveMeta:
return self.obj.meta
@property
def _nodes(self) -> _CurveNodes:
return self.obj.nodes
@property
def _base_type(self) -> _CurveType:
return self.obj._base_type
class _WithMutability:
"""
This class is designed as a mixin for the methods for *Curve Pricing Objects*, i.e.
the :class:`~rateslib.curves.Curve` and :class:`~rateslib.curves.LineCurve`.
It permits initialization, configuration of ``nodes`` and ``meta`` and
mutability when interacting with a :class:`~rateslib.solver.Solver`, when
getting and setting nodes, as well as user update methods, spline interpolation solving and
state validation.
"""
_ini_solve: int
_base_type: _CurveType
_nodes: _CurveNodes
_interpolator: _CurveInterpolator
_ad: int
_meta: _CurveMeta
_id: str
@_new_state_post
def __init__( # type: ignore[no-untyped-def]
self,
nodes: dict[datetime, DualTypes],
*,
interpolation: str | InterpolationFunction | NoInput = NoInput(0),
t: list[datetime] | NoInput = NoInput(0),
endpoints: str | tuple[str, str] | NoInput = NoInput(0),
id: str | NoInput = NoInput(0), # noqa: A002
convention: Convention | str | NoInput = NoInput(0),
modifier: str | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
ad: int = 0,
index_base: Variable | float_ = NoInput(0),
index_lag: int | NoInput = NoInput(0),
collateral: str_ = NoInput(0),
credit_discretization: int_ = NoInput(0),
credit_recovery_rate: Variable | float_ = NoInput(0),
**kwargs,
) -> None:
self._id = _drb(uuid4().hex[:5], id) # 1 in a million clash
# Parameters for the rate/values derivation
self._meta = _CurveMeta(
_calendar=get_calendar(calendar),
_convention=_get_convention(_drb(defaults.convention, convention)),
_modifier=_drb(defaults.modifier, modifier).upper(),
_index_base=index_base,
_index_lag=_drb(defaults.index_lag_curve, index_lag),
_collateral=_drb(None, collateral),
_credit_discretization=_drb(
defaults.cds_protection_discretization, credit_discretization
),
_credit_recovery_rate=_drb(defaults.cds_recovery_rate, credit_recovery_rate),
)
self._nodes = _CurveNodes(nodes)
temp: str | tuple[str, str] = _drb(defaults.endpoints, endpoints)
if isinstance(temp, str):
endpoints_: tuple[str, str] = (temp.lower(), temp.lower())
else:
endpoints_ = (temp[0].lower(), temp[1].lower())
self._interpolator = _CurveInterpolator(
local=interpolation,
t=t,
endpoints=endpoints_,
node_dates=self._nodes.keys,
convention=self._meta.convention,
curve_type=self._base_type,
)
self._set_ad_order(order=ad) # will also clear and initialise the cache
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
"""
Change the node values to float, Dual or Dual2 based on input parameter.
"""
if order == getattr(self, "ad", None):
return None
elif order not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = order
nodes_: dict[datetime, DualTypes] = {
k: set_order_convert(v, order, [f"{self._id}{i}"])
for i, (k, v) in enumerate(self._nodes.nodes.items())
}
self._nodes = _CurveNodes(nodes_)
self._interpolator._csolve(self._base_type, self._nodes, self._ad)
# Solver interaction
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[Any]]:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
return np.array(list(self._nodes.nodes.values())[self._ini_solve :])
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
return tuple(f"{self._id}{i}" for i in range(self._ini_solve, self._nodes.n))
# Mutation
@_new_state_post
@_clear_cache_post
def csolve(self) -> None:
"""
Solves **and sets** the coefficients, ``c``, of the :class:`PPSpline`.
Returns
-------
None
Notes
-----
Only impacts curves which have a knot sequence, ``t``, and a ``PPSpline``.
Only solves if ``c`` not given at curve initialisation.
Uses the ``spline_endpoints`` attribute on the class to determine the solving
method.
"""
self._interpolator._csolve(self._base_type, self._nodes, self._ad)
@_new_state_post
@_clear_cache_post
def update(
self,
nodes: dict[datetime, DualTypes] | NoInput = NoInput(0),
) -> None:
"""
Update a curves nodes with new, manually input values.
For arguments see :class:`~rateslib.curves.curves.Curve`. Any value not given will not
change the underlying *Curve*.
Parameters
----------
nodes: dict[datetime, DualTypes], optional
New nodes to assign to the curve.
Returns
-------
None
Notes
-----
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of a *Curve* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *Curve* instance.
This class is labelled as a **mutable on update** object.
"""
if not isinstance(nodes, NoInput):
self._nodes = _CurveNodes(nodes)
self._interpolator._csolve(self._base_type, self._nodes, self._ad)
@_new_state_post
@_clear_cache_post
def update_node(self, key: datetime, value: DualTypes) -> None:
"""
Update a single node value on the *Curve*.
Parameters
----------
key: datetime
The node date to update. Must exist in ``nodes``.
value: float, Dual, Dual2, Variable
Value to update on the *Curve*.
Returns
-------
None
Notes
-----
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of a *Curve* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *Curve* instance.
This class is labelled as a **mutable on update** object.
"""
if key not in self._nodes.nodes:
raise KeyError("`key` is not in *Curve* ``nodes``.")
nodes_ = self._nodes.nodes.copy()
nodes_[key] = value
self._nodes = _CurveNodes(nodes_)
self._interpolator._csolve(self._base_type, self._nodes, self._ad)
@_new_state_post
@_clear_cache_post
def update_meta(self, key: datetime, value: Any) -> None:
"""
Update a single meta value on the *Curve*.
Parameters
----------
key: datetime
The meta descriptor to update. Must be a documented attribute of
:class:`~rateslib.curves.utils._CurveMeta`.
value: Any
Value to update on the *Curve*.
Returns
-------
None
"""
_key = f"_{key}"
self._meta = replace(self._meta, **{_key: value})
@_new_state_post
@_clear_cache_post
def _set_node_vector(self, vector: list[DualTypes], ad: int) -> None:
"""Used to update curve values during a Solver iteration. ``ad`` in {1, 2}."""
self._set_node_vector_direct(vector, ad)
def _set_node_vector_direct(self, vector: list[DualTypes], ad: int) -> None:
nodes_ = self._nodes.nodes.copy()
if ad == 0:
if self._ini_solve == 1 and self._nodes.n > 0:
nodes_[self._nodes.initial] = _dual_float(nodes_[self._nodes.initial])
for i, k in enumerate(self._nodes.keys[self._ini_solve :]):
nodes_[k] = _dual_float(vector[i])
else:
DualType: type[Dual | Dual2] = Dual if ad == 1 else Dual2
DualArgs: tuple[list[float]] | tuple[list[float], list[float]] = (
([],) if ad == 1 else ([], [])
)
base_obj = DualType(0.0, [f"{self._id}{i}" for i in range(self._nodes.n)], *DualArgs)
ident: np.ndarray[tuple[int, ...], np.dtype[np.float64]] = np.eye(
self._nodes.n, dtype=np.float64
)
if self._ini_solve == 1:
# then the first node on the Curve is not updated but
# set it as a dual type with consistent vars.
nodes_[self._nodes.initial] = DualType.vars_from(
base_obj, # type: ignore[arg-type]
_dual_float(nodes_[self._nodes.initial]),
base_obj.vars,
ident[0, :].tolist(),
*DualArgs[1:],
)
for i, k in enumerate(self._nodes.keys[self._ini_solve :]):
nodes_[k] = DualType.vars_from(
base_obj, # type: ignore[arg-type]
_dual_float(vector[i]),
base_obj.vars,
ident[i + self._ini_solve, :].tolist(),
*DualArgs[1:],
)
self._ad = ad
self._nodes = _CurveNodes(nodes_)
self._interpolator._csolve(self._base_type, self._nodes, self._ad)
# Serialization
@classmethod
def _from_json(cls, loaded_json: dict[str, Any]) -> _BaseCurve:
"""
Reconstitute a curve from JSON.
Parameters
----------
curve : str
The JSON string representation of the curve.
Returns
-------
Curve or LineCurve
"""
from rateslib.serialization import from_json
meta = from_json(loaded_json["meta"])
interpolator = from_json(loaded_json["interpolator"])
nodes = from_json(loaded_json["nodes"])
spl = interpolator.spline
if interpolator.local_name == "spline":
t = NoInput(0)
else:
t = NoInput(0) if spl is None else spl.t
_: _BaseCurve = cls( # type: ignore[assignment]
nodes=nodes.nodes,
interpolation=interpolator.local_name,
t=t,
endpoints=spl.endpoints if spl is not None else NoInput(0),
id=loaded_json["id"],
convention=meta.convention,
modifier=meta.modifier,
calendar=meta.calendar,
ad=loaded_json["ad"],
index_base=meta.index_base,
index_lag=meta.index_lag,
collateral=meta.collateral,
credit_discretization=meta.credit_discretization,
credit_recovery_rate=meta.credit_recovery_rate,
)
return _
def to_json(self) -> str:
"""
Serialize this object to JSON format.
The object can be deserialized using the :meth:`~rateslib.serialization.from_json` method.
Returns
-------
str
Notes
-----
Some *Curves* will **not** be serializable, for example those that possess user defined
interpolation functions.
"""
obj = dict(
PyNative={
f"{type(self).__name__}": dict(
meta=self._meta.to_json(),
interpolator=self._interpolator.to_json(),
id=self._id,
ad=self._ad,
nodes=self._nodes.to_json(),
)
}
)
return json.dumps(obj)
class Curve(_WithMutability, _BaseCurve):
"""
A :class:`~rateslib.curves._BaseCurve` with DF parametrisation at given node dates with
interpolation.
Parameters
----------
nodes : dict[datetime: float]
Parameters of the curve denoted by a node date and a corresponding
DF at that point.
interpolation : str or callable
The interpolation used in the non-spline section of the curve. That is the part
of the curve between the first node in ``nodes`` and the first knot in ``t``.
If a callable, this allows a user-defined interpolation scheme, and this must
have the signature ``method(date, curve)``, where ``date`` is the datetime
whose DF will be returned and ``curve`` is passed as ``self``.
t : list[datetime], optional
The knot locations for the B-spline log-cubic interpolation section of the
curve. If *None* all interpolation will be done by the local method specified in
``interpolation``.
endpoints : 2-tuple of str, optional
The left and then right endpoint constraint for the spline solution. Valid values are
in {"natural", "not_a_knot"}.
id : str, optional, set by Default
The unique identifier to distinguish between curves in a multicurve framework.
convention : str, optional, set by Default
The convention of the curve for determining rates. Please see
:meth:`dcf()` for all available options.
modifier : str, optional
The modification rule, in {"F", "MF", "P", "MP"}, for determining rates when input as
a tenor, e.g. "3M".
calendar : Cal, UnionCal, NamedCal, str, optional
The holiday calendar object to use. If str, looks up named calendar from
static data. Used for determining rates.
ad : int in {0, 1, 2}, optional
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
index_base: float, optional
The initial index value at the initial node date of the curve. Used for
forecasting future index values.
index_lag : int, optional
Number of months of by which the index lags the date. For example if the initial
curve node date is 1st Sep 2021 based on the inflation index published
17th June 2023 then the lag is 3 months. Best practice is to use 0 months.
collateral : str
A currency identifier to denote the collateral currency against which the discount factors
for this *Curve* are measured.
credit_discretization : int
A parameter for numerically solving the integral for credit protection legs and default
events. Expressed in calendar days. Only used by *Curves* functioning as *hazard Curves*.
credit_recovery_rate : Variable | float
A parameter used in pricing credit protection legs and default events.
Notes
-----
This curve type is **discount factor (DF)** based and is parametrised by a set of
(date, DF) pairs set as ``nodes``. The initial node date of the curve is defined
to be today and should **always** have a DF of precisely 1.0. The initial DF
will **not** be affected by a :class:`~rateslib.solver.Solver`.
Intermediate DFs are determined through ``interpolation``. If local interpolation
is adopted a DF for an arbitrary date is dependent only on its immediately
neighbouring nodes via the interpolation routine. Available options are:
- *"log_linear"* (default for this curve type)
- *"linear_index"*
And also the following which are not recommended for this curve type:
- *"linear"*,
- *"linear_zero_rate"*,
- *"flat_forward"*,
- *"flat_backward"*,
**Spline Interpolation**
Global interpolation in the form of a **log-cubic** spline is also configurable
with the parameters ``t``, and ``endpoints``. Setting an ``interpolation`` of *"spline"*
is syntactic sugar for automatically determining the most obvious
knot sequence ``t`` to use all specified *node dates*. See
:ref:`splines` for instruction of knot sequence calibration.
If the knot sequence is provided directly then any dates prior to the first knot date in ``t``
will be determined through the local interpolation method. This allows for
**mixed interpolation**, permitting the most common form of a stepped curve followed by a
smooth curve at some boundary.
For defining rates by a given tenor, the ``modifier`` and ``calendar`` arguments
will be used. For correct scaling of the rate a ``convention`` is attached to the
curve, which is usually one of "Act360" or "Act365F".
Examples
--------
.. ipython:: python
nodes={
dt(2022,1,1): 1.0, # <- initial DF should always be 1.0
dt(2023,1,1): 0.99,
dt(2024,1,1): 0.979,
dt(2025,1,1): 0.967,
dt(2026,1,1): 0.956,
dt(2027,1,1): 0.946,
}
curve1 = Curve(nodes=nodes, interpolation="log_linear")
curve2 = Curve(nodes=nodes, interpolation="spline")
curve1.plot("1d", comparators=[curve2], labels=["log_linear", "log_cubic_spline"])
.. plot::
from rateslib.curves import *
import matplotlib.pyplot as plt
from datetime import datetime as dt
import numpy as np
nodes={
dt(2022,1,1): 1.0, # <- initial DF should always be 1.0
dt(2023,1,1): 0.99,
dt(2024,1,1): 0.979,
dt(2025,1,1): 0.967,
dt(2026,1,1): 0.956,
dt(2027,1,1): 0.946,
}
curve1 = Curve(nodes=nodes, interpolation="log_linear")
curve2 = Curve(nodes=nodes, interpolation="spline")
fig, ax, line = curve1.plot("1d", comparators=[curve2], labels=["log_linear", "log_cubic_spline"])
plt.show()
plt.close()
""" # noqa: E501
_ini_solve: int = 1 # Curve is assumed to have initial DF node at 1.0 as constraint
# abcs - set by init
_base_type: _CurveType = _CurveType.dfs
_id: str = None # type: ignore[assignment]
_ad: int = None # type: ignore[assignment]
_meta: _CurveMeta = None # type: ignore[assignment]
_nodes: _CurveNodes = None # type: ignore[assignment]
_interpolator: _CurveInterpolator = None # type: ignore[assignment]
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
def __getitem__(self, date: datetime) -> DualTypes:
return super().__getitem__(date)
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
class LineCurve(_WithMutability, _BaseCurve):
"""
A :class:`~rateslib.curves._BaseCurve` with value parametrisation at given node dates with
interpolation.
Parameters
----------
nodes : dict[datetime: float]
Parameters of the curve denoted by a node date and a corresponding
value at that point.
interpolation : str in {"log_linear", "linear"} or callable
The interpolation used in the non-spline section of the curve. That is the part
of the curve between the first node in ``nodes`` and the first knot in ``t``.
If a callable, this allows a user-defined interpolation scheme, and this must
have the signature ``method(date, nodes)``, where ``date`` is the datetime
whose DF will be returned and ``nodes`` is as above and is passed to the
callable.
t : list[datetime], optional
The knot locations for the B-spline cubic interpolation section of the
curve. If *None* all interpolation will be done by the method specified in
``interpolation``.
endpoints : str or list, optional
The left and right endpoint constraint for the spline solution. Valid values are
in {"natural", "not_a_knot"}. If a list, supply the left endpoint then the
right endpoint.
id : str, optional, set by Default
The unique identifier to distinguish between curves in a multi-curve framework.
convention : str, optional, set by Default
The convention of the curve for determining rates. Please see
:meth:`dcf()` for all available options.
convention : str, optional, set by Default
The convention of the curve for determining rates. Please see
:meth:`dcf()` for all available options.
modifier : str, optional
The modification rule, in {"F", "MF", "P", "MP"}, for determining rates when input as
a tenor, e.g. "3M".
calendar : Cal, UnionCal, NamedCal, str, optional
The holiday calendar object to use. If str, looks up named calendar from
static data. Used for determining rates.
ad : int in {0, 1, 2}, optional
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`Dual` or :class:`Dual2`. It is advised against
using this setting directly. It is mainly used internally.
Notes
-----
The arguments ``index_base``, ``index_lag``, and ``collateral`` available on
:class:`~rateslib.curves.Curve` are not used by, or relevant for, a :class:`LineCurve`.
This curve type is **value** based and it is parametrised by a set of
(date, value) pairs set as ``nodes``. The initial node date of the curve is defined
to be today, and can take a general value. The initial value
will be affected by a :class:`~rateslib.solver.Solver`.
.. note::
This curve type can only ever be used for **forecasting** rates and projecting
cashflow calculations. It cannot be used to discount cashflows becuase it is
not DF based and there is no mathematical one-to-one conversion available to
imply DFs.
Intermediate values are determined through ``interpolation``. If local interpolation
is adopted a value for an arbitrary date is dependent only on its immediately
neighbouring nodes via the interpolation routine. Available options are:
- *"linear"* (default for this curve type)
- *"log_linear"* (useful for values that exponential, e.g. stock indexes or GDP)
- *"spline"*
- *"flat_forward"*, (useful for replicating a DF based log-linear type curve)
- *"flat_backward"*,
And also the following which are not recommended for this curve type:
- *"linear_index"*
- *"linear_zero_rate"*,
**Spline Interpolation**
Global interpolation in the form of a **cubic** spline is also configurable
with the parameters ``t``, and ``endpoints``. Setting an ``interpolation`` of *"spline"*
is syntactic sugar for automatically determining the most obvious
knot sequence ``t`` to use all specified *node dates*. See
:ref:`splines` for instruction of knot sequence calibration.
If the knot sequence is provided directly then any dates prior to the first knot date in ``t``
will be determined through the local interpolation method. This allows for
**mixed interpolation**.
This curve type cannot return arbitrary tenor rates. It will only return a single
value which is applicable to that date. It is recommended to review
:ref:`RFR and IBOR Indexing` to ensure indexing is done in a
way that is consistent with internal instrument configuration.
Examples
--------
.. ipython:: python
nodes = {
dt(2022,1,1): 0.975, # <- initial value is general
dt(2023,1,1): 1.10,
dt(2024,1,1): 1.22,
dt(2025,1,1): 1.14,
dt(2026,1,1): 1.03,
dt(2027,1,1): 1.03,
}
line_curve1 = LineCurve(nodes=nodes, interpolation="linear")
line_curve2 = LineCurve(nodes=nodes, interpolation="spline")
line_curve1.plot("1d", comparators=[line_curve2], labels=["linear", "cubic spline"])
.. plot::
from rateslib.curves import *
import matplotlib.pyplot as plt
from datetime import datetime as dt
import numpy as np
nodes = {
dt(2022,1,1): 0.975, # <- initial value is general
dt(2023,1,1): 1.10,
dt(2024,1,1): 1.22,
dt(2025,1,1): 1.14,
dt(2026,1,1): 1.03,
dt(2027,1,1): 1.03,
}
line_curve1 = LineCurve(nodes=nodes, interpolation="linear")
line_curve2 = LineCurve(nodes=nodes, interpolation="spline")
fig, ax, line = line_curve1.plot("1d", comparators=[line_curve2], labels=["linear", "cubic spline"])
plt.show()
plt.close()
""" # noqa: E501
_ini_solve = 0 # No constraint placed on initial node in Solver
# abcs - set by init
_base_type: _CurveType = _CurveType.values
_id: str = None # type: ignore[assignment]
_ad: int = None # type: ignore[assignment]
_meta: _CurveMeta = None # type: ignore[assignment]
_nodes: _CurveNodes = None # type: ignore[assignment]
_interpolator: _CurveInterpolator = None # type: ignore[assignment]
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
def __getitem__(self, date: datetime) -> DualTypes:
return super().__getitem__(date)
class CompositeCurve(_BaseCurve):
"""
A dynamic composition of a sequence of other :class:`~rateslib.curves._BaseCurve`.
.. note::
Can only composite curves of the same type: :class:`Curve`
or :class:`LineCurve`. Other curve parameters such as ``modifier``, ``calendar``
and ``convention`` must also match.
Parameters
----------
curves : sequence of :class:`Curve` or sequence of :class:`LineCurve`
The curves to be composited.
id : str, optional, set by Default
The unique identifier to distinguish between curves in a multi-curve framework.
Examples
--------
Composite two :class:`LineCurve` s. Here, simulating the effect of adding
quarter-end turns to a cubic spline interpolator, which is otherwise difficult to
mathematically derive.
.. ipython:: python
:suppress:
from datetime import datetime as dt
.. ipython:: python
from rateslib.curves import LineCurve, CompositeCurve
line_curve1 = LineCurve(
nodes={
dt(2022, 1, 1): 2.5,
dt(2023, 1, 1): 3.5,
dt(2024, 1, 1): 3.0,
},
t=[dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)],
)
line_curve2 = LineCurve(
nodes={
dt(2022, 1, 1): 0,
dt(2022, 3, 31): -0.2,
dt(2022, 4, 1): 0,
dt(2022, 6, 30): -0.2,
dt(2022, 7, 1): 0,
dt(2022, 9, 30): -0.2,
dt(2022, 10, 1): 0,
dt(2022, 12, 31): -0.2,
dt(2023, 1, 1): 0,
dt(2023, 3, 31): -0.2,
dt(2023, 4, 1): 0,
dt(2023, 6, 30): -0.2,
dt(2023, 7, 1): 0,
dt(2023, 9, 30): -0.2,
},
interpolation="flat_forward",
)
curve = CompositeCurve([line_curve1, line_curve2])
curve.plot("1d")
.. plot::
from rateslib.curves import LineCurve, CompositeCurve
import matplotlib.pyplot as plt
from datetime import datetime as dt
line_curve1 = LineCurve(
nodes={
dt(2022, 1, 1): 2.5,
dt(2023, 1, 1): 3.5,
dt(2024, 1, 1): 3.0,
},
t=[dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1), dt(2022, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1)],
)
line_curve2 = LineCurve(
nodes={
dt(2022, 1, 1): 0,
dt(2022, 3, 31): -0.2,
dt(2022, 4, 1): 0,
dt(2022, 6, 30): -0.2,
dt(2022, 7, 1): 0,
dt(2022, 9, 30): -0.2,
dt(2022, 10, 1): 0,
dt(2022, 12, 31): -0.2,
dt(2023, 1, 1): 0,
dt(2023, 3, 31): -0.2,
dt(2023, 4, 1): 0,
dt(2023, 6, 30): -0.2,
dt(2023, 7, 1): 0,
dt(2023, 9, 30): -0.2,
},
interpolation="flat_forward",
)
curve = CompositeCurve([line_curve1, line_curve2])
fig, ax, line = curve.plot("1D")
plt.show()
We can also composite DF based curves by using a fast approximation or an
exact match.
.. ipython:: python
from rateslib.curves import Curve, CompositeCurve
curve1 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955
},
t=[dt(2023, 1, 1), dt(2023, 1, 1), dt(2023, 1, 1), dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1), dt(2025, 1, 1), dt(2025, 1, 1), dt(2025, 1, 1)],
)
curve2 =Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 6, 30): 1.0,
dt(2022, 7, 1): 0.999992,
dt(2022, 12, 31): 0.999992,
dt(2023, 1, 1): 0.999984,
dt(2023, 6, 30): 0.999984,
dt(2023, 7, 1): 0.999976,
dt(2023, 12, 31): 0.999976,
dt(2024, 1, 1): 0.999968,
dt(2024, 6, 30): 0.999968,
dt(2024, 7, 1): 0.999960,
dt(2025, 1, 1): 0.999960,
},
)
curve = CompositeCurve([curve1, curve2])
curve.plot("1D", comparators=[curve1, curve2], labels=["Composite", "C1", "C2"])
.. plot::
from rateslib.curves import Curve, CompositeCurve
import matplotlib.pyplot as plt
from datetime import datetime as dt
curve1 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955
},
t=[dt(2023, 1, 1), dt(2023, 1, 1), dt(2023, 1, 1), dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1), dt(2025, 1, 1), dt(2025, 1, 1), dt(2025, 1, 1)],
)
curve2 =Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 6, 30): 1.0,
dt(2022, 7, 1): 0.999992,
dt(2022, 12, 31): 0.999992,
dt(2023, 1, 1): 0.999984,
dt(2023, 6, 30): 0.999984,
dt(2023, 7, 1): 0.999976,
dt(2023, 12, 31): 0.999976,
dt(2024, 1, 1): 0.999968,
dt(2024, 6, 30): 0.999968,
dt(2024, 7, 1): 0.999960,
dt(2025, 1, 1): 0.999960,
},
)
curve = CompositeCurve([curve1, curve2])
fig, ax, line = curve.plot("1D", comparators=[curve1, curve2], labels=["Composite", "C1", "C2"])
plt.show()
""" # noqa: E501
_mutable_by_association = True
_do_not_validate = False
_composite_scalars: list[float | Dual | Dual2 | Variable]
# abcs - set by init
_base_type: _CurveType = None # type: ignore[assignment]
_id: str = None # type: ignore[assignment]
_ad: int = None # type: ignore[assignment]
_meta: _CurveMeta = None # type: ignore[assignment]
_nodes: _CurveNodes = None # type: ignore[assignment]
_interpolator: _CurveInterpolator = None # type: ignore[assignment]
@_new_state_post
@_clear_cache_post
def __init__(
self,
curves: list[_BaseCurve] | tuple[_BaseCurve, ...],
id: str_ = NoInput(0), # noqa: A002
_no_validation: bool = False,
) -> None:
self._id = _drb(super()._id, id)
self.curves = tuple(curves)
nodes_proxy: dict[datetime, DualTypes] = dict.fromkeys(self.curves[0].nodes.keys, 0.0)
self._nodes = _CurveNodes(nodes_proxy)
self._base_type = curves[0]._base_type
self._meta = replace(self.curves[0].meta)
if _no_validation:
pass
else:
_validate_composited_curve_collection(self, self.curves, False)
self._composite_scalars = [1.0] * len(self.curves)
self._ad = max(_._ad for _ in self.curves)
@property
@_validate_states # this ensures that the _meta attribute is updated if the curve state changes
def meta(self) -> _CurveMeta:
return self._meta
@_validate_states
@_no_interior_validation
def __getitem__(self, date: datetime) -> DualTypes:
if defaults.curve_caching and date in self._cache:
return self._cache[date]
if self._base_type == _CurveType.dfs:
# will return a composited discount factor
if date == self.nodes.initial:
# this value is 1.0, but by multiplying capture AD versus initial nodes.
ret: DualTypes = prod(crv[date] for crv in self.curves)
return ret
elif date < self.nodes.initial:
return 0.0 # Any DF in the past is set to zero consistent with behaviour on `Curve`
dcf_ = dcf(
start=self.nodes.initial,
end=date,
convention=self.meta.convention,
calendar=self.meta.calendar,
)
_, d, n = average_rate(self.nodes.initial, date, self.meta.convention, 0.0, dcf_)
total_rate: Number = 0.0
for scalar, curve in zip(self._composite_scalars, self.curves, strict=False):
avg_rate = ((1.0 / curve[date]) ** (1.0 / n) - 1) / d
total_rate += avg_rate * scalar # type: ignore[assignment]
ret = 1.0 / (1 + total_rate * d) ** n
return self._cached_value(date, ret)
else: # self._base_type == _CurveType.values:
# will return a composited rate
_ = 0.0
for scalar, curve in zip(self._composite_scalars, self.curves, strict=False):
_ += curve[date] * scalar
return self._cached_value(date, _)
# Solver interaction
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
"""
Change the node values on each curve to float, Dual or Dual2 based on input parameter.
"""
if order not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = order
for curve in self.curves:
curve._set_ad_order(order)
# Mutation
def _validate_state(self) -> None:
if self._do_not_validate:
return None
if self._state != self._get_composited_state():
# re-reference meta preserving own collateral status
self._meta = replace(self.curves[0].meta, _collateral=self._meta.collateral)
# If any of the associated curves have been mutated then the cache is invalidated
self._clear_cache()
self._set_new_state()
def _get_composited_state(self) -> int:
_: int = hash(sum(curve._state for curve in self.curves))
return _
class MultiCsaCurve(_BaseCurve):
"""
A dynamic composition of a sequence of other :class:`~rateslib.curves._BaseCurve`.
.. note::
Can only combine curves of the type: :class:`Curve`. Other curve parameters such as
``modifier``, and ``convention`` must also match.
.. warning::
Intrinsic *MultiCsaCurves*, by definition, are not natively AD safe, due to having
discontinuities and no available derivatives in certain cases. See
:ref:`discontinuous MultiCsaCurves `.
Parameters
----------
curves : sequence of :class:`Curve`
The curves to be composited.
id : str, optional, set by Default
The unique identifier to distinguish between curves in a multi-curve framework.
multi_csa_min_step: int, optional
The minimum calculation step between subsequent DF evaluations to determine a multi-CSA
curve term DF. Higher numbers make faster calculations but are less accurate. Should be
in [1, max_step].
multi_csa_max_step: int, optional
The minimum calculation step between subsequent DF evaluations to determine a multi-CSA
curve term DF. Higher numbers make faster calculations but are less accurate. Should be
in [min_step, 1825].
Notes
-----
A *MultiCsaCurve* uses a different calculation methodology than a *CompositeCurve* for
determining the *rate* by selecting the curve within the collection with the highest rate.
"""
_mutable_by_association = True
_do_not_validate = False
# abcs - set by init
_base_type: _CurveType = None # type: ignore[assignment]
_id: str = None # type: ignore[assignment]
_ad: int = None # type: ignore[assignment]
_meta: _CurveMeta = None # type: ignore[assignment]
_nodes: _CurveNodes = None # type: ignore[assignment]
_interpolator: _CurveInterpolator = None # type: ignore[assignment]
@property
@_validate_states # this ensures that the _meta attribute is updated if the curve state changes
def meta(self) -> _CurveMeta:
return self._meta
@_new_state_post
@_clear_cache_post
def __init__(
self,
curves: list[_BaseCurve] | tuple[_BaseCurve, ...],
id: str | NoInput = NoInput(0), # noqa: A002
) -> None:
self._id = _drb(super()._id, id)
self.curves = tuple(curves)
nodes_proxy: dict[datetime, DualTypes] = dict.fromkeys(self.curves[0].nodes.keys, 0.0)
self._nodes = _CurveNodes(nodes_proxy)
self._base_type = curves[0]._base_type
self._meta = replace(self.curves[0].meta)
_validate_composited_curve_collection(self, self.curves, True)
self._ad = max(_._ad for _ in self.curves)
@_validate_states
@_no_interior_validation
def __getitem__(self, date: datetime) -> DualTypes:
# TODO: changing the multi_csa_step size should force a cache clear. This is a mutation.
# will return a composited discount factor
if defaults.curve_caching and date in self._cache:
return self._cache[date]
if date == self.nodes.initial:
# this value is 1.0, but by multiplying capture AD versus initial nodes.
ret: DualTypes = prod(crv[date] for crv in self.curves)
return ret
elif date < self.nodes.initial:
return 0.0 # Any DF in the past is set to zero consistent with behaviour on `Curve`
def _get_step(step: int) -> int:
mins = defaults.multi_csa_min_step
maxs = defaults.multi_csa_max_step
return min(max(step, mins), maxs)
# method uses the step and picks the highest (cheapest rate) in each step
d1 = self.nodes.initial
d2 = d1 + timedelta(days=_get_step(defaults.multi_csa_steps[0]))
v: DualTypes = self.__getitem__(d1)
v_i_1_j: list[DualTypes] = [curve[d1] for curve in self.curves]
v_i_j: list[DualTypes] = [0.0 for curve in self.curves]
k: int = 1
while d2 < date:
if defaults.curve_caching and d2 in self._cache:
v = self._cache[d2]
v_i_1_j = [curve[d2] for curve in self.curves]
else:
min_ratio: DualTypes = 1e5
for j, curve in enumerate(self.curves):
v_i_j[j] = curve[d2]
ratio_ = v_i_j[j] / v_i_1_j[j]
min_ratio = ratio_ if ratio_ < min_ratio else min_ratio
v_i_1_j[j] = v_i_j[j]
v *= min_ratio
self._cached_value(d2, v)
try:
step = _get_step(defaults.multi_csa_steps[k])
except IndexError:
step = defaults.multi_csa_max_step
d1, d2, k = d2, d2 + timedelta(days=step), k + 1
# finish the loop on the correct date
if date == d1:
return self._cached_value(date, v)
else:
min_ratio = 1e5
for j, curve in enumerate(self.curves):
ratio_ = curve[date] / v_i_1_j[j]
min_ratio = ratio_ if ratio_ < min_ratio else min_ratio
v *= min_ratio
return self._cached_value(date, v)
# Solver interaction
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
"""
Change the node values on each curve to float, Dual or Dual2 based on input parameter.
"""
if order not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = order
for curve in self.curves:
curve._set_ad_order(order)
# Mutation
def _validate_state(self) -> None:
if self._do_not_validate:
return None
if self._state != self._get_composited_state():
# re-reference meta preserving own collateral status
self._meta = replace(self.curves[0].meta, _collateral=self._meta.collateral)
# If any of the associated curves have been mutated then the cache is invalidated
self._clear_cache()
self._set_new_state()
def _get_composited_state(self) -> int:
_: int = hash(sum(curve._state for curve in self.curves))
return _
def _validate_composited_curve_collection(
obj: _BaseCurve, curves: tuple[_BaseCurve, ...], force_dfs: bool
) -> None:
"""Perform checks to ensure CompositeCurve can exist"""
_base_type = curves[0]._base_type
if force_dfs and _base_type != _CurveType.dfs:
raise TypeError(f"{type(obj).__name__} must use discount factors, i.e have _CurveType.dfs.")
if not all(_._base_type == _base_type for _ in curves):
# then at least one curve is value based and one is DF based
raise TypeError(f"{type(obj).__name__} can only contain curves of the same type.")
ini_dates = [_.nodes.initial for _ in curves]
if not all(_ == ini_dates[0] for _ in ini_dates[1:]):
raise ValueError(f"`curves` must share the same initial node date, got {ini_dates}")
# if type(self) is not MultiCsaCurve: # for multi_csa DF curve do not check calendars
# self._check_meta_attribute("calendar")
if _base_type == _CurveType.dfs:
_check_meta_attribute(curves, "modifier")
_check_meta_attribute(curves, "convention")
_check_meta_attribute(curves, "calendar")
# self._check_meta_attribute("collateral") # not used due to inconsistent labelling
_ad = [_._ad for _ in curves]
if 1 in _ad and 2 in _ad:
raise TypeError(
f"{type(obj).__name__} cannot composite curves of AD order 1 and 2.\n"
"Either downcast curves using `curve._set_ad_order(1)`.\n"
"Or upcast curves using `curve._set_ad_order(2)`.\n"
)
def _check_meta_attribute(curves: tuple[_BaseCurve, ...], attr: str) -> None:
"""Ensure attributes are the same across curve collection"""
attrs = [getattr(_.meta, attr, None) for _ in curves]
if not all(_ == attrs[0] for _ in attrs[1:]):
raise ValueError(
f"Cannot composite curves with different attributes, got for "
f"'{attr}': {[getattr(_.meta, attr, None) for _ in curves]},",
)
class ProxyCurve(_BaseCurve):
"""
A :class:`~rateslib.curves._BaseCurve` which returns dynamic DFs from an
:class:`~rateslib.fx.FXForwards` object and FX parity.
Parameters
----------
cashflow : str
The currency in which cashflows are represented (3-digit code).
collateral : str
The currency of the CSA against which cashflows are collateralised (3-digit
code).
fx_forwards : FXForwards
The :class:`~rateslib.fx.FXForwards` object which contains the relating
FX information and the available :class:`~rateslib.curves.Curve` s.
id : str, optional, set by Default
The unique identifier to distinguish between curves in a multi-curve framework.
Notes
-----
The DFs returned are calculated via the chaining method and the below formula,
relating the DF curve in the local collateral currency and FX forward rates.
.. math::
w_{dom:for,i} = \\frac{f_{DOMFOR,i}}{F_{DOMFOR,0}} v_{for:for,i}
The returned curve contains contrived methods to calculate this dynamically and
efficiently from the combination of curves and FX rates that are available within
the given :class:`FXForwards` instance.
"""
_mutable_by_association = True
_do_not_validate = False
# abcs
_base_type: _CurveType = None # type: ignore[assignment]
_interpolator: _ProxyCurveInterpolator = None # type: ignore[assignment]
_nodes: _CurveNodes = None # type: ignore[assignment]
_meta: _CurveMeta = None # type: ignore[assignment]
_id: str = None # type: ignore[assignment]
@property
def _ad(self) -> int:
return self.interpolator.fx_forwards._ad
@property
def interpolator(self) -> _ProxyCurveInterpolator: # type: ignore[override]
"""An instance of :class:`~rateslib.curves.utils._ProxyCurveInterpolator`."""
return self._interpolator
@property
@_validate_states # this ensures that the _meta attribute is updated if the curve state changes
def meta(self) -> _CurveMeta:
return self._meta
@_new_state_post
@_clear_cache_post
def __init__(
self,
cashflow: str,
collateral: str,
fx_forwards: FXForwards,
id: str_ = NoInput(0), # noqa: A002
):
self._interpolator = _ProxyCurveInterpolator(
_fx_forwards=fx_forwards, _cash=cashflow.lower(), _collateral=collateral.lower()
)
self._id = _drb(super()._id, id)
self._base_type = fx_forwards.fx_curves[self.interpolator.cash_pair]._base_type
self._meta = replace(
self.interpolator.fx_forwards.fx_curves[self.interpolator.cash_pair].meta,
_collateral=collateral.lower(),
)
# CurveNodes attached for date attribution
self._nodes = _CurveNodes(
{
fx_forwards.immediate: 0.0,
fx_forwards.fx_curves[self.interpolator.cash_pair].nodes.final: 0.0,
}
)
@_validate_states
@_no_interior_validation
def __getitem__(self, date: datetime) -> DualTypes:
_1: DualTypes = self.interpolator.fx_forwards.rate(self.interpolator.pair, date)
_2: DualTypes = self.interpolator.fx_forwards.fx_rates_immediate._fx_array_el(
self.interpolator.cash_index, self.interpolator.collateral_index
)
_3: DualTypes = self.interpolator.fx_forwards.fx_curves[self.interpolator.collateral_pair][
date
]
return _1 / _2 * _3
def _set_ad_order(self, order: int) -> None:
return self.interpolator.fx_forwards._set_ad_order(order)
def _validate_state(self) -> None:
"""Used by 'mutable by association' objects to evaluate if their own record of
associated objects states matches the current state of those objects.
Mutable by update objects have no concept of state validation, they simply maintain
a *state* id.
"""
self.interpolator.fx_forwards._validate_state() # validate the state of sub-object
if self._state != self._get_composited_state():
# re-reference meta preserving own collateral status
self._meta = replace(
self.interpolator.fx_forwards.fx_curves[self.interpolator.cash_pair].meta,
_collateral=self._meta.collateral,
)
# If any of the associated curves have been mutated then the cache is invalidated
self._clear_cache()
self._set_new_state()
def _get_composited_state(self) -> int:
return self.interpolator.fx_forwards._state
class CreditImpliedCurve(_BaseCurve):
"""
Imply a :class:`~rateslib.curves._BaseCurve` from credit components.
.. warning::
This class is in **beta** status as of v2.1.0
Parameters
----------
risk_free: _BaseCurve, optional
The known risk free curve. If not given will be the implied curve.
credit: _BaseCurve, optional
The known credit curve. If not given will be the implied curve.
hazard: _BaseCurve, optional
The known hazard curve. If not given will be the implied curve.
Notes
-----
A *risk free*, *credit* or *hazard* curve will be implied from the other known, provided
curves.
This class is a wrapper for a :class:`~rateslib.curves.CompositeCurve` where the two known
curves are added and multiplied by the appropriate recovery rate, obtained from the
:class:`~rateslib.curves._CurveMeta` (either from the
``hazard`` curve or the ``credit`` curve in that order of precedence) to derive the third.
In traditional papers, such as *Duffie and Singleton (1999)*, the *credit* DF is expressed
relative to a *risk free* and *hazard* process. I.e.
.. math::
exp \\left ( \\int_0^T -r_f(t) - (1-R)\\lambda(t) .dt \\right ) = exp \\left ( \\int_0^T -r_c(t) .dt \\right )
where :math:`r_f` is the instantaneous risk free rate, :math:`r_c` the instantaneous credit rate
and :math:`\\lambda` the hazard intensity process.
In an approximation *rateslib* converts these to discrete overnight rate equivalents and implies
the curves as follows under rate vector addition:
- **Credit curve rates**: :math:`r_f(t) + (1-R)\\lambda(t)`
- **Hazard curve rates**: :math:`\\frac{r_c(t) - r_f(t)}{1-R}`
- **Risk free rates**: :math:`r_c(t) - (1-R)\\lambda(t)`
Example
-------
Given the following **risk free** curve and **hazard** curve, a **credit** curve is implied.
.. ipython:: python
from rateslib.curves import CreditImpliedCurve
risk_free = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2000, 9, 1): 0.98, dt(2001, 4, 1): 0.95, dt(2002, 1, 1): 0.92},
interpolation="spline",
)
hazard = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98, dt(2002, 1, 1): 0.95},
credit_recovery_rate=0.25,
)
credit = CreditImpliedCurve(risk_free=risk_free, hazard=hazard)
risk_free.plot("1b", comparators=[hazard, credit], labels=["risk free", "hazard", "credit"])
.. plot::
from rateslib.curves import *
import matplotlib.pyplot as plt
from datetime import datetime as dt
risk_free = Curve({dt(2000, 1, 1): 1.0, dt(2000, 9, 1): 0.98, dt(2001, 4, 1): 0.95, dt(2002, 1, 1): 0.92}, interpolation="spline")
hazard = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98, dt(2002, 1, 1): 0.95}, credit_recovery_rate=0.25)
credit = CreditImpliedCurve(risk_free=risk_free, hazard=hazard)
fig, ax, line = risk_free.plot("1b", comparators=[hazard, credit], labels=["risk free", "hazard", "credit"])
plt.show()
plt.close()
These associations are dynamic so changes to any of the curves will naturally update the
:class:`~rateslib.curves.CreditImpliedCurve`.
.. ipython:: python
hazard.update_meta("credit_recovery_rate", 0.90)
risk_free.plot("1b", comparators=[hazard, credit], labels=["risk free", "hazard", "credit"])
.. plot::
from rateslib.curves import *
import matplotlib.pyplot as plt
from datetime import datetime as dt
risk_free = Curve({dt(2000, 1, 1): 1.0, dt(2000, 9, 1): 0.98, dt(2001, 4, 1): 0.95, dt(2002, 1, 1): 0.92}, interpolation="spline")
hazard = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98, dt(2002, 1, 1): 0.95}, credit_recovery_rate=0.25)
credit = CreditImpliedCurve(risk_free=risk_free, hazard=hazard)
hazard.update_meta("credit_recovery_rate", 0.90)
fig, ax, line = risk_free.plot("1b", comparators=[hazard, credit], labels=["risk free", "hazard", "credit"])
plt.show()
plt.close()
""" # noqa: E501
_mutable_by_association = True
_do_not_validate = False
_obj: CompositeCurve
# abcs
_meta: _CurveMeta = None # type: ignore[assignment]
_interpolator: _CurveInterpolator = None # type: ignore[assignment]
@property
def _base_type(self) -> _CurveType:
return self.obj._base_type
@property
def _id(self) -> str:
return self.obj.id
@property
def _ad(self) -> int:
return self.obj.ad
@_new_state_post
@_clear_cache_post
def __init__(
self,
risk_free: Curve | NoInput = NoInput(0),
credit: Curve | NoInput = NoInput(0),
hazard: Curve | NoInput = NoInput(0),
id: str_ = NoInput(0), # noqa: A002
) -> None:
if sum([isinstance(_, NoInput) for _ in [risk_free, credit, hazard]]) != 1:
raise ValueError(
"One, and only one, curve must be NoInput in order to be a CreditImpliedCurve."
)
elif not isinstance(hazard, NoInput) and not isinstance(credit, NoInput):
self._implied = _CreditImpliedType.risk_free
self._obj = CompositeCurve(curves=[hazard, credit], id=id)
elif not isinstance(hazard, NoInput) and not isinstance(risk_free, NoInput):
self._implied = _CreditImpliedType.credit
self._obj = CompositeCurve(curves=[hazard, risk_free], id=id)
else: # not isinstance(credit, NoInput) and not isinstance(risk_free, NoInput):
self._implied = _CreditImpliedType.hazard
self._obj = CompositeCurve(curves=[credit, risk_free], id=id) # type: ignore[list-item]
self._meta = replace(self._obj.meta)
@_validate_states
@_no_interior_validation
def __getitem__(self, date: datetime) -> DualTypes:
self.obj._composite_scalars = self._composite_scalars()
return self.obj.__getitem__(date)
def _set_ad_order(self, order: int) -> None:
return self.obj._set_ad_order(order)
@property
def obj(self) -> CompositeCurve:
"""The wrapped :class:`~rateslib.curves.CompositeCurve` for making calculations."""
return self._obj
@property
@_validate_states # this ensures that the _meta attribute is updated if the curve state changes
def meta(self) -> _CurveMeta:
"""An instance of :class:`~rateslib.curves._CurveMeta`."""
return self._meta
@property
def _nodes(self) -> _CurveNodes:
return self.obj.nodes
def _composite_scalars(self) -> list[float | Dual | Dual2 | Variable]:
lr = 1.0 - self.meta.credit_recovery_rate
if self._implied == _CreditImpliedType.credit:
return [lr, 1.0]
elif self._implied == _CreditImpliedType.hazard:
return [1.0 / lr, -1.0 / lr]
else:
return [-lr, 1.0]
def _get_composited_state(self) -> int:
# return the state of the CompositeCurve
return self._obj._state
def _validate_state(self) -> None:
"""Used by 'mutable by association' objects to evaluate if their own record of
associated objects states matches the current state of those objects.
Mutable by update objects have no concept of state validation, they simply maintain
a *state* id.
"""
if self._do_not_validate:
return None
self.obj._validate_state() # validate the obj state in case one its sub components changed
if self._state != self._get_composited_state():
self._clear_cache() # CreditImpliedCurve has no cache but future proofing here
self._set_new_state()
self._meta = replace(
self._obj.meta,
_collateral=self._meta.collateral,
_credit_recovery_rate=self._obj._meta.credit_recovery_rate,
_credit_discretization=self._obj._meta.credit_discretization,
)
self._obj._composite_scalars = self._composite_scalars()
def index_value(
index_lag: int,
index_method: str | IndexMethod,
index_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
index_date: datetime_ = NoInput(0),
index_curve: CurveOption_ = NoInput(0),
) -> DualTypes:
"""
Determine an index value from a reference date using combinations of known fixings and
forecast from a *Curve*.
Parameters
----------
index_lag: int
The number of months by which the reference ``index_date`` should be lagged to derive a
value.
index_method: str in {"curve", "daily", "monthly"}
The method used to derive and interpolate index values.
index_fixings: float, Dual, Dual2, Variable, Series[DualTypes], str, optional
A specific index value which is returned directly, or if given as a Series applies the
appropriate ``index_method`` to determine a value. May also forecast from *Curve* if
necessary. See notes.
index_date: datetime, optional
The reference index date for which the index value is sought. Not required if
``index_fixings`` is returned directly.
index_curve: Curve, optional
The forecast curve from which to derive index values under the appropriate ``index_method``.
If using *'curve'*, then curve calculations are used directly.
Returns
-------
DualTypes
Notes
-----
A *Series* **must** be given with a unique, monotonic increasing index. This will **not** be
validated.
When using the *'daily'* or *'monthly'* type ``index_methods`` index values **must** be
assigned to **the first of the month** to which the publication is relevant.
The below image is a snippet taken from the UK DMO *'Formulae for Calculating Gilt Prices
and Yield'*. It outlines the calculation of an *index value* for a reference date using their
3 month lag and *'daily'* indexing method.
.. image:: _static/ukdmo_rpi_ex.png
:alt: Index value calculations
:align: center
:width: 291
This calculation is replicated in *rateslib* in the following way:
.. ipython:: python
from rateslib import index_value
from pandas import Series
rpi_series = Series(
[172.2, 173.1, 174.2, 174.4],
index=[dt(2001, 3, 1), dt(2001, 4, 1), dt(2001, 5, 1), dt(2001, 6, 1)]
)
index_value(
index_lag=3,
index_method="daily",
index_fixings=rpi_series,
index_date=dt(2001, 7, 20)
)
"""
index_method_ = _get_index_method(index_method)
iv_result = _try_index_value(
index_lag=index_lag,
index_method=index_method_,
index_fixings=index_fixings,
index_date=index_date,
index_curve=index_curve,
)
return iv_result.unwrap()
def _try_index_value(
index_lag: int,
index_method: IndexMethod,
index_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
index_date: datetime_ = NoInput(0),
index_curve: CurveOption_ = NoInput(0),
) -> Result[DualTypes]:
if isinstance(index_fixings, int | float | Dual | Dual2 | Variable):
# i_fixings is a given value, probably aligned with an ``index_base``: return directly
return Ok(index_fixings)
if isinstance(index_curve, dict):
return Err(
NotImplementedError(
"`index_curve` cannot currently be supplied as dict. Use a Curve type or "
"NoInput(0)."
)
)
if isinstance(index_date, NoInput):
return Err(
ValueError(
"Must supply an `index_date` from which to forecast if `index_fixings` is "
"not a value."
)
)
if isinstance(index_fixings, NoInput | None):
# forecast from curve if available
if isinstance(index_curve, NoInput):
return Err(
ValueError(
"`index_value` must be forecast from a `index_curve` but no such argument "
"was provided."
)
)
return index_curve._try_index_value(
index_date=index_date,
index_lag=index_lag,
index_method=index_method,
)
elif isinstance(index_fixings, str):
try:
fixings_series = fixings.__getitem__(index_fixings)
except Exception as e:
return Err(e)
if isinstance(index_curve, NoInput):
return _index_value_from_series_no_curve(
index_lag=index_lag,
index_method=index_method,
index_fixings=fixings_series[1],
index_date=index_date,
index_fixings_boundary=fixings_series[2],
)
else:
return _index_value_from_mixed_series_and_curve(
index_lag=index_lag,
index_method=index_method,
index_fixings=fixings_series[1],
index_date=index_date,
index_curve=index_curve,
)
elif isinstance(index_fixings, Series):
if isinstance(index_curve, NoInput):
return _index_value_from_series_no_curve(
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings,
index_date=index_date,
)
else:
return _index_value_from_mixed_series_and_curve(
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings,
index_date=index_date,
index_curve=index_curve,
)
else:
return Err(
TypeError(
"`index_fixings` must be of type: Str, Series, DualTypes or NoInput.\n"
f"{type(index_fixings)} was given."
)
)
def _index_value_from_mixed_series_and_curve(
index_lag: int,
index_method: IndexMethod,
index_fixings: Series[DualTypes], # type: ignore[type-var]
index_date: datetime,
index_curve: _BaseCurve,
) -> Result[DualTypes]:
"""
Iterate through possibilities assuming a Curve and fixings as series exists.
For returning a value from the Series the ``index_lag`` must be zero.
If the lag is not zero then a Curve method will be used instead which will omit the Series.
"""
if index_method == IndexMethod.Curve:
if index_date in index_fixings.index:
# simplest case returns Series value if all checks pass.
if index_lag == 0:
return Ok(index_fixings.loc[index_date])
else:
return Err(
ValueError(
"`index_lag` must be zero when using a 'curve' `index_method`.\n"
f"`index_date`: {index_date}, is in Series but got "
f"`index_lag`: {index_lag}."
)
)
elif len(index_fixings.index) == 0:
# recall with the curve
return index_curve._try_index_value(
index_date=index_date, index_lag=index_lag, index_method=index_method
)
elif index_lag == 0 and (index_fixings.index[0] < index_date < index_fixings.index[-1]):
# index date is within the Series index range but not found and the index lag is
# zero so this should be available
return Err(
ValueError(
f"The Series given for `index_fixings` requires, but does not contain, "
f"the value for date: {index_date}.\n"
"For inflation indexes using 'monthly' or 'daily' `index_method` the "
"values associated for a month should be assigned "
"to the first day of that month."
)
)
else:
return index_curve._try_index_value(
index_date=index_date, index_lag=index_lag, index_method=index_method
)
elif index_method == IndexMethod.Monthly:
date_ = add_tenor(index_date, f"-{index_lag}M", "none", NoInput(0), 1)
# a monthly value can only be derived from one source.
# make separate determinations to avoid the issue of mis-matching index lags
value_from_fixings = _try_index_value(
index_lag=0,
index_method=IndexMethod.Curve,
index_fixings=index_fixings,
index_date=date_,
index_curve=NoInput(0),
)
if value_from_fixings.is_ok:
return value_from_fixings
else:
value_from_curve = _try_index_value(
index_lag=index_lag,
index_method=IndexMethod.Monthly,
index_fixings=NoInput(0),
index_date=index_date,
index_curve=index_curve,
)
return value_from_curve
else: # i_method == IndexMethod.Daily:
n = monthrange(index_date.year, index_date.month)[1]
date_som = datetime(index_date.year, index_date.month, 1)
date_sonm = add_tenor(index_date, "1M", "none", NoInput(0), 1)
m1 = _try_index_value(
index_lag=index_lag,
index_method=IndexMethod.Monthly,
index_fixings=index_fixings,
index_date=date_som,
index_curve=index_curve,
)
if index_date == date_som:
return m1
m2 = _try_index_value(
index_lag=index_lag,
index_method=IndexMethod.Monthly,
index_fixings=index_fixings,
index_date=date_sonm,
index_curve=index_curve,
)
if m2.is_err or m1.is_err:
return Err(
ValueError(
"The `index_value` could not be determined.\nThe period may be 'future' based "
"and there is no `index_fixing` available, or an `index_curve` has not be "
"able to forecast it."
)
)
# this line cannot be hit when a curve returns DualTypes and not a NoInput
# will raise a warning when the curve returns 0.0
m1_, m2_ = m1.unwrap(), m2.unwrap()
return Ok(m1_ + (index_date.day - 1) / n * (m2_ - m1_))
def _index_value_from_series_no_curve(
index_lag: int,
index_method: IndexMethod,
index_fixings: Series[DualTypes], # type: ignore[type-var]
index_date: datetime,
index_fixings_boundary: tuple[datetime, datetime] | None = None,
) -> Result[DualTypes]:
"""
Derive a value from a Series only, detecting cases where the errors might be raised.
"""
fixings_series = index_fixings
if index_method == IndexMethod.Curve:
if index_lag != 0:
return Err(ValueError(err.VE_INDEX_LAG_MUST_BE_ZERO.format(index_date, index_lag)))
if len(fixings_series.index) == 0:
return Err(ValueError(err.VE_EMPTY_SERIES))
if index_fixings_boundary is not None:
left, right = index_fixings_boundary
if index_date < left or index_date > right:
return Err(FixingRangeError(index_date, index_fixings_boundary))
else:
right = fixings_series.index[-1]
if index_date > right:
return Err(FixingRangeError(index_date, (datetime(1, 1, 1), right)))
left = fixings_series.index[0]
if index_date < left:
return Err(FixingRangeError(index_date, (left, right)))
if index_date in fixings_series.index:
# simplest case returns Series value if all checks pass.
return Ok(fixings_series.loc[index_date])
# date falls inside the dates of the Series but does not exist.
return Err(FixingMissingDataError(index_date, (left, right)))
elif index_method == IndexMethod.Monthly:
date_ = add_tenor(index_date, f"-{index_lag}M", "none", NoInput(0), 1)
return _index_value_from_series_no_curve(
index_lag=0,
index_method=IndexMethod.Curve,
index_fixings=index_fixings,
index_date=date_,
index_fixings_boundary=index_fixings_boundary,
)
else: # i_method == IndexMethod.Daily:
n = monthrange(index_date.year, index_date.month)[1]
date_som = datetime(index_date.year, index_date.month, 1)
date_sonm = add_tenor(index_date, "1M", "none", NoInput(0), 1)
m1 = _index_value_from_series_no_curve(
index_lag=index_lag,
index_method=IndexMethod.Monthly,
index_fixings=index_fixings,
index_date=date_som,
index_fixings_boundary=index_fixings_boundary,
)
if index_date == date_som:
return m1
m2 = _index_value_from_series_no_curve(
index_lag=index_lag,
index_method=IndexMethod.Monthly,
index_fixings=index_fixings,
index_date=date_sonm,
index_fixings_boundary=index_fixings_boundary,
)
if m1.is_err:
return m1
if m2.is_err:
return m2
m1_, m2_ = m1.unwrap(), m2.unwrap()
return Ok(m1_ + (index_date.day - 1) / n * (m2_ - m1_))
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
================================================
FILE: python/rateslib/curves/interpolation.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import timezone
from math import floor
from typing import TYPE_CHECKING, Protocol
from rateslib.dual import dual_exp, dual_log
from rateslib.rs import index_left_f64
from rateslib.scheduling import Convention, dcf
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
DualTypes,
Sequence,
_BaseCurve,
datetime,
)
UTC = timezone.utc
class InterpolationFunction(Protocol):
# Callable type for Interpolation Functions
def __call__(self, date: datetime, curve: _BaseCurve) -> DualTypes: ...
def _linear(date: datetime, curve: _BaseCurve) -> DualTypes:
x, x_1, x_2, i = _get_posix(date, curve)
node_values = list(curve.nodes.nodes.values())
y_1, y_2 = node_values[i], node_values[i + 1]
return y_1 + (y_2 - y_1) * (x - x_1) / (x_2 - x_1)
def _linear_bus(date: datetime, curve: _BaseCurve) -> DualTypes:
i = index_left(curve.nodes.keys, curve.nodes.n, date)
x_1, x_2 = curve.nodes.keys[i], curve.nodes.keys[i + 1]
d_n = dcf(x_1, x_2, "bus252", calendar=curve.meta.calendar)
d_m = dcf(x_1, date, "bus252", calendar=curve.meta.calendar)
node_values = list(curve.nodes.nodes.values())
y_1, y_2 = node_values[i], node_values[i + 1]
return y_1 + (y_2 - y_1) * d_m / d_n
def _log_linear(date: datetime, curve: _BaseCurve) -> DualTypes:
x, x_1, x_2, i = _get_posix(date, curve)
node_values = list(curve.nodes.nodes.values())
y_1, y_2 = dual_log(node_values[i]), dual_log(node_values[i + 1])
return dual_exp(y_1 + (y_2 - y_1) * (x - x_1) / (x_2 - x_1))
def _log_linear_bus(date: datetime, curve: _BaseCurve) -> DualTypes:
i = index_left(curve.nodes.keys, curve.nodes.n, date)
x_1, x_2 = curve.nodes.keys[i], curve.nodes.keys[i + 1]
d_n = dcf(x_1, x_2, "bus252", calendar=curve.meta.calendar)
d_m = dcf(x_1, date, "bus252", calendar=curve.meta.calendar)
node_values = list(curve.nodes.nodes.values())
y_1, y_2 = dual_log(node_values[i]), dual_log(node_values[i + 1])
return dual_exp(y_1 + (y_2 - y_1) * d_m / d_n)
def _flat_forward(date: datetime, curve: _BaseCurve) -> DualTypes:
x, x_1, x_2, i = _get_posix(date, curve)
node_values = list(curve.nodes.nodes.values())
y_1, y_2 = node_values[i], node_values[i + 1]
if x >= x_2:
return y_2
return y_1
def _flat_backward(date: datetime, curve: _BaseCurve) -> DualTypes:
x, x_1, x_2, i = _get_posix(date, curve)
node_values = list(curve.nodes.nodes.values())
y_1, y_2 = node_values[i], node_values[i + 1]
if x <= x_1:
return y_1
return y_2
def _linear_zero_rate(date: datetime, curve: _BaseCurve) -> DualTypes:
# base time on DCF, which depends on the curve convention.
i = index_left(curve.nodes.keys, curve.nodes.n, date)
nvs = list(curve.nodes.nodes.values())
nds = curve.nodes.keys
d_2 = dcf(nds[0], nds[i + 1], curve.meta.convention, calendar=curve.meta.calendar)
r_2 = -dual_log(nvs[i + 1]) / dcf(
nds[0], nds[i + 1], curve.meta.convention, calendar=curve.meta.calendar
)
if i == 0:
# first period must use flat backwards zero rate
d_m = dcf(nds[0], date, curve.meta.convention, calendar=curve.meta.calendar)
r_m = r_2
else:
d_1 = dcf(nds[0], nds[i], curve.meta.convention, calendar=curve.meta.calendar)
r_1 = -dual_log(nvs[i]) / d_1
d_m = dcf(nds[0], date, curve.meta.convention, calendar=curve.meta.calendar)
r_m = r_1 + (r_2 - r_1) * (d_m - d_1) / (d_2 - d_1)
return dual_exp(-r_m * d_m)
def _linear_index(date: datetime, curve: _BaseCurve) -> DualTypes:
x, x_1, x_2, i = _get_posix(date, curve)
node_values = list(curve.nodes.nodes.values())
y_1, y_2 = node_values[i], node_values[i + 1]
return (1 / y_1 + (1 / y_2 - 1 / y_1) * (x - x_1) / (x_2 - x_1)) ** -1.0
def _runtime_error(date: datetime, curve: _BaseCurve) -> DualTypes:
"""Spline interpolation is performed by a PPSpline over the whole nodes domain."""
raise RuntimeError( # pragma: no cover
"An `interpolation` mode of 'spline' should never call this function.\n"
"The configured knot sequence `t` for the PPSpline should cover the entire `nodes` domain."
)
INTERPOLATION: dict[tuple[str, Convention | None], InterpolationFunction] = {
("linear", None): _linear, # default linear interpolation for all Convention types
("linear", Convention.Bus252): _linear_bus, # overload for Bus252 type
("log_linear", None): _log_linear,
("log_linear", Convention.Bus252): _log_linear_bus,
("linear_zero_rate", None): _linear_zero_rate,
("linear_index", None): _linear_index,
("flat_forward", None): _flat_forward,
("flat_backward", None): _flat_backward,
("spline", None): _runtime_error,
}
def _get_posix(date: datetime, curve: _BaseCurve) -> tuple[float, float, float, int]:
"""
Convert a datetime and curve_nodes to posix timestamps and return the index_left.
"""
date_posix: float = date.replace(tzinfo=UTC).timestamp()
l_index = index_left_f64(curve.nodes.posix_keys, date_posix, None)
node_left_posix, node_right_posix = (
curve.nodes.posix_keys[l_index],
curve.nodes.posix_keys[l_index + 1],
)
return date_posix, node_left_posix, node_right_posix, l_index
def index_left(
list_input: Sequence[Any],
list_length: int,
value: Any,
left_count: int = 0,
) -> int:
"""
Return the interval index of a value from an ordered input list on the left side.
Parameters
----------
input : list
Ordered list (lowest to highest) containing datatypes the same as value.
length : int
The length of ``input``.
value : Any
The value for which to determine the list index of.
left_count : int
The counter to pass recursively to determine the output. Users should not
directly specify, it is used in internal calculation only.
Returns
-------
int : The left index of the interval within which value is found (or extrapolated
from)
Notes
-----
Uses a binary search method which operates with time :math:`O(log_2 n)`.
Examples
--------
.. ipython:: python
from rateslib.curves import index_left
Out of domain values return the left-side index of the closest matching interval.
100 is attributed to the interval (1, 2].
.. ipython:: python
list = [0, 1, 2]
index_left(list, 3, 100)
-100 is attributed to the interval (0, 1].
.. ipython:: python
index_left(list, 3, -100)
Interior values return the left-side index of the interval.
1.45 is attributed to the interval (1, 2].
.. ipython:: python
index_left(list, 3, 1.45)
1 is attributed to the interval (0, 1].
.. ipython:: python
index_left(list, 3, 1)
"""
if list_length == 1:
raise ValueError("`index_left` designed for intervals. Cannot index list of length 1.")
if list_length == 2:
return left_count
split: int = floor((list_length - 1) / 2)
if list_length == 3 and value == list_input[split]:
return left_count
if value <= list_input[split]:
return index_left(list_input[: split + 1], split + 1, value, left_count)
else:
return index_left(list_input[split:], list_length - split, value, left_count + split)
# # ALTERNATIVE index_left: exhaustive search which is inferior to binary search
# def index_left_exhaustive(list_input, value, left_count=0):
# if left_count == 0:
# if value > list_input[-1]:
# return len(list_input)-2
# if value <= list_input[0]:
# return 0
#
# if list_input[0] < value <= list_input[1]:
# return left_count
# else:
# return index_left_exhaustive(list_input[1:], value, left_count + 1)
================================================
FILE: python/rateslib/curves/rs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from collections.abc import Callable
from datetime import datetime
from typing import TYPE_CHECKING, Any
from uuid import uuid4
from rateslib import defaults
from rateslib.default import _make_py_json
from rateslib.dual.utils import _get_adorder
from rateslib.enums.generics import NoInput, _drb
from rateslib.rs import (
ADOrder,
FlatBackwardInterpolator,
FlatForwardInterpolator,
LinearInterpolator,
LinearZeroRateInterpolator,
LogLinearInterpolator,
Modifier,
NullInterpolator,
_get_modifier_str,
)
from rateslib.rs import Curve as CurveObj # noqa: F401
from rateslib.scheduling import get_calendar
from rateslib.scheduling.convention import _get_convention
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurveInterpolator,
DualTypes,
Number,
)
class CurveRs:
def __init__(
self,
nodes: dict[datetime, Number],
*,
interpolation: str
| Callable[[datetime, dict[datetime, DualTypes]], DualTypes]
| NoInput = NoInput(0),
id: str | NoInput = NoInput(0), # noqa: A002
convention: str | NoInput = NoInput(0),
modifier: str | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
ad: int = 0,
index_base: float | NoInput = NoInput(0),
):
self._py_interpolator: Callable[[datetime, dict[datetime, DualTypes]], DualTypes] | None = (
interpolation if callable(interpolation) else None
)
self.obj = CurveObj(
nodes=nodes,
interpolator=self._validate_interpolator(interpolation),
ad=_get_adorder(ad),
id=_drb(uuid4().hex[:5] + "_", id), # 1 in a million clash
convention=_get_convention(_drb(defaults.convention, convention)),
modifier=Modifier.ModF,
calendar=get_calendar(calendar),
index_base=_drb(None, index_base),
)
@property
def id(self) -> str:
return self.obj.id
@property
def convention(self) -> str:
return str(self.obj.convention)
@property
def modifier(self) -> str:
return _get_modifier_str(self.obj.modifier)
@property
def interpolation(self) -> str:
return self.obj.interpolation
@property
def nodes(self) -> dict[datetime, Number]:
return self.obj.nodes
@property
def ad(self) -> int:
_ = self.obj.ad
if _ == ADOrder.One:
return 1
elif _ == ADOrder.Two:
return 2
return 0
def _set_ad_order(self, ad: int) -> None:
self.obj.set_ad_order(_get_adorder(ad))
@staticmethod
def _validate_interpolator(
interpolation: str | Callable[[datetime, dict[datetime, DualTypes]], DualTypes] | NoInput,
) -> CurveInterpolator:
if interpolation is NoInput.blank:
return _get_interpolator(defaults.interpolation["Curve"])
elif isinstance(interpolation, str):
return _get_interpolator(interpolation)
else:
return NullInterpolator()
def to_json(self) -> str:
return _make_py_json(self.obj.to_json(), "CurveRs")
@classmethod
def __init_from_obj__(cls, obj: CurveObj) -> CurveRs:
new = cls(
nodes={datetime(2000, 1, 1): 1.0},
interpolation="linear",
id="_",
ad=0,
index_base=NoInput(0),
)
new.obj = obj
return new
def __eq__(self, other: Any) -> bool:
if not isinstance(other, CurveRs):
return False
return self.obj.__eq__(other.obj)
def __getitem__(self, value: datetime) -> Number:
return self.obj[value]
def _get_interpolator(name: str) -> CurveInterpolator:
name_ = name.lower()
if name_ == "log_linear":
return LogLinearInterpolator()
elif name_ == "linear":
return LinearInterpolator()
elif name_ == "linear_zero_rate":
return LinearZeroRateInterpolator()
elif name_ == "flat_forward":
return FlatForwardInterpolator()
elif name_ == "flat_backward":
return FlatBackwardInterpolator()
else:
raise ValueError("Interpolator `name` is invalid.")
================================================
FILE: python/rateslib/curves/utils.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import json
from dataclasses import dataclass
from datetime import datetime, timezone
from enum import Enum
from functools import cached_property
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.curves.interpolation import INTERPOLATION, InterpolationFunction
from rateslib.dual import dual_log, set_order_convert
from rateslib.dual.utils import _to_number
from rateslib.enums.generics import NoInput
from rateslib.scheduling import Convention
from rateslib.scheduling.convention import _get_convention
from rateslib.splines import PPSplineDual, PPSplineDual2, PPSplineF64
if TYPE_CHECKING:
from rateslib.local_types import (
Any,
CalTypes,
DualTypes,
FXForwards,
Number,
Variable,
float_,
str_,
) # pragma: no cover
UTC = timezone.utc
class _CurveType(Enum):
"""
Enumerable type to define the difference between a discount factor (DF) based and
values based :class:`~rateslib.curves._BaseCurve`.
"""
dfs = 0
values = 1
class _CreditImpliedType(Enum):
"""
Enumerable type to define which calculation is performed on a
:class:`~rateslib.curves.CreditImpliedCurve`.
"""
credit = 0
hazard = 1
risk_free = 2
@dataclass
class _CurveMeta:
"""
A container of meta data associated with a
:class:`~rateslib.curves._BaseCurve` used to make calculations.
"""
_calendar: CalTypes
_convention: Convention
_modifier: str
_index_base: float_ | Variable
_index_lag: int
_collateral: str | None
_credit_discretization: int
_credit_recovery_rate: float | Variable
@property
def calendar(self) -> CalTypes:
"""Settlement calendar used to determine fixing dates and tenor end dates."""
return self._calendar
@property
def convention(self) -> Convention:
"""Day count convention for determining rates and interpolation."""
return self._convention
@property
def modifier(self) -> str:
"""Modification rule for adjusting non-business tenor end dates."""
return self._modifier
@property
def index_base(self) -> Variable | float_:
"""The index value associated with the initial node date of the *Curve*."""
return self._index_base
@property
def index_lag(self) -> int:
"""The number of months by which curve nodes are lagged to determine index values."""
return self._index_lag
@property
def collateral(self) -> str | None:
"""The currency(ies) identified as being the collateral choice for DFs associated with
the *Curve*."""
return self._collateral
@property
def credit_discretization(self) -> int:
"""A parameter for numerically solving the integral for a *Credit Protection Period*."""
return self._credit_discretization
@property
def credit_recovery_rate(self) -> float | Variable:
"""The recovery rate applied to *Credit Protection Period* cashflows."""
return self._credit_recovery_rate
def to_json(self) -> str:
"""
Serialize this object to JSON format.
The object can be deserialized using the :meth:`~rateslib.serialization.from_json` method.
Returns
-------
str
"""
from rateslib.serialization.utils import _obj_to_json
obj = dict(
PyNative=dict(
_CurveMeta=dict(
calendar=self.calendar.to_json(),
convention=self.convention.to_json(),
modifier=self.modifier,
index_base=_obj_to_json(self.index_base),
index_lag=self.index_lag,
collateral=self.collateral,
credit_discretization=self.credit_discretization,
credit_recovery_rate=_obj_to_json(self.credit_recovery_rate),
)
)
)
return json.dumps(obj)
@classmethod
def _from_json(cls, loaded_json: dict[str, Any]) -> _CurveMeta:
from rateslib.serialization import from_json
return _CurveMeta(
_convention=from_json(loaded_json["convention"]),
_modifier=loaded_json["modifier"],
_index_lag=loaded_json["index_lag"],
_collateral=loaded_json["collateral"],
_index_base=from_json(loaded_json["index_base"]),
_calendar=from_json(loaded_json["calendar"]),
_credit_discretization=loaded_json["credit_discretization"],
_credit_recovery_rate=from_json(loaded_json["credit_recovery_rate"]),
)
class _CurveSpline:
"""
A container for data relating to interpolating :class:`~rateslib.curves._CurveNodes` using
a cubic PPSpline.
"""
_t: list[datetime]
_spline: PPSplineF64 | PPSplineDual | PPSplineDual2 | None
_endpoints: tuple[str, str]
def __init__(self, t: list[datetime], endpoints: tuple[str, str]) -> None:
self._t = t
self._endpoints = endpoints
self._spline = None # will be set in later in csolve
if len(self._t) < 10 and "not_a_knot" in self.endpoints:
raise ValueError(
"`endpoints` cannot be 'not_a_knot' with only 1 interior breakpoint",
)
@property
def t(self) -> list[datetime]:
"""The knot sequence of the PPSpline."""
return self._t
@cached_property
def t_posix(self) -> list[float]:
"""The knot sequence of the PPSpline converted to float unix timestamps."""
return [_.replace(tzinfo=UTC).timestamp() for _ in self.t]
@property
def spline(self) -> PPSplineF64 | PPSplineDual | PPSplineDual2 | None:
"""An instance of :class:`~rateslib.splines.PPSplineF64`,
:class:`~rateslib.splines.PPSplineDual` or :class:`~rateslib.splines.PPSplineDual2`.
"""
return self._spline
@property
def endpoints(self) -> tuple[str, str]:
"""The endpoints method used to determine the spline coefficients."""
return self._endpoints
# All calling methods should clear the cache and/or set new state after `_csolve`
def _csolve(self, curve_type: _CurveType, nodes: _CurveNodes, ad: int) -> None:
t_posix = self.t_posix.copy()
tau_posix = [k.replace(tzinfo=UTC).timestamp() for k in nodes.keys if k >= self.t[0]]
if curve_type == _CurveType.dfs:
# then use log
y = [dual_log(v) for k, v in nodes.nodes.items() if k >= self.t[0]]
else:
# use values directly
y = [_to_number(v) for k, v in nodes.nodes.items() if k >= self.t[0]]
# Left side constraint
if self.endpoints[0].lower() == "natural":
tau_posix.insert(0, t_posix[0])
y.insert(0, set_order_convert(0.0, ad, None))
left_n = 2
elif self.endpoints[0].lower() == "not_a_knot":
t_posix.pop(4)
left_n = 0
else:
raise NotImplementedError(
f"Endpoint method '{self.endpoints[0]}' not implemented.",
)
# Right side constraint
if self.endpoints[1].lower() == "natural":
tau_posix.append(self.t_posix[-1])
y.append(set_order_convert(0, ad, None))
right_n = 2
elif self.endpoints[1].lower() == "not_a_knot":
t_posix.pop(-5)
right_n = 0
else:
raise NotImplementedError(
f"Endpoint method '{self.endpoints[0]}' not implemented.",
)
# Get the Spline class by data types
if ad == 0:
self._spline = PPSplineF64(4, t_posix, None)
elif ad == 1:
self._spline = PPSplineDual(4, t_posix, None)
else:
self._spline = PPSplineDual2(4, t_posix, None)
self._spline.csolve(tau_posix, y, left_n, right_n, False) # type: ignore[arg-type]
def to_json(self) -> str:
"""
Serialize this object to JSON format.
The object can be deserialized using the :meth:`~rateslib.serialization.from_json` method.
Returns
-------
str
"""
obj = dict(
PyNative=dict(
_CurveSpline=dict(
t=[_.strftime("%Y-%m-%d") for _ in self.t],
endpoints=self.endpoints,
)
)
)
return json.dumps(obj)
@classmethod
def _from_json(cls, loaded_json: dict[str, Any]) -> _CurveSpline:
return _CurveSpline(
t=[datetime.strptime(_, "%Y-%m-%d") for _ in loaded_json["t"]],
endpoints=tuple(loaded_json["endpoints"]),
)
def __eq__(self, other: Any) -> bool:
"""CurveSplines are considered equal if their knot sequence and endpoints are equivalent.
For the same nodes this will resolve to give the same spline coefficients.
"""
if not isinstance(other, _CurveSpline):
return False
else:
return all(iter([self.t == other.t, self.endpoints == other.endpoints]))
class _CurveInterpolator:
"""
A container for data relating to interpolating :class:`~rateslib.curves._CurveNodes`.
"""
_local_name: str
_local_func: InterpolationFunction
_convention: Convention
_spline: _CurveSpline | None
def __init__(
self,
local: str_ | InterpolationFunction,
t: list[datetime] | NoInput,
endpoints: tuple[str, str],
node_dates: list[datetime],
convention: Convention | str,
curve_type: _CurveType,
) -> None:
if not isinstance(t, NoInput) and local == "spline":
raise ValueError(
"When defining 'spline' interpolation, the argument `t` will be "
"automatically generated.\n"
f"It should not be specified directly. Got: {t}"
)
self._convention = _get_convention(convention)
if isinstance(local, NoInput):
local = defaults.interpolation[curve_type.name]
if isinstance(local, str):
self._local_name = local.lower()
if self.local_name == "spline":
# then refactor t
t = (
[node_dates[0], node_dates[0], node_dates[0]]
+ node_dates
+ [node_dates[-1], node_dates[-1], node_dates[-1]]
)
if (self._local_name, self.convention) in INTERPOLATION:
self._local_func = INTERPOLATION[(self.local_name, self.convention)]
else:
try:
self._local_func = INTERPOLATION[(self.local_name, None)]
except KeyError:
raise ValueError(
f"Curve interpolation: '{self.local_name}' not available.\n"
f"Consult the documentation for available methods."
)
else:
self._local_name = "user_defined_callable"
self._local_func = local
if isinstance(t, NoInput):
self._spline = None
else:
self._spline = _CurveSpline(t, endpoints)
@property
def local(self) -> str | InterpolationFunction:
"""The local interpolation name or function, if user defined."""
if self.local_name == "user_defined_callable":
return self.local_func
return self.local_name
@property
def local_name(self) -> str:
"""The str name of the local interpolation function."""
return self._local_name
@property
def local_func(self) -> InterpolationFunction:
"""The callable used for local interpolation"""
return self._local_func
@property
def spline(self) -> _CurveSpline | None:
"""The :class:`~rateslib.curves.utils._CurveSpline` used for PPSpline interpolation."""
return self._spline
@property
def convention(self) -> Convention:
"""The day count convention used to adjust interpolation functions."""
return self._convention
# All calling methods should clear the cache and/or set new state after `_csolve`
def _csolve(self, curve_type: _CurveType, nodes: _CurveNodes, ad: int) -> None:
if self.spline is None:
return None
self.spline._csolve(curve_type, nodes, ad)
def __eq__(self, other: Any) -> bool:
if (
not isinstance(other, _CurveInterpolator)
or self.local_name == "user_defined_callable"
and self.local_func != other.local_func
):
return False
return all(iter([self.local_name == other.local_name, self.spline == other.spline]))
def to_json(self) -> str:
"""
Serialize this object to JSON format.
The object can be deserialized using the :meth:`~rateslib.serialization.from_json` method.
Returns
-------
str
"""
from rateslib.serialization.utils import _obj_to_json
obj = dict(
PyNative=dict(
_CurveInterpolator=dict(
local=self.local_name,
spline=_obj_to_json(self.spline),
convention=_obj_to_json(self.convention),
)
)
)
return json.dumps(obj)
@classmethod
def _from_json(cls, loaded_json: dict[str, Any]) -> _CurveInterpolator:
from rateslib.serialization import from_json
spl = from_json(loaded_json["spline"])
if loaded_json["local"] == "spline":
t = NoInput(0)
node_dates = spl.t[3:-3]
else:
t = NoInput(0) if spl is None else spl.t
node_dates = NoInput(0)
return _CurveInterpolator(
local=loaded_json["local"],
t=t,
endpoints=NoInput(0) if spl is None else spl.endpoints, # type: ignore[arg-type]
node_dates=node_dates,
convention=from_json(loaded_json["convention"]),
curve_type=NoInput(0), # type: ignore[arg-type]
)
@dataclass(frozen=True)
class _ProxyCurveInterpolator:
"""
A container for data relating to interpolating the DFs of a
:class:`~rateslib.curves.ProxyCurve`.
"""
_fx_forwards: FXForwards
_cash: str
_collateral: str
@property
def fx_forwards(self) -> FXForwards:
"""The :class:`~rateslib.fx.FXForwards` object containing :class:`~rateslib.fx.FXRates`
and :class:`~rateslib.curves.Curve` objects."""
return self._fx_forwards
@property
def cash(self) -> str:
"""The currency of the cashflows."""
return self._cash
@property
def collateral(self) -> str:
"""The currency of the collateral assuming PAI."""
return self._collateral
@property
def pair(self) -> str:
"""A pair of currencies representing the cashflow and collateral."""
return self.cash + self.collateral
@property
def cash_index(self) -> int:
"""The index of the cash currency in the :class:`~rateslib.fx.FXForwards` object."""
return self.fx_forwards.currencies[self.cash]
@property
def collateral_index(self) -> int:
"""The index of the collateral currency in the :class:`~rateslib.fx.FXForwards` object."""
return self.fx_forwards.currencies[self.collateral]
@property
def cash_pair(self) -> str:
"""A pair constructed from the cash currency"""
return self.cash + self.cash
@property
def collateral_pair(self) -> str:
"""A pair constructed from the collateral currency"""
return self.collateral + self.collateral
@dataclass(frozen=True)
class _CurveNodes:
"""
An immutable container for the pricing parameters of a :class:`~rateslib.curves._BaseCurve`.
"""
_nodes: dict[datetime, DualTypes]
def __post_init__(self) -> None:
for idx in range(1, self.n):
if self.keys[idx - 1] >= self.keys[idx]:
raise ValueError(
"Curve node dates are not sorted or contain duplicates.\n"
"To sort directly use: `dict(sorted(nodes.items()))`",
)
@property
def nodes(self) -> dict[datetime, DualTypes]:
"""The initial nodes dict passed for construction of this class."""
return self._nodes
@cached_property
def keys(self) -> list[datetime]:
"""A list of datetime keys in ``nodes``."""
return list(self._nodes.keys())
@cached_property
def values(self) -> list[DualTypes]:
"""A list of values in ``nodes``."""
return list(self._nodes.values())
@property
def n(self) -> int:
"""Number of parameters contained in ``nodes``."""
return len(self.keys)
@cached_property
def posix_keys(self) -> list[float]:
"""A list of the ``keys`` converted to unix timestamps."""
return [_.replace(tzinfo=UTC).timestamp() for _ in self.keys]
@property
def initial(self) -> datetime:
"""The first node key associated with the *Curve* nodes."""
return self.keys[0]
@property
def final(self) -> datetime:
"""The last node key associated with the *Curve* nodes."""
return self.keys[-1]
def to_json(self) -> str:
"""
Serialize this object to JSON format.
The object can be deserialized using the :meth:`~rateslib.serialization.from_json` method.
Returns
-------
str
"""
obj = dict(
PyNative=dict(
_CurveNodes=dict(
_nodes={dt.strftime("%Y-%m-%d"): v.real for dt, v in self._nodes.items()},
)
)
)
return json.dumps(obj)
@classmethod
def _from_json(cls, loaded_json: dict[str, Any]) -> _CurveNodes:
return _CurveNodes(
_nodes={datetime.strptime(d, "%Y-%m-%d"): v for d, v in loaded_json["_nodes"].items()}
)
def average_rate(
effective: datetime,
termination: datetime,
convention: Convention | str,
rate: DualTypes,
dcf: float,
) -> tuple[Number, float, float]:
"""
Return the geometric, 1-day, average simple rate for a given simple period rate.
This is used for approximations usually in combination with floating periods.
Parameters
----------
effective : datetime
The effective date of the rate.
termination : datetime
The termination date of the rate.
convention : str
The day count convention of the curve rate.
rate : float, Dual, Dual2
The simple period rate to decompose to average, in percentage terms, e.g. 4.00 = 4% rate.
dcf : float
The day count fraction of the period used to determine daily DCF.
Returns
-------
tuple : The simple rate, the 1-day DCF, and the number of relevant days for the convention
Notes
-----
This method operates in one of two modes to determine the value, :math:`\\bar{r}`.
- Calendar day basis, where :math:`\\tilde{n}` is calendar days in period:
.. math::
1+\\tilde{n}\\bar{d}r = (1 + \\bar{d}\\bar{r})^{\\tilde{n}}
- Business day basis (if ``convention`` is *'bus252'*), where :math:`n` is business days
in period. *n* is approximated by a 252 business days per year rule and does not
calculate the exact number of business days from any specific holiday calendar.
.. math::
1+n\\bar{d}r = (1 + \\bar{d}\\bar{r})^{n}
:math:`\\bar{d}`, the 1-day DCF is estimated from a ``convention``. For certain conventions,
e.g. *'act360'* and *'act365f'* this is explicit and exact, but for others, such as *'30360'*,
this function will likely be lesser used and less accurate.
"""
convention_ = _get_convention(convention)
if convention_ == Convention.Bus252:
# business days are used
n: float = dcf * 252.0
d = 1.0 / 252.0
else: # calendar day mode
n = (termination - effective).days
d = dcf / n
_: Number = ((1 + n * d * rate / 100) ** (1 / n) - 1) / d
return _ * 100, d, n
================================================
FILE: python/rateslib/data/__instrument_spec.csv
================================================
kind,meta,meta,meta,meta,meta,meta,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,base_derivative,fixed,float,float,float,float,fixed,float,float,float,float,float,exchange,exchange,exchange,exchange,exchange,exchange,exchange,exchange,xcs,xcs,xcs,index,index,index,index,index,index,index,index,bond,bond,bond,stir,cds,bondfuture,fx,fx
leg,meta,meta,meta,meta,meta,meta,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg2,leg2,leg2,leg2,leg2,leg2,leg2,leg2,leg2,leg2,leg2,leg2,leg2,leg2,leg2,leg1,leg1,leg1,leg1,leg1,leg2,leg2,leg2,leg2,leg2,leg2,leg1,leg1,leg2,leg2,leg1,leg2,leg1,leg2,leg1,leg2,leg2,leg1,leg1,leg1,leg1,leg2,leg2,leg2,leg2,leg1,leg1,leg1,leg1,leg1,leg1,leg1,leg1
kwarg,currency,instrument,sub_type,bloomberg_ticker,eval,description,effective,termination,frequency,stub,front_stub,back_stub,roll,eom,modifier,calendar,payment_lag,notional,currency,amortization,convention,effective,termination,frequency,stub,front_stub,back_stub,roll,eom,modifier,calendar,payment_lag,notional,currency,amortization,convention,fixed_rate,float_spread,spread_compound_method,rate_fixings,fixing_method,fixed_rate,float_spread,spread_compound_method,rate_fixings,fixing_method,fixing_series,initial_exchange,final_exchange,initial_exchange,final_exchange,fx_fixings,fx_fixings,payment_lag_exchange,payment_lag_exchange,fixed,fixed,mtm,index_method,index_fixings,index_base,index_lag,index_method,index_fixings,index_base,index_lag,settle,ex_div,calc_mode,nominal,premium_accrued,coupon,pair,delivery_lag
dtype,str,str,str,str,str,str,str,str,str,str,str,str,str,boolean,str,str,Int64,float,str,float,str,str,str,str,str,str,str,str,boolean,str,str,Int64,float,str,float,str,float,float,str,str,str,float,float,str,str,str,str,boolean,boolean,boolean,boolean,str,str,Int64,Int64,boolean,boolean,boolean,str,str,float,Int64,str,str,float,Int64,Int64,str,str,float,boolean,float,str,Int64
test,TES,none,none,none,none,A test column,,,m,longfront,,,,FALSE,p,"nyc,tgt,ldn",4,,tes,,yearsmonths,,,m,longback,,,1,FALSE,mp,"nyc,tgt,ldn",3,,,,one,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
eurusd_call,eurusd,fx_call,,,2b,Currency call option,,,,,,,,,mf,tgt|fed,2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,eurusd,2
us_ig_cds,usd,cds,,,,,,,q,shortfront,,,20,FALSE,fex,nyc,0,,usd,,act360,,,,,,,,,,,,,,,,1,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
inr_ndirs,inr,irs,,irswni,1b,NDIRS vs IN000/N Index,,,s,shortfront,,,,FALSE,mf,mum,0,,usd,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,usdinr,
inrusd_ndxcs,inrusd_ndxcs,ndxcs,,IRUSON,2b,NDXCS Fixed/Float vs SOFR,,,s,shortfront,,,,FALSE,mf,mum|fed,2,,usd,,act365f,,,,,,,,,,,,,,,act360,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,TRUE,,,,,,,,,,,,,,,,,usdinr,
mxn_irs,mxn,irs,,MPSWF,2b,F-TIIE OIS,,,28d,shortfront,,,,FALSE,f,mex,2,,mxn,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
usd_irs,usd,irs,,usosfr,2b,SOFR IRS conventions,,,a,shortfront,,,,FALSE,mf,nyc,2,,usd,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
usd_irs_lt_2y,usd,irs,,usosfr,2b,SOFR IRS conventions,,,a,shortfront,,,,TRUE,mf,nyc,2,,usd,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
gbp_irs,gbp,irs,,bpsws,0b,SONIA IRS conventions,,,a,shortfront,,,,TRUE,mf,ldn,0,,gbp,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
eur_irs,eur,irs,,eeswe,2b,ESTR IRS conventions,,,a,shortfront,,,,FALSE,mf,tgt,1,,eur,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
sek_irs,sek,irs,,sksws,2b,SWESTR IRS conventions,,,a,shortfront,,,,FALSE,mf,stk,1,,sek,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
nok_irs,nok,irs,,nks,2b,,,,a,shortfront,,,,FALSE,mf,osl,2,,nok,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
chf_irs,chf,irs,,sfsnt,2b,,,,a,shortfront,,,,FALSE,mf,zur,2,,chf,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
cad_irs,cad,irs,,cdso,1b,,,,s,shortfront,,,,FALSE,mf,tro,1,,cad,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
cad_irs_le_1y,cad,irs,,cdso,1b,,,,a,shortfront,,,,TRUE,mf,tro,1,,cad,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
jpy_irs,jpy,irs,,jyso,2b,,,,a,shortfront,,,,TRUE,mf,tyo,2,,jpy,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
nzd_irs3,nzd,irs,,ndswap,2b,,,,s,shortfront,,,,TRUE,mf,wlg,0,,nzd,,act365f,,,q,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(0),,,,,,,,,,,,,,,,,,,,,,,,,,,,
nzd_irs6,nzd,irs,,,2b,,,,s,shortfront,,,,TRUE,mf,wlg,0,,nzd,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(0),,,,,,,,,,,,,,,,,,,,,,,,,,,,
nzd_irs,nzd,irs,,,2b,,,,a,shortfront,,,,TRUE,mf,wlg,2,,nzd,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
aud_irs6,aud,irs,,adsw,1b,,,,s,shortfront,,,,TRUE,mf,syd,0,,aud,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(0),,,,,,,,,,,,,,,,,,,,,,,,,,,,
aud_irs3,aud,irs,,adsw_q,1b,,,,q,shortfront,,,,TRUE,mf,syd,0,,aud,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(0),,,,,,,,,,,,,,,,,,,,,,,,,,,,
aud_irs3_gt_3y,aud,irs,,,2b,,,,s,shortfront,,,,TRUE,mf,syd,0,,aud,,act365f,,,q,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(0),,,,,,,,,,,,,,,,,,,,,,,,,,,,
aud_irs,aud,irs,,adso,1b,,,,a,shortfront,,,,TRUE,mf,syd,2,,aud,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
eur_irs6,eur,irs,,eusa,2b,,,,a,shortfront,,,,FALSE,mf,tgt,0,,eur,,30e360,,,s,,,,,,,,,,,,act360,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
eur_irs3,eur,irs,,eusw_v3,2b,,,,a,shortfront,,,,FALSE,mf,tgt,0,,eur,,30e360,,,q,,,,,,,,,,,,act360,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
eur_irs1,eur,irs,,,2b,,,,a,shortfront,,,,FALSE,mf,tgt,0,,eur,,30e360,,,m,,,,,,,,,,,,act360,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
sek_irs3,sek,irs,,sksw,2b,,,,a,shortfront,,,,FALSE,mf,stk,0,,sek,,30e360,,,q,,,,,,,,,,,,act360,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
nok_irs3,nok,irs,,nksw_v3,2b,,,,a,shortfront,,,,FALSE,mf,osl,0,,nok,,30e360,,,q,,,,,,,,,,,,act360,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
nok_irs6,nok,irs,,nksw,2b,,,,a,shortfront,,,,FALSE,mf,osl,0,,nok,,30e360,,,s,,,,,,,,,,,,act360,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
eurusd_xcs,eur/usd,xcs,,euxoqq,2b,,,,q,shortfront,,,,FALSE,mf,"tgt,nyc",2,,eur,,act360,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,none_simple,,rfr_payment_delay,,,,,,,,0,,FALSE,FALSE,TRUE,,,,,,,,,,,,,,,eurusd,
gbpusd_xcs,gbp/usd,xcs,,bpxoqq,2b,,,,q,shortfront,,,,FALSE,mf,"ldn,nyc",2,,gbp,,act365f,,,,,,,,,,,,,,,act360,,,none_simple,,rfr_payment_delay,,,none_simple,,rfr_payment_delay,,,,,,,,0,,FALSE,FALSE,TRUE,,,,,,,,,,,,,,,gbpusd,
eurgbp_xcs,eur/gbp,xcs,,ebxoqq,2b,,,,q,shortfront,,,,FALSE,mf,"tgt,ldn",2,,eur,,act360,,,,,,,,,,,,,,,act365f,,,none_simple,,rfr_payment_delay,,,none_simple,,rfr_payment_delay,,,,,,,,0,,FALSE,FALSE,TRUE,,,,,,,,,,,,,,,eurgbp,
gbpeur_xcs,gbp/eur,xcs,,ebxoqq,2b,,,,q,shortfront,,,,FALSE,mf,"tgt,ldn",2,,gbp,,act365f,,,,,,,,,,,,,,,act360,,,none_simple,,rfr_payment_delay,,,none_simple,,rfr_payment_delay,,,,,,,,0,,FALSE,FALSE,TRUE,,,,,,,,,,,,,,,eurgbp,
jpyusd_xcs,jpy/usd,xcs,,jybss,2b,,,,q,shortfront,,,,FALSE,mf,"nyc,tyo",2,,jpy,,act365f,,,,,,,,,,,,,,,act360,,,none_simple,,rfr_payment_delay,,,none_simple,,rfr_payment_delay,,,,,,,,0,,FALSE,FALSE,TRUE,,,,,,,,,,,,,,,usdjpy,
audusd_xcs3,aud/usd,xcs,,,,,,,q,shortfront,,,,FALSE,mf,"nyc,syd",2,,aud,,act365f,,,,,,,,,,,,,,,act360,,,none_simple,,ibor(0),,,none_simple,,rfr_payment_delay,,,,,,,,0,,FALSE,FALSE,TRUE,,,,,,,,,,,,,,,audusd,
audusd_xcs,aud/usd,xcs,,,,,,,q,shortfront,,,,FALSE,mf,"nyc,syd",2,,aud,,act365f,,,,,,,,,,,,,,,act360,,,none_simple,,rfr_payment_delay,,,none_simple,,rfr_payment_delay,,,,,,,,0,,FALSE,FALSE,TRUE,,,,,,,,,,,,,,,audusd,
nzdusd_xcs3,nzd/usd,xcs,,,,,,,q,shortfront,,,,FALSE,mf,"nyc,wlg",2,,nzd,,act365f,,,,,,,,,,,,,,,act360,,,none_simple,,ibor(0),,,none_simple,,rfr_payment_delay,,,,,,,,0,,FALSE,FALSE,TRUE,,,,,,,,,,,,,,,nzdusd,
nzdaud_xcs3,nzd/aud,xcs,,,,,,,q,shortfront,,,,FALSE,mf,"nyc,wlg,syd",2,,nzd,,act365f,,,,,,,,,,,,,,,act365f,,,none_simple,,ibor(0),,,none_simple,,ibor(0),,,,,,,,0,,FALSE,FALSE,TRUE,,,,,,,,,,,,,,,audnzd,
eur_zcis,eur,zcis,,euswi,2b,,,,a,shortfront,,,,FALSE,mf,tgt,0,,eur,,1+,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,monthly,,,3,,,,,,,,
gbp_zcis,gbp,zcis,,bpswit,0b,,,,a,shortfront,,,,FALSE,mf,ldn,0,,gbp,,1+,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,monthly,,,2,,,,,,,,
usd_zcis,usd,zcis,,usswit,2b,,,,a,shortfront,,,,FALSE,mf,nyc,0,,usd,,1+,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,daily,,,3,,,,,,,,
gbp_zcs,gbp,zcs,,,0b,,,,a,shortfront,,,,TRUE,mf,ldn,0,,gbp,,act365f,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,,,,,,,,,,,,,,,,,,,,,,,,,,,,
sek_iirs,sek_iirs,iirs,,,2b,,,,a,shortfront,,,,FALSE,none,stk,0,,sek,,actacticma,,,q,,,,,,,,,,,,act360,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,daily,,,3,,,,,,,,,,,,
eur_sbs36,eur,sbs,,,2b,,,,q,shortfront,,,,FALSE,mf,tgt,0,,eur,,act360,,,s,,,,,,,,,,,,,,,none_simple,,ibor(2),,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
nok_sbs36,nok,sbs,,,2b,,,,q,shortfront,,,,FALSE,mf,osl,0,,nok,,act360,,,s,,,,,,,,,,,,,,,none_simple,,ibor(2),,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
aud_sbs36,aud,sbs,,,2b,,,,q,shortfront,,,,FALSE,mf,syd,0,,aud,,act365f,,,s,,,,,,,,,,,,,,,none_simple,,ibor(0),,,none_simple,,ibor(0),,,,,,,,,,,,,,,,,,,,,,,,,,,,
aud_sbs31,aud,sbs,,,2b,,,,q,shortfront,,,,FALSE,mf,syd,0,,aud,,act365f,,,m,,,,,,,,,,,,,,,none_simple,,ibor(0),,,none_simple,,ibor(0),,,,,,,,,,,,,,,,,,,,,,,,,,,,
nzd_sbs36,nzd,sbs,,,2b,,,,q,shortfront,,,,FALSE,mf,wlg,0,,nzd,,act365f,,,s,,,,,,,,,,,,,,,none_simple,,ibor(0),,,none_simple,,ibor(0),,,,,,,,,,,,,,,,,,,,,,,,,,,,
nzd_sbs31,nzd,sbs,,,2b,,,,q,shortfront,,,,FALSE,mf,wlg,0,,nzd,,act365f,,,m,,,,,,,,,,,,,,,none_simple,,ibor(0),,,none_simple,,ibor(0),,,,,,,,,,,,,,,,,,,,,,,,,,,,
us_gb,usd,frb,,,,,,,s,shortfront,,,,TRUE,none,nyc,0,,usd,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-1b,us_gb,,,,,
us_gbi,usd,ifrb,,,,,,,s,shortfront,,,,TRUE,none,nyc,0,,usd,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,daily,,,3,,,,,1,-1b,us_gb,,,,,
us_corp,usd,frb,,,,,,,s,shortfront,,,,TRUE,none,nyc,0,,usd,,30u360,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-1b,us_corp,,,,,
us_muni,usd,frb,,,,,,,s,shortfront,,,,TRUE,none,nyc,0,,usd,,30u360,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-1b,us_muni,,,,,
us_gb_tsy,usd,frb,,,,,,,s,shortfront,,,,TRUE,none,nyc,0,,usd,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-1b,us_gb_tsy,,,,,
uk_gb,gbp,frb,,,,,,,s,longfront,,,,FALSE,none,ldn,0,,gbp,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-7b,uk_gb,,,,,
au_gb,aud,frb,,,,,,,s,longfront,,,,FALSE,none,syd,0,,aud,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,2,-8d,au_gb,,,,,
nz_gb,nzd,frb,,,,,,,s,shortfront,,,,FALSE,none,wlg,0,,nzd,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-8b,nz_gb,,,,,
cn_gb,cny,frb,,,,,,,s,shortfront,,,,FALSE,none,bjs,0,,cny,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-1b,cn_gb,,,,,
de_gb,eur,frb,,,,,,,a,longfront,,,,FALSE,none,tgt,0,,eur,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,2,-1b,de_gb,,,,,
fr_gb,eur,frb,,,,,,,a,shortfront,,,,FALSE,none,tgt,0,,eur,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,2,-1b,fr_gb,,,,,
nl_gb,eur,frb,,,,,,,a,shortfront,,,,FALSE,none,tgt,0,,eur,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,2,-1b,nl_gb,,,,,
it_gb,eur,frb,,,,,,,s,shortfront,,,,FALSE,none,tgt,0,,eur,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,2,-1b,it_gb,,,,,
ch_gb,chf,frb,,,,,,,a,shortfront,,,,FALSE,none,zur,0,,chf,,30e360,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-1b,ch_gb,,,,,
se_gb,sek,frb,,,,,,,a,shortfront,,,,FALSE,none,stk,0,,sek,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,2,-5b,se_gb,,,,,
no_gb,nok,frb,,,,,,,a,shortfront,,,,FALSE,none,osl,0,,nok,,actacticma_stub365f,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-1b,no_gb,,,,,
ca_gb,cad,frb,,,,,,,s,shortfront,,,,FALSE,none,tro,0,,cad,,actacticma_stub365f,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,-1b,ca_gb,,,,,
ca_gbi,cad,ifrb,,,,,,,s,shortfront,,,,FALSE,none,tro,0,,cad,,actacticma_stub365f,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,daily,,,3,,,,,1,-1b,ca_gb,,,,,
us_gbb,usd,bill,,,,,,,,,,,,TRUE,none,nyc,0,,usd,,act360,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,0b,us_gbb,,,,,
se_gbb,sek,bill,,,,,,,,,,,,FALSE,none,stk,0,,sek,,act360,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,2,0b,se_gbb,,,,,
no_gbb,nok,bill,,,,,,,,,,,,FALSE,none,osl,0,,nok,,act365f,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,2,0b,no_gbb,,,,,
uk_gbb,gbp,bill,,,,,,,,,,,,TRUE,none,ldn,0,,gbp,,act365f,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,,,,,,,,,1,0b,uk_gbb,,,,,
uk_gbi,gbp,ifrb,,,,,,,s,shortfront,,,,FALSE,none,ldn,0,,gbp,,actacticma,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,0,,,,,daily,,,3,,,,,1,-7b,uk_gb,,,,,
sek_fra3,sek,fra,,,,,,3m,q,,,,,FALSE,mf,stk,0,,sek,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
eur_fra3,eur,fra,,,,,,3m,q,,,,,FALSE,mf,tgt,0,,eur,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
eur_fra6,eur,fra,,,,,,6m,s,,,,,FALSE,mf,tgt,0,,eur,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
eur_fra1,eur,fra,,,,,,1m,m,,,,,FALSE,mf,tgt,0,,eur,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
nok_fra3,nok,fra,,,,,,3m,q,,,,,FALSE,mf,osl,0,,nok,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
nok_fra6,nok,fra,,,,,,6m,s,,,,,FALSE,mf,osl,0,,nok,,act360,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(2),,,,,,,,,,,,,,,,,,,,,,,,,,,,
usd_frn5,usd,frn,,,,,,,q,,,,,FALSE,mf,nyc,0,,usd,,act360,,,,,,,,,,,,,,,,,,none_simple,,rfr_observation_shift(5),,,,,,,,,,,,,,,,,,,,,,,,,,1,1b,,,,,,
usd_stir,usd,stir,,SFR,3m SOFR Futures convention,,,,q,,,,imm,FALSE,mf,nyc,0,,usd,,actacticma,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,usd_rfr,,,,,,,,,,,,,,,,,,,,,,,1000000,,,,
usd_stir1,usd,stir,,SF1,1m Avergaed SOFR Futures,,,,m,,,,som,FALSE,mf,nyc,0,,usd,,actacticma,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay_avg,usd_rfr,,,,,,,,,,,,,,,,,,,,,,,5000400,,,,
eur_stir,eur,stir,,KTR,3m ESTR Futures,,,,q,,,,imm,FALSE,mf,tgt,0,,eur,,actacticma,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,eur_rfr,,,,,,,,,,,,,,,,,,,,,,,1000000,,,,
eur_stir1,eur,stir,,,1m Averaged ESTR futures,,,,m,,,,som,FALSE,mf,tgt,0,,eur,,actacticma,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay_avg,eur_rfr,,,,,,,,,,,,,,,,,,,,,,,3000000,,,,
eur_stir3,eur,stir,,ER,Euribor 3m Futures,,,,q,,,,imm,FALSE,mf,tgt,0,,eur,,actacticma,,,,,,,,,,,,,,,,,,,,,,,none_simple,,ibor(2),eur_ibor,,,,,,,,,,,,,,,,,,,,,,,1000000,,,,
gbp_stir,gbp,stir,,SFI,SONIA 3m Futures,,,,q,,,,imm,FALSE,mf,ldn,0,,gbp,,actacticma,,,,,,,,,,,,,,,,,,,,,,,none_simple,,rfr_payment_delay,gbp_rfr,,,,,,,,,,,,,,,,,,,,,,,1000000,,,,
uk_gb_2y,gbp,bf,,G,Gilt future,,,,,,,,,,,ldn,,,gbp,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,ice_gbp,100000,,3,,
uk_gb_5y,gbp,bf,,G,Gilt future,,,,,,,,,,,ldn,,,gbp,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,ice_gbp,100000,,4,,
uk_gb_10y,gbp,bf,,G,Gilt future,,,,,,,,,,,ldn,,,gbp,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,ice_gbp,100000,,4,,
uk_gb_30y,gbp,bf,,G,Gilt future,,,,,,,,,,,ldn,,,gbp,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,ice_gbp,100000,,4,,
us_gb_2y,usd,bf,,US,US treasury futures,,,,,,,,,,,fed,,,usd,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,ust_short,200000,,6,,
us_gb_3y,usd,bf,,US,US treasury futures,,,,,,,,,,,fed,,,usd,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,ust_short,200000,,6,,
us_gb_5y,usd,bf,,US,US treasury futures,,,,,,,,,,,fed,,,usd,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,ust_short,100000,,6,,
us_gb_10y,usd,bf,,US,US treasury futures,,,,,,,,,,,fed,,,usd,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,ust_long,100000,,6,,
us_gb_30y,usd,bf,,US,US treasury futures,,,,,,,,,,,fed,,,usd,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,ust_long,100000,,6,,
de_gb_2y,eur,bf,,DE,Eurex Germany Futures,,,,,,,,,,,tgt,,,eur,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,eurex_eur,100000,,6,,
de_gb_5y,eur,bf,,DE,Eurex Germany Futures,,,,,,,,,,,tgt,,,eur,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,eurex_eur,100000,,6,,
de_gb_10y,eur,bf,,DE,Eurex Germany Futures,,,,,,,,,,,tgt,,,eur,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,eurex_eur,100000,,6,,
de_gb_30y,eur,bf,,DE,Eurex Germany Futures,,,,,,,,,,,tgt,,,eur,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,eurex_eur,100000,,4,,
fr_gb_5y,eur,bf,,FR,Eurex France Futures,,,,,,,,,,,tgt,,,eur,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,eurex_eur,100000,,6,,
fr_gb_10y,eur,bf,,FR,Eurex France Futures,,,,,,,,,,,tgt,,,eur,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,eurex_eur,100000,,6,,
sp_gb_10y,eur,bf,,SP,Eurex Spain Futures,,,,,,,,,,,tgt,,,eur,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,eurex_eur,100000,,6,,
ch_gb_10y,chf,bf,,CH,Eurex CHF Futures,,,,,,,,,,,zur,,,chf,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,eurex_chf,100000,,6,,
================================================
FILE: python/rateslib/data/fixings.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from abc import ABCMeta, abstractmethod
from datetime import datetime, timedelta
from enum import Enum
from functools import cached_property
from math import prod
from typing import TYPE_CHECKING
import numpy as np
import rateslib.errors as err
from pandas import Series, isna
from rateslib import defaults, fixings
from rateslib.curves.curves import _BaseCurve, _index_value_from_series_no_curve
from rateslib.curves.interpolation import index_left
from rateslib.curves.utils import _CurveType
from rateslib.data.loader import FixingMissingForecasterError, FixingRangeError
from rateslib.dual import Dual, Dual2, Variable
from rateslib.enums.generics import Err, NoInput, Ok, _drb, _validate_obj_not_no_input
from rateslib.enums.parameters import (
FloatFixingMethod,
SpreadCompoundMethod,
SwaptionSettlementMethod,
_get_float_fixing_method,
_get_index_method,
_get_spread_compound_method,
_get_swaption_settlement_method,
)
from rateslib.rs import Adjuster
from rateslib.scheduling.adjuster import _get_adjuster
from rateslib.scheduling.calendars import get_calendar
from rateslib.scheduling.convention import Convention, _get_convention
from rateslib.scheduling.dcfs import dcf
from rateslib.scheduling.frequency import _get_frequency, _get_tenor_from_frequency, add_tenor
from rateslib.scheduling.schedule import Schedule, _get_stub_inference
from rateslib.utils.calendars import _get_first_bus_day
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
IRS,
Any,
Arr1dF64,
Arr1dObj,
Cal,
CalInput,
CalTypes,
CurveOption_,
CurvesT_,
DualTypes,
DualTypes_,
Frequency,
FXForwards,
FXForwards_,
FXIndex_,
IndexMethod,
LegFixings,
NamedCal,
PeriodFixings,
Result,
StubInference,
UnionCal,
_BaseCurve_,
bool_,
datetime_,
int_,
str_,
)
class _BaseFixing(metaclass=ABCMeta):
"""
Abstract base class for core financial fixing implementation.
Parameters
----------
date: datetime
The date of relevance for the financial fixing, e.g. the publication date for an
*IBORFixing* or the reference date for an *IndexFixing*.
value: float, Dual, Dual2, Variable, optional
The initial value for the fixing to adopt. Most commonly this is not given and it is
determined from a timeseries.
identifier: str, optional
The string name of the timeseries to be loaded by the *Fixings* object.
"""
_identifier: str_
_value: DualTypes_
_state: int
_date: datetime
def __init__(
self,
*,
date: datetime,
value: DualTypes_ = NoInput(0),
identifier: str_ = NoInput(0),
) -> None:
self._identifier = identifier if isinstance(identifier, NoInput) else identifier.upper()
self._value = value
self._state = 0
self._date = date
def reset(self, state: int_ = NoInput(0)) -> None:
"""
Sets the ``value`` attribute to :class:`~rateslib.enums.generics.NoInput`, which allows it
to be redetermined from a timeseries.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import fixings, dt, NoInput, FXFixing
from pandas import Series
.. ipython:: python
fx_fixing1 = FXFixing(publication=dt(2021, 1, 1), fx_index="eurusd", identifier="A")
fx_fixing2 = FXFixing(publication=dt(2021, 1, 1), fx_index="gbpusd", identifier="B")
fixings.add("A_eurusd", Series(index=[dt(2021, 1, 1)], data=[1.1]), state=100)
fixings.add("B_gbpusd", Series(index=[dt(2021, 1, 1)], data=[1.4]), state=200)
# data is populated from the available Series
fx_fixing1.value
fx_fixing2.value
# fixings are reset according to the data state
fx_fixing1.reset(state=100)
fx_fixing2.reset(state=100)
# only the private data for fixing1 is removed because of its link to the data state
fx_fixing1._value
fx_fixing2._value
.. role:: green
Parameters
----------
state: int, :green:`optional`
If given only fixings whose state matches this value will be reset. If no state is
given then the value will be reset.
Returns
-------
None
"""
if isinstance(state, NoInput) or self._state == state:
self._value = NoInput(0)
self._state = 0
@property
def value(self) -> DualTypes_:
"""
The fixing value.
If this value is :class:`rateslib.enums.generics.NoInput`, then each request will attempt a
lookup from a timeseries to obtain a new fixing value.
Once this value is determined it is restated indefinitely, unless :meth:`_BaseFixing.reset`
is called.
"""
if not isinstance(self._value, NoInput):
return self._value
else:
if isinstance(self._identifier, NoInput):
return NoInput(0)
else:
state, timeseries, bounds = fixings.__getitem__(self._identifier)
if state == self._state:
return NoInput(0)
else:
self._state = state
v = self._lookup_and_calculate(timeseries, bounds)
self._value = v
return v
@property
def date(self) -> datetime:
"""The date of relevance for the fixing, e.g. the publication date of an IBORFixing."""
return self._date
@property
def identifier(self) -> str_:
"""The string name of the timeseries to be loaded by the *Fixings* object."""
return self._identifier
@abstractmethod
def _lookup_and_calculate(
self,
timeseries: Series[DualTypes], # type: ignore[type-var]
bounds: tuple[datetime, datetime] | None,
) -> DualTypes_:
pass
def __repr__(self) -> str:
return f""
class IndexFixing(_BaseFixing):
"""
An index fixing value for settlement of indexed cashflows.
Parameters
----------
index_lag: int
The number months by which the reference date is lagged to derive an index value.
index_method: IndexMethod
The method used for calculating the index value. See
:class:`~rateslib.enums.parameters.IndexMethod`.
date: datetime
The date of relevance for the index fixing, which is its **reference value** date.
value: float, Dual, Dual2, Variable, optional
The initial value for the fixing to adopt. Most commonly this is not given and it is
determined from a timeseries of published FX rates.
identifier: str, optional
The string name of the timeseries to be loaded by the *Fixings* object.
Examples
--------
.. ipython:: python
:suppress:
from rateslib.data.fixings import IndexFixing
from rateslib.enums.parameters import IndexMethod
from rateslib import fixings, dt
from pandas import Series
.. ipython:: python
fixings.add("UK-CPI", Series(index=[dt(2000, 1, 1), dt(2000, 2, 1)], data=[100, 110.0]))
index_fix = IndexFixing(date=dt(2000, 4, 15), identifier="UK-CPI", index_lag=3, index_method=IndexMethod.Daily)
index_fix.value
.. ipython:: python
:suppress:
fixings.pop("UK-CPI")
""" # noqa: E501
_index_lag: int
_index_method: IndexMethod
def __init__(
self,
*,
index_lag: int,
index_method: IndexMethod | str,
date: datetime,
value: DualTypes_ = NoInput(0),
identifier: str_ = NoInput(0),
) -> None:
super().__init__(date=date, value=value, identifier=identifier)
self._index_lag = index_lag
self._index_method = _get_index_method(index_method)
@property
def index_method(self) -> IndexMethod:
"""The :class:`~rateslib.enums.parameters.IndexMethod` used for calculating the
index value."""
return self._index_method
@property
def index_lag(self) -> int:
"""The number months by which the reference date is lagged to derive an index value."""
return self._index_lag
def _lookup_and_calculate(
self,
timeseries: Series[DualTypes], # type: ignore[type-var]
bounds: tuple[datetime, datetime] | None,
) -> DualTypes_:
return self._lookup(
index_lag=self.index_lag,
index_method=self.index_method,
date=self.date,
timeseries=timeseries,
bounds=bounds,
)
@classmethod
def _lookup(
cls,
index_lag: int,
index_method: IndexMethod,
timeseries: Series[DualTypes], # type: ignore[type-var]
date: datetime,
bounds: tuple[datetime, datetime] | None = None,
) -> DualTypes_:
result = _index_value_from_series_no_curve(
index_lag=index_lag,
index_method=index_method,
index_fixings=timeseries,
index_date=date,
index_fixings_boundary=bounds,
)
if isinstance(result, Err):
if isinstance(result._exception, FixingRangeError):
return NoInput(0)
result.unwrap()
else:
return result.unwrap()
class FXIndex:
"""
Define the parameters of a specific FX pair and fixing index.
This object acts as a container to store market conventions for different FX pairs.
This allows the determination of dates under different methodologies, e.g. ISDA MTM fixings or
spot settlement dates.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.data.fixings import FXIndex
.. ipython:: python
fxi = FXIndex(
pair="eurusd",
calendar="tgt|fed", # <- Spot FX measures settlement dates according to this calendar
settle=2,
isda_mtm_calendar="ldn", # <- MTM XCS FX fixing dates are determined according to this calendar
isda_mtm_settle=-2,
)
fxi.delivery(dt(2025, 7, 3))
fxi.isda_fixing_date(dt(2025, 7, 3))
.. role:: red
.. role:: green
Parameters
----------
pair: str, :red:`required`
The currency pair of the FX fixing. 6-digit iso code.
calendar: Calendar, str, :red:`required`
The calendar associated with the FX settlement date determination.
settle: Adjuster, int, str :green:`optional (set by 'defaults')`
The delivery lag applied to any FX quotation to adjust 'today' to a delivery date, under
the given ``calendar``. If int is assumed to be settleable business days.
isda_mtm_calendar: Calendar, str, :green:`optional`
The calendar associated with the MTM fixing date determination.
isda_mtm_settle: Adjuster, str, int, :green:`optional`,
The adjustment applied to determine the MTM fixing date.
allow_cross: bool, :green:`optional (set as True)`
This allows sub-division of the fixing into its *majors* as defined by WMR
benchmark methodology. For an example of using a *cross* see the documentation for
an :class:`FXFixing`.
""" # noqa: E501
def __init__(
self,
pair: str,
calendar: CalTypes | str,
settle: Adjuster | str | int,
isda_mtm_calendar: CalInput = NoInput(0),
isda_mtm_settle: Adjuster | str | int_ = NoInput(0),
allow_cross: bool_ = NoInput(0),
) -> None:
self._pair: str = pair.lower()
self._calendar: CalTypes = get_calendar(calendar)
self._settle: Adjuster = _get_adjuster(settle)
self._allow_cross: bool = _drb(True, allow_cross)
if isinstance(isda_mtm_calendar, NoInput):
self._isda_mtm_calendar: CalTypes | NoInput = NoInput(0)
else:
self._isda_mtm_calendar = get_calendar(isda_mtm_calendar)
if isinstance(isda_mtm_settle, NoInput):
self._isda_mtm_settle: Adjuster | NoInput = NoInput(0)
else:
self._isda_mtm_settle = _get_adjuster(isda_mtm_settle)
def __repr__(self) -> str:
return f""
@property
def pair(self) -> str:
"""The currency pair of the FX fixing."""
return self._pair
@property
def calendar(self) -> CalTypes:
"""The calendar associated with the settlement delivery date determination."""
return self._calendar
@property
def settle(self) -> Adjuster:
"""
The :class:`~rateslib.scheduling.Adjuster` associated with determining
the settlement delivery date.
"""
return self._settle
@property
def isda_mtm_calendar(self) -> CalTypes | NoInput:
"""The calendar associated with the MTM fixing date determination."""
return self._isda_mtm_calendar
@property
def isda_mtm_settle(self) -> Adjuster | NoInput:
"""
The :class:`~rateslib.scheduling.Adjuster` associated with the MTM fixing
date determination.
"""
return self._isda_mtm_settle
def isda_fixing_date(self, delivery: datetime) -> datetime:
"""
Return the MTM FX fixing date under ISDA conventions.
Parameters
----------
delivery: datetime
The delivery date of the notional exchange.
Returns
-------
datetime
Notes
-----
If ``isda`` attributes are not fully qualified on the object then uses the ``reverse``
method to reverse engineer the FX quotation date as a proxy.
"""
if isinstance(self.isda_mtm_calendar, NoInput) or isinstance(self.isda_mtm_settle, NoInput):
# Fallback method for determining fixing date when ISDA fixing details not available.
# This may be due to instruments that only use for non-deliverability as a feature
# but do not technically have a published fixing, i.e. a physically settled
# FXForward or an FXOption.
# In these cases do the best to estimate a respectable date.
alternatives: list[datetime] = []
counter: int = 0
while len(alternatives) == 0:
alternatives = self.publications(delivery + timedelta(days=counter))
counter += 1
return _get_first_bus_day(alternatives, self.calendar)
else:
return self.isda_mtm_settle.adjust(delivery, self.isda_mtm_calendar)
def delivery(self, date: datetime) -> datetime:
"""
Return the settlement delivery date associated with the publication date.
Parameters
----------
date: datetime
The publication date of the quotation.
Returns
-------
datetime
"""
return self.settle.adjust(date, self.calendar)
def publications(self, delivery: datetime) -> list[datetime]:
"""
Return the potential publication dates that result in a given settlement delivery date.
Parameters
----------
delivery: datetime
The settlement delivery date of the publication.
Returns
-------
list[datetime]
"""
return self.settle.reverse(delivery, self.calendar)
@property
def allow_cross(self) -> bool:
"""Whether to allow FXFixings which sub-divide into USD or EUR crosses."""
return self._allow_cross
class _FXFixingMajor(_BaseFixing):
"""
An FX fixing value for cross currency settlement.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.data.fixings import _FXFixingMajor, FXIndex
from rateslib import fixings, dt
from pandas import Series
.. ipython:: python
fixings.add("Custom_CADSEK", Series(index=[dt(1999, 12, 29)], data=[8.7]))
fxfix = _FXFixingMajor(
delivery=dt(2000, 1, 4),
fx_index=FXIndex(
pair="cadsek",
calendar="tro,stk|fed",
settle=2,
isda_mtm_calendar="tro,stk,ldn,nyc",
isda_mtm_settle=-2,
),
identifier="Custom"
)
fxfix.publication # <-- derived from isda attributes
fxfix.value # <-- should be 8.7
.. ipython:: python
:suppress:
fixings.pop("Custom_CADSEK")
.. role:: red
.. role:: green
Parameters
----------
fx_index: FXIndex, str, :red:`required`
The :class:`~rateslib.data.fixings.FXIndex` defining the FX pair and its conventions.
publication: datetime, :green:`optional`
The publication date of the fixing. If not given, must provide ``delivery`` in order to
derive the *publication date*.
delivery: datetime, :green:`optional`
The settlement delivery date of the cashflow. Can be used to derive the *publication date*.
If not given is derived from the ``publication``.
value: float, Dual, Dual2, Variable, optional
The initial value for the fixing to adopt. Most commonly this is not given and it is
determined from a timeseries of published FX rates.
identifier: str, optional
The string name of the series to be loaded by the *Fixings* object. Will be
appended with "_{pair}" to derive the full timeseries key.
Notes
------
The *FXFixingMajor* is a class designed to lookup and return FX fixings directly from a
Series in either the FX pair directly, or its inverse. This function depends upon what is
populated to the datastore. That is, if *'GBPMXN'* is an available dataseries then *'MXNGBP'*
would also be calculable as the inverse of *'GBPMXN'*.
When forecasting the fixing from an :class:`~rateslib.fx.FXForwards` object, the rate pair
will be looked up directly according to the ``delivery`` date.
The use of the name **major**, does not imply that only *FX majors* can be used by this class.
I.e. that it is only suitable for *'EURUSD'* and *'EURSEK'*, for example. Rather, the name
*major* implies that this object treats the given FX pair as a major and does not perform any
type of **cross**. This is, in fact, a sub-component of the more featureful
:class:`~rateslib.data.fixings.FXFixing` class which adheres to the ``allow_cross`` argument
on the :class:`~rateslib.data.fixings.FXIndex` in order to automatically handle different
types of required behaviour.
"""
def __init__(
self,
fx_index: FXIndex | str,
publication: datetime_ = NoInput(0),
delivery: datetime_ = NoInput(0),
value: DualTypes_ = NoInput(0),
identifier: str_ = NoInput(0),
) -> None:
self._fx_index = _get_fx_index(fx_index)
del fx_index
if isinstance(delivery, NoInput) and isinstance(publication, NoInput):
raise ValueError(
"At least one date; a `delivery` or `publication` is required to derive the "
"`date` used for the FX fixing."
)
elif isinstance(publication, NoInput) and isinstance(delivery, datetime):
# then derive it under conventions
date_ = self.fx_index.isda_fixing_date(delivery)
self._delivery = delivery
self._publication = date_
elif isinstance(publication, datetime) and isinstance(delivery, NoInput):
date_ = publication
self._publication = date_
self._delivery = self.fx_index.delivery(date_)
elif isinstance(publication, datetime) and isinstance(delivery, datetime):
date_ = publication
self._publication = date_
self._delivery = delivery
else:
raise TypeError( # pragma: no cover
"`delivery` and `publication` given as incorrect types.\n"
f"Got {type(delivery).__name__} and {type(publication).__name__}."
)
super().__init__(date=date_, value=value, identifier=identifier)
@property
def fx_index(self) -> FXIndex:
"""The :class:`FXIndex` for the FX fixing."""
return self._fx_index
def _value_from_possible_inversion(self, identifier: str) -> DualTypes_:
direct, inverted = self.pair, f"{self.pair[3:6]}{self.pair[0:3]}"
try:
state, timeseries, bounds = fixings.__getitem__(identifier + "_" + direct)
exponent = 1.0
except ValueError as e:
try:
state, timeseries, bounds = fixings.__getitem__(identifier + "_" + inverted)
exponent = -1.0
except ValueError:
raise e
if state == self._state:
return NoInput(0)
else:
self._state = state
v = self._lookup_and_calculate(timeseries, bounds)
if isinstance(v, NoInput):
return NoInput(0)
self._value = v**exponent
return self._value
@property
def publication(self) -> datetime:
"""The publication date of the fixing as specified directly, or implied from
the :class:`~rateslib.data.fixings.FXIndex`."""
return self._publication
@property
def delivery(self) -> datetime:
"""The settlement delivery date of the fixing as specified directly, or implied
from the :class:`~rateslib.data.fixings.FXIndex`."""
return self._delivery
@property
def value(self) -> DualTypes_:
if not isinstance(self._value, NoInput):
return self._value
else:
if isinstance(self._identifier, NoInput):
return NoInput(0)
else:
return self._value_from_possible_inversion(identifier=self._identifier)
def _lookup_and_calculate(
self, timeseries: Series, bounds: tuple[datetime, datetime] | None
) -> DualTypes_:
return self._lookup(timeseries=timeseries, date=self.date, bounds=bounds)
@classmethod
def _lookup(
cls,
timeseries: Series[DualTypes], # type: ignore[type-var]
date: datetime,
bounds: tuple[datetime, datetime] | None = None,
) -> DualTypes_:
result = fixings.__base_lookup__(
fixing_series=timeseries,
lookup_date=date,
bounds=bounds,
)
if isinstance(result, Err):
if isinstance(result._exception, FixingRangeError):
return NoInput(0)
result.unwrap()
else:
return result.unwrap()
@property
def pair(self) -> str:
"""The currency pair related to the FX fixing."""
return self.fx_index.pair
def value_or_forecast(self, fx: FXForwards_) -> DualTypes:
"""
Return the determined value of the fixing or forecast it if not available.
Parameters
----------
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object to forecast the forward FX rate.
Returns
-------
float, Dual, Dual2, Variable
"""
if isinstance(self.value, NoInput):
fx_: FXForwards = _validate_obj_not_no_input(fx, "FXForwards")
return fx_.rate(pair=self.pair, settlement=self.delivery)
else:
return self.value
def try_value_or_forecast(self, fx: FXForwards_) -> Result[DualTypes]:
"""
Return the determined value of the fixing or forecast it if not available.
Parameters
----------
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object to forecast the forward FX rate.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
if isinstance(self.value, NoInput):
if isinstance(fx, NoInput):
return Err(ValueError("Must provide `fx` argument to forecast FXFixing."))
else:
return Ok(fx.rate(pair=self.pair, settlement=self.delivery))
else:
return Ok(self.value)
def __repr__(self) -> str:
return f""
def _clone_isda_mtm(pair: FXIndex | str, isda_index: FXIndex) -> FXIndex:
"""
Attempt to lookup the conventions of pair, but maintain the original ISDA index conventions
from the given isda_index
"""
if isinstance(pair, str):
try:
fx_index = _get_fx_index(pair) # lookup the conventions from STATIC directly
except ValueError:
fx_index = FXIndex(
pair=pair,
settle=isda_index.settle,
calendar=isda_index.calendar,
)
else:
fx_index = pair
return FXIndex(
pair=fx_index.pair,
settle=fx_index.settle,
calendar=fx_index.calendar,
isda_mtm_settle=isda_index.isda_mtm_settle,
isda_mtm_calendar=isda_index.isda_mtm_calendar,
)
def _fx_index_set_cross(pair: FXIndex, allow_cross: bool) -> FXIndex:
return FXIndex(
pair=pair.pair,
settle=pair.settle,
calendar=pair.calendar,
isda_mtm_settle=pair.isda_mtm_settle,
isda_mtm_calendar=pair.isda_mtm_calendar,
allow_cross=allow_cross,
)
class _UnitFixing(_BaseFixing):
"""
A :class:`~rateslib.data.fixings._BaseFixing` permanently adopting value 1.0.
Used as a placeholder.
"""
def __init__(
self, *, date: datetime, value: DualTypes_ = NoInput(0), identifier: str_ = NoInput(0)
) -> None:
self._value = 1.0
self._state = 0
self._date = date
self._identifier = identifier
@property
def value(self) -> DualTypes_:
"""Returns 1.0."""
return 1.0
def value_or_forecast(self, *args: Any, **kwargs: Any) -> DualTypes:
"""Returns 1.0."""
return 1.0
def __repr__(self) -> str:
return f""
def reset(self, *args: Any, **kwargs: Any) -> None:
"""Does nothing."""
pass
def _lookup_and_calculate(self, *args: Any, **kwargs: Any) -> DualTypes_:
return 1.0
_WMR_EUR_BASE = ["czk", "dkk", "huf", "nok", "pln", "ron", "sek"]
_WMR_USD_INVERTED = ["gbp", "eur", "aud", "nzd", "iep", "bwp", "sbd", "top", "wst", "xeu"]
class _WMRClassification(Enum):
"""
WMR FX Benchmarks classification. Either the currency is USD or EUR or it is a third currency
whose base is measured versus USD or EUR
"""
USD = 0
EUR = 1
BASE_USD = 2
BASE_EUR = 3
@classmethod
def classify(cls, value: str) -> _WMRClassification:
if value == "usd":
return _WMRClassification.USD
elif value == "eur":
return _WMRClassification.EUR
elif value in _WMR_EUR_BASE:
return _WMRClassification.BASE_EUR
else:
return _WMRClassification.BASE_USD
class FXFixing(_BaseFixing):
"""
An FX fixing value for cross-currency or non-deliverable settlement.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.data.fixings import FXFixing, FXIndex
from rateslib import fixings, dt, FXForwards, FXRates, Curve
from pandas import Series
.. ipython:: python
fixings.add("WMR_10AMTYO_USDJPY", Series(index=[dt(1999, 12, 29)], data=[155.00]))
fixings.add("WMR_10AMTYO_AUDUSD", Series(index=[dt(1999, 12, 29)], data=[1.260]))
fxfix = FXFixing(
delivery=dt(2000, 1, 4),
fx_index=FXIndex(
pair="audjpy",
calendar="syd,tyo|fed",
settle=2,
isda_mtm_calendar="syd,tyo,ldn",
isda_mtm_settle=-2,
allow_cross=True,
),
identifier="WMR_10AMTYO"
)
fxfix.publication # <-- derived from isda attributes
fxfix.value # <-- should be from the cross 1.26 * 155 = 195.3
.. ipython:: python
:suppress:
fixings.pop("WMR_10AMTYO_USDJPY")
fixings.pop("WMR_10AMTYO_AUDUSD")
.. role:: red
.. role:: green
Parameters
----------
fx_index: FXIndex, str, :red:`required`
The :class:`~rateslib.data.fixings.FXIndex` defining the FX pair and its conventions.
publication: datetime, :green:`optional`
The publication date of the fixing. If not given, must provide ``delivery`` in order to
derive the *publication date*.
delivery: datetime, :green:`optional`
The settlement delivery date of the cashflow. Can be used to derive the *publication date*.
If not given is derived from the ``publication``.
value: float, Dual, Dual2, Variable, optional
The initial value for the fixing to adopt. Most commonly this is not given and it is
determined from a timeseries of published FX rates.
identifier: str, optional
The string name of the series to be loaded by the *Fixings* object. Will be
appended with "_{pair}" to derive the full timeseries key.
Notes
-----
This object is designed to systematically handle FX fixings across variety of conventions
and is typically used for non-deliverable and MTM-XCS settlement.
If the :class:`~rateslib.data.fixings.FXIndex` is configured to ``allow_cross`` (which is
the general default) then it will adopt the `WMR Benchmark Methodology `__
and assume cross rates against base USD, except if the currency is one of the European
currencies defined as having a base EUR, by that methodology.
Suppose one transacted a *CADSEK mtm-XCS* with a CAD mtm *Leg*. The ISDA MTM fixing date could
be defined as being 2 business days prior to the cashflow under the Stockholm, New York, London
and Toronto calendars. Under WMR, CAD has a USD base, and SEK has a EUR base, so the
determination of this FX Fixing will be a 3-way cross: CADUSD * USDEUR * EURSEK.
WMR ignores market settlement convention and local calendars in the determination of the cross.
So a cashflow due on 8th Jan '26 will determine a publication date as 5th Jan '26 (since the
6th Jan is holiday in Stockholm). All three WMR publication will have different
settlement (delivery dates) for the publication date on the 5th Jan:
.. ipython:: python
fxfix = FXFixing(
fx_index=FXIndex(
pair="cadsek",
calendar="tro,stk|fed",
settle=2,
isda_mtm_calendar="tro,ldn,stk,nyc",
isda_mtm_settle=-2,
allow_cross=True,
),
delivery=dt(2026, 1, 8),
)
fxfix.publication # <-- is 5th Jan determnined from the isda specications
fxfix.fx_fixing1.delivery # <-- USDCAD is T+1 under "tro|fed" defined by defaults
fxfix.fx_fixing2.delivery # <-- EURUSD is T+2 under "tgt|fed" defined by defaults
fxfix.fx_fixing3.delivery # <-- EURSEK is T+2 under "tgt,stk|fed" defined by defaults
This has implications towards the forecasting of these fixing values. In order to properly
forecast the above an :class:`~rateslib.fx.FXForwards` with all four currencies is required.
.. ipython:: python
:suppress:
sek = Curve({dt(2026, 1, 1): 1.0, dt(2027, 1, 1): 0.98})
eur = Curve({dt(2026, 1, 1): 1.0, dt(2027, 1, 1): 0.981})
cad = Curve({dt(2026, 1, 1): 1.0, dt(2027, 1, 1): 0.97})
usd = Curve({dt(2026, 1, 1): 1.0, dt(2027, 1, 1): 0.965})
fxf = FXForwards(
fx_rates=[
FXRates({"usdcad": 1.38}, settlement=dt(2026, 1, 2)),
FXRates({"eurusd": 1.165, "eursek": 10.75}, settlement=dt(2026, 1, 3))
],
fx_curves={
"seksek": sek, "sekusd": sek, "eureur": eur, "eurusd": eur, "cadcad": cad, "cadusd": cad, "usdusd": usd
}
)
.. ipython:: python
fxfix.value_or_forecast(fx=fxf) # <- FXForwards:usd,cad,eur,sek
fxf.rate("cadusd", dt(2026, 1, 6)) * fxf.rate("usdeur", dt(2026, 1, 7)) * fxf.rate("eursek", dt(2026, 1, 8))
Note that this is different to the **actual** *CADSEK* forecast FX rate and this is due to
those milaligned crosses and calendars.
.. ipython:: python
fxfix = FXFixing(
fx_index=FXIndex(
pair="cadsek",
calendar="tro,stk|fed",
settle=2,
isda_mtm_calendar="tro,ldn,stk,nyc",
isda_mtm_settle=-2,
allow_cross=False, # <- Everything the same except no crossing allowed
),
delivery=dt(2026, 1, 8),
)
fxfix.value_or_forecast(fx=fxf)
fxf.rate("cadsek", dt(2026, 1, 8))
""" # noqa: E501
def __init__(
self,
fx_index: FXIndex | str,
publication: datetime_ = NoInput(0),
delivery: datetime_ = NoInput(0),
value: DualTypes_ = NoInput(0),
identifier: str_ = NoInput(0),
) -> None:
self._fx_index = _get_fx_index(fx_index)
del fx_index
if isinstance(delivery, NoInput) and isinstance(publication, NoInput):
raise ValueError(
"At least one date; a `delivery` or `publication` is required to derive the "
"`date` used for the FX fixing."
)
elif isinstance(publication, NoInput) and isinstance(delivery, datetime):
# then derive it under conventions
date_ = self.fx_index.isda_fixing_date(delivery)
self._delivery = delivery
self._publication = date_
elif isinstance(publication, datetime) and isinstance(delivery, NoInput):
date_ = publication
self._publication = date_
self._delivery = self.fx_index.delivery(date_)
elif isinstance(publication, datetime) and isinstance(delivery, datetime):
date_ = publication
self._publication = date_
self._delivery = delivery
else:
raise TypeError( # pragma: no cover
"`delivery` and `publication` given as incorrect types.\n"
f"Got {type(delivery).__name__} and {type(publication).__name__}."
)
self._identifier = identifier if isinstance(identifier, NoInput) else identifier.upper()
self._value = value
self._date = date_
if not self.allow_cross:
self._fx_fixing1: _FXFixingMajor = _FXFixingMajor(
fx_index=self.fx_index,
publication=self.publication,
delivery=self.delivery,
value=value,
identifier=identifier,
)
self._fx_fixing2: _FXFixingMajor | _UnitFixing = _UnitFixing(
date=self.publication, identifier=identifier
)
self._fx_fixing3: _FXFixingMajor | _UnitFixing = _UnitFixing(
date=self.publication, identifier=identifier
)
else:
ccy1, ccy2 = self.fx_index.pair[:3], self.fx_index.pair[3:]
match (
_WMRClassification.classify(self.pair[:3]),
_WMRClassification.classify(self.pair[3:]),
):
case (_WMRClassification.USD, _WMRClassification.USD):
raise ValueError("An FXFixing between 'usd' and 'usd' is not valid.")
case (_WMRClassification.EUR, _WMRClassification.EUR):
raise ValueError("An FXFixing between 'eur' and 'eur' is not valid.")
case (
(_WMRClassification.USD, _WMRClassification.EUR)
| (_WMRClassification.EUR, _WMRClassification.USD)
| (_WMRClassification.USD, _WMRClassification.BASE_USD)
| (_WMRClassification.BASE_USD, _WMRClassification.USD)
| (_WMRClassification.EUR, _WMRClassification.BASE_EUR)
| (_WMRClassification.BASE_EUR, _WMRClassification.EUR)
):
# then the pair is a direct major determined by WMR
self._fx_fixing1 = _FXFixingMajor(
fx_index=self.fx_index,
publication=self.publication,
delivery=self.delivery,
identifier=identifier,
)
self._fx_fixing2 = _UnitFixing(date=self.publication, identifier=identifier)
self._fx_fixing3 = _UnitFixing(date=self.publication, identifier=identifier)
case (
(_WMRClassification.USD, _WMRClassification.BASE_EUR)
| (_WMRClassification.BASE_EUR, _WMRClassification.USD)
| (_WMRClassification.BASE_EUR, _WMRClassification.BASE_EUR)
):
# then must be a 2 pair cross involving EUR
self._fx_fixing1 = _FXFixingMajor(
fx_index=_clone_isda_mtm(f"{ccy1}eur", self.fx_index),
publication=self.publication,
identifier=identifier,
)
self._fx_fixing2 = _FXFixingMajor(
fx_index=_clone_isda_mtm(f"eur{ccy2}", self.fx_index),
publication=self.publication,
identifier=identifier,
)
self._fx_fixing3 = _UnitFixing(date=self.publication, identifier=identifier)
case (
(_WMRClassification.BASE_USD, _WMRClassification.EUR)
| (_WMRClassification.EUR, _WMRClassification.BASE_USD)
| (_WMRClassification.BASE_USD, _WMRClassification.BASE_USD)
):
# then must be a 2 pair cross involving USD
self._fx_fixing1 = _FXFixingMajor(
fx_index=_clone_isda_mtm(f"{ccy1}usd", self.fx_index),
publication=self.publication,
identifier=identifier,
)
self._fx_fixing2 = _FXFixingMajor(
fx_index=_clone_isda_mtm(f"usd{ccy2}", self.fx_index),
publication=self.publication,
identifier=identifier,
)
self._fx_fixing3 = _UnitFixing(date=self.publication, identifier=identifier)
case (_WMRClassification.BASE_USD, _WMRClassification.BASE_EUR):
# then must be a 4 currency cross involving EUR and USD
self._fx_fixing1 = _FXFixingMajor(
fx_index=_clone_isda_mtm(f"{ccy1}usd", self.fx_index),
publication=self.publication,
identifier=identifier,
)
self._fx_fixing2 = _FXFixingMajor(
fx_index=_clone_isda_mtm("usdeur", self.fx_index),
publication=self.publication,
identifier=identifier,
)
self._fx_fixing3 = _FXFixingMajor(
fx_index=_clone_isda_mtm(f"eur{ccy2}", self.fx_index),
publication=self.publication,
identifier=identifier,
)
case (_WMRClassification.BASE_EUR, _WMRClassification.BASE_USD):
# then must be a 4 currency cross involving EUR and USD
self._fx_fixing1 = _FXFixingMajor(
fx_index=_clone_isda_mtm(f"{ccy1}eur", self.fx_index),
publication=self.publication,
identifier=identifier,
)
self._fx_fixing2 = _FXFixingMajor(
fx_index=_clone_isda_mtm("eurusd", self.fx_index),
publication=self.publication,
identifier=identifier,
)
self._fx_fixing3 = _FXFixingMajor(
fx_index=_clone_isda_mtm(f"usd{ccy2}", self.fx_index),
publication=self.publication,
identifier=identifier,
)
@property
def _state(self) -> int: # type: ignore[override]
return hash(self.fx_fixing1._state + self.fx_fixing2._state + self.fx_fixing3._state)
@property
def fx_fixing1(self) -> _FXFixingMajor:
"""
The first (or only) :class:`~rateslib.data.fixings._FXFixingMajor` required by the fixing.
"""
return self._fx_fixing1
@property
def fx_fixing2(self) -> _FXFixingMajor | _UnitFixing:
"""
The second :class:`~rateslib.data.fixings._FXFixingMajor` required by the fixing if crossed.
"""
return self._fx_fixing2
@property
def fx_fixing3(self) -> _FXFixingMajor | _UnitFixing:
"""
The third :class:`~rateslib.data.fixings._FXFixingMajor` required by the fixing if crossed.
"""
return self._fx_fixing3
@property
def allow_cross(self) -> bool:
"""Whether the fixing uses WMR base currencies and majors or directly looks up the given
pair."""
return self.fx_index.allow_cross
@property
def fx_index(self) -> FXIndex:
"""The :class:`FXIndex` for the FX fixing."""
return self._fx_index
@property
def publication(self) -> datetime:
"""The publication date of the fixing as specified directly, or implied from
the :class:`~rateslib.data.fixings.FXIndex`."""
return self._publication
@property
def delivery(self) -> datetime:
"""The settlement delivery date of the fixing as specified directly, or implied
from the :class:`~rateslib.data.fixings.FXIndex`."""
return self._delivery
@property
def value(self) -> DualTypes_:
if not isinstance(self._value, NoInput):
return self._value
else:
if (
isinstance(self.fx_fixing1.value, NoInput)
or isinstance(self.fx_fixing2.value, NoInput)
or isinstance(self.fx_fixing3.value, NoInput)
):
return NoInput(0)
else:
self._value = self.fx_fixing1.value * self.fx_fixing2.value * self.fx_fixing3.value
return self._value
@property
def pair(self) -> str:
"""The currency pair related to the FX fixing."""
return self.fx_index.pair
def value_or_forecast(self, fx: FXForwards_) -> DualTypes:
"""
Return the determined value of the fixing or forecast it if not available.
Parameters
----------
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object to forecast the forward FX rate.
Returns
-------
float, Dual, Dual2, Variable
"""
if isinstance(self.value, NoInput):
fx_: FXForwards = _validate_obj_not_no_input(fx, "FXForwards")
f1 = self.fx_fixing1.value_or_forecast(fx=fx_)
f2 = self.fx_fixing2.value_or_forecast(fx=fx_)
f3 = self.fx_fixing3.value_or_forecast(fx=fx_)
return f1 * f2 * f3
else:
return self.value
def try_value_or_forecast(self, fx: FXForwards_) -> Result[DualTypes]:
"""
Return the determined value of the fixing or forecast it if not available.
Parameters
----------
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object to forecast the forward FX rate.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
if isinstance(self.value, NoInput):
if isinstance(fx, NoInput):
return Err(ValueError("Must provide `fx` argument to forecast FXFixing."))
else:
return Ok(fx.rate(pair=self.pair, settlement=self.delivery))
else:
return Ok(self.value)
def _lookup_and_calculate(
self, timeseries: Series, bounds: tuple[datetime, datetime] | None
) -> DualTypes_:
raise NotImplementedError("FXFixing does not support lookup and calculation.")
@classmethod
def _lookup(
cls,
timeseries: Series[DualTypes], # type: ignore[type-var]
date: datetime,
bounds: tuple[datetime, datetime] | None = None,
) -> DualTypes_:
raise NotImplementedError("FXFixing does not support lookup.")
result = fixings.__base_lookup__(
fixing_series=timeseries,
lookup_date=date,
bounds=bounds,
)
if isinstance(result, Err):
if isinstance(result._exception, FixingRangeError):
return NoInput(0)
result.unwrap()
else:
return result.unwrap()
def __repr__(self) -> str:
_1 = self.fx_fixing1.pair
_2 = ("/" + self.fx_fixing2.pair) if not isinstance(self.fx_fixing2, _UnitFixing) else ""
_3 = ("/" + self.fx_fixing3.pair) if not isinstance(self.fx_fixing3, _UnitFixing) else ""
return f""
def reset(self, state: int_ = NoInput(0)) -> None:
if (
isinstance(state, NoInput)
or self.fx_fixing1._state == state
or self.fx_fixing2._state == state
or self.fx_fixing3._state == state
):
self._value = NoInput(0)
self._fx_fixing1.reset(state)
self._fx_fixing2.reset(state)
self._fx_fixing3.reset(state)
class IRSSeries:
"""
Define the parameters of a specific IRS series.
This object acts as a container to store local conventions for different IRS markets.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.data.fixings import IRSSeries
.. ipython:: python
irss = IRSSeries(
currency="nok",
settle=2,
calendar="osl",
convention="30e360",
leg2_convention="act360",
frequency="A",
leg2_frequency="Q",
leg2_fixing_method="Ibor(2)",
eom=False,
modifier="MF",
payment_lag=0
)
irss.settle
.. role:: red
.. role:: green
Parameters
----------
currency: str, :red:`required`
The currency of the fixing. 3-digit iso code.
settle: Adjuster, int, str :green:`optional (set by 'defaults')`
The effective date lag from the fixing date to arrive at the swap effective date,
under the given ``calendar``. If int is assumed to be settleable business days.
calendar: Calendar, str, :red:`required`
The calendar passed to the :class:`~rateslib.instruments.IRS`
convention: str, :green:`optional (set by 'defaults')`
The convention passed to the :class:`~rateslib.instruments.IRS`
leg2_convention: str, :green:`optional (set by 'defaults')`
The leg2_convention passed to the :class:`~rateslib.instruments.IRS`
frequency: str, :green:`optional (set by 'defaults')`
The frequency passed to the :class:`~rateslib.instruments.IRS`
leg2_frequency: str, :green:`optional (set by 'defaults')`
The leg2_frequency passed to the :class:`~rateslib.instruments.IRS`
leg2_fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.FloatFixingMethod` describing the determination
of the floating rate for each period.
eom : bool, :green:`optional`
The eom passed to the :class:`~rateslib.instruments.IRS`
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional (set by Default)`
The eom passed to the :class:`~rateslib.instruments.IRS`
payment_lag: Adjuster, int, :green:`optional`
The payment_lag passed to the :class:`~rateslib.instruments.IRS`
""" # noqa: E501
def __init__(
self,
currency: str,
settle: int | Adjuster | str,
frequency: Frequency | str,
convention: str,
calendar: Cal | UnionCal | NamedCal | str,
leg2_fixing_method: str | FloatFixingMethod,
*,
eom: bool_ = NoInput(0),
modifier: Adjuster | str_ = NoInput(0),
payment_lag: Adjuster | str | int_ = NoInput(0),
leg2_frequency: Frequency | str_ = NoInput(1),
leg2_convention: str_ = NoInput(1),
) -> None:
self._currency = currency.lower()
self._settle = _get_adjuster(settle)
self._calendar = get_calendar(calendar)
self._frequency = _get_frequency(frequency, roll=NoInput(0), calendar=self.calendar)
self._leg2_frequency = _get_frequency(
_drb(self.frequency, leg2_frequency), roll=NoInput(0), calendar=self.calendar
)
self._convention = _get_convention(convention)
self._leg2_convention = _get_convention(_drb(self.convention, leg2_convention))
self._eom: bool = _drb(defaults.eom, eom)
self._modifier = _get_adjuster(_drb(defaults.modifier, modifier))
self._payment_lag = payment_lag
self._leg2_fixing_method = _get_float_fixing_method(leg2_fixing_method)
@property
def currency(self) -> str:
"""The currency of the associated :class:`~rateslib.instruments.IRS`"""
return self._currency
@property
def settle(self) -> Adjuster:
"""The :class:`~rateslib.scheduling.Adjuster` for effective date determination of the
associated :class:`~rateslib.instruments.IRS`"""
return self._settle
@property
def calendar(self) -> Cal | NamedCal | UnionCal:
"""The calendar of the associated :class:`~rateslib.instruments.IRS`"""
return self._calendar
@property
def frequency(self) -> Frequency:
"""The :class:`~rateslib.scheduling.Frequency` of leg1 of
the associated :class:`~rateslib.instruments.IRS`"""
return self._frequency
@property
def leg2_frequency(self) -> Frequency:
"""The :class:`~rateslib.scheduling.Frequency` of leg2 of
the associated :class:`~rateslib.instruments.IRS`"""
return self._leg2_frequency
@property
def convention(self) -> Convention:
"""The :class:`~rateslib.scheduling.Convention` of leg1 of
the associated :class:`~rateslib.instruments.IRS`"""
return self._convention
@property
def leg2_convention(self) -> Convention:
"""The :class:`~rateslib.scheduling.Convention` of leg2 of
the associated :class:`~rateslib.instruments.IRS`"""
return self._leg2_convention
@property
def modifier(self) -> Adjuster:
"""The :class:`~rateslib.scheduling.Adjuster` for accrual modification
of the associated :class:`~rateslib.instruments.IRS`"""
return self._modifier
@property
def payment_lag(self) -> Adjuster | int | str_:
"""The :class:`~rateslib.scheduling.Adjuster` for payment date modification
of the associated :class:`~rateslib.instruments.IRS`"""
return self._payment_lag
@property
def eom(self) -> bool:
"""Whether the roll-day tends to EoM or not."""
return self._eom
@property
def leg2_fixing_method(self) -> FloatFixingMethod:
"""The :class:`~rateslib.enums.FloatFixingMethod` of the
:class:`~rateslib.legs.FloatLeg`."""
return self._leg2_fixing_method
def __repr__(self) -> str:
return f""
def _get_irs_series(val: IRSSeries | str) -> IRSSeries:
if isinstance(val, IRSSeries):
return val
else:
return IRSSeries(**defaults.irs_series[val.lower()])
class IRSFixing(_BaseFixing):
"""
An IRS fixing value for the determination of IR Swaptions.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.data.fixings import IRSFixing
from rateslib import fixings, dt, Curve
from pandas import Series
.. ipython:: python
fixings.add("ISDA_USD_2Y", Series(index=[dt(2000, 1, 4)], data=[2.543]))
irs_fix = IRSFixing(
publication=dt(2000, 1, 4),
irs_series="usd_irs",
tenor="2Y",
identifier="ISDA_USD_2Y",
)
irs_fix.publication
irs_fix.value # <-- determined from Series
.. ipython:: python
curve = Curve({dt(2000, 1, 4): 1.0, dt(2003, 1, 4): 0.91}, convention="Act360")
irs_fix = IRSFixing(
publication=dt(2000, 1, 11),
irs_series="usd_irs",
tenor="2Y",
identifier="ISDA_USD_2Y",
)
irs_fix.publication
irs_fix.value_or_forecast(curves=[curve, curve]) # <-- no Series index available - use Curve
.. ipython:: python
:suppress:
fixings.pop("ISDA_USD_2Y")
.. role:: red
.. role:: green
Parameters
----------
irs_series: IRSSeries, str, :red:`required`
The :class:`~rateslib.data.fixings.IRSSeries` defining the IRS conventions.
publication: datetime, :red:`required`
The publication date of the fixing.
tenor: str, :red:`required`
The standard tenor of the underlying :class:`~rateslib.instruments.IRS` of the fixing.
value: float, Dual, Dual2, Variable, :green:`optional`
The initial value for the fixing to adopt. Most commonly this is not given and it is
determined from a timeseries of published rates.
identifier: str, :green:`optional`
The string name of the series to be loaded by the *Fixings* object.
""" # noqa: E501
def __init__(
self,
irs_series: IRSSeries | str,
publication: datetime,
tenor: str | datetime,
value: DualTypes_ = NoInput(0),
identifier: str_ = NoInput(0),
) -> None:
self._publication = publication
self._tenor = tenor
self._irs_series = _get_irs_series(irs_series)
super().__init__(identifier=identifier, value=value, date=self.publication)
@property
def tenor(self) -> datetime | str:
"""The tenor of the IRSFixing"""
return self._tenor
@property
def irs_series(self) -> IRSSeries:
"""The :class:`~rateslib.enums.IRSSeries` for the fixing."""
return self._irs_series
@cached_property
def irs(self) -> IRS:
"""The :class:`~rateslib.instruments.IRS` underlying for the swaptions priced
by this *Smile*."""
from rateslib.instruments.irs import IRS
return IRS(
effective=self.effective,
termination=self.tenor,
frequency=self.irs_series.frequency,
leg2_frequency=self.irs_series.leg2_frequency,
convention=self.irs_series.convention,
leg2_convention=self.irs_series.leg2_convention,
calendar=self.irs_series.calendar,
payment_lag=self.irs_series.payment_lag,
modifier=self.irs_series.modifier,
eom=self.irs_series.eom,
notional=1e6, # default notional to a sized paid IRS
)
def annuity(
self,
settlement_method: SwaptionSettlementMethod | str,
index_curve: _BaseCurve,
rate_curve: CurveOption_,
) -> DualTypes:
r"""
Return the annuity value used in the determination of the cashflow settlement, scaled to
match 1mm notional per bp.
.. role:: red
.. role:: green
Parameters
----------
settlement_method: SwaptionSettlementMethod, str, :red:`required`
The :class:`~rateslib.enums.SwaptionSettlementMethod` defining the settlement method.
index_curve: _BaseCurve, :green:`optional`
The price alignment index (PAI) curve, colloquially known as the discount factor
curve for the *IRS* that determines the PV. Required for certain methods.
rate_curve: _BaseCurve or dict of such, :green:`optional`
The curve used to forecast the floating leg of the
underlying :class:`~rateslib.instruments.IRS`.
Returns
-------
float, Dual, Dual2
Notes
-----
This method branches based on the SwaptionSettlementMethod:
- **Physical**: only the ``index_curve`` need be provided. In the case of physical
settlement this curve is the discount factor curve used to discount the resultant
:class:`~rateslib.instruments.IRS`, which is likely to be cleared and hence should
typically be a single currency RFR curve, e.g. SOFR or ESTR.
- **CashParTenor**: the annuity factor is derived from the *IRSFixing* value itself, using
the formula:
.. math::
A_R = \sum_{i=1}^N \frac{1/f}{(1 + R / f)^{i}}
- **CashCollateralized**: very similar to the *Physical* settlement, only the
``index_curve`` needs to be provided to derive the annuity. In practice, this *Curve*
should be constructed according the ISDA cash collateralized method using the published
rates at each tenor for the collateralization, e.g. SOFR swaps or ESTR swaps.
.. math::
A_R = \sum_{i=1}^N d_i v_i
"""
settlement_method_ = _get_swaption_settlement_method(settlement_method)
del settlement_method
if settlement_method_ == SwaptionSettlementMethod.Physical:
a_r: DualTypes = self.irs.leg1.analytic_delta( # type: ignore[assignment]
disc_curve=index_curve, forward=self.effective, local=False
)
elif settlement_method_ == SwaptionSettlementMethod.CashParTenor:
R = self.value_or_forecast(
curves=dict( # type: ignore[arg-type]
rate_curve=NoInput(0),
disc_curve=index_curve,
leg2_rate_curve=rate_curve,
leg2_disc_curve=index_curve,
)
)
a_r, f = 0.0, self.irs.leg1.schedule.frequency_obj.periods_per_annum()
for i, _period in enumerate(self.irs.leg1._regular_periods):
a_r += (1 / f) * (1 + R / (f * 100.0)) ** (-i - 1) * 100.0
else: # settlement_method == SwaptionSettlementMethod.CashCollaterized:
a_r = self.irs.leg1.analytic_delta( # type: ignore[assignment]
disc_curve=index_curve,
forward=self.effective,
local=False,
)
return a_r
@property
def publication(self) -> datetime:
"""The publication date of the fixing."""
return self._publication
@cached_property
def effective(self) -> datetime:
"""The effective date of the underlying :class:`~rateslib.instruments.IRS`."""
return self.irs_series.calendar.adjust(self.publication, self.irs_series.settle)
@cached_property
def termination(self) -> datetime:
"""The termination date of the underlying :class:`~rateslib.instruments.IRS`."""
if isinstance(self.tenor, datetime):
return self.tenor
else:
schedule = Schedule(
effective=self.effective,
termination=self.tenor,
frequency=self.irs_series.frequency,
calendar=self.irs_series.calendar,
modifier=self.irs_series.modifier,
eom=self.irs_series.eom,
)
return schedule.aschedule[-1]
def value_or_forecast(self, curves: CurvesT_) -> DualTypes:
"""
Return the determined value of the fixing or forecast it if not available.
Parameters
----------
curves: optional
Curves in the pricing format required by :class:`~rateslib.instruments.IRS`.
Returns
-------
float, Dual, Dual2, Variable
"""
if isinstance(self.value, NoInput):
rate = self.irs.rate(curves=curves)
return rate
else:
return self.value
def try_value_or_forecast(self, curves: CurvesT_) -> Result[DualTypes]:
"""
Return the determined value of the fixing or forecast it if not available.
Parameters
----------
curves: _Curves,
Pricing objects. See **Pricing** on :class:`~rateslib.instruments.IRS`
for details of allowed inputs.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
if isinstance(self.value, NoInput):
try:
return Ok(self.irs.rate(curves=curves))
except Exception as e:
return Err(e)
else:
return Ok(self.value)
def _lookup_and_calculate(
self,
timeseries: Series[DualTypes], # type: ignore[type-var]
bounds: tuple[datetime, datetime] | None,
) -> DualTypes_:
return self._lookup(timeseries=timeseries, bounds=bounds, date=self.date)
@classmethod
def _lookup(
cls,
timeseries: Series[DualTypes], # type: ignore[type-var]
date: datetime,
bounds: tuple[datetime, datetime] | None = None,
) -> DualTypes_:
result = fixings.__base_lookup__(
fixing_series=timeseries,
lookup_date=date,
bounds=bounds,
)
if isinstance(result, Err):
if isinstance(result._exception, FixingRangeError):
return NoInput(0)
result.unwrap()
else:
return result.unwrap()
def __repr__(self) -> str:
return f""
def _maybe_get_fx_index(val: FXIndex | str_) -> FXIndex_:
if isinstance(val, NoInput):
return NoInput(0)
else:
return _get_fx_index(val)
def _get_fx_index(val: FXIndex | str) -> FXIndex:
if isinstance(val, FXIndex):
return val
else:
pair = val.lower()
try:
return FXIndex(**defaults.fx_index[pair])
except KeyError:
try:
reverse_fxi = FXIndex(**defaults.fx_index[f"{pair[3:]}{pair[:3]}"])
return FXIndex(
pair=pair,
calendar=reverse_fxi.calendar,
settle=reverse_fxi.settle,
isda_mtm_calendar=reverse_fxi.isda_mtm_calendar,
isda_mtm_settle=reverse_fxi.isda_mtm_settle,
)
except KeyError:
raise ValueError(
f"The FXIndex: '{pair}' was not found in `defaults`.\n"
"To add a default specification for the required FXIndex, for example, use:\n"
f"> defaults.fx_index['{pair}'] = {{ \n"
" 'pair': 'usdsek',\n"
" 'calendar': 'stk|fed',\n"
" 'settle': '2B',\n"
" 'isda_mtm_settle': '-2B',\n"
" 'isda_mtm_calendar': 'stk',\n"
" 'allow_cross': True,\n"
f" }}\n"
"Alternatively, create an FXIndex directly and supply it to `pair`, "
"for example:\n> pair=FXIndex('usdsek', 'stk|fed\\, 2)"
)
class IBORFixing(_BaseFixing):
"""
A rate fixing value referencing a tenor-IBOR type calculation.
Parameters
----------
rate_index: FloatRateIndex
The parameters associated with the floating rate index.
accrual_start: datetime
The start accrual date for the period of the floating rate.
date: datetime
The date of relevance for the fixing, which is its **publication** date. This can
be determined by a ``lag`` parameter of the ``rate_index`` measured from the
``accrual_start``.
value: float, Dual, Dual2, Variable, optional
The initial value for the fixing to adopt. Most commonly this is not given and it is
determined from a timeseries of published FX rates.
identifier: str, optional
The string name of the timeseries to be loaded by the *Fixings* object.
Examples
--------
.. ipython:: python
:suppress:
from rateslib.data.fixings import IBORFixing
from rateslib.data.fixings import FloatRateIndex
from rateslib import fixings, dt
from pandas import Series
.. ipython:: python
fixings.add("EURIBOR_3M", Series(index=[dt(2000, 1, 3), dt(2000, 2, 4)], data=[1.651, 1.665]))
ibor_fix = IBORFixing(
accrual_start=dt(2000, 1, 5),
identifier="Euribor_3m",
rate_index=FloatRateIndex(frequency="Q", series="eur_ibor")
)
ibor_fix.date
ibor_fix.value
.. ipython:: python
:suppress:
fixings.pop("Euribor_3m")
""" # noqa: E501
_accrual_start: datetime
_accrual_end: datetime
_rate_index: FloatRateIndex
def __init__(
self,
*,
rate_index: FloatRateIndex,
accrual_start: datetime,
date: datetime_ = NoInput(0),
value: DualTypes_ = NoInput(0),
identifier: str_ = NoInput(0),
) -> None:
super().__init__(date=date, value=value, identifier=identifier) # type: ignore[arg-type]
self._accrual_start = accrual_start
self._rate_index = rate_index
self._date = _drb(
self.index.calendar.lag_bus_days(self.accrual_start, -self.index.lag, False),
date,
)
self._accrual_end = add_tenor(
start=self.accrual_start,
tenor=self.index.frequency,
modifier=self.index.modifier,
calendar=self.index.calendar,
)
@property
def index(self) -> FloatRateIndex:
"""The definitions for the :class:`FloatRateIndex` of the fixing."""
return self._rate_index
@property
def series(self) -> FloatRateSeries:
"""The :class:`FloatRateSeries` for defining the fixing."""
return self.index.series
@property
def accrual_start(self) -> datetime:
"""The start accrual date for the defined period of the floating rate."""
return self._accrual_start
@property
def accrual_end(self) -> datetime:
"""The end accrual date for the defined period of the floating rate."""
return self._accrual_end
def _lookup_and_calculate(
self,
timeseries: Series[DualTypes], # type: ignore[type-var]
bounds: tuple[datetime, datetime] | None,
) -> DualTypes_:
return self._lookup(timeseries=timeseries, bounds=bounds, date=self.date)
@classmethod
def _lookup(
cls,
timeseries: Series[DualTypes], # type: ignore[type-var]
date: datetime,
bounds: tuple[datetime, datetime] | None = None,
) -> DualTypes_:
result = fixings.__base_lookup__(
fixing_series=timeseries,
lookup_date=date,
bounds=bounds,
)
if isinstance(result, Err):
if isinstance(result._exception, FixingRangeError):
return NoInput(0)
result.unwrap()
else:
return result.unwrap()
class IBORStubFixing(_BaseFixing):
"""
A rate fixing value referencing an interpolated tenor-IBOR type calculation.
Parameters
----------
rate_series: FloatRateSeries
The parameters associated with the floating rate index.
accrual_start: datetime
The start accrual date for the period.
accrual_end: datetime
The end accrual date for the period..
date: datetime, optional
The date of relevance for the fixing, which is its **publication** date. This can
be determined by a ``lag`` parameter of the ``rate_series`` measured from the
``accrual_start``.
value: float, Dual, Dual2, Variable, optional
The initial value for the fixing to adopt. Most commonly this is not given and it is
determined from a timeseries of published FX rates.
identifier: str, optional
The string name of the timeseries to be loaded by the *Fixings* object. This is a
*series* identifier, e.g. "Euribor", which will be extended to derive the full
version, e.g. "Euribor_3m" based on available and necessary tenors.
Notes
-----
An interpolated tenor-IBOR type calculation depends upon two tenors being determinable from
which a rate can be linearly interpolated.
The ``rate_series`` has a ``tenors`` attribute which will be used in a first instance. If this
is empty, i.e. unspecified, then the default tenors of ['1W', '1M', '3M', '6M', '12M']
are used in place.
Examples
--------
This fixing automatically identifies it must be interpolated between the available 3M and 6M
tenors.
.. ipython:: python
:suppress:
from rateslib.data.fixings import IBORStubFixing
from rateslib.data.fixings import FloatRateSeries
from rateslib import fixings, dt
from pandas import Series
.. ipython:: python
fixings.add("EURIBOR_1M", Series(index=[dt(2000, 1, 3), dt(2000, 2, 4)], data=[1.651, 1.665]))
fixings.add("EURIBOR_2M", Series(index=[dt(2000, 1, 3), dt(2000, 2, 4)], data=[2.651, 2.665]))
fixings.add("EURIBOR_3M", Series(index=[dt(2000, 1, 3), dt(2000, 2, 4)], data=[3.651, 3.665]))
fixings.add("EURIBOR_6M", Series(index=[dt(2000, 1, 3), dt(2000, 2, 4)], data=[4.651, 4.665]))
ibor_fix = IBORStubFixing(
accrual_start=dt(2000, 1, 5),
accrual_end=dt(2000, 5, 17),
identifier="Euribor",
rate_series=FloatRateSeries(
lag=2,
modifier="MF",
calendar="tgt",
convention="act360",
eom=False,
tenors=["1M", "2M", "3M", "6M", "12M"],
)
)
ibor_fix.date
ibor_fix.value
This fixing can only be determined from a single tenor, which is quite distinct from the
12 day period length in this case. In practice this should be avoided.
.. ipython:: python
fixings.add("NIBOR_6M", Series(index=[dt(2000, 1, 3), dt(2000, 2, 4)], data=[4.651, 4.665]))
ibor_fix = IBORStubFixing(
accrual_start=dt(2000, 1, 5),
accrual_end=dt(2000, 1, 17),
identifier="Nibor",
rate_series=FloatRateSeries(
lag=2,
modifier="MF",
calendar="osl",
convention="act360",
eom=True,
tenors=["6M"],
)
)
ibor_fix.date
ibor_fix.value
ibor_fix.fixing2
.. ipython:: python
:suppress:
fixings.pop("Euribor_1m")
fixings.pop("Euribor_2m")
fixings.pop("Euribor_3m")
fixings.pop("Euribor_6m")
fixings.pop("NIBOR_6M")
""" # noqa: E501
_accrual_start: datetime
_accrual_end: datetime
_series: FloatRateSeries
_fixing1: IBORFixing | NoInput
_fixing2: IBORFixing | NoInput
def __init__(
self,
*,
rate_series: FloatRateSeries | str,
accrual_start: datetime,
accrual_end: datetime,
value: DualTypes_ = NoInput(0),
identifier: str_ = NoInput(0),
date: datetime_ = NoInput(0),
) -> None:
super().__init__(value=value, date=date, identifier=identifier) # type: ignore[arg-type]
self._accrual_start = accrual_start
self._accrual_end = accrual_end
self._series = _get_float_rate_series(rate_series)
self._date = _drb(
self.series.calendar.lag_bus_days(self.accrual_start, -self.series.lag, False),
date,
)
tenors = self._stub_tenors_from_list(
tenors=_drb(["1W", "1M", "3M", "6M", "12M"], self.series.tenors)
)
self._fixing1 = IBORFixing(
rate_index=FloatRateIndex(
series=self.series,
frequency=_get_frequency(tenors[0][0], NoInput(0), NoInput(0)),
),
accrual_start=self.accrual_start,
date=date,
value=value,
identifier=NoInput(0)
if isinstance(identifier, NoInput)
else identifier + "_" + tenors[0][0],
)
if len(tenors[0]) == 2:
self._fixing2 = IBORFixing(
rate_index=FloatRateIndex(
series=self._series,
frequency=_get_frequency(tenors[0][1], NoInput(0), NoInput(0)),
),
date=date,
accrual_start=self.accrual_start,
value=value,
identifier=NoInput(0)
if isinstance(identifier, NoInput)
else identifier + "_" + tenors[0][1],
)
else:
self._fixing2 = NoInput(0)
self._value = value
@property
def date(self) -> datetime:
"""The date of relevance for the fixing, which is its **publication** date."""
return self._date
@property
def fixing1(self) -> IBORFixing | NoInput:
"""The shorter tenor :class:`IBORFixing` making up part of the calculation."""
return self._fixing1
@property
def fixing2(self) -> IBORFixing | NoInput:
"""The longer tenor :class:`IBORFixing` making up part of the calculation."""
return self._fixing2
@property
def value(self) -> DualTypes_:
if not isinstance(self._value, NoInput):
return self._value
elif isinstance(self.fixing1, NoInput) or isinstance(self.fixing1.value, NoInput):
return NoInput(0)
else:
if isinstance(self.fixing2, NoInput):
self._value = self.fixing1.value
return self._value
elif isinstance(self.fixing2.value, NoInput):
return NoInput(0)
else:
self._value = (
self.weights[0] * self.fixing1.value + self.weights[1] * self.fixing2.value
)
return self._value
def reset(self, state: int_ = NoInput(0)) -> None:
if not isinstance(self._fixing1, NoInput):
self._fixing1.reset(state=state)
if not isinstance(self._fixing2, NoInput):
self._fixing2.reset(state=state)
self._value = NoInput(0)
@cached_property
def weights(self) -> tuple[float, float]:
"""Scalar multiplier to apply to each tenor fixing for the interpolation."""
if isinstance(self.fixing2, NoInput):
if isinstance(self.fixing1, NoInput):
raise ValueError(
"The IBORStubFixing has no individual IBORFixings to determine weights."
)
return 1.0, 0.0
else:
e1 = self.fixing1.accrual_end # type: ignore[union-attr]
e2 = self.fixing2.accrual_end
e = self.accrual_end
return (e2 - e) / (e2 - e1), (e - e1) / (e2 - e1)
@property
def series(self) -> FloatRateSeries:
"""The :class:`FloatRateSeries` for defining the fixing."""
return self._series
@property
def accrual_start(self) -> datetime:
"""The start accrual date for the defined accrual period."""
return self._accrual_start
@property
def accrual_end(self) -> datetime:
"""The end accrual date for the defined accrual period."""
return self._accrual_end
def _lookup_and_calculate(
self,
timeseries: Series[DualTypes], # type: ignore[type-var]
bounds: tuple[datetime, datetime] | None,
) -> DualTypes_:
raise RuntimeError("This method should be unused due to overloaded properties")
def _stub_tenors_from_list(self, tenors: list[str]) -> tuple[list[str], list[datetime]]:
left: tuple[str | None, datetime] = (None, datetime(1, 1, 1))
right: tuple[str | None, datetime] = (None, datetime(9999, 1, 1))
for tenor in tenors:
sample_end = add_tenor(
start=self.accrual_start,
tenor=tenor,
modifier=self.series.modifier,
calendar=self.series.calendar,
)
if sample_end <= self.accrual_end and sample_end > left[1]:
left = (tenor, sample_end)
if sample_end > self.accrual_end and sample_end < right[1]:
right = (tenor, sample_end)
break
ret: tuple[list[str], list[datetime]] = ([], [])
if left[0] is not None:
ret[0].append(left[0])
ret[1].append(left[1])
if right[0] is not None:
ret[0].append(right[0])
ret[1].append(right[1])
return ret
# def _stub_tenors_from_fixings(self) -> tuple[list[str], list[datetime]]:
# """
# Return the tenors available in the :class:`~rateslib.defaults.Fixings` object for
# determining an IBOR type stub period.
#
# Returns
# -------
# tuple of list[string tenors] and list[evaluated end dates]
# """
# from rateslib.scheduling import add_tenor
#
# left: tuple[str | None, datetime] = (None, datetime(1, 1, 1))
# right: tuple[str | None, datetime] = (None, datetime(9999, 1, 1))
#
# for tenor in [
# "1D",
# "1B",
# "2B",
# "1W",
# "2W",
# "3W",
# "4W",
# "1M",
# "2M",
# "3M",
# "4M",
# "5M",
# "6M",
# "7M",
# "8M",
# "9M",
# "10M",
# "11M",
# "12M",
# "1Y",
# ]:
# try:
# _ = fixings.__getitem__(f"{self.identifier}_{tenor}")
# except Exception: # noqa: S112
# continue
# else:
# sample_end = add_tenor(
# start=self.accrual_start,
# tenor=tenor,
# modifier=self.series.modifier,
# calendar=self.series.calendar,
# )
# if sample_end <= self.accrual_end and sample_end > left[1]:
# left = (tenor, sample_end)
# if sample_end > self.accrual_end and sample_end < right[1]:
# right = (tenor, sample_end)
# break
#
# ret: tuple[list[str], list[datetime]] = ([], [])
# if left[0] is not None:
# ret[0].append(left[0])
# ret[1].append(left[1])
# if right[0] is not None:
# ret[0].append(right[0])
# ret[1].append(right[1])
# return ret
class RFRFixing(_BaseFixing):
"""
A rate fixing value representing an RFR type calculating involving multiple RFR publications.
Parameters
----------
rate_index: FloatRateIndex
The parameters associated with the floating rate index.
accrual_start: datetime
The start accrual date for the period.
accrual_end: datetime
The end accrual date for the period.
value: float, Dual, Dual2, Variable, optional
The initial value for the fixing to adopt. Most commonly this is not given and it is
determined from a timeseries of published FX rates.
identifier: str, optional
The string name of the timeseries to be loaded by the *Fixings* object. For alignment with
internal structuring these should have the suffix "_1B", e.g. "ESTR_1B".
fixing_method: FloatFixingMethod or str
The :class:`FloatFixingMethod` object used to combine multiple RFR fixings.
spread_compound_method: SpreadCompoundMethod or str
A :class:`SpreadCompoundMethod` object used define the calculation of the addition of the
``float_spread``.
float_spread: float, DUal, Dual2, Variable
An additional amount added to the calculation to determine the final period rate.
Examples
--------
.. ipython:: python
:suppress:
from rateslib.enums.parameters import SpreadCompoundMethod, FloatFixingMethod
from rateslib.data.fixings import RFRFixing
from rateslib.data.fixings import FloatRateIndex
from rateslib import fixings, dt
from pandas import Series
The below is a fully determined *RFRFixing* with populated rates.
.. ipython:: python
fixings.add("SOFR_1B", Series(index=[
dt(2025, 1, 8), dt(2025, 1, 9), dt(2025, 1, 10), dt(2025, 1, 13), dt(2025, 1, 14)
], data=[1.1, 2.2, 3.3, 4.4, 5.5]))
rfr_fix = RFRFixing(
accrual_start=dt(2025, 1, 9),
accrual_end=dt(2025, 1, 15),
identifier="SOFR_1B",
spread_compound_method=SpreadCompoundMethod.NoneSimple,
fixing_method=FloatFixingMethod.RFRPaymentDelay(),
float_spread=0.0,
rate_index=FloatRateIndex(frequency="1B", series="usd_rfr")
)
rfr_fix.value
rfr_fix.populated
This second example is a partly undetermined period, and will result in *NoInput* for its
value but has recorded partial population of its individual RFRs.
.. ipython:: python
rfr_fix2 = RFRFixing(
accrual_start=dt(2025, 1, 9),
accrual_end=dt(2025, 1, 21),
identifier="SOFR_1B",
spread_compound_method="NoneSimple",
fixing_method="RFRPaymentDelay",
float_spread=0.0,
rate_index=FloatRateIndex(frequency="1B", series="usd_rfr")
)
rfr_fix2.value
rfr_fix2.populated
.. ipython:: python
:suppress:
fixings.pop("SOFR_1B")
"""
_populated: Series[DualTypes] # type: ignore[type-var]
_dates_obs: list[datetime] | None
_dates_dcf: list[datetime] | None
_float_spread: DualTypes
_fixing_index: FloatRateIndex
_accrual_start: datetime
_accrual_end: datetime
_fixing_method: FloatFixingMethod
_spread_compound_method: SpreadCompoundMethod
def __init__(
self,
*,
rate_index: FloatRateIndex,
accrual_start: datetime,
accrual_end: datetime,
fixing_method: FloatFixingMethod | str,
spread_compound_method: SpreadCompoundMethod | str,
float_spread: DualTypes,
value: DualTypes_ = NoInput(0),
identifier: str_ = NoInput(0),
):
self._identifier = identifier if isinstance(identifier, NoInput) else identifier.upper()
self._value = value
self._state = 0
self._float_spread = float_spread
self._spread_compound_method = _get_spread_compound_method(spread_compound_method)
self._rate_index = rate_index
self._value = value
self._accrual_start = accrual_start
self._accrual_end = accrual_end
self._fixing_method = _get_float_fixing_method(fixing_method)
self._populated = Series(index=[], data=[], dtype=float) # type: ignore[assignment]
def reset(self, state: int_ = NoInput(0)) -> None:
if isinstance(state, NoInput) or self._state == state:
self._populated = Series(index=[], data=[], dtype=float) # type: ignore[assignment]
self._value = NoInput(0)
self._state = 0
@property
def fixing_method(self) -> FloatFixingMethod:
"""The :class:`FloatFixingMethod` object used to combine multiple RFR fixings."""
return self._fixing_method
@property
def float_spread(self) -> DualTypes:
"""The spread value incorporated into the fixing calculation using the compound method."""
return self._float_spread
@property
def spread_compound_method(self) -> SpreadCompoundMethod:
"""A :class:`SpreadCompoundMethod` object used define the calculation of the addition of the
``float_spread``."""
return self._spread_compound_method
@property
def accrual_start(self) -> datetime:
"""The accrual start date for the underlying float rate period."""
return self._accrual_start
@property
def accrual_end(self) -> datetime:
"""The accrual end date for the underlying float rate period."""
return self._accrual_end
@property
def value(self) -> DualTypes_:
if not isinstance(self._value, NoInput):
return self._value
else:
if isinstance(self._identifier, NoInput):
return NoInput(0)
else:
state, timeseries, bounds = fixings.__getitem__(self._identifier)
if state == self._state:
return NoInput(0)
else:
self._state = state
v = self._lookup_and_calculate(timeseries, bounds)
self._value = v
return v
@property
def populated(self) -> Series[DualTypes]: # type: ignore[type-var]
"""The looked up fixings as part of the calculation after a ``value`` calculation."""
return self._populated
@property
def unpopulated(self) -> Series[DualTypes]: # type: ignore[type-var]
"""The fixings that are not published but are required to determine the period fixing."""
return Series(index=self.dates_obs[:-1], data=np.nan, dtype=object).drop( # type: ignore[return-value]
self.populated.index
)
def _lookup_and_calculate(
self,
timeseries: Series[DualTypes], # type: ignore[type-var]
bounds: tuple[datetime, datetime] | None,
) -> DualTypes_:
value, populated = self._lookup(
timeseries=timeseries,
fixing_method=self.fixing_method,
dates_obs=self.dates_obs,
dcfs_dcf=self.dcfs_dcf,
float_spread=self.float_spread,
spread_compound_method=self.spread_compound_method,
)
self._populated = populated
return value
@classmethod
def _lookup(
cls,
timeseries: Series[DualTypes], # type: ignore[type-var]
# bounds: tuple[datetime, datetime] | None,
# accrual_start: datetime,
# accrual_end: datetime,
fixing_method: FloatFixingMethod,
dates_obs: Arr1dObj,
# dates_dcf: list[datetime] | None,
# dcfs_obs: Arr1dF64,
dcfs_dcf: Arr1dF64,
float_spread: DualTypes,
spread_compound_method: SpreadCompoundMethod,
) -> tuple[DualTypes_, Series[DualTypes]]: # type: ignore[type-var]
fixing_rates: Series[DualTypes] = Series(index=dates_obs[:-1], data=np.nan, dtype=object) # type: ignore[type-var, assignment]
# populate Series with values
fixing_rates, populated, unpopulated = (
_RFRRate._push_rate_fixings_as_series_to_fixing_rates(
fixing_rates=fixing_rates,
rate_fixings=timeseries,
fixing_method=fixing_method,
)
)
if len(unpopulated) > 0:
return NoInput(0), populated
else:
result = _RFRRate._inefficient_calculation(
fixing_rates=fixing_rates,
fixing_dcfs=dcfs_dcf,
fixing_method=fixing_method,
spread_compound_method=spread_compound_method,
float_spread=float_spread,
)
if isinstance(result, Err):
result.unwrap() # will raise
return result.unwrap(), populated
@property
def rate_index(self) -> FloatRateIndex:
"""The :class:`FloatRateIndex` defining the parameters of the RFR interest rate index."""
return self._rate_index
@cached_property
def dates_obs(self) -> Arr1dObj:
"""A sequence of dates defining the individual **observation** rates for the period."""
start, end = self.bounds[0]
return np.array(self.rate_index.calendar.bus_date_range(start, end))
@cached_property
def dates_dcf(self) -> Arr1dObj:
"""A sequence of dates defining the individual **DCF** dates for the period."""
start, end = self.bounds[1]
return np.array(self.rate_index.calendar.bus_date_range(start, end))
@cached_property
def dcfs_obs(self) -> Arr1dF64:
"""A sequence of floats defining the individual **DCF** values associated with
the method's **observation** dates."""
return _RFRRate._get_dcf_values(
dcf_dates=self.dates_obs,
fixing_convention=self.rate_index.convention,
fixing_calendar=self.rate_index.calendar,
)
@cached_property
def dcfs_dcf(self) -> Arr1dF64:
"""A sequence of floats defining the individual **DCF** values associated with
the **DCF** dates natural to the fixing rates."""
return _RFRRate._get_dcf_values(
dcf_dates=self.dates_dcf,
fixing_convention=self.rate_index.convention,
fixing_calendar=self.rate_index.calendar,
)
@cached_property
def bounds(self) -> tuple[tuple[datetime, datetime], tuple[datetime, datetime]]:
"""The fixing method adjusted start and end dates for the **observation** dates and
the **dcf** dates."""
return self._get_date_bounds(
accrual_start=self.accrual_start,
accrual_end=self.accrual_end,
fixing_method=self.fixing_method,
fixing_calendar=self.rate_index.calendar,
)
@staticmethod
def _get_date_bounds(
accrual_start: datetime,
accrual_end: datetime,
fixing_method: FloatFixingMethod,
fixing_calendar: CalTypes,
) -> tuple[tuple[datetime, datetime], tuple[datetime, datetime]]:
"""
For each different RFR fixing method adjust the start and end date of the associated
period to return adjusted start and end dates for the fixing set as well as the
DCF set.
For all methods except 'lookback', these dates will align with each other.
For 'lookback' the observed RFRs are applied over different DCFs that do not naturally
align.
"""
# Depending upon method get the observation dates and dcf dates
if type(fixing_method) in [
FloatFixingMethod.RFRPaymentDelay,
FloatFixingMethod.RFRPaymentDelayAverage,
FloatFixingMethod.RFRLockout,
FloatFixingMethod.RFRLockoutAverage,
]:
start_obs, end_obs = accrual_start, accrual_end
start_dcf, end_dcf = accrual_start, accrual_end
elif type(fixing_method) in [
FloatFixingMethod.RFRObservationShift,
FloatFixingMethod.RFRObservationShiftAverage,
]:
start_obs = fixing_calendar.lag_bus_days(
accrual_start, -fixing_method.method_param(), settlement=False
)
end_obs = fixing_calendar.lag_bus_days(
accrual_end, -fixing_method.method_param(), settlement=False
)
start_dcf, end_dcf = start_obs, end_obs
else:
# fixing_method in [
# FloatFixingMethod.RFRLookback,
# FloatFixingMethod.RFRLookbackAverage,
# ]:
start_obs = fixing_calendar.lag_bus_days(
accrual_start, -fixing_method.method_param(), settlement=False
)
end_obs = fixing_calendar.lag_bus_days(
accrual_end, -fixing_method.method_param(), settlement=False
)
start_dcf, end_dcf = accrual_start, accrual_end
return (start_obs, end_obs), (start_dcf, end_dcf)
class FloatRateIndex:
"""
Define the parameters of a specific interest rate index.
Parameters
----------
frequency : Frequency or str
The specific tenor of the interest rate index.
series : Series or str
The general parameters applied to any tenor of this particular interest rate series.
Examples
--------
None
"""
_frequency: Frequency
_series: FloatRateSeries
def __init__(
self,
frequency: Frequency | str,
series: FloatRateSeries | str,
) -> None:
self._series = _get_float_rate_series(series)
self._frequency = _get_frequency(frequency, NoInput(0), self.calendar)
@property
def frequency(self) -> Frequency:
"""The specific tenor of the interest rate index."""
return self._frequency
@property
def series(self) -> FloatRateSeries:
"""The general parameters applied to any tenor of this particular interest rate series."""
return self._series
@property
def lag(self) -> int:
"""The lag for the determining the publication date of the interest rate index."""
return self.series.lag
@property
def calendar(self) -> CalTypes:
"""The calendar associated with the publication of the interest rate index."""
return self.series.calendar
@property
def modifier(self) -> Adjuster:
"""The :class:`Adjuster` associated with the end accrual day of the interest rate index."""
return self.series.modifier
@property
def eom(self) -> bool:
"""Whether the interest rate index adopts an end of month convention."""
return self.series.eom
@property
def convention(self) -> Convention:
"""The :class:`Convention` associated with the publication of the interest rate index."""
return self.series.convention
class FloatRateSeries:
"""
Define the general parameters of multiple tenors of an interest rate series.
.. role:: red
.. role:: green
Parameters
----------
lag: int, :red:`required`
The number of business days by which the fixing date is lagged to the accrual start date.
calendar: Calendar, str :red:`required`
The calendar associated with the floating rate's date determination.
modifier: Adjuster, str, :red:`required`
The :class:`~rateslib.scheduling.Adjuster` associated with the end accrual day of the
floating rate's date.
convention: Convention, str, :red:`required`
The day count :class:`~rateslib.scheduling.Convention` associated with the floating rate.
eom: bool, :red:`required`
Whether the interest rate index natively adopts EoM roll preference or not.
tenors: list[str], :green:`optional`
The official list of tenor indexes published by this series.
zero_float_period_stub: StubInference, str, :green:`optional (set as 'ShortBack')`
The stub inference parameter that is used to steer schedule construction when this
series is used as part of a :class:`~rateslib.legs.FloatLeg` composed of
:class:`~rateslib.periods.ZeroFloatPeriod`.
"""
_lag: int
_calendar: CalTypes
_modifier: Adjuster
_convention: Convention
_eom: bool
_zero_period_stub: StubInference
_tenors: list[str] | NoInput
def __init__(
self,
lag: int,
calendar: CalTypes | str,
modifier: Adjuster | str,
convention: Convention | str,
eom: bool,
zero_period_stub: StubInference | str_ = NoInput(0),
tenors: list[str] | NoInput = NoInput(0),
) -> None:
self._lag = lag
self._calendar = get_calendar(calendar)
self._modifier = _get_adjuster(modifier)
self._convention = _get_convention(convention)
self._eom = eom
self._tenors: list[str] = tenors
if not isinstance(self.tenors, NoInput) and len(self.tenors) == 0:
raise ValueError("`tenors` cannot be given as an empty list.")
self._zero_period_stub = _get_stub_inference(
_drb("ShortBack", zero_period_stub), NoInput(0), NoInput(0)
)
@property
def lag(self) -> int:
"""The number of business days before accrual start that the fixing is published according
to ``calendar``."""
return self._lag
@property
def calendar(self) -> CalTypes:
"""The fixing calendar for the rate series."""
return self._calendar
@property
def convention(self) -> Convention:
"""The day count :class:`~rateslib.scheduling.Convention` associated with the fixing."""
return self._convention
@property
def modifier(self) -> Adjuster:
"""The date :class:`~rateslib.scheduling.Adjuster` used for date adjustment of the tenor."""
return self._modifier
@property
def eom(self) -> bool:
"""Whether end of month date rolling is applied to date calculations for the fixing
series."""
return self._eom
@property
def zero_period_stub(self) -> StubInference:
""":class:`~rateslib.scheduling.StubInference` used when a fixing tenor does not divide
into the frequency of a compounded :class:`~rateslib.periods.ZeroFloatPeriod`."""
return self._zero_period_stub
@property
def tenors(self) -> list[str] | NoInput:
"""
A list of tenors that are published by this interest rate series.
"""
return self._tenors
class _IBORRate:
@staticmethod
def _rate(
*,
rate_curve: _BaseCurve | dict[str, _BaseCurve] | NoInput,
rate_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
start: datetime,
end: datetime,
lag: int,
stub: bool,
float_spread: DualTypes,
rate_series: FloatRateSeries | NoInput,
frequency: Frequency,
) -> Result[DualTypes]:
rate_series_ = _maybe_get_rate_series_from_curve(
rate_curve=rate_curve,
rate_series=rate_series,
lag=lag,
)
fixing_date = rate_series_.calendar.lag_bus_days(start, -rate_series_.lag, settlement=False)
if stub:
# TODO: pass through tenor convention and modifier to the interpolated stub
return _IBORRate._rate_interpolated_stub(
rate_curve=rate_curve,
rate_fixings=rate_fixings,
fixing_date=fixing_date,
start=start,
end=end,
float_spread=float_spread,
rate_series=rate_series_,
)
else:
return _IBORRate._rate_single_tenor(
rate_curve=rate_curve,
rate_fixings=rate_fixings,
fixing_date=fixing_date,
start=start,
end=end,
frequency=frequency,
float_spread=float_spread,
)
@staticmethod
def _rate_interpolated_stub(
rate_curve: _BaseCurve | dict[str, _BaseCurve] | NoInput,
rate_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
fixing_date: datetime,
start: datetime,
end: datetime,
float_spread: DualTypes,
rate_series: FloatRateSeries,
) -> Result[DualTypes]:
if isinstance(rate_fixings, NoInput):
# will attempt to forecast stub period from rate_curve
if isinstance(rate_curve, dict):
return _IBORRate._rate_interpolated_stub_from_curve_dict(
rate_curve=rate_curve,
fixing_date=fixing_date,
start=start,
end=end,
float_spread=float_spread,
)
else:
return _IBORRate._rate_stub_forecast_from_curve(
rate_curve=rate_curve,
fixing_date=fixing_date,
start=start,
end=end,
float_spread=float_spread,
)
else:
# will maybe find relevant fixing values in Series
return _IBORRate._rate_interpolated_stub_maybe_from_fixings(
rate_curve=rate_curve,
rate_fixings=rate_fixings,
fixing_date=fixing_date,
start=start,
end=end,
rate_series=rate_series,
float_spread=float_spread,
)
@staticmethod
def _rate_interpolated_stub_maybe_from_fixings(
rate_curve: _BaseCurve_ | dict[str, _BaseCurve],
rate_fixings: DualTypes | Series[DualTypes] | str, # type: ignore[type-var]
fixing_date: datetime,
start: datetime,
end: datetime,
float_spread: DualTypes,
rate_series: FloatRateSeries,
) -> Result[DualTypes]:
if isinstance(rate_fixings, str):
tenors, dates, fixings_ = fixings.get_stub_ibor_fixings(
value_start_date=start,
value_end_date=end,
fixing_calendar=rate_series.calendar,
fixing_modifier=rate_series.modifier,
fixing_identifier=rate_fixings,
fixing_date=fixing_date,
)
if len(tenors) == 0:
# nothing found
return _IBORRate._rate_interpolated_stub(
rate_curve=rate_curve,
rate_fixings=NoInput(0), # no fixings are found
fixing_date=fixing_date,
start=start,
end=end,
float_spread=float_spread,
rate_series=rate_series,
)
elif len(tenors) == 1:
if fixings_[0] is None:
return _IBORRate._rate_interpolated_stub(
rate_curve=rate_curve,
rate_fixings=NoInput(0), # no fixings are found
fixing_date=fixing_date,
start=start,
end=end,
float_spread=float_spread,
rate_series=rate_series,
)
return Ok(fixings_[0] + float_spread / 100.0)
else:
if fixings_[0] is None or fixings_[1] is None:
# missing data exists
return _IBORRate._rate_interpolated_stub(
rate_curve=rate_curve,
rate_fixings=NoInput(0), # no fixings are found
fixing_date=fixing_date,
start=start,
end=end,
float_spread=float_spread,
rate_series=rate_series,
)
return Ok(
_IBORRate._interpolated_stub_rate(
left_date=dates[0],
right_date=dates[1],
left_rate=fixings_[0],
right_rate=fixings_[1],
maturity_date=end,
float_spread=float_spread,
)
)
elif isinstance(rate_fixings, Series):
raise ValueError(err.VE_FIXINGS_BAD_TYPE)
else:
return Ok(rate_fixings + float_spread / 100.0)
@staticmethod
def _rate_interpolated_stub_from_curve_dict(
rate_curve: dict[str, _BaseCurve],
fixing_date: datetime,
start: datetime,
end: datetime,
float_spread: DualTypes,
) -> Result[DualTypes]:
"""
Get the rate on all available curves in dict and then determine the ones to interpolate.
"""
def _rate(c: _BaseCurve, tenor: str) -> DualTypes:
if c._base_type == _CurveType.dfs:
return c._rate_with_raise(start, tenor)
else: # values
return c._rate_with_raise(fixing_date, tenor) # tenor is not used on LineCurve
try:
values = {
add_tenor(start, k, v.meta.modifier, v.meta.calendar): _rate(v, k)
for k, v in rate_curve.items()
}
except Exception as e:
return Err(e)
values = dict(sorted(values.items()))
dates, rates = list(values.keys()), list(values.values())
if end > dates[-1]:
warnings.warn(
"Interpolated stub period has a length longer than the provided "
"IBOR curve tenors: using the longest IBOR value.",
UserWarning,
)
ret: DualTypes = rates[-1]
elif end < dates[0]:
warnings.warn(
"Interpolated stub period has a length shorter than the provided "
"IBOR curve tenors: using the shortest IBOR value.",
UserWarning,
)
ret = rates[0]
else:
i = index_left(dates, len(dates), end)
ret = rates[i] + (rates[i + 1] - rates[i]) * (
(end - dates[i]).days / (dates[i + 1] - dates[i]).days
)
return Ok(ret + float_spread / 100.0)
@staticmethod
def _rate_single_tenor(
rate_curve: _BaseCurve | dict[str, _BaseCurve] | NoInput,
rate_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
fixing_date: datetime,
start: datetime,
end: datetime,
frequency: Frequency,
float_spread: DualTypes,
) -> Result[DualTypes]:
if isinstance(rate_fixings, NoInput):
return _IBORRate._rate_tenor_forecast_from_curve(
rate_curve=rate_curve,
fixing_date=fixing_date,
start=start,
end=end,
frequency=frequency,
float_spread=float_spread,
)
else:
return _IBORRate._rate_tenor_maybe_from_fixings(
rate_curve=rate_curve,
rate_fixings=rate_fixings,
fixing_date=fixing_date,
start=start,
end=end,
frequency=frequency,
float_spread=float_spread,
)
@staticmethod
def _rate_tenor_maybe_from_fixings(
rate_curve: _BaseCurve_ | dict[str, _BaseCurve],
rate_fixings: DualTypes | Series[DualTypes] | str, # type: ignore[type-var]
fixing_date: datetime,
start: datetime,
end: datetime,
frequency: Frequency,
float_spread: DualTypes,
) -> Result[DualTypes]:
if isinstance(rate_fixings, str | Series):
if isinstance(rate_fixings, str):
identifier = rate_fixings
_, fixings_, bounds = fixings[identifier]
else:
identifier = ""
fixings_ = rate_fixings
bounds = (rate_fixings.index.min(), rate_fixings.index.max())
if fixing_date <= bounds[1]:
try:
fixing = fixings_.loc[fixing_date]
return Ok(fixing + float_spread / 100.0)
except KeyError:
warnings.warn(
f"Fixings are provided in series: '{identifier}', but the value for "
f" date: {fixing_date} is not found.\nAttempting to forecast from "
f"the `rate_curve`.",
)
return _IBORRate._rate_tenor_forecast_from_curve(
rate_curve=rate_curve,
fixing_date=fixing_date,
start=start,
end=end,
frequency=frequency,
float_spread=float_spread,
)
else:
# is just a scalar value so return directly.
return Ok(rate_fixings + float_spread / 100.0)
@staticmethod
def _rate_tenor_forecast_from_curve(
rate_curve: _BaseCurve_ | dict[str, _BaseCurve],
fixing_date: datetime,
start: datetime,
end: datetime,
frequency: Frequency,
float_spread: DualTypes,
) -> Result[DualTypes]:
tenor = _get_tenor_from_frequency(frequency)
if isinstance(rate_curve, NoInput):
return Err(ValueError(err.VE_NEEDS_RATE_TO_FORECAST_TENOR_IBOR))
elif isinstance(rate_curve, dict):
remapped_rate_curve = {k.lower(): v for k, v in rate_curve.items()}
rate_curve_ = remapped_rate_curve[tenor.lower()]
return _IBORRate._rate_tenor_forecast_from_curve(
rate_curve=rate_curve_,
fixing_date=fixing_date,
start=start,
end=end,
frequency=frequency,
float_spread=float_spread,
)
else:
if rate_curve._base_type == _CurveType.dfs:
try:
r = rate_curve._rate_with_raise(start, tenor) + float_spread / 100.0
except Exception as e:
return Err(e)
else:
return Ok(r)
else:
try:
r = rate_curve._rate_with_raise(fixing_date, NoInput(0)) + float_spread / 100.0
except Exception as e:
return Err(e)
else:
return Ok(r)
@staticmethod
def _rate_stub_forecast_from_curve(
rate_curve: _BaseCurve_,
fixing_date: datetime,
start: datetime,
end: datetime,
float_spread: DualTypes,
) -> Result[DualTypes]:
if isinstance(rate_curve, NoInput):
return Err(ValueError(err.VE_NEEDS_RATE_TO_FORECAST_STUB_IBOR))
if rate_curve._base_type == _CurveType.dfs:
try:
r = rate_curve._rate_with_raise(start, end) + float_spread / 100.0
except Exception as e:
return Err(e)
else:
return Ok(r)
else:
try:
r = rate_curve[fixing_date] + float_spread / 100.0
except Exception as e:
return Err(e)
else:
return Ok(r)
@staticmethod
def _interpolated_stub_rate(
left_date: datetime,
right_date: datetime,
left_rate: DualTypes,
right_rate: DualTypes,
maturity_date: datetime,
float_spread: DualTypes,
) -> DualTypes:
return (
left_rate
+ (maturity_date - left_date).days
/ (right_date - left_date).days
* (right_rate - left_rate)
+ float_spread / 100.0
)
class _RFRRate:
"""
Class for maintaining methods related to calculating the period rate for an RFR compounded
period. These periods have multiple branches depending upon;
- which `fixing_method` has been selected.
- which `spread_compound_method` has been selected (if the `float_spread` is non-zero).
- whether there are any known fixings that must be populated to the calculation or unknown
fixings must be forecast by some curve.
"""
@staticmethod
def _rate(
start: datetime,
end: datetime,
rate_curve: _BaseCurve_,
rate_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
fixing_method: FloatFixingMethod,
spread_compound_method: SpreadCompoundMethod,
float_spread: DualTypes,
rate_series: FloatRateSeries | NoInput,
) -> Result[ # type: ignore[type-var]
tuple[
DualTypes,
tuple[datetime, datetime] | None,
tuple[datetime, datetime] | None,
Arr1dObj | None,
Arr1dObj | None,
Arr1dF64 | None,
Arr1dF64 | None,
Series[DualTypes] | None,
Series[DualTypes] | None,
Series[DualTypes] | None,
]
]:
"""
To avoid repeated calculation, this function will pass back the data it calculates.
In some short-circuited calculation not all data will have been calculated and returns
None
- 0: rate
- 1: date_boundary_obs
- 2: date_boundary_dcf
- 3: dates_obs
- 4: dates_dcf
- 5: dcfs_obs
- 6: dcfs_dcf
- 7: fixing_rates
- 8: populated
- 9: unpopulated
"""
if isinstance(rate_fixings, int | float | Dual | Dual2 | Variable):
# a scalar value is assumed to have been pre-computed **including** the float spread
# otherwise this information is of no use, since a computation including a
# complicated float spread cannot be performed on just a compounded or average rate.
return Ok((rate_fixings,) + (None,) * 9)
rate_series_ = _maybe_get_rate_series_from_curve(
rate_curve=rate_curve,
rate_series=rate_series,
lag=0,
)
bounds_obs, bounds_dcf, is_matching = _RFRRate._adjust_dates(
start=start,
end=end,
fixing_method=fixing_method,
fixing_calendar=rate_series_.calendar,
)
# >>> short-circuit here before any complex calculation or date lookup is performed.
# EFFICIENT CALCULATION:
if _RFRRate._is_rfr_efficient(
rate_curve=rate_curve,
rate_fixings=rate_fixings,
float_spread=float_spread,
spread_compound_method=spread_compound_method,
fixing_method=fixing_method,
):
r_result = _RFRRate._efficient_calculation(
rate_curve=rate_curve, # type: ignore[arg-type] # is pre-checked
bounds_obs=bounds_obs,
float_spread=float_spread,
)
if isinstance(r_result, Err):
return r_result
else:
return Ok((r_result.unwrap(), bounds_obs, bounds_dcf) + (None,) * 7)
dates_obs, dates_dcf, dcfs_obs, dcfs_dcf, populated, unpopulated, fixing_rates = (
_RFRRate._get_dates_and_fixing_rates_from_fixings(
rate_series=rate_series_,
bounds_obs=bounds_obs,
bounds_dcf=bounds_dcf,
is_matching=is_matching,
rate_fixings=rate_fixings,
fixing_method=fixing_method,
)
)
# >>> short circuit and perform a semi-efficient calculation splicing fixings with DFs
# SEMI-EFFICIENT CALCULATION:
if _RFRRate._is_rfr_efficient(
rate_curve, NoInput(0), float_spread, spread_compound_method, fixing_method
):
r = _RFRRate._semi_efficient_calculation(
rate_curve=rate_curve, # type: ignore[arg-type] # guaranteed by if statement
populated=populated,
unpopulated=unpopulated,
obs_date_boundary=bounds_obs,
float_spread=float_spread,
fixing_dcfs=dcfs_dcf,
)
return Ok(
(
r,
bounds_obs,
bounds_dcf,
dates_obs,
dates_dcf,
dcfs_obs,
dcfs_dcf,
fixing_rates,
populated,
unpopulated,
)
)
update = _RFRRate._forecast_fixing_rates_from_curve(
unpopulated=unpopulated,
populated=populated,
fixing_rates=fixing_rates,
rate_curve=rate_curve,
dates_obs=dates_obs,
dcfs_obs=dcfs_obs,
)
if isinstance(update, Err):
return update
# INEFFICIENT CALCULATION having derived all individual fixings.
r_result = _RFRRate._inefficient_calculation(
fixing_rates=fixing_rates,
fixing_dcfs=dcfs_dcf,
fixing_method=fixing_method,
spread_compound_method=spread_compound_method,
float_spread=float_spread,
)
if isinstance(r_result, Err):
return r_result
else:
return Ok(
(
r_result.unwrap(),
bounds_obs,
bounds_dcf,
dates_obs,
dates_dcf,
dcfs_obs,
dcfs_dcf,
fixing_rates,
populated,
unpopulated,
)
)
@staticmethod
def _efficient_calculation(
rate_curve: _BaseCurve, # discount factors only
bounds_obs: tuple[datetime, datetime],
float_spread: DualTypes,
) -> Result[DualTypes]:
"""
Perform an efficient calculation only after the `_is_rfr_efficient` check is performed.
This calculation uses only discount factors and does not calculate individual fixing rates.
"""
try:
r = (
rate_curve._rate_with_raise(
effective=bounds_obs[0],
termination=bounds_obs[1],
# no other arguments are necessary following _is_efficient check
)
+ float_spread / 100.0
)
except Exception as e:
return Err(e)
else:
return Ok(r)
@staticmethod
def _semi_efficient_calculation(
rate_curve: _BaseCurve,
populated: Series[DualTypes], # type: ignore[type-var]
fixing_dcfs: Arr1dF64,
unpopulated: Series[DualTypes], # type: ignore[type-var]
obs_date_boundary: tuple[datetime, datetime],
float_spread: DualTypes,
) -> DualTypes:
"""
Perform an efficient calculation only after the `_is_rfr_efficient` check is performed.
This calculation combines some known fixing values with a forecast people calculated
using discount factors and not by calculating a number of individual fixing rates.
"""
populated_index = prod(
[
1.0 + d * r / 100.0
for r, d in zip(populated, fixing_dcfs[: len(populated)], strict=False)
]
)
# TODO this is not date safe, i.e. a date maybe before the curve starts and DF is zero.
if len(unpopulated) == 0: # i.e. all fixings are known without needing to forecast
unpopulated_index: DualTypes = 1.0
else:
unpopulated_index = rate_curve[unpopulated.index[0]] / rate_curve[obs_date_boundary[1]]
rate: DualTypes = ((populated_index * unpopulated_index) - 1.0) * 100.0 / fixing_dcfs.sum()
return rate + float_spread / 100.0
@staticmethod
def _inefficient_calculation(
fixing_rates: Series,
fixing_dcfs: Arr1dF64,
fixing_method: FloatFixingMethod,
spread_compound_method: SpreadCompoundMethod,
float_spread: DualTypes,
) -> Result[DualTypes]:
"""
Perform a full calculation forecasting every individual fixing rate and then compounding
or averaging each of them up in turn, combining a float spread if necessary.
"""
# overwrite with lockout rates: this is needed if rates have been forecast from curve.
if type(fixing_method) in [
FloatFixingMethod.RFRLockout,
FloatFixingMethod.RFRLockoutAverage,
]:
# overwrite fixings
method_param = fixing_method.method_param()
if method_param >= len(fixing_rates):
return Err(
ValueError(err.VE_LOCKOUT_METHOD_PARAM.format(method_param, fixing_rates))
)
for i in range(1, method_param + 1):
fixing_rates.iloc[-i] = fixing_rates.iloc[-(method_param + 1)]
if type(fixing_method) in [
FloatFixingMethod.RFRLockoutAverage,
FloatFixingMethod.RFRLookbackAverage,
FloatFixingMethod.RFRObservationShiftAverage,
FloatFixingMethod.RFRPaymentDelayAverage,
]:
return _RFRRate._calculator_rate_rfr_avg_with_spread(
float_spread=float_spread,
spread_compound_method=spread_compound_method,
rates=fixing_rates.to_numpy(),
dcf_vals=fixing_dcfs,
)
else:
return _RFRRate._calculator_rate_rfr_isda_compounded_with_spread(
float_spread=float_spread,
spread_compound_method=spread_compound_method,
rates=fixing_rates.to_numpy(),
dcf_vals=fixing_dcfs,
)
@staticmethod
def _get_dates_and_fixing_rates_from_fixings(
rate_series: FloatRateSeries,
bounds_obs: tuple[datetime, datetime],
bounds_dcf: tuple[datetime, datetime],
is_matching: bool,
rate_fixings: Series[DualTypes] | str_, # type: ignore[type-var]
fixing_method: FloatFixingMethod,
) -> tuple[ # type: ignore[type-var]
Arr1dObj,
Arr1dObj,
Arr1dF64,
Arr1dF64,
Series[DualTypes],
Series[DualTypes],
Series[DualTypes],
]:
"""
For an RFR period, construct the necessary fixing dates and DCF schedule.
Populate fixings from a Series if any values are available to yield.
Return Series objects.
"""
dates_obs, dates_dcf, fixing_rates = _RFRRate._get_obs_and_dcf_dates(
fixing_calendar=rate_series.calendar,
fixing_convention=rate_series.convention,
obs_date_boundary=bounds_obs,
dcf_date_boundary=bounds_dcf,
is_matching=is_matching,
)
dcfs_dcf = _RFRRate._get_dcf_values(
dcf_dates=dates_dcf,
fixing_convention=rate_series.convention,
fixing_calendar=rate_series.calendar,
)
if is_matching:
dcfs_obs = dcfs_dcf.copy()
else:
dcfs_obs = _RFRRate._get_dcf_values(
dcf_dates=dates_obs,
fixing_convention=rate_series.convention,
fixing_calendar=rate_series.calendar,
)
# populate Series with values
if isinstance(rate_fixings, NoInput):
populated: Series[DualTypes] = Series(index=[], data=np.nan, dtype=object) # type: ignore[type-var, assignment]
unpopulated: Series[DualTypes] = Series(index=dates_obs[:-1], data=np.nan, dtype=object) # type: ignore[type-var, assignment]
elif isinstance(rate_fixings, str | Series):
fixing_rates, populated, unpopulated = (
_RFRRate._push_rate_fixings_as_series_to_fixing_rates(
fixing_rates=fixing_rates,
rate_fixings=rate_fixings,
fixing_method=fixing_method,
)
)
else:
raise ValueError(err.VE_FIXINGS_BAD_TYPE) # unknown fixings type fixings runtime issue
return dates_obs, dates_dcf, dcfs_obs, dcfs_dcf, populated, unpopulated, fixing_rates
@staticmethod
def _forecast_fixing_rates_from_curve(
unpopulated: Series[DualTypes], # type: ignore[type-var]
populated: Series[DualTypes], # type: ignore[type-var]
fixing_rates: Series[DualTypes], # type: ignore[type-var]
rate_curve: _BaseCurve_,
dates_obs: Arr1dObj,
dcfs_obs: Arr1dF64,
) -> Result[None]:
# determine unpopulated fixings from the curve
if len(unpopulated) > 0 and isinstance(rate_curve, NoInput):
return Err(FixingMissingForecasterError()) # missing data - needs a rate_curve
unpopulated_obs_dates = dates_obs[len(populated) :]
if len(unpopulated_obs_dates) > 1:
if isinstance(rate_curve, NoInput):
return Err(ValueError(err.VE_NEEDS_RATE_TO_FORECAST_RFR))
if rate_curve._base_type == _CurveType.values:
try:
r = [
rate_curve._rate_with_raise(unpopulated_obs_dates[_], NoInput(0))
for _ in range(len(unpopulated))
]
except Exception as e:
return Err(e)
else:
v = np.array([rate_curve[_] for _ in unpopulated_obs_dates])
r = (v[:-1] / v[1:] - 1) * 100 / dcfs_obs[len(populated) :]
unpopulated = Series(
index=unpopulated.index,
data=r,
)
fixing_rates.update(unpopulated)
return Ok(None)
@staticmethod
def _push_rate_fixings_as_series_to_fixing_rates(
fixing_rates: Series[DualTypes], # type: ignore[type-var]
rate_fixings: str | Series[DualTypes], # type: ignore[type-var]
fixing_method: FloatFixingMethod,
) -> tuple[Series[DualTypes], Series[DualTypes], Series[DualTypes]]: # type: ignore[type-var]
"""
Populates an empty fixings_rates Series with values from a looked up fixings collection.
"""
if isinstance(rate_fixings, str):
fixing_series = fixings[rate_fixings][1]
else:
fixing_series = rate_fixings
if fixing_rates.index[0] > fixing_series.index[-1]:
# then no fixings in scope, so no changes
return fixing_rates, Series(index=[], data=np.nan), fixing_rates.copy() # type: ignore[return-value]
else:
fixing_rates.update(fixing_series)
# push lockout rates if they are available
if type(fixing_method) in [
FloatFixingMethod.RFRLockout,
FloatFixingMethod.RFRLockoutAverage,
]:
method_param = fixing_method.method_param()
if method_param >= len(fixing_rates):
raise ValueError(err.VE_LOCKOUT_METHOD_PARAM.format(method_param, fixing_rates))
if not isna(fixing_rates.iloc[-(1 + method_param)]): # type: ignore[arg-type]
for i in range(method_param):
fixing_rates.iloc[-(1 + i)] = fixing_rates.iloc[-(1 + method_param)]
# validate for missing and expected fixings in the fixing Series
nans = isna(fixing_rates)
populated, unpopulated = fixing_rates[~nans], fixing_rates[nans]
if (
len(unpopulated) > 0
and len(populated) > 0
and unpopulated.index[0] < populated.index[-1]
):
raise ValueError(
err.VE02_5.format( # there is at least one missing fixing data item
rate_fixings,
fixing_rates[nans].index[0].strftime("%d-%m-%Y"),
fixing_rates[~nans].index[-1].strftime("%d-%m-%Y"),
)
)
# validate for unexpected fixings provided in the fixings Series
if 0 < len(populated) < len(fixing_series[populated.index[0] : populated.index[-1]]):
# then fixing series contains an unexpected fixing.
warnings.warn(
err.W02_0.format(
rate_fixings,
populated.index[0].strftime("%d-%m-%Y"),
populated.index[-1].strftime("%d-%m-%Y"),
),
UserWarning,
)
return fixing_rates, populated, unpopulated
@staticmethod
def _adjust_dates(
start: datetime,
end: datetime,
fixing_method: FloatFixingMethod,
fixing_calendar: CalTypes,
) -> tuple[tuple[datetime, datetime], tuple[datetime, datetime], bool]:
"""
For each different RFR fixing method adjust the start and end date of the associated
period to return adjusted start and end dates for the fixing set as well as the
DCF set.
For all methods except 'lookback', these dates will align with each other.
For 'lookback' the observed RFRs are applied over different DCFs that do not naturally
align.
"""
# Depending upon method get the observation dates and dcf dates
if type(fixing_method) in [
FloatFixingMethod.RFRPaymentDelay,
FloatFixingMethod.RFRPaymentDelayAverage,
FloatFixingMethod.RFRLockout,
FloatFixingMethod.RFRLockoutAverage,
]:
start_obs, end_obs = start, end
start_dcf, end_dcf = start, end
is_matching = True
elif type(fixing_method) in [
FloatFixingMethod.RFRObservationShift,
FloatFixingMethod.RFRObservationShiftAverage,
]:
start_obs = fixing_calendar.lag_bus_days(
start, -fixing_method.method_param(), settlement=False
)
end_obs = fixing_calendar.lag_bus_days(
end, -fixing_method.method_param(), settlement=False
)
start_dcf, end_dcf = start_obs, end_obs
is_matching = True
else:
# fixing_method in [
# FloatFixingMethod.RFRLookback,
# FloatFixingMethod.RFRLookbackAverage,
# ]:
start_obs = fixing_calendar.lag_bus_days(
start, -fixing_method.method_param(), settlement=False
)
end_obs = fixing_calendar.lag_bus_days(
end, -fixing_method.method_param(), settlement=False
)
start_dcf, end_dcf = start, end
is_matching = False
return (start_obs, end_obs), (start_dcf, end_dcf), is_matching
@staticmethod
def _get_obs_and_dcf_dates(
fixing_calendar: CalTypes,
fixing_convention: Convention,
obs_date_boundary: tuple[datetime, datetime],
dcf_date_boundary: tuple[datetime, datetime],
is_matching: bool,
) -> tuple[Arr1dObj, Arr1dObj, Series[DualTypes]]: # type: ignore[type-var]
# construct empty Series for rates and DCFs
obs_dates = np.array(fixing_calendar.bus_date_range(*obs_date_boundary))
fixing_rates: Series[DualTypes] = Series(index=obs_dates[:-1], data=np.nan, dtype=object) # type: ignore[type-var, assignment]
if is_matching:
dcf_dates = obs_dates
else:
dcf_dates = np.array(fixing_calendar.bus_date_range(*dcf_date_boundary))
return obs_dates, dcf_dates, fixing_rates
@staticmethod
def _get_dcf_values(
dcf_dates: Arr1dObj,
fixing_convention: Convention,
fixing_calendar: CalTypes,
) -> Arr1dF64:
if fixing_convention == Convention.Act365F:
days = np.fromiter((_.days for _ in dcf_dates[1:] - dcf_dates[:-1]), float)
return days / 365.0
elif fixing_convention == Convention.Act360:
days = np.fromiter((_.days for _ in dcf_dates[1:] - dcf_dates[:-1]), float)
return days / 360.0
elif fixing_convention == Convention.Bus252:
return np.array([1.0 / 252.0] * (len(dcf_dates) - 1))
else:
# this is unconventional fixing convention. Should maybe be avoided altogether.
return np.array(
[
dcf(
start=dcf_dates[i],
end=dcf_dates[i + 1],
convention=fixing_convention,
calendar=fixing_calendar,
)
for i in range(len(dcf_dates) - 1)
]
)
@staticmethod
def _is_rfr_efficient(
rate_curve: _BaseCurve_,
rate_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
float_spread: DualTypes,
spread_compound_method: SpreadCompoundMethod,
fixing_method: FloatFixingMethod,
) -> bool:
"""
Check all of the conditions to return an RFR rate directly from discount factors.
- A rate curve must be available and be based on DFs.
- There cannot be any known fixings that must be incorporated into the calculation.
- Only PaymentDelay and ObservationShift fixing methods are suitable for this calculation.
- Only NoneSimple spread compound method is suitable, or the float spread must be 0.0.
"""
return (
isinstance(rate_curve, _BaseCurve)
and rate_curve._base_type == _CurveType.dfs
and isinstance(rate_fixings, NoInput)
and type(fixing_method)
in [FloatFixingMethod.RFRPaymentDelay, FloatFixingMethod.RFRObservationShift]
and (float_spread == 0.0 or spread_compound_method == SpreadCompoundMethod.NoneSimple)
)
@staticmethod
def _calculator_rate_rfr_avg_with_spread(
float_spread: DualTypes,
spread_compound_method: SpreadCompoundMethod,
rates: Arr1dF64,
dcf_vals: Arr1dF64,
) -> Result[DualTypes]:
"""
Calculate all in rate with float spread under averaging.
Parameters
----------
rates : Series
The rates which are expected for each daily period.
dcf_vals : Series
The weightings which are used for each rate in the compounding formula.
Returns
-------
float, Dual, Dual2
"""
if spread_compound_method != SpreadCompoundMethod.NoneSimple:
return Err(ValueError(err.VE_SPREAD_METHOD_RFR.format(spread_compound_method)))
else:
_: DualTypes = (dcf_vals * rates).sum() / dcf_vals.sum() + float_spread / 100
return Ok(_)
@staticmethod
def _calculator_rate_rfr_isda_compounded_with_spread(
float_spread: DualTypes,
spread_compound_method: SpreadCompoundMethod,
rates: Arr1dObj,
dcf_vals: Arr1dF64,
) -> Result[DualTypes]:
"""
Calculate all in rates with float spread under different compounding methods.
Parameters
----------
rates : Series
The rates which are expected for each daily period.
dcf_vals : Series
The weightings which are used for each rate in the compounding formula.
Returns
-------
float, Dual, Dual2
"""
if float_spread == 0 or spread_compound_method == SpreadCompoundMethod.NoneSimple:
_: DualTypes = (
(1 + dcf_vals * rates / 100).prod() - 1
) * 100 / dcf_vals.sum() + float_spread / 100
return Ok(_)
elif spread_compound_method == SpreadCompoundMethod.ISDACompounding:
_ = (
((1 + dcf_vals * (rates / 100 + float_spread / 10000)).prod() - 1)
* 100
/ dcf_vals.sum()
)
return Ok(_)
else: # spread_compound_method == SpreadCompoundMethod.ISDAFlatCompounding:
sub_cashflows = (rates / 100 + float_spread / 10000) * dcf_vals
C_i = 0.0
for i in range(1, len(sub_cashflows)):
C_i += sub_cashflows[i - 1]
sub_cashflows[i] += C_i * rates[i] / 100 * dcf_vals[i]
_ = sub_cashflows.sum() * 100 / dcf_vals.sum()
return Ok(_)
def _get_float_rate_series(val: FloatRateSeries | str) -> FloatRateSeries:
if isinstance(val, FloatRateSeries):
return val
else:
try:
return FloatRateSeries(**defaults.float_series[val.lower()])
except KeyError:
raise ValueError(
f"The FloatRateSeries: '{val.lower()}' was not found in `defaults`.\n"
"To add a default specification for a FloatRateSeries, for example, use:\n"
f"> defaults.float_series['{val.lower()}'] = {{ \n"
" 'lag': 2,\n"
" 'calendar': 'nyc',\n"
" 'modifier': 'MF',\n"
" 'convention': 'Act360',\n"
" 'eom': False,\n"
f" }}"
)
def _get_float_rate_series_or_blank(val: FloatRateSeries | str_) -> FloatRateSeries | NoInput:
if isinstance(val, NoInput):
return val
else:
return _get_float_rate_series(val)
def _maybe_get_rate_series_from_curve(
rate_curve: CurveOption_,
rate_series: FloatRateSeries | NoInput,
lag: int,
) -> FloatRateSeries:
"""Get a rate fixing calendar and convention from a Curve or the alternatives if not given."""
if isinstance(rate_curve, NoInput):
if isinstance(rate_series, NoInput):
raise ValueError(err.VE_NEEDS_CURVE_OR_INDEX)
else:
# get params from rate_index
return rate_series
else:
if isinstance(rate_curve, dict):
cal_ = list(rate_curve.values())[0].meta.calendar
conv_ = list(rate_curve.values())[0].meta.convention
mod_ = list(rate_curve.values())[0].meta.modifier
else:
cal_ = rate_curve.meta.calendar
conv_ = rate_curve.meta.convention
mod_ = rate_curve.meta.modifier
if isinstance(rate_series, NoInput):
# get params from rate_curve
return FloatRateSeries(
lag=lag,
calendar=cal_,
convention=conv_,
modifier=mod_,
eom=False, # TODO: un hard code this
)
else:
if rate_series.convention != conv_:
raise ValueError(
err.MISMATCH_RATE_INDEX_PARAMETERS.format(
"convention", conv_, rate_series.convention
)
)
# dual parameters may be specified
# get params from rate_index
return rate_series
def _leg_fixings_to_list(rate_fixings: LegFixings, n_periods: int) -> list[PeriodFixings]:
"""Perform a conversion of 'LegRateFixings' into a list of PeriodFixings."""
if isinstance(rate_fixings, NoInput):
# NoInput is converted to a list of NoInputs
return [NoInput(0)] * n_periods
elif isinstance(rate_fixings, tuple):
# A tuple must be a 2-tuple which is converted to a first item and then multiplied.
return [rate_fixings[0]] + [rate_fixings[1]] * (n_periods - 1)
elif isinstance(rate_fixings, list):
# A list is padded with NoInputs
return rate_fixings + [NoInput(0)] * (n_periods - len(rate_fixings))
elif isinstance(rate_fixings, str | Series):
# A string or seried is multiplied
return [rate_fixings] * n_periods
else:
# A scalar value is padded with NoInputs.
return [rate_fixings] + [NoInput(0)] * (n_periods - 1) # type: ignore[return-value]
__all__ = [
"FloatRateSeries",
"FloatRateIndex",
"IRSSeries",
"FXIndex",
"RFRFixing",
"IBORFixing",
"IBORStubFixing",
"IndexFixing",
"IRSFixing",
"FXFixing",
"_FXFixingMajor",
"_UnitFixing",
"_BaseFixing",
]
================================================
FILE: python/rateslib/data/historical/aud_rfr.csv
================================================
reference_date,rate
04-01-2011,-500
05-01-2011,-500
06-01-2011,-500
07-01-2011,-500
10-01-2011,-500
11-01-2011,-500
12-01-2011,-500
13-01-2011,-500
14-01-2011,-500
17-01-2011,-500
18-01-2011,-500
19-01-2011,-500
20-01-2011,-500
21-01-2011,-500
24-01-2011,-500
25-01-2011,-500
27-01-2011,-500
28-01-2011,-500
31-01-2011,-500
01-02-2011,-500
02-02-2011,-500
03-02-2011,-500
04-02-2011,-500
07-02-2011,-500
08-02-2011,-500
09-02-2011,-500
10-02-2011,-500
11-02-2011,-500
14-02-2011,-500
15-02-2011,-500
16-02-2011,-500
17-02-2011,-500
18-02-2011,-500
21-02-2011,-500
22-02-2011,-500
23-02-2011,-500
24-02-2011,-500
25-02-2011,-500
28-02-2011,-500
01-03-2011,-500
02-03-2011,-500
03-03-2011,-500
04-03-2011,-500
07-03-2011,-500
08-03-2011,-500
09-03-2011,-500
10-03-2011,-500
11-03-2011,-500
14-03-2011,-500
15-03-2011,-500
16-03-2011,-500
17-03-2011,-500
18-03-2011,-500
21-03-2011,-500
22-03-2011,-500
23-03-2011,-500
24-03-2011,-500
25-03-2011,-500
28-03-2011,-500
29-03-2011,-500
30-03-2011,-500
31-03-2011,-500
01-04-2011,-500
04-04-2011,-500
05-04-2011,-500
06-04-2011,-500
07-04-2011,-500
08-04-2011,-500
11-04-2011,-500
12-04-2011,-500
13-04-2011,-500
14-04-2011,-500
15-04-2011,-500
18-04-2011,-500
19-04-2011,-500
20-04-2011,-500
21-04-2011,-500
27-04-2011,-500
28-04-2011,-500
29-04-2011,-500
02-05-2011,-500
03-05-2011,-500
04-05-2011,-500
05-05-2011,-500
06-05-2011,-500
09-05-2011,-500
10-05-2011,-500
11-05-2011,-500
12-05-2011,-500
13-05-2011,-500
16-05-2011,-500
17-05-2011,-500
18-05-2011,-500
19-05-2011,-500
20-05-2011,-500
23-05-2011,-500
24-05-2011,-500
25-05-2011,-500
26-05-2011,-500
27-05-2011,-500
30-05-2011,-500
31-05-2011,-500
01-06-2011,-500
02-06-2011,-500
03-06-2011,-500
06-06-2011,-500
07-06-2011,-500
08-06-2011,-500
09-06-2011,-500
10-06-2011,-500
14-06-2011,-500
15-06-2011,-500
16-06-2011,-500
17-06-2011,-500
20-06-2011,-500
21-06-2011,-500
22-06-2011,-500
23-06-2011,-500
24-06-2011,-500
27-06-2011,-500
28-06-2011,-500
29-06-2011,-500
30-06-2011,-500
01-07-2011,-500
04-07-2011,-500
05-07-2011,-500
06-07-2011,-500
07-07-2011,-500
08-07-2011,-500
11-07-2011,-500
12-07-2011,-500
13-07-2011,-500
14-07-2011,-500
15-07-2011,-500
18-07-2011,-500
19-07-2011,-500
20-07-2011,-500
21-07-2011,-500
22-07-2011,-500
25-07-2011,-500
26-07-2011,-500
27-07-2011,-500
28-07-2011,-500
29-07-2011,-500
01-08-2011,-500
02-08-2011,-500
03-08-2011,-500
04-08-2011,-500
05-08-2011,-500
08-08-2011,-500
09-08-2011,-500
10-08-2011,-500
11-08-2011,-500
12-08-2011,-500
15-08-2011,-500
16-08-2011,-500
17-08-2011,-500
18-08-2011,-500
19-08-2011,-500
22-08-2011,-500
23-08-2011,-500
24-08-2011,-500
25-08-2011,-500
26-08-2011,-500
29-08-2011,-500
30-08-2011,-500
31-08-2011,-500
01-09-2011,-500
02-09-2011,-500
05-09-2011,-500
06-09-2011,-500
07-09-2011,-500
08-09-2011,-500
09-09-2011,-500
12-09-2011,-500
13-09-2011,-500
14-09-2011,-500
15-09-2011,-500
16-09-2011,-500
19-09-2011,-500
20-09-2011,-500
21-09-2011,-500
22-09-2011,-500
23-09-2011,-500
26-09-2011,-500
27-09-2011,-500
28-09-2011,-500
29-09-2011,-500
30-09-2011,-500
03-10-2011,-500
04-10-2011,-500
05-10-2011,-500
06-10-2011,-500
07-10-2011,-500
10-10-2011,-500
11-10-2011,-500
12-10-2011,-500
13-10-2011,-500
14-10-2011,-500
17-10-2011,-500
18-10-2011,-500
19-10-2011,-500
20-10-2011,-500
21-10-2011,-500
24-10-2011,-500
25-10-2011,-500
26-10-2011,-500
27-10-2011,-500
28-10-2011,-500
31-10-2011,-500
01-11-2011,-500
02-11-2011,-500
03-11-2011,-500
04-11-2011,-500
07-11-2011,-500
08-11-2011,-500
09-11-2011,-500
10-11-2011,-500
11-11-2011,-500
14-11-2011,-500
15-11-2011,-500
16-11-2011,-500
17-11-2011,-500
18-11-2011,-500
21-11-2011,-500
22-11-2011,-500
23-11-2011,-500
24-11-2011,-500
25-11-2011,-500
28-11-2011,-500
29-11-2011,-500
30-11-2011,-500
01-12-2011,-500
02-12-2011,-500
05-12-2011,-500
06-12-2011,-500
07-12-2011,-500
08-12-2011,-500
09-12-2011,-500
12-12-2011,-500
13-12-2011,-500
14-12-2011,-500
15-12-2011,-500
16-12-2011,-500
19-12-2011,-500
20-12-2011,-500
21-12-2011,-500
22-12-2011,-500
23-12-2011,-500
28-12-2011,-500
29-12-2011,-500
30-12-2011,-500
03-01-2012,-500
04-01-2012,-500
05-01-2012,-500
06-01-2012,-500
09-01-2012,-500
10-01-2012,-500
11-01-2012,-500
12-01-2012,-500
13-01-2012,-500
16-01-2012,-500
17-01-2012,-500
18-01-2012,-500
19-01-2012,-500
20-01-2012,-500
23-01-2012,-500
24-01-2012,-500
25-01-2012,-500
27-01-2012,-500
30-01-2012,-500
31-01-2012,-500
01-02-2012,-500
02-02-2012,-500
03-02-2012,-500
06-02-2012,-500
07-02-2012,-500
08-02-2012,-500
09-02-2012,-500
10-02-2012,-500
13-02-2012,-500
14-02-2012,-500
15-02-2012,-500
16-02-2012,-500
17-02-2012,-500
20-02-2012,-500
21-02-2012,-500
22-02-2012,-500
23-02-2012,-500
24-02-2012,-500
27-02-2012,-500
28-02-2012,-500
29-02-2012,-500
01-03-2012,-500
02-03-2012,-500
05-03-2012,-500
06-03-2012,-500
07-03-2012,-500
08-03-2012,-500
09-03-2012,-500
12-03-2012,-500
13-03-2012,-500
14-03-2012,-500
15-03-2012,-500
16-03-2012,-500
19-03-2012,-500
20-03-2012,-500
21-03-2012,-500
22-03-2012,-500
23-03-2012,-500
26-03-2012,-500
27-03-2012,-500
28-03-2012,-500
29-03-2012,-500
30-03-2012,-500
02-04-2012,-500
03-04-2012,-500
04-04-2012,-500
05-04-2012,-500
10-04-2012,-500
11-04-2012,-500
12-04-2012,-500
13-04-2012,-500
16-04-2012,-500
17-04-2012,-500
18-04-2012,-500
19-04-2012,-500
20-04-2012,-500
23-04-2012,-500
24-04-2012,-500
26-04-2012,-500
27-04-2012,-500
30-04-2012,-500
01-05-2012,-500
02-05-2012,-500
03-05-2012,-500
04-05-2012,-500
07-05-2012,-500
08-05-2012,-500
09-05-2012,-500
10-05-2012,-500
11-05-2012,-500
14-05-2012,-500
15-05-2012,-500
16-05-2012,-500
17-05-2012,-500
18-05-2012,-500
21-05-2012,-500
22-05-2012,-500
23-05-2012,-500
24-05-2012,-500
25-05-2012,-500
28-05-2012,-500
29-05-2012,-500
30-05-2012,-500
31-05-2012,-500
01-06-2012,-500
04-06-2012,-500
05-06-2012,-500
06-06-2012,-500
07-06-2012,-500
08-06-2012,-500
12-06-2012,-500
13-06-2012,-500
14-06-2012,-500
15-06-2012,-500
18-06-2012,-500
19-06-2012,-500
20-06-2012,-500
21-06-2012,-500
22-06-2012,-500
25-06-2012,-500
26-06-2012,-500
27-06-2012,-500
28-06-2012,-500
29-06-2012,-500
02-07-2012,-500
03-07-2012,-500
04-07-2012,-500
05-07-2012,-500
06-07-2012,-500
09-07-2012,-500
10-07-2012,-500
11-07-2012,-500
12-07-2012,-500
13-07-2012,-500
16-07-2012,-500
17-07-2012,-500
18-07-2012,-500
19-07-2012,-500
20-07-2012,-500
23-07-2012,-500
24-07-2012,-500
25-07-2012,-500
26-07-2012,-500
27-07-2012,-500
30-07-2012,-500
31-07-2012,-500
01-08-2012,-500
02-08-2012,-500
03-08-2012,-500
06-08-2012,-500
07-08-2012,-500
08-08-2012,-500
09-08-2012,-500
10-08-2012,-500
13-08-2012,-500
14-08-2012,-500
15-08-2012,-500
16-08-2012,-500
17-08-2012,-500
20-08-2012,-500
21-08-2012,-500
22-08-2012,-500
23-08-2012,-500
24-08-2012,-500
27-08-2012,-500
28-08-2012,-500
29-08-2012,-500
30-08-2012,-500
31-08-2012,-500
03-09-2012,-500
04-09-2012,-500
05-09-2012,-500
06-09-2012,-500
07-09-2012,-500
10-09-2012,-500
11-09-2012,-500
12-09-2012,-500
13-09-2012,-500
14-09-2012,-500
17-09-2012,-500
18-09-2012,-500
19-09-2012,-500
20-09-2012,-500
21-09-2012,-500
24-09-2012,-500
25-09-2012,-500
26-09-2012,-500
27-09-2012,-500
28-09-2012,-500
01-10-2012,-500
02-10-2012,-500
03-10-2012,-500
04-10-2012,-500
05-10-2012,-500
08-10-2012,-500
09-10-2012,-500
10-10-2012,-500
11-10-2012,-500
12-10-2012,-500
15-10-2012,-500
16-10-2012,-500
17-10-2012,-500
18-10-2012,-500
19-10-2012,-500
22-10-2012,-500
23-10-2012,-500
24-10-2012,-500
25-10-2012,-500
26-10-2012,-500
29-10-2012,-500
30-10-2012,-500
31-10-2012,-500
01-11-2012,-500
02-11-2012,-500
05-11-2012,-500
06-11-2012,-500
07-11-2012,-500
08-11-2012,-500
09-11-2012,-500
12-11-2012,-500
13-11-2012,-500
14-11-2012,-500
15-11-2012,-500
16-11-2012,-500
19-11-2012,-500
20-11-2012,-500
21-11-2012,-500
22-11-2012,-500
23-11-2012,-500
26-11-2012,-500
27-11-2012,-500
28-11-2012,-500
29-11-2012,-500
30-11-2012,-500
03-12-2012,-500
04-12-2012,-500
05-12-2012,-500
06-12-2012,-500
07-12-2012,-500
10-12-2012,-500
11-12-2012,-500
12-12-2012,-500
13-12-2012,-500
14-12-2012,-500
17-12-2012,-500
18-12-2012,-500
19-12-2012,-500
20-12-2012,-500
21-12-2012,-500
24-12-2012,-500
27-12-2012,-500
28-12-2012,-500
31-12-2012,-500
02-01-2013,-500
03-01-2013,-500
04-01-2013,-500
07-01-2013,-500
08-01-2013,-500
09-01-2013,-500
10-01-2013,-500
11-01-2013,-500
14-01-2013,-500
15-01-2013,-500
16-01-2013,-500
17-01-2013,-500
18-01-2013,-500
21-01-2013,-500
22-01-2013,-500
23-01-2013,-500
24-01-2013,-500
25-01-2013,-500
29-01-2013,-500
30-01-2013,-500
31-01-2013,-500
01-02-2013,-500
04-02-2013,-500
05-02-2013,-500
06-02-2013,-500
07-02-2013,-500
08-02-2013,-500
11-02-2013,-500
12-02-2013,-500
13-02-2013,-500
14-02-2013,-500
15-02-2013,-500
18-02-2013,-500
19-02-2013,-500
20-02-2013,-500
21-02-2013,-500
22-02-2013,-500
25-02-2013,-500
26-02-2013,-500
27-02-2013,-500
28-02-2013,-500
01-03-2013,-500
04-03-2013,-500
05-03-2013,-500
06-03-2013,-500
07-03-2013,-500
08-03-2013,-500
11-03-2013,-500
12-03-2013,-500
13-03-2013,-500
14-03-2013,-500
15-03-2013,-500
18-03-2013,-500
19-03-2013,-500
20-03-2013,-500
21-03-2013,-500
22-03-2013,-500
25-03-2013,-500
26-03-2013,-500
27-03-2013,-500
28-03-2013,-500
02-04-2013,-500
03-04-2013,-500
04-04-2013,-500
05-04-2013,-500
08-04-2013,-500
09-04-2013,-500
10-04-2013,-500
11-04-2013,-500
12-04-2013,-500
15-04-2013,-500
16-04-2013,-500
17-04-2013,-500
18-04-2013,-500
19-04-2013,-500
22-04-2013,-500
23-04-2013,-500
24-04-2013,-500
26-04-2013,-500
29-04-2013,-500
30-04-2013,-500
01-05-2013,-500
02-05-2013,-500
03-05-2013,-500
06-05-2013,-500
07-05-2013,-500
08-05-2013,-500
09-05-2013,-500
10-05-2013,-500
13-05-2013,-500
14-05-2013,-500
15-05-2013,-500
16-05-2013,-500
17-05-2013,-500
20-05-2013,-500
21-05-2013,-500
22-05-2013,-500
23-05-2013,-500
24-05-2013,-500
27-05-2013,-500
28-05-2013,-500
29-05-2013,-500
30-05-2013,-500
31-05-2013,-500
03-06-2013,-500
04-06-2013,-500
05-06-2013,-500
06-06-2013,-500
07-06-2013,-500
11-06-2013,-500
12-06-2013,-500
13-06-2013,-500
14-06-2013,-500
17-06-2013,-500
18-06-2013,-500
19-06-2013,-500
20-06-2013,-500
21-06-2013,-500
24-06-2013,-500
25-06-2013,-500
26-06-2013,-500
27-06-2013,-500
28-06-2013,-500
01-07-2013,-500
02-07-2013,-500
03-07-2013,-500
04-07-2013,-500
05-07-2013,-500
08-07-2013,-500
09-07-2013,-500
10-07-2013,-500
11-07-2013,-500
12-07-2013,-500
15-07-2013,-500
16-07-2013,-500
17-07-2013,-500
18-07-2013,-500
19-07-2013,-500
22-07-2013,-500
23-07-2013,-500
24-07-2013,-500
25-07-2013,-500
26-07-2013,-500
29-07-2013,-500
30-07-2013,-500
31-07-2013,-500
01-08-2013,-500
02-08-2013,-500
05-08-2013,-500
06-08-2013,-500
07-08-2013,-500
08-08-2013,-500
09-08-2013,-500
12-08-2013,-500
13-08-2013,-500
14-08-2013,-500
15-08-2013,-500
16-08-2013,-500
19-08-2013,-500
20-08-2013,-500
21-08-2013,-500
22-08-2013,-500
23-08-2013,-500
26-08-2013,-500
27-08-2013,-500
28-08-2013,-500
29-08-2013,-500
30-08-2013,-500
02-09-2013,-500
03-09-2013,-500
04-09-2013,-500
05-09-2013,-500
06-09-2013,-500
09-09-2013,-500
10-09-2013,-500
11-09-2013,-500
12-09-2013,-500
13-09-2013,-500
16-09-2013,-500
17-09-2013,-500
18-09-2013,-500
19-09-2013,-500
20-09-2013,-500
23-09-2013,-500
24-09-2013,-500
25-09-2013,-500
26-09-2013,-500
27-09-2013,-500
30-09-2013,-500
01-10-2013,-500
02-10-2013,-500
03-10-2013,-500
04-10-2013,-500
07-10-2013,-500
08-10-2013,-500
09-10-2013,-500
10-10-2013,-500
11-10-2013,-500
14-10-2013,-500
15-10-2013,-500
16-10-2013,-500
17-10-2013,-500
18-10-2013,-500
21-10-2013,-500
22-10-2013,-500
23-10-2013,-500
24-10-2013,-500
25-10-2013,-500
28-10-2013,-500
29-10-2013,-500
30-10-2013,-500
31-10-2013,-500
01-11-2013,-500
04-11-2013,-500
05-11-2013,-500
06-11-2013,-500
07-11-2013,-500
08-11-2013,-500
11-11-2013,-500
12-11-2013,-500
13-11-2013,-500
14-11-2013,-500
15-11-2013,-500
18-11-2013,-500
19-11-2013,-500
20-11-2013,-500
21-11-2013,-500
22-11-2013,-500
25-11-2013,-500
26-11-2013,-500
27-11-2013,-500
28-11-2013,-500
29-11-2013,-500
02-12-2013,-500
03-12-2013,-500
04-12-2013,-500
05-12-2013,-500
06-12-2013,-500
09-12-2013,-500
10-12-2013,-500
11-12-2013,-500
12-12-2013,-500
13-12-2013,-500
16-12-2013,-500
17-12-2013,-500
18-12-2013,-500
19-12-2013,-500
20-12-2013,-500
23-12-2013,-500
24-12-2013,-500
27-12-2013,-500
30-12-2013,-500
31-12-2013,-500
02-01-2014,-500
03-01-2014,-500
06-01-2014,-500
07-01-2014,-500
08-01-2014,-500
09-01-2014,-500
10-01-2014,-500
13-01-2014,-500
14-01-2014,-500
15-01-2014,-500
16-01-2014,-500
17-01-2014,-500
20-01-2014,-500
21-01-2014,-500
22-01-2014,-500
23-01-2014,-500
24-01-2014,-500
28-01-2014,-500
29-01-2014,-500
30-01-2014,-500
31-01-2014,-500
03-02-2014,-500
04-02-2014,-500
05-02-2014,-500
06-02-2014,-500
07-02-2014,-500
10-02-2014,-500
11-02-2014,-500
12-02-2014,-500
13-02-2014,-500
14-02-2014,-500
17-02-2014,-500
18-02-2014,-500
19-02-2014,-500
20-02-2014,-500
21-02-2014,-500
24-02-2014,-500
25-02-2014,-500
26-02-2014,-500
27-02-2014,-500
28-02-2014,-500
03-03-2014,-500
04-03-2014,-500
05-03-2014,-500
06-03-2014,-500
07-03-2014,-500
10-03-2014,-500
11-03-2014,-500
12-03-2014,-500
13-03-2014,-500
14-03-2014,-500
17-03-2014,-500
18-03-2014,-500
19-03-2014,-500
20-03-2014,-500
21-03-2014,-500
24-03-2014,-500
25-03-2014,-500
26-03-2014,-500
27-03-2014,-500
28-03-2014,-500
31-03-2014,-500
01-04-2014,-500
02-04-2014,-500
03-04-2014,-500
04-04-2014,-500
07-04-2014,-500
08-04-2014,-500
09-04-2014,-500
10-04-2014,-500
11-04-2014,-500
14-04-2014,-500
15-04-2014,-500
16-04-2014,-500
17-04-2014,-500
22-04-2014,-500
23-04-2014,-500
24-04-2014,-500
28-04-2014,-500
29-04-2014,-500
30-04-2014,-500
01-05-2014,-500
02-05-2014,-500
05-05-2014,-500
06-05-2014,-500
07-05-2014,-500
08-05-2014,-500
09-05-2014,-500
12-05-2014,-500
13-05-2014,-500
14-05-2014,-500
15-05-2014,-500
16-05-2014,-500
19-05-2014,-500
20-05-2014,-500
21-05-2014,-500
22-05-2014,-500
23-05-2014,-500
26-05-2014,-500
27-05-2014,-500
28-05-2014,-500
29-05-2014,-500
30-05-2014,-500
02-06-2014,-500
03-06-2014,-500
04-06-2014,-500
05-06-2014,-500
06-06-2014,-500
10-06-2014,-500
11-06-2014,-500
12-06-2014,-500
13-06-2014,-500
16-06-2014,-500
17-06-2014,-500
18-06-2014,-500
19-06-2014,-500
20-06-2014,-500
23-06-2014,-500
24-06-2014,-500
25-06-2014,-500
26-06-2014,-500
27-06-2014,-500
30-06-2014,-500
01-07-2014,-500
02-07-2014,-500
03-07-2014,-500
04-07-2014,-500
07-07-2014,-500
08-07-2014,-500
09-07-2014,-500
10-07-2014,-500
11-07-2014,-500
14-07-2014,-500
15-07-2014,-500
16-07-2014,-500
17-07-2014,-500
18-07-2014,-500
21-07-2014,-500
22-07-2014,-500
23-07-2014,-500
24-07-2014,-500
25-07-2014,-500
28-07-2014,-500
29-07-2014,-500
30-07-2014,-500
31-07-2014,-500
01-08-2014,-500
04-08-2014,-500
05-08-2014,-500
06-08-2014,-500
07-08-2014,-500
08-08-2014,-500
11-08-2014,-500
12-08-2014,-500
13-08-2014,-500
14-08-2014,-500
15-08-2014,-500
18-08-2014,-500
19-08-2014,-500
20-08-2014,-500
21-08-2014,-500
22-08-2014,-500
25-08-2014,-500
26-08-2014,-500
27-08-2014,-500
28-08-2014,-500
29-08-2014,-500
01-09-2014,-500
02-09-2014,-500
03-09-2014,-500
04-09-2014,-500
05-09-2014,-500
08-09-2014,-500
09-09-2014,-500
10-09-2014,-500
11-09-2014,-500
12-09-2014,-500
15-09-2014,-500
16-09-2014,-500
17-09-2014,-500
18-09-2014,-500
19-09-2014,-500
22-09-2014,-500
23-09-2014,-500
24-09-2014,-500
25-09-2014,-500
26-09-2014,-500
29-09-2014,-500
30-09-2014,-500
01-10-2014,-500
02-10-2014,-500
03-10-2014,-500
06-10-2014,-500
07-10-2014,-500
08-10-2014,-500
09-10-2014,-500
10-10-2014,-500
13-10-2014,-500
14-10-2014,-500
15-10-2014,-500
16-10-2014,-500
17-10-2014,-500
20-10-2014,-500
21-10-2014,-500
22-10-2014,-500
23-10-2014,-500
24-10-2014,-500
27-10-2014,-500
28-10-2014,-500
29-10-2014,-500
30-10-2014,-500
31-10-2014,-500
03-11-2014,-500
04-11-2014,-500
05-11-2014,-500
06-11-2014,-500
07-11-2014,-500
10-11-2014,-500
11-11-2014,-500
12-11-2014,-500
13-11-2014,-500
14-11-2014,-500
17-11-2014,-500
18-11-2014,-500
19-11-2014,-500
20-11-2014,-500
21-11-2014,-500
24-11-2014,-500
25-11-2014,-500
26-11-2014,-500
27-11-2014,-500
28-11-2014,-500
01-12-2014,-500
02-12-2014,-500
03-12-2014,-500
04-12-2014,-500
05-12-2014,-500
08-12-2014,-500
09-12-2014,-500
10-12-2014,-500
11-12-2014,-500
12-12-2014,-500
15-12-2014,-500
16-12-2014,-500
17-12-2014,-500
18-12-2014,-500
19-12-2014,-500
22-12-2014,-500
23-12-2014,-500
24-12-2014,-500
29-12-2014,-500
30-12-2014,-500
31-12-2014,-500
02-01-2015,-500
05-01-2015,-500
06-01-2015,-500
07-01-2015,-500
08-01-2015,-500
09-01-2015,-500
12-01-2015,-500
13-01-2015,-500
14-01-2015,-500
15-01-2015,-500
16-01-2015,-500
19-01-2015,-500
20-01-2015,-500
21-01-2015,-500
22-01-2015,-500
23-01-2015,-500
27-01-2015,-500
28-01-2015,-500
29-01-2015,-500
30-01-2015,-500
02-02-2015,-500
03-02-2015,-500
04-02-2015,-500
05-02-2015,-500
06-02-2015,-500
09-02-2015,-500
10-02-2015,-500
11-02-2015,-500
12-02-2015,-500
13-02-2015,-500
16-02-2015,-500
17-02-2015,-500
18-02-2015,-500
19-02-2015,-500
20-02-2015,-500
23-02-2015,-500
24-02-2015,-500
25-02-2015,-500
26-02-2015,-500
27-02-2015,-500
02-03-2015,-500
03-03-2015,-500
04-03-2015,-500
05-03-2015,-500
06-03-2015,-500
09-03-2015,-500
10-03-2015,-500
11-03-2015,-500
12-03-2015,-500
13-03-2015,-500
16-03-2015,-500
17-03-2015,-500
18-03-2015,-500
19-03-2015,-500
20-03-2015,-500
23-03-2015,-500
24-03-2015,-500
25-03-2015,-500
26-03-2015,-500
27-03-2015,-500
30-03-2015,-500
31-03-2015,-500
01-04-2015,-500
02-04-2015,-500
07-04-2015,-500
08-04-2015,-500
09-04-2015,-500
10-04-2015,-500
13-04-2015,-500
14-04-2015,-500
15-04-2015,-500
16-04-2015,-500
17-04-2015,-500
20-04-2015,-500
21-04-2015,-500
22-04-2015,-500
23-04-2015,-500
24-04-2015,-500
27-04-2015,-500
28-04-2015,-500
29-04-2015,-500
30-04-2015,-500
01-05-2015,-500
04-05-2015,-500
05-05-2015,-500
06-05-2015,-500
07-05-2015,-500
08-05-2015,-500
11-05-2015,-500
12-05-2015,-500
13-05-2015,-500
14-05-2015,-500
15-05-2015,-500
18-05-2015,-500
19-05-2015,-500
20-05-2015,-500
21-05-2015,-500
22-05-2015,-500
25-05-2015,-500
26-05-2015,-500
27-05-2015,-500
28-05-2015,-500
29-05-2015,-500
01-06-2015,-500
02-06-2015,-500
03-06-2015,-500
04-06-2015,-500
05-06-2015,-500
09-06-2015,-500
10-06-2015,-500
11-06-2015,-500
12-06-2015,-500
15-06-2015,-500
16-06-2015,-500
17-06-2015,-500
18-06-2015,-500
19-06-2015,-500
22-06-2015,-500
23-06-2015,-500
24-06-2015,-500
25-06-2015,-500
26-06-2015,-500
29-06-2015,-500
30-06-2015,-500
01-07-2015,-500
02-07-2015,-500
03-07-2015,-500
06-07-2015,-500
07-07-2015,-500
08-07-2015,-500
09-07-2015,-500
10-07-2015,-500
13-07-2015,-500
14-07-2015,-500
15-07-2015,-500
16-07-2015,-500
17-07-2015,-500
20-07-2015,-500
21-07-2015,-500
22-07-2015,-500
23-07-2015,-500
24-07-2015,-500
27-07-2015,-500
28-07-2015,-500
29-07-2015,-500
30-07-2015,-500
31-07-2015,-500
03-08-2015,-500
04-08-2015,-500
05-08-2015,-500
06-08-2015,-500
07-08-2015,-500
10-08-2015,-500
11-08-2015,-500
12-08-2015,-500
13-08-2015,-500
14-08-2015,-500
17-08-2015,-500
18-08-2015,-500
19-08-2015,-500
20-08-2015,-500
21-08-2015,-500
24-08-2015,-500
25-08-2015,-500
26-08-2015,-500
27-08-2015,-500
28-08-2015,-500
31-08-2015,-500
01-09-2015,-500
02-09-2015,-500
03-09-2015,-500
04-09-2015,-500
07-09-2015,-500
08-09-2015,-500
09-09-2015,-500
10-09-2015,-500
11-09-2015,-500
14-09-2015,-500
15-09-2015,-500
16-09-2015,-500
17-09-2015,-500
18-09-2015,-500
21-09-2015,-500
22-09-2015,-500
23-09-2015,-500
24-09-2015,-500
25-09-2015,-500
28-09-2015,-500
29-09-2015,-500
30-09-2015,-500
01-10-2015,-500
02-10-2015,-500
05-10-2015,-500
06-10-2015,-500
07-10-2015,-500
08-10-2015,-500
09-10-2015,-500
12-10-2015,-500
13-10-2015,-500
14-10-2015,-500
15-10-2015,-500
16-10-2015,-500
19-10-2015,-500
20-10-2015,-500
21-10-2015,-500
22-10-2015,-500
23-10-2015,-500
26-10-2015,-500
27-10-2015,-500
28-10-2015,-500
29-10-2015,-500
30-10-2015,-500
02-11-2015,-500
03-11-2015,-500
04-11-2015,-500
05-11-2015,-500
06-11-2015,-500
09-11-2015,-500
10-11-2015,-500
11-11-2015,-500
12-11-2015,-500
13-11-2015,-500
16-11-2015,-500
17-11-2015,-500
18-11-2015,-500
19-11-2015,-500
20-11-2015,-500
23-11-2015,-500
24-11-2015,-500
25-11-2015,-500
26-11-2015,-500
27-11-2015,-500
30-11-2015,-500
01-12-2015,-500
02-12-2015,-500
03-12-2015,-500
04-12-2015,-500
07-12-2015,-500
08-12-2015,-500
09-12-2015,-500
10-12-2015,-500
11-12-2015,-500
14-12-2015,-500
15-12-2015,-500
16-12-2015,-500
17-12-2015,-500
18-12-2015,-500
21-12-2015,-500
22-12-2015,-500
23-12-2015,-500
24-12-2015,-500
29-12-2015,-500
30-12-2015,-500
31-12-2015,-500
04-01-2016,-500
05-01-2016,-500
06-01-2016,-500
07-01-2016,-500
08-01-2016,-500
11-01-2016,-500
12-01-2016,-500
13-01-2016,-500
14-01-2016,-500
15-01-2016,-500
18-01-2016,-500
19-01-2016,-500
20-01-2016,-500
21-01-2016,-500
22-01-2016,-500
25-01-2016,-500
27-01-2016,-500
28-01-2016,-500
29-01-2016,-500
01-02-2016,-500
02-02-2016,-500
03-02-2016,-500
04-02-2016,-500
05-02-2016,-500
08-02-2016,-500
09-02-2016,-500
10-02-2016,-500
11-02-2016,-500
12-02-2016,-500
15-02-2016,-500
16-02-2016,-500
17-02-2016,-500
18-02-2016,-500
19-02-2016,-500
22-02-2016,-500
23-02-2016,-500
24-02-2016,-500
25-02-2016,-500
26-02-2016,-500
29-02-2016,-500
01-03-2016,-500
02-03-2016,-500
03-03-2016,-500
04-03-2016,-500
07-03-2016,-500
08-03-2016,-500
09-03-2016,-500
10-03-2016,-500
11-03-2016,-500
14-03-2016,-500
15-03-2016,-500
16-03-2016,-500
17-03-2016,-500
18-03-2016,-500
21-03-2016,-500
22-03-2016,-500
23-03-2016,-500
24-03-2016,-500
29-03-2016,-500
30-03-2016,-500
31-03-2016,-500
01-04-2016,-500
04-04-2016,-500
05-04-2016,-500
06-04-2016,-500
07-04-2016,-500
08-04-2016,-500
11-04-2016,-500
12-04-2016,-500
13-04-2016,-500
14-04-2016,-500
15-04-2016,-500
18-04-2016,-500
19-04-2016,-500
20-04-2016,-500
21-04-2016,-500
22-04-2016,-500
26-04-2016,-500
27-04-2016,-500
28-04-2016,-500
29-04-2016,-500
02-05-2016,-500
03-05-2016,-500
04-05-2016,-500
05-05-2016,-500
06-05-2016,-500
09-05-2016,-500
10-05-2016,-500
11-05-2016,-500
12-05-2016,-500
13-05-2016,-500
16-05-2016,-500
17-05-2016,-500
18-05-2016,-500
19-05-2016,-500
20-05-2016,-500
23-05-2016,-500
24-05-2016,-500
25-05-2016,-500
26-05-2016,-500
27-05-2016,-500
30-05-2016,-500
31-05-2016,-500
01-06-2016,-500
02-06-2016,-500
03-06-2016,-500
06-06-2016,-500
07-06-2016,-500
08-06-2016,-500
09-06-2016,-500
10-06-2016,-500
14-06-2016,-500
15-06-2016,-500
16-06-2016,-500
17-06-2016,-500
20-06-2016,-500
21-06-2016,-500
22-06-2016,-500
23-06-2016,-500
24-06-2016,-500
27-06-2016,-500
28-06-2016,-500
29-06-2016,-500
30-06-2016,-500
01-07-2016,-500
04-07-2016,-500
05-07-2016,-500
06-07-2016,-500
07-07-2016,-500
08-07-2016,-500
11-07-2016,-500
12-07-2016,-500
13-07-2016,-500
14-07-2016,-500
15-07-2016,-500
18-07-2016,-500
19-07-2016,-500
20-07-2016,-500
21-07-2016,-500
22-07-2016,-500
25-07-2016,-500
26-07-2016,-500
27-07-2016,-500
28-07-2016,-500
29-07-2016,-500
01-08-2016,-500
02-08-2016,-500
03-08-2016,-500
04-08-2016,-500
05-08-2016,-500
08-08-2016,-500
09-08-2016,-500
10-08-2016,-500
11-08-2016,-500
12-08-2016,-500
15-08-2016,-500
16-08-2016,-500
17-08-2016,-500
18-08-2016,-500
19-08-2016,-500
22-08-2016,-500
23-08-2016,-500
24-08-2016,-500
25-08-2016,-500
26-08-2016,-500
29-08-2016,-500
30-08-2016,-500
31-08-2016,-500
01-09-2016,-500
02-09-2016,-500
05-09-2016,-500
06-09-2016,-500
07-09-2016,-500
08-09-2016,-500
09-09-2016,-500
12-09-2016,-500
13-09-2016,-500
14-09-2016,-500
15-09-2016,-500
16-09-2016,-500
19-09-2016,-500
20-09-2016,-500
21-09-2016,-500
22-09-2016,-500
23-09-2016,-500
26-09-2016,-500
27-09-2016,-500
28-09-2016,-500
29-09-2016,-500
30-09-2016,-500
03-10-2016,-500
04-10-2016,-500
05-10-2016,-500
06-10-2016,-500
07-10-2016,-500
10-10-2016,-500
11-10-2016,-500
12-10-2016,-500
13-10-2016,-500
14-10-2016,-500
17-10-2016,-500
18-10-2016,-500
19-10-2016,-500
20-10-2016,-500
21-10-2016,-500
24-10-2016,-500
25-10-2016,-500
26-10-2016,-500
27-10-2016,-500
28-10-2016,-500
31-10-2016,-500
01-11-2016,-500
02-11-2016,-500
03-11-2016,-500
04-11-2016,-500
07-11-2016,-500
08-11-2016,-500
09-11-2016,-500
10-11-2016,-500
11-11-2016,-500
14-11-2016,-500
15-11-2016,-500
16-11-2016,-500
17-11-2016,-500
18-11-2016,-500
21-11-2016,-500
22-11-2016,-500
23-11-2016,-500
24-11-2016,-500
25-11-2016,-500
28-11-2016,-500
29-11-2016,-500
30-11-2016,-500
01-12-2016,-500
02-12-2016,-500
05-12-2016,-500
06-12-2016,-500
07-12-2016,-500
08-12-2016,-500
09-12-2016,-500
12-12-2016,-500
13-12-2016,-500
14-12-2016,-500
15-12-2016,-500
16-12-2016,-500
19-12-2016,-500
20-12-2016,-500
21-12-2016,-500
22-12-2016,-500
23-12-2016,-500
28-12-2016,-500
29-12-2016,-500
30-12-2016,-500
03-01-2017,-500
04-01-2017,-500
05-01-2017,-500
06-01-2017,-500
09-01-2017,-500
10-01-2017,-500
11-01-2017,-500
12-01-2017,-500
13-01-2017,-500
16-01-2017,-500
17-01-2017,-500
18-01-2017,-500
19-01-2017,-500
20-01-2017,-500
23-01-2017,-500
24-01-2017,-500
25-01-2017,-500
27-01-2017,-500
30-01-2017,-500
31-01-2017,-500
01-02-2017,-500
02-02-2017,-500
03-02-2017,-500
06-02-2017,-500
07-02-2017,-500
08-02-2017,-500
09-02-2017,-500
10-02-2017,-500
13-02-2017,-500
14-02-2017,-500
15-02-2017,-500
16-02-2017,-500
17-02-2017,-500
20-02-2017,-500
21-02-2017,-500
22-02-2017,-500
23-02-2017,-500
24-02-2017,-500
27-02-2017,-500
28-02-2017,-500
01-03-2017,-500
02-03-2017,-500
03-03-2017,-500
06-03-2017,-500
07-03-2017,-500
08-03-2017,-500
09-03-2017,-500
10-03-2017,-500
13-03-2017,-500
14-03-2017,-500
15-03-2017,-500
16-03-2017,-500
17-03-2017,-500
20-03-2017,-500
21-03-2017,-500
22-03-2017,-500
23-03-2017,-500
24-03-2017,-500
27-03-2017,-500
28-03-2017,-500
29-03-2017,-500
30-03-2017,-500
31-03-2017,-500
03-04-2017,-500
04-04-2017,-500
05-04-2017,-500
06-04-2017,-500
07-04-2017,-500
10-04-2017,-500
11-04-2017,-500
12-04-2017,-500
13-04-2017,-500
18-04-2017,-500
19-04-2017,-500
20-04-2017,-500
21-04-2017,-500
24-04-2017,-500
26-04-2017,-500
27-04-2017,-500
28-04-2017,-500
01-05-2017,-500
02-05-2017,-500
03-05-2017,-500
04-05-2017,-500
05-05-2017,-500
08-05-2017,-500
09-05-2017,-500
10-05-2017,-500
11-05-2017,-500
12-05-2017,-500
15-05-2017,-500
16-05-2017,-500
17-05-2017,-500
18-05-2017,-500
19-05-2017,-500
22-05-2017,-500
23-05-2017,-500
24-05-2017,-500
25-05-2017,-500
26-05-2017,-500
29-05-2017,-500
30-05-2017,-500
31-05-2017,-500
01-06-2017,-500
02-06-2017,-500
05-06-2017,-500
06-06-2017,-500
07-06-2017,-500
08-06-2017,-500
09-06-2017,-500
13-06-2017,-500
14-06-2017,-500
15-06-2017,-500
16-06-2017,-500
19-06-2017,-500
20-06-2017,-500
21-06-2017,-500
22-06-2017,-500
23-06-2017,-500
26-06-2017,-500
27-06-2017,-500
28-06-2017,-500
29-06-2017,-500
30-06-2017,-500
03-07-2017,-500
04-07-2017,-500
05-07-2017,-500
06-07-2017,-500
07-07-2017,-500
10-07-2017,-500
11-07-2017,-500
12-07-2017,-500
13-07-2017,-500
14-07-2017,-500
17-07-2017,-500
18-07-2017,-500
19-07-2017,-500
20-07-2017,-500
21-07-2017,-500
24-07-2017,-500
25-07-2017,-500
26-07-2017,-500
27-07-2017,-500
28-07-2017,-500
31-07-2017,-500
01-08-2017,-500
02-08-2017,-500
03-08-2017,-500
04-08-2017,-500
07-08-2017,-500
08-08-2017,-500
09-08-2017,-500
10-08-2017,-500
11-08-2017,-500
14-08-2017,-500
15-08-2017,-500
16-08-2017,-500
17-08-2017,-500
18-08-2017,-500
21-08-2017,-500
22-08-2017,-500
23-08-2017,-500
24-08-2017,-500
25-08-2017,-500
28-08-2017,-500
29-08-2017,-500
30-08-2017,-500
31-08-2017,-500
01-09-2017,-500
04-09-2017,-500
05-09-2017,-500
06-09-2017,-500
07-09-2017,-500
08-09-2017,-500
11-09-2017,-500
12-09-2017,-500
13-09-2017,-500
14-09-2017,-500
15-09-2017,-500
18-09-2017,-500
19-09-2017,-500
20-09-2017,-500
21-09-2017,-500
22-09-2017,-500
25-09-2017,-500
26-09-2017,-500
27-09-2017,-500
28-09-2017,-500
29-09-2017,-500
02-10-2017,-500
03-10-2017,-500
04-10-2017,-500
05-10-2017,-500
06-10-2017,-500
09-10-2017,-500
10-10-2017,-500
11-10-2017,-500
12-10-2017,-500
13-10-2017,-500
16-10-2017,-500
17-10-2017,-500
18-10-2017,-500
19-10-2017,-500
20-10-2017,-500
23-10-2017,-500
24-10-2017,-500
25-10-2017,-500
26-10-2017,-500
27-10-2017,-500
30-10-2017,-500
31-10-2017,-500
01-11-2017,-500
02-11-2017,-500
03-11-2017,-500
06-11-2017,-500
07-11-2017,-500
08-11-2017,-500
09-11-2017,-500
10-11-2017,-500
13-11-2017,-500
14-11-2017,-500
15-11-2017,-500
16-11-2017,-500
17-11-2017,-500
20-11-2017,-500
21-11-2017,-500
22-11-2017,-500
23-11-2017,-500
24-11-2017,-500
27-11-2017,-500
28-11-2017,-500
29-11-2017,-500
30-11-2017,-500
01-12-2017,-500
04-12-2017,-500
05-12-2017,-500
06-12-2017,-500
07-12-2017,-500
08-12-2017,-500
11-12-2017,-500
12-12-2017,-500
13-12-2017,-500
14-12-2017,-500
15-12-2017,-500
18-12-2017,-500
19-12-2017,-500
20-12-2017,-500
21-12-2017,-500
22-12-2017,-500
27-12-2017,-500
28-12-2017,-500
29-12-2017,-500
02-01-2018,-500
03-01-2018,-500
04-01-2018,-500
05-01-2018,-500
08-01-2018,-500
09-01-2018,-500
10-01-2018,-500
11-01-2018,-500
12-01-2018,-500
15-01-2018,-500
16-01-2018,-500
17-01-2018,-500
18-01-2018,-500
19-01-2018,-500
22-01-2018,-500
23-01-2018,-500
24-01-2018,-500
25-01-2018,-500
29-01-2018,-500
30-01-2018,-500
31-01-2018,-500
01-02-2018,-500
02-02-2018,-500
05-02-2018,-500
06-02-2018,-500
07-02-2018,-500
08-02-2018,-500
09-02-2018,-500
12-02-2018,-500
13-02-2018,-500
14-02-2018,-500
15-02-2018,-500
16-02-2018,-500
19-02-2018,-500
20-02-2018,-500
21-02-2018,-500
22-02-2018,-500
23-02-2018,-500
26-02-2018,-500
27-02-2018,-500
28-02-2018,-500
01-03-2018,-500
02-03-2018,-500
05-03-2018,-500
06-03-2018,-500
07-03-2018,-500
08-03-2018,-500
09-03-2018,-500
12-03-2018,-500
13-03-2018,-500
14-03-2018,-500
15-03-2018,-500
16-03-2018,-500
19-03-2018,-500
20-03-2018,-500
21-03-2018,-500
22-03-2018,-500
23-03-2018,-500
26-03-2018,-500
27-03-2018,-500
28-03-2018,-500
29-03-2018,-500
03-04-2018,-500
04-04-2018,-500
05-04-2018,-500
06-04-2018,-500
09-04-2018,-500
10-04-2018,-500
11-04-2018,-500
12-04-2018,-500
13-04-2018,-500
16-04-2018,-500
17-04-2018,-500
18-04-2018,-500
19-04-2018,-500
20-04-2018,-500
23-04-2018,-500
24-04-2018,-500
26-04-2018,-500
27-04-2018,-500
30-04-2018,-500
01-05-2018,-500
02-05-2018,-500
03-05-2018,-500
04-05-2018,-500
07-05-2018,-500
08-05-2018,-500
09-05-2018,-500
10-05-2018,-500
11-05-2018,-500
14-05-2018,-500
15-05-2018,-500
16-05-2018,-500
17-05-2018,-500
18-05-2018,-500
21-05-2018,-500
22-05-2018,-500
23-05-2018,-500
24-05-2018,-500
25-05-2018,-500
28-05-2018,-500
29-05-2018,-500
30-05-2018,-500
31-05-2018,-500
01-06-2018,-500
04-06-2018,-500
05-06-2018,-500
06-06-2018,-500
07-06-2018,-500
08-06-2018,-500
12-06-2018,-500
13-06-2018,-500
14-06-2018,-500
15-06-2018,-500
18-06-2018,-500
19-06-2018,-500
20-06-2018,-500
21-06-2018,-500
22-06-2018,-500
25-06-2018,-500
26-06-2018,-500
27-06-2018,-500
28-06-2018,-500
29-06-2018,-500
02-07-2018,-500
03-07-2018,-500
04-07-2018,-500
05-07-2018,-500
06-07-2018,-500
09-07-2018,-500
10-07-2018,-500
11-07-2018,-500
12-07-2018,-500
13-07-2018,-500
16-07-2018,-500
17-07-2018,-500
18-07-2018,-500
19-07-2018,-500
20-07-2018,-500
23-07-2018,-500
24-07-2018,-500
25-07-2018,-500
26-07-2018,-500
27-07-2018,-500
30-07-2018,-500
31-07-2018,-500
01-08-2018,-500
02-08-2018,-500
03-08-2018,-500
06-08-2018,-500
07-08-2018,-500
08-08-2018,-500
09-08-2018,-500
10-08-2018,-500
13-08-2018,-500
14-08-2018,-500
15-08-2018,-500
16-08-2018,-500
17-08-2018,-500
20-08-2018,-500
21-08-2018,-500
22-08-2018,-500
23-08-2018,-500
24-08-2018,-500
27-08-2018,-500
28-08-2018,-500
29-08-2018,-500
30-08-2018,-500
31-08-2018,-500
03-09-2018,-500
04-09-2018,-500
05-09-2018,-500
06-09-2018,-500
07-09-2018,-500
10-09-2018,-500
11-09-2018,-500
12-09-2018,-500
13-09-2018,-500
14-09-2018,-500
17-09-2018,-500
18-09-2018,-500
19-09-2018,-500
20-09-2018,-500
21-09-2018,-500
24-09-2018,-500
25-09-2018,-500
26-09-2018,-500
27-09-2018,-500
28-09-2018,-500
01-10-2018,-500
02-10-2018,-500
03-10-2018,-500
04-10-2018,-500
05-10-2018,-500
08-10-2018,-500
09-10-2018,-500
10-10-2018,-500
11-10-2018,-500
12-10-2018,-500
15-10-2018,-500
16-10-2018,-500
17-10-2018,-500
18-10-2018,-500
19-10-2018,-500
22-10-2018,-500
23-10-2018,-500
24-10-2018,-500
25-10-2018,-500
26-10-2018,-500
29-10-2018,-500
30-10-2018,-500
31-10-2018,-500
01-11-2018,-500
02-11-2018,-500
05-11-2018,-500
06-11-2018,-500
07-11-2018,-500
08-11-2018,-500
09-11-2018,-500
12-11-2018,-500
13-11-2018,-500
14-11-2018,-500
15-11-2018,-500
16-11-2018,-500
19-11-2018,-500
20-11-2018,-500
21-11-2018,-500
22-11-2018,-500
23-11-2018,-500
26-11-2018,-500
27-11-2018,-500
28-11-2018,-500
29-11-2018,-500
30-11-2018,-500
03-12-2018,-500
04-12-2018,-500
05-12-2018,-500
06-12-2018,-500
07-12-2018,-500
10-12-2018,-500
11-12-2018,-500
12-12-2018,-500
13-12-2018,-500
14-12-2018,-500
17-12-2018,-500
18-12-2018,-500
19-12-2018,-500
20-12-2018,-500
21-12-2018,-500
24-12-2018,-500
27-12-2018,-500
28-12-2018,-500
31-12-2018,-500
02-01-2019,-500
03-01-2019,-500
04-01-2019,-500
07-01-2019,-500
08-01-2019,-500
09-01-2019,-500
10-01-2019,-500
11-01-2019,-500
14-01-2019,-500
15-01-2019,-500
16-01-2019,-500
17-01-2019,-500
18-01-2019,-500
21-01-2019,-500
22-01-2019,-500
23-01-2019,-500
24-01-2019,-500
25-01-2019,-500
29-01-2019,-500
30-01-2019,-500
31-01-2019,-500
01-02-2019,-500
04-02-2019,-500
05-02-2019,-500
06-02-2019,-500
07-02-2019,-500
08-02-2019,-500
11-02-2019,-500
12-02-2019,-500
13-02-2019,-500
14-02-2019,-500
15-02-2019,-500
18-02-2019,-500
19-02-2019,-500
20-02-2019,-500
21-02-2019,-500
22-02-2019,-500
25-02-2019,-500
26-02-2019,-500
27-02-2019,-500
28-02-2019,-500
01-03-2019,-500
04-03-2019,-500
05-03-2019,-500
06-03-2019,-500
07-03-2019,-500
08-03-2019,-500
11-03-2019,-500
12-03-2019,-500
13-03-2019,-500
14-03-2019,-500
15-03-2019,-500
18-03-2019,-500
19-03-2019,-500
20-03-2019,-500
21-03-2019,-500
22-03-2019,-500
25-03-2019,-500
26-03-2019,-500
27-03-2019,-500
28-03-2019,-500
29-03-2019,-500
01-04-2019,-500
02-04-2019,-500
03-04-2019,-500
04-04-2019,-500
05-04-2019,-500
08-04-2019,-500
09-04-2019,-500
10-04-2019,-500
11-04-2019,-500
12-04-2019,-500
15-04-2019,-500
16-04-2019,-500
17-04-2019,-500
18-04-2019,-500
23-04-2019,-500
24-04-2019,-500
26-04-2019,-500
29-04-2019,-500
30-04-2019,-500
01-05-2019,-500
02-05-2019,-500
03-05-2019,-500
06-05-2019,-500
07-05-2019,-500
08-05-2019,-500
09-05-2019,-500
10-05-2019,-500
13-05-2019,-500
14-05-2019,-500
15-05-2019,-500
16-05-2019,-500
17-05-2019,-500
20-05-2019,-500
21-05-2019,-500
22-05-2019,-500
23-05-2019,-500
24-05-2019,-500
27-05-2019,-500
28-05-2019,-500
29-05-2019,-500
30-05-2019,-500
31-05-2019,-500
03-06-2019,-500
04-06-2019,-500
05-06-2019,-500
06-06-2019,-500
07-06-2019,-500
11-06-2019,-500
12-06-2019,-500
13-06-2019,-500
14-06-2019,-500
17-06-2019,-500
18-06-2019,-500
19-06-2019,-500
20-06-2019,-500
21-06-2019,-500
24-06-2019,-500
25-06-2019,-500
26-06-2019,-500
27-06-2019,-500
28-06-2019,-500
01-07-2019,-500
02-07-2019,-500
03-07-2019,-500
04-07-2019,-500
05-07-2019,-500
08-07-2019,-500
09-07-2019,-500
10-07-2019,-500
11-07-2019,-500
12-07-2019,-500
15-07-2019,-500
16-07-2019,-500
17-07-2019,-500
18-07-2019,-500
19-07-2019,-500
22-07-2019,-500
23-07-2019,-500
24-07-2019,-500
25-07-2019,-500
26-07-2019,-500
29-07-2019,-500
30-07-2019,-500
31-07-2019,-500
01-08-2019,-500
02-08-2019,-500
05-08-2019,-500
06-08-2019,-500
07-08-2019,-500
08-08-2019,-500
09-08-2019,-500
12-08-2019,-500
13-08-2019,-500
14-08-2019,-500
15-08-2019,-500
16-08-2019,-500
19-08-2019,-500
20-08-2019,-500
21-08-2019,-500
22-08-2019,-500
23-08-2019,-500
26-08-2019,-500
27-08-2019,-500
28-08-2019,-500
29-08-2019,-500
30-08-2019,-500
02-09-2019,-500
03-09-2019,-500
04-09-2019,-500
05-09-2019,-500
06-09-2019,-500
09-09-2019,-500
10-09-2019,-500
11-09-2019,-500
12-09-2019,-500
13-09-2019,-500
16-09-2019,-500
17-09-2019,-500
18-09-2019,-500
19-09-2019,-500
20-09-2019,-500
23-09-2019,-500
24-09-2019,-500
25-09-2019,-500
26-09-2019,-500
27-09-2019,-500
30-09-2019,-500
01-10-2019,-500
02-10-2019,-500
03-10-2019,-500
04-10-2019,-500
07-10-2019,-500
08-10-2019,-500
09-10-2019,-500
10-10-2019,-500
11-10-2019,-500
14-10-2019,-500
15-10-2019,-500
16-10-2019,-500
17-10-2019,-500
18-10-2019,-500
21-10-2019,-500
22-10-2019,-500
23-10-2019,-500
24-10-2019,-500
25-10-2019,-500
28-10-2019,-500
29-10-2019,-500
30-10-2019,-500
31-10-2019,-500
01-11-2019,-500
04-11-2019,-500
05-11-2019,-500
06-11-2019,-500
07-11-2019,-500
08-11-2019,-500
11-11-2019,-500
12-11-2019,-500
13-11-2019,-500
14-11-2019,-500
15-11-2019,-500
18-11-2019,-500
19-11-2019,-500
20-11-2019,-500
21-11-2019,-500
22-11-2019,-500
25-11-2019,-500
26-11-2019,-500
27-11-2019,-500
28-11-2019,-500
29-11-2019,-500
02-12-2019,-500
03-12-2019,-500
04-12-2019,-500
05-12-2019,-500
06-12-2019,-500
09-12-2019,-500
10-12-2019,-500
11-12-2019,-500
12-12-2019,-500
13-12-2019,-500
16-12-2019,-500
17-12-2019,-500
18-12-2019,-500
19-12-2019,-500
20-12-2019,-500
23-12-2019,-500
24-12-2019,-500
27-12-2019,-500
30-12-2019,-500
31-12-2019,-500
02-01-2020,-500
03-01-2020,-500
06-01-2020,-500
07-01-2020,-500
08-01-2020,-500
09-01-2020,-500
10-01-2020,-500
13-01-2020,-500
14-01-2020,-500
15-01-2020,-500
16-01-2020,-500
17-01-2020,-500
20-01-2020,-500
21-01-2020,-500
22-01-2020,-500
23-01-2020,-500
24-01-2020,-500
28-01-2020,-500
29-01-2020,-500
30-01-2020,-500
31-01-2020,-500
03-02-2020,-500
04-02-2020,-500
05-02-2020,-500
06-02-2020,-500
07-02-2020,-500
10-02-2020,-500
11-02-2020,-500
12-02-2020,-500
13-02-2020,-500
14-02-2020,-500
17-02-2020,-500
18-02-2020,-500
19-02-2020,-500
20-02-2020,-500
21-02-2020,-500
24-02-2020,-500
25-02-2020,-500
26-02-2020,-500
27-02-2020,-500
28-02-2020,-500
02-03-2020,-500
03-03-2020,-500
04-03-2020,-500
05-03-2020,-500
06-03-2020,-500
09-03-2020,-500
10-03-2020,-500
11-03-2020,-500
12-03-2020,-500
13-03-2020,-500
16-03-2020,-500
17-03-2020,-500
18-03-2020,-500
19-03-2020,-500
20-03-2020,-500
23-03-2020,-500
24-03-2020,-500
25-03-2020,-500
26-03-2020,-500
27-03-2020,-500
30-03-2020,-500
31-03-2020,-500
01-04-2020,-500
02-04-2020,-500
03-04-2020,-500
06-04-2020,-500
07-04-2020,-500
08-04-2020,-500
09-04-2020,-500
14-04-2020,-500
15-04-2020,-500
16-04-2020,-500
17-04-2020,-500
20-04-2020,-500
21-04-2020,-500
22-04-2020,-500
23-04-2020,-500
24-04-2020,-500
27-04-2020,-500
28-04-2020,-500
29-04-2020,-500
30-04-2020,-500
01-05-2020,-500
04-05-2020,-500
05-05-2020,-500
06-05-2020,-500
07-05-2020,-500
08-05-2020,-500
11-05-2020,-500
12-05-2020,-500
13-05-2020,-500
14-05-2020,-500
15-05-2020,-500
18-05-2020,-500
19-05-2020,-500
20-05-2020,-500
21-05-2020,-500
22-05-2020,-500
25-05-2020,-500
26-05-2020,-500
27-05-2020,-500
28-05-2020,-500
29-05-2020,-500
01-06-2020,-500
02-06-2020,-500
03-06-2020,-500
04-06-2020,-500
05-06-2020,-500
09-06-2020,-500
10-06-2020,-500
11-06-2020,-500
12-06-2020,-500
15-06-2020,-500
16-06-2020,-500
17-06-2020,-500
18-06-2020,-500
19-06-2020,-500
22-06-2020,-500
23-06-2020,-500
24-06-2020,-500
25-06-2020,-500
26-06-2020,-500
29-06-2020,-500
30-06-2020,-500
01-07-2020,-500
02-07-2020,-500
03-07-2020,-500
06-07-2020,-500
07-07-2020,-500
08-07-2020,-500
09-07-2020,-500
10-07-2020,-500
13-07-2020,-500
14-07-2020,-500
15-07-2020,-500
16-07-2020,-500
17-07-2020,-500
20-07-2020,-500
21-07-2020,-500
22-07-2020,-500
23-07-2020,-500
24-07-2020,-500
27-07-2020,-500
28-07-2020,-500
29-07-2020,-500
30-07-2020,-500
31-07-2020,-500
03-08-2020,-500
04-08-2020,-500
05-08-2020,-500
06-08-2020,-500
07-08-2020,-500
10-08-2020,-500
11-08-2020,-500
12-08-2020,-500
13-08-2020,-500
14-08-2020,-500
17-08-2020,-500
18-08-2020,-500
19-08-2020,-500
20-08-2020,-500
21-08-2020,-500
24-08-2020,-500
25-08-2020,-500
26-08-2020,-500
27-08-2020,-500
28-08-2020,-500
31-08-2020,-500
01-09-2020,-500
02-09-2020,-500
03-09-2020,-500
04-09-2020,-500
07-09-2020,-500
08-09-2020,-500
09-09-2020,-500
10-09-2020,-500
11-09-2020,-500
14-09-2020,-500
15-09-2020,-500
16-09-2020,-500
17-09-2020,-500
18-09-2020,-500
21-09-2020,-500
22-09-2020,-500
23-09-2020,-500
24-09-2020,-500
25-09-2020,-500
28-09-2020,-500
29-09-2020,-500
30-09-2020,-500
01-10-2020,-500
02-10-2020,-500
05-10-2020,-500
06-10-2020,-500
07-10-2020,-500
08-10-2020,-500
09-10-2020,-500
12-10-2020,-500
13-10-2020,-500
14-10-2020,-500
15-10-2020,-500
16-10-2020,-500
19-10-2020,-500
20-10-2020,-500
21-10-2020,-500
22-10-2020,-500
23-10-2020,-500
26-10-2020,-500
27-10-2020,-500
28-10-2020,-500
29-10-2020,-500
30-10-2020,-500
02-11-2020,-500
03-11-2020,-500
04-11-2020,-500
05-11-2020,-500
06-11-2020,-500
09-11-2020,-500
10-11-2020,-500
11-11-2020,-500
12-11-2020,-500
13-11-2020,-500
16-11-2020,-500
17-11-2020,-500
18-11-2020,-500
19-11-2020,-500
20-11-2020,-500
23-11-2020,-500
24-11-2020,-500
25-11-2020,-500
26-11-2020,-500
27-11-2020,-500
30-11-2020,-500
01-12-2020,-500
02-12-2020,-500
03-12-2020,-500
04-12-2020,-500
07-12-2020,-500
08-12-2020,-500
09-12-2020,-500
10-12-2020,-500
11-12-2020,-500
14-12-2020,-500
15-12-2020,-500
16-12-2020,-500
17-12-2020,-500
18-12-2020,-500
21-12-2020,-500
22-12-2020,-500
23-12-2020,-500
24-12-2020,-500
29-12-2020,-500
30-12-2020,-500
31-12-2020,-500
04-01-2021,-500
05-01-2021,-500
06-01-2021,-500
07-01-2021,-500
08-01-2021,-500
11-01-2021,-500
12-01-2021,-500
13-01-2021,-500
14-01-2021,-500
15-01-2021,-500
18-01-2021,-500
19-01-2021,-500
20-01-2021,-500
21-01-2021,-500
22-01-2021,-500
25-01-2021,-500
27-01-2021,-500
28-01-2021,-500
29-01-2021,-500
01-02-2021,-500
02-02-2021,-500
03-02-2021,-500
04-02-2021,-500
05-02-2021,-500
08-02-2021,-500
09-02-2021,-500
10-02-2021,-500
11-02-2021,-500
12-02-2021,-500
15-02-2021,-500
16-02-2021,-500
17-02-2021,-500
18-02-2021,-500
19-02-2021,-500
22-02-2021,-500
23-02-2021,-500
24-02-2021,-500
25-02-2021,-500
26-02-2021,-500
01-03-2021,-500
02-03-2021,-500
03-03-2021,-500
04-03-2021,-500
05-03-2021,-500
08-03-2021,-500
09-03-2021,-500
10-03-2021,-500
11-03-2021,-500
12-03-2021,-500
15-03-2021,-500
16-03-2021,-500
17-03-2021,-500
18-03-2021,-500
19-03-2021,-500
22-03-2021,-500
23-03-2021,-500
24-03-2021,-500
25-03-2021,-500
26-03-2021,-500
29-03-2021,-500
30-03-2021,-500
31-03-2021,-500
01-04-2021,-500
06-04-2021,-500
07-04-2021,-500
08-04-2021,-500
09-04-2021,-500
12-04-2021,-500
13-04-2021,-500
14-04-2021,-500
15-04-2021,-500
16-04-2021,-500
19-04-2021,-500
20-04-2021,-500
21-04-2021,-500
22-04-2021,-500
23-04-2021,-500
26-04-2021,-500
27-04-2021,-500
28-04-2021,-500
29-04-2021,-500
30-04-2021,-500
03-05-2021,-500
04-05-2021,-500
05-05-2021,-500
06-05-2021,-500
07-05-2021,-500
10-05-2021,-500
11-05-2021,-500
12-05-2021,-500
13-05-2021,-500
14-05-2021,-500
17-05-2021,-500
18-05-2021,-500
19-05-2021,-500
20-05-2021,-500
21-05-2021,-500
24-05-2021,-500
25-05-2021,-500
26-05-2021,-500
27-05-2021,-500
28-05-2021,-500
31-05-2021,-500
01-06-2021,-500
02-06-2021,-500
03-06-2021,-500
04-06-2021,-500
07-06-2021,-500
08-06-2021,-500
09-06-2021,-500
10-06-2021,-500
11-06-2021,-500
15-06-2021,-500
16-06-2021,-500
17-06-2021,-500
18-06-2021,-500
21-06-2021,-500
22-06-2021,-500
23-06-2021,-500
24-06-2021,-500
25-06-2021,-500
28-06-2021,-500
29-06-2021,-500
30-06-2021,-500
01-07-2021,-500
02-07-2021,-500
05-07-2021,-500
06-07-2021,-500
07-07-2021,-500
08-07-2021,-500
09-07-2021,-500
12-07-2021,-500
13-07-2021,-500
14-07-2021,-500
15-07-2021,-500
16-07-2021,-500
19-07-2021,-500
20-07-2021,-500
21-07-2021,-500
22-07-2021,-500
23-07-2021,-500
26-07-2021,-500
27-07-2021,-500
28-07-2021,-500
29-07-2021,-500
30-07-2021,-500
02-08-2021,-500
03-08-2021,-500
04-08-2021,-500
05-08-2021,-500
06-08-2021,-500
09-08-2021,-500
10-08-2021,-500
11-08-2021,-500
12-08-2021,-500
13-08-2021,-500
16-08-2021,-500
17-08-2021,-500
18-08-2021,-500
19-08-2021,-500
20-08-2021,-500
23-08-2021,-500
24-08-2021,-500
25-08-2021,-500
26-08-2021,-500
27-08-2021,-500
30-08-2021,-500
31-08-2021,-500
01-09-2021,-500
02-09-2021,-500
03-09-2021,-500
06-09-2021,-500
07-09-2021,-500
08-09-2021,-500
09-09-2021,-500
10-09-2021,-500
13-09-2021,-500
14-09-2021,-500
15-09-2021,-500
16-09-2021,-500
17-09-2021,-500
20-09-2021,-500
21-09-2021,-500
22-09-2021,-500
23-09-2021,-500
24-09-2021,-500
27-09-2021,-500
28-09-2021,-500
29-09-2021,-500
30-09-2021,-500
01-10-2021,-500
04-10-2021,-500
05-10-2021,-500
06-10-2021,-500
07-10-2021,-500
08-10-2021,-500
11-10-2021,-500
12-10-2021,-500
13-10-2021,-500
14-10-2021,-500
15-10-2021,-500
18-10-2021,-500
19-10-2021,-500
20-10-2021,-500
21-10-2021,-500
22-10-2021,-500
25-10-2021,-500
26-10-2021,-500
27-10-2021,-500
28-10-2021,-500
29-10-2021,-500
01-11-2021,-500
02-11-2021,-500
03-11-2021,-500
04-11-2021,-500
05-11-2021,-500
08-11-2021,-500
09-11-2021,-500
10-11-2021,-500
11-11-2021,-500
12-11-2021,-500
15-11-2021,-500
16-11-2021,-500
17-11-2021,-500
18-11-2021,-500
19-11-2021,-500
22-11-2021,-500
23-11-2021,-500
24-11-2021,-500
25-11-2021,-500
26-11-2021,-500
29-11-2021,-500
30-11-2021,-500
01-12-2021,-500
02-12-2021,-500
03-12-2021,-500
06-12-2021,-500
07-12-2021,-500
08-12-2021,-500
09-12-2021,-500
10-12-2021,-500
13-12-2021,-500
14-12-2021,-500
15-12-2021,-500
16-12-2021,-500
17-12-2021,-500
20-12-2021,-500
21-12-2021,-500
22-12-2021,-500
23-12-2021,-500
24-12-2021,-500
29-12-2021,-500
30-12-2021,-500
31-12-2021,-500
04-01-2022,-500
05-01-2022,-500
06-01-2022,-500
07-01-2022,-500
10-01-2022,-500
11-01-2022,-500
12-01-2022,-500
13-01-2022,-500
14-01-2022,-500
17-01-2022,-500
18-01-2022,-500
19-01-2022,-500
20-01-2022,-500
21-01-2022,-500
24-01-2022,-500
25-01-2022,-500
27-01-2022,-500
28-01-2022,-500
31-01-2022,-500
01-02-2022,-500
02-02-2022,-500
03-02-2022,-500
04-02-2022,-500
07-02-2022,-500
08-02-2022,-500
09-02-2022,-500
10-02-2022,-500
11-02-2022,-500
14-02-2022,-500
15-02-2022,-500
16-02-2022,-500
17-02-2022,-500
18-02-2022,-500
21-02-2022,-500
22-02-2022,-500
23-02-2022,-500
24-02-2022,-500
25-02-2022,-500
28-02-2022,-500
01-03-2022,-500
02-03-2022,-500
03-03-2022,-500
04-03-2022,-500
07-03-2022,-500
08-03-2022,-500
09-03-2022,-500
10-03-2022,-500
11-03-2022,-500
14-03-2022,-500
15-03-2022,-500
16-03-2022,-500
17-03-2022,-500
18-03-2022,-500
21-03-2022,-500
22-03-2022,-500
23-03-2022,-500
24-03-2022,-500
25-03-2022,-500
28-03-2022,-500
29-03-2022,-500
30-03-2022,-500
31-03-2022,-500
01-04-2022,-500
04-04-2022,-500
05-04-2022,-500
06-04-2022,-500
07-04-2022,-500
08-04-2022,-500
11-04-2022,-500
12-04-2022,-500
13-04-2022,-500
14-04-2022,-500
19-04-2022,-500
20-04-2022,-500
21-04-2022,-500
22-04-2022,-500
26-04-2022,-500
27-04-2022,-500
28-04-2022,-500
29-04-2022,-500
02-05-2022,-500
03-05-2022,-500
04-05-2022,-500
05-05-2022,-500
06-05-2022,-500
09-05-2022,-500
10-05-2022,-500
11-05-2022,-500
12-05-2022,-500
13-05-2022,-500
16-05-2022,-500
17-05-2022,-500
18-05-2022,-500
19-05-2022,-500
20-05-2022,-500
23-05-2022,-500
24-05-2022,-500
25-05-2022,-500
26-05-2022,-500
27-05-2022,-500
30-05-2022,-500
31-05-2022,-500
01-06-2022,-500
02-06-2022,-500
03-06-2022,-500
06-06-2022,-500
07-06-2022,-500
08-06-2022,-500
09-06-2022,-500
10-06-2022,-500
14-06-2022,-500
15-06-2022,-500
16-06-2022,-500
17-06-2022,-500
20-06-2022,-500
21-06-2022,-500
22-06-2022,-500
23-06-2022,-500
24-06-2022,-500
27-06-2022,-500
28-06-2022,-500
29-06-2022,-500
30-06-2022,-500
01-07-2022,-500
04-07-2022,-500
05-07-2022,-500
06-07-2022,-500
07-07-2022,-500
08-07-2022,-500
11-07-2022,-500
12-07-2022,-500
13-07-2022,-500
14-07-2022,-500
15-07-2022,-500
18-07-2022,-500
19-07-2022,-500
20-07-2022,-500
21-07-2022,-500
22-07-2022,-500
25-07-2022,-500
26-07-2022,-500
27-07-2022,-500
28-07-2022,-500
29-07-2022,-500
01-08-2022,-500
02-08-2022,-500
03-08-2022,-500
04-08-2022,-500
05-08-2022,-500
08-08-2022,-500
09-08-2022,-500
10-08-2022,-500
11-08-2022,-500
12-08-2022,-500
15-08-2022,-500
16-08-2022,-500
17-08-2022,-500
18-08-2022,-500
19-08-2022,-500
22-08-2022,-500
23-08-2022,-500
24-08-2022,-500
25-08-2022,-500
26-08-2022,-500
29-08-2022,-500
30-08-2022,-500
31-08-2022,-500
01-09-2022,-500
02-09-2022,-500
05-09-2022,-500
06-09-2022,-500
07-09-2022,-500
08-09-2022,-500
09-09-2022,-500
12-09-2022,-500
13-09-2022,-500
14-09-2022,-500
15-09-2022,-500
16-09-2022,-500
19-09-2022,-500
20-09-2022,-500
21-09-2022,-500
23-09-2022,-500
26-09-2022,-500
27-09-2022,-500
28-09-2022,-500
29-09-2022,-500
30-09-2022,-500
03-10-2022,-500
04-10-2022,-500
05-10-2022,-500
06-10-2022,-500
07-10-2022,-500
10-10-2022,-500
11-10-2022,-500
12-10-2022,-500
13-10-2022,-500
14-10-2022,-500
17-10-2022,-500
18-10-2022,-500
19-10-2022,-500
20-10-2022,-500
21-10-2022,-500
24-10-2022,-500
25-10-2022,-500
26-10-2022,-500
27-10-2022,-500
28-10-2022,-500
31-10-2022,-500
01-11-2022,-500
02-11-2022,-500
03-11-2022,-500
04-11-2022,-500
07-11-2022,-500
08-11-2022,-500
09-11-2022,-500
10-11-2022,-500
11-11-2022,-500
14-11-2022,-500
15-11-2022,-500
16-11-2022,-500
17-11-2022,-500
18-11-2022,-500
21-11-2022,-500
22-11-2022,-500
23-11-2022,-500
24-11-2022,-500
25-11-2022,-500
28-11-2022,-500
29-11-2022,-500
30-11-2022,-500
01-12-2022,-500
02-12-2022,-500
05-12-2022,-500
06-12-2022,-500
07-12-2022,-500
08-12-2022,-500
09-12-2022,-500
12-12-2022,-500
13-12-2022,-500
14-12-2022,-500
15-12-2022,-500
16-12-2022,-500
19-12-2022,-500
20-12-2022,-500
21-12-2022,-500
22-12-2022,-500
23-12-2022,-500
28-12-2022,-500
29-12-2022,-500
30-12-2022,-500
03-01-2023,-500
04-01-2023,-500
05-01-2023,-500
06-01-2023,-500
09-01-2023,-500
10-01-2023,-500
11-01-2023,-500
12-01-2023,-500
13-01-2023,-500
16-01-2023,-500
17-01-2023,-500
18-01-2023,-500
19-01-2023,-500
20-01-2023,-500
23-01-2023,-500
24-01-2023,-500
25-01-2023,-500
27-01-2023,-500
30-01-2023,-500
31-01-2023,-500
01-02-2023,-500
02-02-2023,-500
03-02-2023,-500
06-02-2023,-500
07-02-2023,-500
08-02-2023,-500
09-02-2023,-500
10-02-2023,-500
13-02-2023,-500
14-02-2023,-500
15-02-2023,-500
16-02-2023,-500
17-02-2023,-500
20-02-2023,-500
21-02-2023,-500
22-02-2023,-500
23-02-2023,-500
24-02-2023,-500
27-02-2023,-500
28-02-2023,-500
01-03-2023,-500
02-03-2023,-500
03-03-2023,-500
06-03-2023,-500
07-03-2023,-500
08-03-2023,-500
09-03-2023,-500
10-03-2023,-500
13-03-2023,-500
14-03-2023,-500
15-03-2023,-500
16-03-2023,-500
17-03-2023,-500
20-03-2023,-500
21-03-2023,-500
22-03-2023,-500
23-03-2023,-500
24-03-2023,-500
27-03-2023,-500
28-03-2023,-500
29-03-2023,-500
30-03-2023,-500
31-03-2023,-500
03-04-2023,-500
04-04-2023,-500
05-04-2023,-500
06-04-2023,-500
11-04-2023,-500
12-04-2023,-500
13-04-2023,-500
14-04-2023,-500
17-04-2023,-500
18-04-2023,-500
19-04-2023,-500
20-04-2023,-500
21-04-2023,-500
24-04-2023,-500
26-04-2023,-500
27-04-2023,-500
28-04-2023,-500
01-05-2023,-500
02-05-2023,-500
03-05-2023,-500
04-05-2023,-500
05-05-2023,-500
08-05-2023,-500
09-05-2023,-500
10-05-2023,-500
11-05-2023,-500
12-05-2023,-500
15-05-2023,-500
16-05-2023,-500
17-05-2023,-500
18-05-2023,-500
19-05-2023,-500
22-05-2023,-500
23-05-2023,-500
24-05-2023,-500
25-05-2023,-500
26-05-2023,-500
29-05-2023,-500
30-05-2023,-500
31-05-2023,-500
01-06-2023,-500
02-06-2023,-500
05-06-2023,-500
06-06-2023,-500
07-06-2023,-500
08-06-2023,-500
09-06-2023,-500
13-06-2023,-500
14-06-2023,-500
15-06-2023,-500
16-06-2023,-500
19-06-2023,-500
20-06-2023,-500
21-06-2023,-500
22-06-2023,-500
23-06-2023,-500
26-06-2023,-500
27-06-2023,-500
28-06-2023,-500
29-06-2023,-500
30-06-2023,-500
03-07-2023,-500
04-07-2023,-500
05-07-2023,-500
06-07-2023,-500
07-07-2023,-500
10-07-2023,-500
11-07-2023,-500
12-07-2023,-500
13-07-2023,-500
14-07-2023,-500
17-07-2023,-500
18-07-2023,-500
19-07-2023,-500
20-07-2023,-500
21-07-2023,-500
24-07-2023,-500
25-07-2023,-500
26-07-2023,-500
27-07-2023,-500
28-07-2023,-500
31-07-2023,-500
01-08-2023,-500
02-08-2023,-500
03-08-2023,-500
04-08-2023,-500
07-08-2023,-500
08-08-2023,-500
09-08-2023,-500
10-08-2023,-500
11-08-2023,-500
14-08-2023,-500
15-08-2023,-500
16-08-2023,-500
17-08-2023,-500
18-08-2023,-500
21-08-2023,-500
22-08-2023,-500
23-08-2023,-500
24-08-2023,-500
25-08-2023,-500
28-08-2023,-500
29-08-2023,-500
30-08-2023,-500
31-08-2023,-500
01-09-2023,-500
04-09-2023,-500
05-09-2023,-500
06-09-2023,-500
07-09-2023,-500
08-09-2023,-500
11-09-2023,-500
12-09-2023,-500
13-09-2023,-500
14-09-2023,-500
15-09-2023,-500
18-09-2023,-500
19-09-2023,-500
20-09-2023,-500
21-09-2023,-500
22-09-2023,-500
25-09-2023,-500
26-09-2023,-500
27-09-2023,-500
28-09-2023,-500
29-09-2023,-500
02-10-2023,-500
03-10-2023,-500
04-10-2023,-500
05-10-2023,-500
06-10-2023,-500
09-10-2023,-500
10-10-2023,-500
11-10-2023,-500
12-10-2023,-500
13-10-2023,-500
16-10-2023,-500
17-10-2023,-500
18-10-2023,-500
19-10-2023,-500
20-10-2023,-500
23-10-2023,-500
24-10-2023,-500
25-10-2023,-500
26-10-2023,-500
27-10-2023,-500
30-10-2023,-500
31-10-2023,-500
01-11-2023,-500
02-11-2023,-500
03-11-2023,-500
06-11-2023,-500
07-11-2023,-500
08-11-2023,-500
09-11-2023,-500
10-11-2023,-500
13-11-2023,-500
14-11-2023,-500
15-11-2023,-500
16-11-2023,-500
17-11-2023,-500
20-11-2023,-500
21-11-2023,-500
22-11-2023,-500
23-11-2023,-500
24-11-2023,-500
27-11-2023,-500
28-11-2023,-500
29-11-2023,-500
30-11-2023,-500
01-12-2023,-500
04-12-2023,-500
05-12-2023,-500
06-12-2023,-500
07-12-2023,-500
08-12-2023,-500
11-12-2023,-500
12-12-2023,-500
13-12-2023,-500
14-12-2023,-500
15-12-2023,-500
18-12-2023,-500
19-12-2023,-500
20-12-2023,-500
21-12-2023,-500
22-12-2023,-500
27-12-2023,-500
28-12-2023,-500
29-12-2023,-500
02-01-2024,-500
03-01-2024,-500
04-01-2024,-500
05-01-2024,-500
08-01-2024,-500
09-01-2024,-500
10-01-2024,-500
11-01-2024,-500
12-01-2024,-500
15-01-2024,-500
16-01-2024,-500
17-01-2024,-500
18-01-2024,-500
19-01-2024,-500
22-01-2024,-500
23-01-2024,-500
24-01-2024,-500
25-01-2024,-500
29-01-2024,-500
30-01-2024,-500
31-01-2024,-500
01-02-2024,-500
02-02-2024,-500
05-02-2024,-500
06-02-2024,-500
07-02-2024,-500
08-02-2024,-500
09-02-2024,-500
12-02-2024,-500
13-02-2024,-500
14-02-2024,-500
15-02-2024,-500
16-02-2024,-500
19-02-2024,-500
20-02-2024,-500
21-02-2024,-500
22-02-2024,-500
23-02-2024,-500
26-02-2024,-500
27-02-2024,-500
28-02-2024,-500
29-02-2024,-500
01-03-2024,-500
04-03-2024,-500
05-03-2024,-500
06-03-2024,-500
07-03-2024,-500
08-03-2024,-500
11-03-2024,-500
12-03-2024,-500
13-03-2024,-500
14-03-2024,-500
15-03-2024,-500
18-03-2024,-500
19-03-2024,-500
20-03-2024,-500
21-03-2024,-500
22-03-2024,-500
25-03-2024,-500
26-03-2024,-500
27-03-2024,-500
28-03-2024,-500
02-04-2024,-500
03-04-2024,-500
04-04-2024,-500
05-04-2024,-500
08-04-2024,-500
09-04-2024,-500
10-04-2024,-500
11-04-2024,-500
12-04-2024,-500
15-04-2024,-500
16-04-2024,-500
17-04-2024,-500
18-04-2024,-500
19-04-2024,-500
22-04-2024,-500
23-04-2024,-500
24-04-2024,-500
26-04-2024,-500
29-04-2024,-500
30-04-2024,-500
01-05-2024,-500
02-05-2024,-500
03-05-2024,-500
06-05-2024,-500
07-05-2024,-500
08-05-2024,-500
09-05-2024,-500
10-05-2024,-500
13-05-2024,-500
14-05-2024,-500
15-05-2024,-500
16-05-2024,-500
17-05-2024,-500
20-05-2024,-500
21-05-2024,-500
22-05-2024,-500
23-05-2024,-500
24-05-2024,-500
27-05-2024,-500
28-05-2024,-500
29-05-2024,-500
30-05-2024,-500
31-05-2024,-500
03-06-2024,-500
04-06-2024,-500
05-06-2024,-500
06-06-2024,-500
07-06-2024,-500
11-06-2024,-500
12-06-2024,-500
13-06-2024,-500
14-06-2024,-500
================================================
FILE: python/rateslib/data/historical/cad_rfr.csv
================================================
reference_date,rate
12-08-1997,-500
18-08-1997,-500
19-08-1997,-500
20-08-1997,-500
21-08-1997,-500
22-08-1997,-500
25-08-1997,-500
26-08-1997,-500
27-08-1997,-500
28-08-1997,-500
02-09-1997,-500
03-09-1997,-500
04-09-1997,-500
05-09-1997,-500
08-09-1997,-500
09-09-1997,-500
10-09-1997,-500
11-09-1997,-500
12-09-1997,-500
15-09-1997,-500
16-09-1997,-500
17-09-1997,-500
18-09-1997,-500
19-09-1997,-500
22-09-1997,-500
23-09-1997,-500
24-09-1997,-500
25-09-1997,-500
26-09-1997,-500
29-09-1997,-500
30-09-1997,-500
01-10-1997,-500
02-10-1997,-500
03-10-1997,-500
06-10-1997,-500
07-10-1997,-500
08-10-1997,-500
09-10-1997,-500
10-10-1997,-500
14-10-1997,-500
15-10-1997,-500
16-10-1997,-500
17-10-1997,-500
20-10-1997,-500
21-10-1997,-500
22-10-1997,-500
23-10-1997,-500
24-10-1997,-500
27-10-1997,-500
28-10-1997,-500
29-10-1997,-500
30-10-1997,-500
31-10-1997,-500
03-11-1997,-500
04-11-1997,-500
05-11-1997,-500
06-11-1997,-500
07-11-1997,-500
10-11-1997,-500
12-11-1997,-500
13-11-1997,-500
14-11-1997,-500
17-11-1997,-500
18-11-1997,-500
19-11-1997,-500
20-11-1997,-500
21-11-1997,-500
24-11-1997,-500
25-11-1997,-500
26-11-1997,-500
27-11-1997,-500
28-11-1997,-500
01-12-1997,-500
02-12-1997,-500
03-12-1997,-500
04-12-1997,-500
05-12-1997,-500
08-12-1997,-500
09-12-1997,-500
10-12-1997,-500
11-12-1997,-500
12-12-1997,-500
15-12-1997,-500
16-12-1997,-500
17-12-1997,-500
18-12-1997,-500
19-12-1997,-500
23-12-1997,-500
24-12-1997,-500
29-12-1997,-500
30-12-1997,-500
31-12-1997,-500
02-01-1998,-500
05-01-1998,-500
06-01-1998,-500
07-01-1998,-500
08-01-1998,-500
09-01-1998,-500
12-01-1998,-500
13-01-1998,-500
14-01-1998,-500
15-01-1998,-500
16-01-1998,-500
19-01-1998,-500
20-01-1998,-500
21-01-1998,-500
22-01-1998,-500
23-01-1998,-500
26-01-1998,-500
27-01-1998,-500
28-01-1998,-500
29-01-1998,-500
30-01-1998,-500
02-02-1998,-500
03-02-1998,-500
04-02-1998,-500
05-02-1998,-500
06-02-1998,-500
09-02-1998,-500
10-02-1998,-500
11-02-1998,-500
12-02-1998,-500
13-02-1998,-500
16-02-1998,-500
17-02-1998,-500
18-02-1998,-500
19-02-1998,-500
20-02-1998,-500
23-02-1998,-500
24-02-1998,-500
25-02-1998,-500
26-02-1998,-500
27-02-1998,-500
02-03-1998,-500
03-03-1998,-500
04-03-1998,-500
05-03-1998,-500
06-03-1998,-500
09-03-1998,-500
10-03-1998,-500
11-03-1998,-500
12-03-1998,-500
13-03-1998,-500
16-03-1998,-500
17-03-1998,-500
18-03-1998,-500
19-03-1998,-500
20-03-1998,-500
23-03-1998,-500
24-03-1998,-500
25-03-1998,-500
26-03-1998,-500
27-03-1998,-500
30-03-1998,-500
31-03-1998,-500
01-04-1998,-500
02-04-1998,-500
03-04-1998,-500
06-04-1998,-500
07-04-1998,-500
08-04-1998,-500
13-04-1998,-500
14-04-1998,-500
15-04-1998,-500
16-04-1998,-500
17-04-1998,-500
20-04-1998,-500
21-04-1998,-500
22-04-1998,-500
23-04-1998,-500
24-04-1998,-500
27-04-1998,-500
28-04-1998,-500
30-04-1998,-500
01-05-1998,-500
04-05-1998,-500
05-05-1998,-500
06-05-1998,-500
07-05-1998,-500
08-05-1998,-500
11-05-1998,-500
12-05-1998,-500
13-05-1998,-500
14-05-1998,-500
15-05-1998,-500
19-05-1998,-500
20-05-1998,-500
21-05-1998,-500
22-05-1998,-500
25-05-1998,-500
26-05-1998,-500
27-05-1998,-500
28-05-1998,-500
29-05-1998,-500
01-06-1998,-500
02-06-1998,-500
03-06-1998,-500
04-06-1998,-500
05-06-1998,-500
08-06-1998,-500
09-06-1998,-500
10-06-1998,-500
11-06-1998,-500
12-06-1998,-500
15-06-1998,-500
16-06-1998,-500
17-06-1998,-500
18-06-1998,-500
19-06-1998,-500
22-06-1998,-500
23-06-1998,-500
24-06-1998,-500
25-06-1998,-500
26-06-1998,-500
29-06-1998,-500
30-06-1998,-500
02-07-1998,-500
03-07-1998,-500
06-07-1998,-500
07-07-1998,-500
08-07-1998,-500
09-07-1998,-500
10-07-1998,-500
13-07-1998,-500
14-07-1998,-500
15-07-1998,-500
16-07-1998,-500
17-07-1998,-500
20-07-1998,-500
21-07-1998,-500
22-07-1998,-500
23-07-1998,-500
24-07-1998,-500
27-07-1998,-500
28-07-1998,-500
29-07-1998,-500
30-07-1998,-500
31-07-1998,-500
04-08-1998,-500
05-08-1998,-500
06-08-1998,-500
07-08-1998,-500
10-08-1998,-500
11-08-1998,-500
12-08-1998,-500
13-08-1998,-500
14-08-1998,-500
17-08-1998,-500
18-08-1998,-500
19-08-1998,-500
20-08-1998,-500
21-08-1998,-500
24-08-1998,-500
25-08-1998,-500
26-08-1998,-500
27-08-1998,-500
28-08-1998,-500
31-08-1998,-500
01-09-1998,-500
02-09-1998,-500
03-09-1998,-500
04-09-1998,-500
08-09-1998,-500
09-09-1998,-500
10-09-1998,-500
11-09-1998,-500
14-09-1998,-500
15-09-1998,-500
16-09-1998,-500
17-09-1998,-500
18-09-1998,-500
21-09-1998,-500
22-09-1998,-500
23-09-1998,-500
24-09-1998,-500
25-09-1998,-500
28-09-1998,-500
29-09-1998,-500
30-09-1998,-500
01-10-1998,-500
02-10-1998,-500
05-10-1998,-500
06-10-1998,-500
07-10-1998,-500
08-10-1998,-500
09-10-1998,-500
13-10-1998,-500
14-10-1998,-500
15-10-1998,-500
16-10-1998,-500
19-10-1998,-500
20-10-1998,-500
21-10-1998,-500
22-10-1998,-500
23-10-1998,-500
26-10-1998,-500
27-10-1998,-500
28-10-1998,-500
29-10-1998,-500
30-10-1998,-500
02-11-1998,-500
03-11-1998,-500
04-11-1998,-500
05-11-1998,-500
06-11-1998,-500
09-11-1998,-500
10-11-1998,-500
12-11-1998,-500
13-11-1998,-500
16-11-1998,-500
17-11-1998,-500
18-11-1998,-500
19-11-1998,-500
20-11-1998,-500
23-11-1998,-500
24-11-1998,-500
25-11-1998,-500
26-11-1998,-500
27-11-1998,-500
30-11-1998,-500
01-12-1998,-500
02-12-1998,-500
03-12-1998,-500
04-12-1998,-500
07-12-1998,-500
08-12-1998,-500
09-12-1998,-500
10-12-1998,-500
11-12-1998,-500
14-12-1998,-500
15-12-1998,-500
16-12-1998,-500
17-12-1998,-500
18-12-1998,-500
21-12-1998,-500
22-12-1998,-500
23-12-1998,-500
24-12-1998,-500
29-12-1998,-500
30-12-1998,-500
31-12-1998,-500
04-01-1999,-500
05-01-1999,-500
06-01-1999,-500
07-01-1999,-500
08-01-1999,-500
11-01-1999,-500
12-01-1999,-500
13-01-1999,-500
14-01-1999,-500
15-01-1999,-500
18-01-1999,-500
19-01-1999,-500
20-01-1999,-500
21-01-1999,-500
22-01-1999,-500
25-01-1999,-500
26-01-1999,-500
27-01-1999,-500
28-01-1999,-500
29-01-1999,-500
01-02-1999,-500
02-02-1999,-500
03-02-1999,-500
04-02-1999,-500
05-02-1999,-500
08-02-1999,-500
09-02-1999,-500
10-02-1999,-500
11-02-1999,-500
12-02-1999,-500
15-02-1999,-500
16-02-1999,-500
17-02-1999,-500
18-02-1999,-500
19-02-1999,-500
22-02-1999,-500
23-02-1999,-500
24-02-1999,-500
25-02-1999,-500
26-02-1999,-500
01-03-1999,-500
02-03-1999,-500
03-03-1999,-500
04-03-1999,-500
05-03-1999,-500
08-03-1999,-500
09-03-1999,-500
10-03-1999,-500
11-03-1999,-500
12-03-1999,-500
15-03-1999,-500
16-03-1999,-500
17-03-1999,-500
18-03-1999,-500
19-03-1999,-500
22-03-1999,-500
23-03-1999,-500
24-03-1999,-500
25-03-1999,-500
26-03-1999,-500
29-03-1999,-500
30-03-1999,-500
31-03-1999,-500
01-04-1999,-500
05-04-1999,-500
06-04-1999,-500
07-04-1999,-500
08-04-1999,-500
09-04-1999,-500
12-04-1999,-500
13-04-1999,-500
14-04-1999,-500
15-04-1999,-500
16-04-1999,-500
19-04-1999,-500
20-04-1999,-500
21-04-1999,-500
22-04-1999,-500
23-04-1999,-500
26-04-1999,-500
27-04-1999,-500
28-04-1999,-500
29-04-1999,-500
30-04-1999,-500
03-05-1999,-500
04-05-1999,-500
05-05-1999,-500
06-05-1999,-500
07-05-1999,-500
10-05-1999,-500
11-05-1999,-500
12-05-1999,-500
13-05-1999,-500
14-05-1999,-500
17-05-1999,-500
18-05-1999,-500
19-05-1999,-500
20-05-1999,-500
21-05-1999,-500
25-05-1999,-500
26-05-1999,-500
27-05-1999,-500
28-05-1999,-500
31-05-1999,-500
01-06-1999,-500
02-06-1999,-500
03-06-1999,-500
04-06-1999,-500
07-06-1999,-500
08-06-1999,-500
09-06-1999,-500
10-06-1999,-500
11-06-1999,-500
14-06-1999,-500
15-06-1999,-500
16-06-1999,-500
17-06-1999,-500
18-06-1999,-500
21-06-1999,-500
22-06-1999,-500
23-06-1999,-500
24-06-1999,-500
25-06-1999,-500
28-06-1999,-500
29-06-1999,-500
30-06-1999,-500
02-07-1999,-500
05-07-1999,-500
06-07-1999,-500
07-07-1999,-500
08-07-1999,-500
09-07-1999,-500
12-07-1999,-500
13-07-1999,-500
14-07-1999,-500
15-07-1999,-500
16-07-1999,-500
19-07-1999,-500
20-07-1999,-500
21-07-1999,-500
22-07-1999,-500
23-07-1999,-500
26-07-1999,-500
27-07-1999,-500
28-07-1999,-500
29-07-1999,-500
30-07-1999,-500
03-08-1999,-500
04-08-1999,-500
05-08-1999,-500
06-08-1999,-500
09-08-1999,-500
10-08-1999,-500
11-08-1999,-500
12-08-1999,-500
13-08-1999,-500
16-08-1999,-500
17-08-1999,-500
18-08-1999,-500
19-08-1999,-500
20-08-1999,-500
23-08-1999,-500
24-08-1999,-500
25-08-1999,-500
26-08-1999,-500
27-08-1999,-500
30-08-1999,-500
31-08-1999,-500
01-09-1999,-500
02-09-1999,-500
03-09-1999,-500
07-09-1999,-500
08-09-1999,-500
09-09-1999,-500
10-09-1999,-500
13-09-1999,-500
14-09-1999,-500
15-09-1999,-500
16-09-1999,-500
17-09-1999,-500
20-09-1999,-500
21-09-1999,-500
22-09-1999,-500
23-09-1999,-500
24-09-1999,-500
27-09-1999,-500
28-09-1999,-500
29-09-1999,-500
30-09-1999,-500
01-10-1999,-500
04-10-1999,-500
05-10-1999,-500
06-10-1999,-500
07-10-1999,-500
08-10-1999,-500
12-10-1999,-500
13-10-1999,-500
14-10-1999,-500
15-10-1999,-500
18-10-1999,-500
19-10-1999,-500
20-10-1999,-500
21-10-1999,-500
22-10-1999,-500
25-10-1999,-500
26-10-1999,-500
27-10-1999,-500
28-10-1999,-500
29-10-1999,-500
01-11-1999,-500
02-11-1999,-500
03-11-1999,-500
04-11-1999,-500
05-11-1999,-500
08-11-1999,-500
09-11-1999,-500
10-11-1999,-500
12-11-1999,-500
15-11-1999,-500
16-11-1999,-500
17-11-1999,-500
18-11-1999,-500
19-11-1999,-500
22-11-1999,-500
23-11-1999,-500
24-11-1999,-500
25-11-1999,-500
26-11-1999,-500
29-11-1999,-500
30-11-1999,-500
01-12-1999,-500
02-12-1999,-500
03-12-1999,-500
06-12-1999,-500
07-12-1999,-500
08-12-1999,-500
09-12-1999,-500
10-12-1999,-500
13-12-1999,-500
14-12-1999,-500
15-12-1999,-500
16-12-1999,-500
17-12-1999,-500
20-12-1999,-500
21-12-1999,-500
22-12-1999,-500
23-12-1999,-500
24-12-1999,-500
29-12-1999,-500
30-12-1999,-500
31-12-1999,-500
04-01-2000,-500
05-01-2000,-500
06-01-2000,-500
07-01-2000,-500
10-01-2000,-500
11-01-2000,-500
12-01-2000,-500
13-01-2000,-500
14-01-2000,-500
17-01-2000,-500
18-01-2000,-500
19-01-2000,-500
20-01-2000,-500
21-01-2000,-500
24-01-2000,-500
25-01-2000,-500
26-01-2000,-500
27-01-2000,-500
28-01-2000,-500
31-01-2000,-500
01-02-2000,-500
02-02-2000,-500
03-02-2000,-500
04-02-2000,-500
07-02-2000,-500
08-02-2000,-500
09-02-2000,-500
10-02-2000,-500
11-02-2000,-500
14-02-2000,-500
15-02-2000,-500
16-02-2000,-500
17-02-2000,-500
18-02-2000,-500
21-02-2000,-500
22-02-2000,-500
23-02-2000,-500
24-02-2000,-500
25-02-2000,-500
28-02-2000,-500
29-02-2000,-500
01-03-2000,-500
02-03-2000,-500
03-03-2000,-500
06-03-2000,-500
07-03-2000,-500
08-03-2000,-500
09-03-2000,-500
10-03-2000,-500
13-03-2000,-500
14-03-2000,-500
15-03-2000,-500
16-03-2000,-500
17-03-2000,-500
20-03-2000,-500
21-03-2000,-500
22-03-2000,-500
23-03-2000,-500
24-03-2000,-500
27-03-2000,-500
28-03-2000,-500
29-03-2000,-500
30-03-2000,-500
31-03-2000,-500
03-04-2000,-500
04-04-2000,-500
05-04-2000,-500
06-04-2000,-500
07-04-2000,-500
10-04-2000,-500
11-04-2000,-500
12-04-2000,-500
13-04-2000,-500
14-04-2000,-500
17-04-2000,-500
18-04-2000,-500
19-04-2000,-500
20-04-2000,-500
24-04-2000,-500
25-04-2000,-500
26-04-2000,-500
27-04-2000,-500
28-04-2000,-500
01-05-2000,-500
02-05-2000,-500
03-05-2000,-500
04-05-2000,-500
05-05-2000,-500
08-05-2000,-500
09-05-2000,-500
10-05-2000,-500
11-05-2000,-500
12-05-2000,-500
15-05-2000,-500
16-05-2000,-500
17-05-2000,-500
18-05-2000,-500
19-05-2000,-500
23-05-2000,-500
24-05-2000,-500
25-05-2000,-500
26-05-2000,-500
29-05-2000,-500
30-05-2000,-500
31-05-2000,-500
01-06-2000,-500
02-06-2000,-500
05-06-2000,-500
06-06-2000,-500
07-06-2000,-500
08-06-2000,-500
09-06-2000,-500
12-06-2000,-500
13-06-2000,-500
14-06-2000,-500
15-06-2000,-500
16-06-2000,-500
19-06-2000,-500
20-06-2000,-500
21-06-2000,-500
22-06-2000,-500
23-06-2000,-500
26-06-2000,-500
27-06-2000,-500
28-06-2000,-500
29-06-2000,-500
30-06-2000,-500
04-07-2000,-500
05-07-2000,-500
06-07-2000,-500
07-07-2000,-500
10-07-2000,-500
11-07-2000,-500
12-07-2000,-500
13-07-2000,-500
14-07-2000,-500
17-07-2000,-500
18-07-2000,-500
19-07-2000,-500
20-07-2000,-500
21-07-2000,-500
24-07-2000,-500
25-07-2000,-500
26-07-2000,-500
27-07-2000,-500
28-07-2000,-500
31-07-2000,-500
01-08-2000,-500
02-08-2000,-500
03-08-2000,-500
04-08-2000,-500
08-08-2000,-500
09-08-2000,-500
10-08-2000,-500
11-08-2000,-500
14-08-2000,-500
15-08-2000,-500
16-08-2000,-500
17-08-2000,-500
18-08-2000,-500
21-08-2000,-500
22-08-2000,-500
23-08-2000,-500
24-08-2000,-500
25-08-2000,-500
28-08-2000,-500
29-08-2000,-500
30-08-2000,-500
31-08-2000,-500
01-09-2000,-500
05-09-2000,-500
06-09-2000,-500
07-09-2000,-500
08-09-2000,-500
11-09-2000,-500
12-09-2000,-500
13-09-2000,-500
14-09-2000,-500
15-09-2000,-500
18-09-2000,-500
19-09-2000,-500
20-09-2000,-500
21-09-2000,-500
22-09-2000,-500
25-09-2000,-500
26-09-2000,-500
27-09-2000,-500
28-09-2000,-500
29-09-2000,-500
02-10-2000,-500
03-10-2000,-500
04-10-2000,-500
05-10-2000,-500
06-10-2000,-500
10-10-2000,-500
11-10-2000,-500
12-10-2000,-500
13-10-2000,-500
16-10-2000,-500
17-10-2000,-500
18-10-2000,-500
19-10-2000,-500
20-10-2000,-500
23-10-2000,-500
24-10-2000,-500
25-10-2000,-500
26-10-2000,-500
27-10-2000,-500
30-10-2000,-500
31-10-2000,-500
01-11-2000,-500
02-11-2000,-500
03-11-2000,-500
06-11-2000,-500
07-11-2000,-500
08-11-2000,-500
09-11-2000,-500
10-11-2000,-500
14-11-2000,-500
15-11-2000,-500
16-11-2000,-500
17-11-2000,-500
20-11-2000,-500
21-11-2000,-500
22-11-2000,-500
23-11-2000,-500
24-11-2000,-500
27-11-2000,-500
28-11-2000,-500
29-11-2000,-500
30-11-2000,-500
01-12-2000,-500
04-12-2000,-500
05-12-2000,-500
06-12-2000,-500
07-12-2000,-500
08-12-2000,-500
11-12-2000,-500
12-12-2000,-500
13-12-2000,-500
14-12-2000,-500
15-12-2000,-500
18-12-2000,-500
19-12-2000,-500
20-12-2000,-500
21-12-2000,-500
22-12-2000,-500
27-12-2000,-500
28-12-2000,-500
29-12-2000,-500
02-01-2001,-500
03-01-2001,-500
04-01-2001,-500
05-01-2001,-500
08-01-2001,-500
09-01-2001,-500
10-01-2001,-500
11-01-2001,-500
12-01-2001,-500
15-01-2001,-500
16-01-2001,-500
17-01-2001,-500
18-01-2001,-500
19-01-2001,-500
22-01-2001,-500
23-01-2001,-500
24-01-2001,-500
25-01-2001,-500
26-01-2001,-500
29-01-2001,-500
30-01-2001,-500
31-01-2001,-500
01-02-2001,-500
02-02-2001,-500
05-02-2001,-500
06-02-2001,-500
07-02-2001,-500
08-02-2001,-500
09-02-2001,-500
12-02-2001,-500
13-02-2001,-500
14-02-2001,-500
15-02-2001,-500
16-02-2001,-500
19-02-2001,-500
20-02-2001,-500
21-02-2001,-500
22-02-2001,-500
23-02-2001,-500
26-02-2001,-500
27-02-2001,-500
28-02-2001,-500
01-03-2001,-500
02-03-2001,-500
05-03-2001,-500
06-03-2001,-500
07-03-2001,-500
08-03-2001,-500
09-03-2001,-500
12-03-2001,-500
13-03-2001,-500
14-03-2001,-500
15-03-2001,-500
16-03-2001,-500
19-03-2001,-500
20-03-2001,-500
21-03-2001,-500
22-03-2001,-500
23-03-2001,-500
26-03-2001,-500
27-03-2001,-500
28-03-2001,-500
29-03-2001,-500
30-03-2001,-500
02-04-2001,-500
03-04-2001,-500
04-04-2001,-500
05-04-2001,-500
06-04-2001,-500
09-04-2001,-500
10-04-2001,-500
11-04-2001,-500
12-04-2001,-500
16-04-2001,-500
17-04-2001,-500
18-04-2001,-500
19-04-2001,-500
20-04-2001,-500
23-04-2001,-500
24-04-2001,-500
25-04-2001,-500
26-04-2001,-500
27-04-2001,-500
30-04-2001,-500
01-05-2001,-500
02-05-2001,-500
03-05-2001,-500
04-05-2001,-500
07-05-2001,-500
08-05-2001,-500
09-05-2001,-500
10-05-2001,-500
11-05-2001,-500
14-05-2001,-500
15-05-2001,-500
16-05-2001,-500
17-05-2001,-500
18-05-2001,-500
22-05-2001,-500
23-05-2001,-500
24-05-2001,-500
25-05-2001,-500
28-05-2001,-500
29-05-2001,-500
30-05-2001,-500
31-05-2001,-500
01-06-2001,-500
04-06-2001,-500
05-06-2001,-500
06-06-2001,-500
07-06-2001,-500
08-06-2001,-500
11-06-2001,-500
12-06-2001,-500
13-06-2001,-500
14-06-2001,-500
15-06-2001,-500
18-06-2001,-500
19-06-2001,-500
20-06-2001,-500
21-06-2001,-500
22-06-2001,-500
25-06-2001,-500
26-06-2001,-500
27-06-2001,-500
28-06-2001,-500
29-06-2001,-500
03-07-2001,-500
04-07-2001,-500
05-07-2001,-500
06-07-2001,-500
09-07-2001,-500
10-07-2001,-500
11-07-2001,-500
12-07-2001,-500
13-07-2001,-500
16-07-2001,-500
17-07-2001,-500
18-07-2001,-500
19-07-2001,-500
20-07-2001,-500
23-07-2001,-500
24-07-2001,-500
25-07-2001,-500
26-07-2001,-500
27-07-2001,-500
30-07-2001,-500
31-07-2001,-500
01-08-2001,-500
02-08-2001,-500
03-08-2001,-500
07-08-2001,-500
08-08-2001,-500
09-08-2001,-500
10-08-2001,-500
13-08-2001,-500
14-08-2001,-500
15-08-2001,-500
16-08-2001,-500
17-08-2001,-500
20-08-2001,-500
21-08-2001,-500
22-08-2001,-500
23-08-2001,-500
24-08-2001,-500
27-08-2001,-500
28-08-2001,-500
29-08-2001,-500
30-08-2001,-500
31-08-2001,-500
04-09-2001,-500
05-09-2001,-500
06-09-2001,-500
07-09-2001,-500
10-09-2001,-500
11-09-2001,-500
12-09-2001,-500
13-09-2001,-500
14-09-2001,-500
17-09-2001,-500
18-09-2001,-500
19-09-2001,-500
20-09-2001,-500
21-09-2001,-500
24-09-2001,-500
25-09-2001,-500
26-09-2001,-500
27-09-2001,-500
28-09-2001,-500
01-10-2001,-500
02-10-2001,-500
03-10-2001,-500
04-10-2001,-500
05-10-2001,-500
09-10-2001,-500
10-10-2001,-500
11-10-2001,-500
12-10-2001,-500
15-10-2001,-500
16-10-2001,-500
17-10-2001,-500
18-10-2001,-500
19-10-2001,-500
22-10-2001,-500
23-10-2001,-500
24-10-2001,-500
25-10-2001,-500
26-10-2001,-500
29-10-2001,-500
30-10-2001,-500
31-10-2001,-500
01-11-2001,-500
02-11-2001,-500
05-11-2001,-500
06-11-2001,-500
07-11-2001,-500
08-11-2001,-500
09-11-2001,-500
13-11-2001,-500
14-11-2001,-500
15-11-2001,-500
16-11-2001,-500
19-11-2001,-500
20-11-2001,-500
21-11-2001,-500
22-11-2001,-500
23-11-2001,-500
26-11-2001,-500
27-11-2001,-500
28-11-2001,-500
29-11-2001,-500
30-11-2001,-500
03-12-2001,-500
04-12-2001,-500
05-12-2001,-500
06-12-2001,-500
07-12-2001,-500
10-12-2001,-500
11-12-2001,-500
12-12-2001,-500
13-12-2001,-500
14-12-2001,-500
17-12-2001,-500
18-12-2001,-500
19-12-2001,-500
20-12-2001,-500
21-12-2001,-500
24-12-2001,-500
27-12-2001,-500
28-12-2001,-500
31-12-2001,-500
02-01-2002,-500
03-01-2002,-500
04-01-2002,-500
07-01-2002,-500
08-01-2002,-500
09-01-2002,-500
10-01-2002,-500
11-01-2002,-500
14-01-2002,-500
15-01-2002,-500
16-01-2002,-500
17-01-2002,-500
18-01-2002,-500
21-01-2002,-500
22-01-2002,-500
23-01-2002,-500
24-01-2002,-500
25-01-2002,-500
28-01-2002,-500
29-01-2002,-500
30-01-2002,-500
31-01-2002,-500
01-02-2002,-500
04-02-2002,-500
05-02-2002,-500
06-02-2002,-500
07-02-2002,-500
08-02-2002,-500
11-02-2002,-500
12-02-2002,-500
13-02-2002,-500
14-02-2002,-500
15-02-2002,-500
18-02-2002,-500
19-02-2002,-500
20-02-2002,-500
21-02-2002,-500
22-02-2002,-500
25-02-2002,-500
26-02-2002,-500
27-02-2002,-500
28-02-2002,-500
01-03-2002,-500
04-03-2002,-500
05-03-2002,-500
06-03-2002,-500
07-03-2002,-500
08-03-2002,-500
11-03-2002,-500
12-03-2002,-500
13-03-2002,-500
14-03-2002,-500
15-03-2002,-500
18-03-2002,-500
19-03-2002,-500
20-03-2002,-500
21-03-2002,-500
22-03-2002,-500
25-03-2002,-500
26-03-2002,-500
27-03-2002,-500
28-03-2002,-500
01-04-2002,-500
02-04-2002,-500
03-04-2002,-500
04-04-2002,-500
05-04-2002,-500
08-04-2002,-500
09-04-2002,-500
10-04-2002,-500
11-04-2002,-500
12-04-2002,-500
15-04-2002,-500
16-04-2002,-500
17-04-2002,-500
18-04-2002,-500
19-04-2002,-500
22-04-2002,-500
23-04-2002,-500
24-04-2002,-500
25-04-2002,-500
26-04-2002,-500
29-04-2002,-500
30-04-2002,-500
01-05-2002,-500
02-05-2002,-500
03-05-2002,-500
06-05-2002,-500
07-05-2002,-500
08-05-2002,-500
09-05-2002,-500
10-05-2002,-500
13-05-2002,-500
14-05-2002,-500
15-05-2002,-500
16-05-2002,-500
17-05-2002,-500
21-05-2002,-500
22-05-2002,-500
23-05-2002,-500
24-05-2002,-500
27-05-2002,-500
28-05-2002,-500
29-05-2002,-500
30-05-2002,-500
31-05-2002,-500
03-06-2002,-500
04-06-2002,-500
05-06-2002,-500
06-06-2002,-500
07-06-2002,-500
10-06-2002,-500
11-06-2002,-500
12-06-2002,-500
13-06-2002,-500
14-06-2002,-500
17-06-2002,-500
18-06-2002,-500
19-06-2002,-500
20-06-2002,-500
21-06-2002,-500
24-06-2002,-500
25-06-2002,-500
26-06-2002,-500
27-06-2002,-500
28-06-2002,-500
02-07-2002,-500
03-07-2002,-500
04-07-2002,-500
05-07-2002,-500
08-07-2002,-500
09-07-2002,-500
10-07-2002,-500
11-07-2002,-500
12-07-2002,-500
15-07-2002,-500
16-07-2002,-500
17-07-2002,-500
18-07-2002,-500
19-07-2002,-500
22-07-2002,-500
23-07-2002,-500
24-07-2002,-500
25-07-2002,-500
26-07-2002,-500
29-07-2002,-500
30-07-2002,-500
31-07-2002,-500
01-08-2002,-500
02-08-2002,-500
06-08-2002,-500
07-08-2002,-500
08-08-2002,-500
09-08-2002,-500
12-08-2002,-500
13-08-2002,-500
14-08-2002,-500
15-08-2002,-500
16-08-2002,-500
19-08-2002,-500
20-08-2002,-500
21-08-2002,-500
22-08-2002,-500
23-08-2002,-500
26-08-2002,-500
27-08-2002,-500
28-08-2002,-500
29-08-2002,-500
30-08-2002,-500
03-09-2002,-500
04-09-2002,-500
05-09-2002,-500
06-09-2002,-500
09-09-2002,-500
10-09-2002,-500
11-09-2002,-500
12-09-2002,-500
13-09-2002,-500
16-09-2002,-500
17-09-2002,-500
18-09-2002,-500
19-09-2002,-500
20-09-2002,-500
23-09-2002,-500
24-09-2002,-500
25-09-2002,-500
26-09-2002,-500
27-09-2002,-500
30-09-2002,-500
01-10-2002,-500
02-10-2002,-500
03-10-2002,-500
04-10-2002,-500
07-10-2002,-500
08-10-2002,-500
09-10-2002,-500
10-10-2002,-500
11-10-2002,-500
15-10-2002,-500
16-10-2002,-500
17-10-2002,-500
18-10-2002,-500
21-10-2002,-500
22-10-2002,-500
23-10-2002,-500
24-10-2002,-500
25-10-2002,-500
28-10-2002,-500
29-10-2002,-500
30-10-2002,-500
31-10-2002,-500
01-11-2002,-500
04-11-2002,-500
05-11-2002,-500
06-11-2002,-500
07-11-2002,-500
08-11-2002,-500
12-11-2002,-500
13-11-2002,-500
14-11-2002,-500
15-11-2002,-500
18-11-2002,-500
19-11-2002,-500
20-11-2002,-500
21-11-2002,-500
22-11-2002,-500
25-11-2002,-500
26-11-2002,-500
27-11-2002,-500
28-11-2002,-500
29-11-2002,-500
02-12-2002,-500
03-12-2002,-500
04-12-2002,-500
05-12-2002,-500
06-12-2002,-500
09-12-2002,-500
10-12-2002,-500
11-12-2002,-500
12-12-2002,-500
13-12-2002,-500
16-12-2002,-500
17-12-2002,-500
18-12-2002,-500
19-12-2002,-500
20-12-2002,-500
23-12-2002,-500
24-12-2002,-500
27-12-2002,-500
30-12-2002,-500
31-12-2002,-500
02-01-2003,-500
03-01-2003,-500
06-01-2003,-500
07-01-2003,-500
08-01-2003,-500
09-01-2003,-500
10-01-2003,-500
13-01-2003,-500
14-01-2003,-500
15-01-2003,-500
16-01-2003,-500
17-01-2003,-500
20-01-2003,-500
21-01-2003,-500
22-01-2003,-500
23-01-2003,-500
24-01-2003,-500
27-01-2003,-500
28-01-2003,-500
29-01-2003,-500
30-01-2003,-500
31-01-2003,-500
03-02-2003,-500
04-02-2003,-500
05-02-2003,-500
06-02-2003,-500
07-02-2003,-500
10-02-2003,-500
11-02-2003,-500
12-02-2003,-500
13-02-2003,-500
14-02-2003,-500
17-02-2003,-500
18-02-2003,-500
19-02-2003,-500
20-02-2003,-500
21-02-2003,-500
24-02-2003,-500
25-02-2003,-500
26-02-2003,-500
27-02-2003,-500
28-02-2003,-500
03-03-2003,-500
04-03-2003,-500
05-03-2003,-500
06-03-2003,-500
07-03-2003,-500
10-03-2003,-500
11-03-2003,-500
12-03-2003,-500
13-03-2003,-500
14-03-2003,-500
17-03-2003,-500
18-03-2003,-500
19-03-2003,-500
20-03-2003,-500
21-03-2003,-500
24-03-2003,-500
25-03-2003,-500
26-03-2003,-500
27-03-2003,-500
28-03-2003,-500
31-03-2003,-500
01-04-2003,-500
02-04-2003,-500
03-04-2003,-500
04-04-2003,-500
07-04-2003,-500
08-04-2003,-500
09-04-2003,-500
10-04-2003,-500
11-04-2003,-500
14-04-2003,-500
15-04-2003,-500
16-04-2003,-500
17-04-2003,-500
21-04-2003,-500
22-04-2003,-500
23-04-2003,-500
24-04-2003,-500
25-04-2003,-500
28-04-2003,-500
29-04-2003,-500
30-04-2003,-500
01-05-2003,-500
02-05-2003,-500
05-05-2003,-500
06-05-2003,-500
07-05-2003,-500
08-05-2003,-500
09-05-2003,-500
12-05-2003,-500
13-05-2003,-500
14-05-2003,-500
15-05-2003,-500
16-05-2003,-500
20-05-2003,-500
21-05-2003,-500
22-05-2003,-500
23-05-2003,-500
26-05-2003,-500
27-05-2003,-500
28-05-2003,-500
29-05-2003,-500
30-05-2003,-500
02-06-2003,-500
03-06-2003,-500
04-06-2003,-500
05-06-2003,-500
06-06-2003,-500
09-06-2003,-500
10-06-2003,-500
11-06-2003,-500
12-06-2003,-500
13-06-2003,-500
16-06-2003,-500
17-06-2003,-500
18-06-2003,-500
19-06-2003,-500
20-06-2003,-500
23-06-2003,-500
24-06-2003,-500
25-06-2003,-500
26-06-2003,-500
27-06-2003,-500
30-06-2003,-500
02-07-2003,-500
03-07-2003,-500
04-07-2003,-500
07-07-2003,-500
08-07-2003,-500
09-07-2003,-500
10-07-2003,-500
11-07-2003,-500
14-07-2003,-500
15-07-2003,-500
16-07-2003,-500
17-07-2003,-500
18-07-2003,-500
21-07-2003,-500
22-07-2003,-500
23-07-2003,-500
24-07-2003,-500
25-07-2003,-500
28-07-2003,-500
29-07-2003,-500
30-07-2003,-500
31-07-2003,-500
01-08-2003,-500
05-08-2003,-500
06-08-2003,-500
07-08-2003,-500
08-08-2003,-500
11-08-2003,-500
12-08-2003,-500
13-08-2003,-500
14-08-2003,-500
15-08-2003,-500
18-08-2003,-500
19-08-2003,-500
20-08-2003,-500
21-08-2003,-500
22-08-2003,-500
25-08-2003,-500
26-08-2003,-500
27-08-2003,-500
28-08-2003,-500
29-08-2003,-500
02-09-2003,-500
03-09-2003,-500
04-09-2003,-500
05-09-2003,-500
08-09-2003,-500
09-09-2003,-500
10-09-2003,-500
11-09-2003,-500
12-09-2003,-500
15-09-2003,-500
16-09-2003,-500
17-09-2003,-500
18-09-2003,-500
19-09-2003,-500
22-09-2003,-500
23-09-2003,-500
24-09-2003,-500
25-09-2003,-500
26-09-2003,-500
29-09-2003,-500
30-09-2003,-500
01-10-2003,-500
02-10-2003,-500
03-10-2003,-500
06-10-2003,-500
07-10-2003,-500
08-10-2003,-500
09-10-2003,-500
10-10-2003,-500
14-10-2003,-500
15-10-2003,-500
16-10-2003,-500
17-10-2003,-500
20-10-2003,-500
21-10-2003,-500
22-10-2003,-500
23-10-2003,-500
24-10-2003,-500
27-10-2003,-500
28-10-2003,-500
29-10-2003,-500
30-10-2003,-500
31-10-2003,-500
03-11-2003,-500
04-11-2003,-500
05-11-2003,-500
06-11-2003,-500
07-11-2003,-500
10-11-2003,-500
12-11-2003,-500
13-11-2003,-500
14-11-2003,-500
17-11-2003,-500
18-11-2003,-500
19-11-2003,-500
20-11-2003,-500
21-11-2003,-500
24-11-2003,-500
25-11-2003,-500
26-11-2003,-500
27-11-2003,-500
28-11-2003,-500
01-12-2003,-500
02-12-2003,-500
03-12-2003,-500
04-12-2003,-500
05-12-2003,-500
08-12-2003,-500
09-12-2003,-500
10-12-2003,-500
11-12-2003,-500
12-12-2003,-500
15-12-2003,-500
16-12-2003,-500
17-12-2003,-500
18-12-2003,-500
19-12-2003,-500
22-12-2003,-500
23-12-2003,-500
24-12-2003,-500
29-12-2003,-500
30-12-2003,-500
31-12-2003,-500
02-01-2004,-500
05-01-2004,-500
06-01-2004,-500
07-01-2004,-500
08-01-2004,-500
09-01-2004,-500
12-01-2004,-500
13-01-2004,-500
14-01-2004,-500
15-01-2004,-500
16-01-2004,-500
19-01-2004,-500
20-01-2004,-500
21-01-2004,-500
22-01-2004,-500
23-01-2004,-500
26-01-2004,-500
27-01-2004,-500
28-01-2004,-500
29-01-2004,-500
30-01-2004,-500
02-02-2004,-500
03-02-2004,-500
04-02-2004,-500
05-02-2004,-500
06-02-2004,-500
09-02-2004,-500
10-02-2004,-500
11-02-2004,-500
12-02-2004,-500
13-02-2004,-500
16-02-2004,-500
17-02-2004,-500
18-02-2004,-500
19-02-2004,-500
20-02-2004,-500
23-02-2004,-500
24-02-2004,-500
25-02-2004,-500
26-02-2004,-500
27-02-2004,-500
01-03-2004,-500
02-03-2004,-500
03-03-2004,-500
04-03-2004,-500
05-03-2004,-500
08-03-2004,-500
09-03-2004,-500
10-03-2004,-500
11-03-2004,-500
12-03-2004,-500
15-03-2004,-500
16-03-2004,-500
17-03-2004,-500
18-03-2004,-500
19-03-2004,-500
22-03-2004,-500
23-03-2004,-500
24-03-2004,-500
25-03-2004,-500
26-03-2004,-500
29-03-2004,-500
30-03-2004,-500
31-03-2004,-500
01-04-2004,-500
02-04-2004,-500
05-04-2004,-500
06-04-2004,-500
07-04-2004,-500
08-04-2004,-500
12-04-2004,-500
13-04-2004,-500
14-04-2004,-500
15-04-2004,-500
16-04-2004,-500
19-04-2004,-500
20-04-2004,-500
21-04-2004,-500
22-04-2004,-500
23-04-2004,-500
26-04-2004,-500
27-04-2004,-500
28-04-2004,-500
29-04-2004,-500
30-04-2004,-500
03-05-2004,-500
04-05-2004,-500
05-05-2004,-500
06-05-2004,-500
07-05-2004,-500
10-05-2004,-500
11-05-2004,-500
12-05-2004,-500
13-05-2004,-500
14-05-2004,-500
17-05-2004,-500
18-05-2004,-500
19-05-2004,-500
20-05-2004,-500
21-05-2004,-500
25-05-2004,-500
26-05-2004,-500
27-05-2004,-500
28-05-2004,-500
31-05-2004,-500
01-06-2004,-500
02-06-2004,-500
03-06-2004,-500
04-06-2004,-500
07-06-2004,-500
08-06-2004,-500
09-06-2004,-500
10-06-2004,-500
11-06-2004,-500
14-06-2004,-500
15-06-2004,-500
16-06-2004,-500
17-06-2004,-500
18-06-2004,-500
21-06-2004,-500
22-06-2004,-500
23-06-2004,-500
24-06-2004,-500
25-06-2004,-500
28-06-2004,-500
29-06-2004,-500
30-06-2004,-500
02-07-2004,-500
05-07-2004,-500
06-07-2004,-500
07-07-2004,-500
08-07-2004,-500
09-07-2004,-500
12-07-2004,-500
13-07-2004,-500
14-07-2004,-500
15-07-2004,-500
16-07-2004,-500
19-07-2004,-500
20-07-2004,-500
21-07-2004,-500
22-07-2004,-500
23-07-2004,-500
26-07-2004,-500
27-07-2004,-500
28-07-2004,-500
29-07-2004,-500
30-07-2004,-500
03-08-2004,-500
04-08-2004,-500
05-08-2004,-500
06-08-2004,-500
09-08-2004,-500
10-08-2004,-500
11-08-2004,-500
12-08-2004,-500
13-08-2004,-500
16-08-2004,-500
17-08-2004,-500
18-08-2004,-500
19-08-2004,-500
20-08-2004,-500
23-08-2004,-500
24-08-2004,-500
25-08-2004,-500
26-08-2004,-500
27-08-2004,-500
30-08-2004,-500
31-08-2004,-500
01-09-2004,-500
02-09-2004,-500
03-09-2004,-500
07-09-2004,-500
08-09-2004,-500
09-09-2004,-500
10-09-2004,-500
13-09-2004,-500
14-09-2004,-500
15-09-2004,-500
16-09-2004,-500
17-09-2004,-500
20-09-2004,-500
21-09-2004,-500
22-09-2004,-500
23-09-2004,-500
24-09-2004,-500
27-09-2004,-500
28-09-2004,-500
29-09-2004,-500
30-09-2004,-500
01-10-2004,-500
04-10-2004,-500
05-10-2004,-500
06-10-2004,-500
07-10-2004,-500
08-10-2004,-500
12-10-2004,-500
13-10-2004,-500
14-10-2004,-500
15-10-2004,-500
18-10-2004,-500
19-10-2004,-500
20-10-2004,-500
21-10-2004,-500
22-10-2004,-500
25-10-2004,-500
26-10-2004,-500
27-10-2004,-500
28-10-2004,-500
29-10-2004,-500
01-11-2004,-500
02-11-2004,-500
03-11-2004,-500
04-11-2004,-500
05-11-2004,-500
08-11-2004,-500
09-11-2004,-500
10-11-2004,-500
12-11-2004,-500
15-11-2004,-500
16-11-2004,-500
17-11-2004,-500
18-11-2004,-500
19-11-2004,-500
22-11-2004,-500
23-11-2004,-500
24-11-2004,-500
25-11-2004,-500
26-11-2004,-500
29-11-2004,-500
30-11-2004,-500
01-12-2004,-500
02-12-2004,-500
03-12-2004,-500
06-12-2004,-500
07-12-2004,-500
08-12-2004,-500
09-12-2004,-500
10-12-2004,-500
13-12-2004,-500
14-12-2004,-500
15-12-2004,-500
16-12-2004,-500
17-12-2004,-500
20-12-2004,-500
21-12-2004,-500
22-12-2004,-500
23-12-2004,-500
24-12-2004,-500
29-12-2004,-500
30-12-2004,-500
31-12-2004,-500
04-01-2005,-500
05-01-2005,-500
06-01-2005,-500
07-01-2005,-500
10-01-2005,-500
11-01-2005,-500
12-01-2005,-500
13-01-2005,-500
14-01-2005,-500
17-01-2005,-500
18-01-2005,-500
19-01-2005,-500
20-01-2005,-500
21-01-2005,-500
24-01-2005,-500
25-01-2005,-500
26-01-2005,-500
27-01-2005,-500
28-01-2005,-500
31-01-2005,-500
01-02-2005,-500
02-02-2005,-500
03-02-2005,-500
04-02-2005,-500
07-02-2005,-500
08-02-2005,-500
09-02-2005,-500
10-02-2005,-500
11-02-2005,-500
14-02-2005,-500
15-02-2005,-500
16-02-2005,-500
17-02-2005,-500
18-02-2005,-500
21-02-2005,-500
22-02-2005,-500
23-02-2005,-500
24-02-2005,-500
25-02-2005,-500
28-02-2005,-500
01-03-2005,-500
02-03-2005,-500
03-03-2005,-500
04-03-2005,-500
07-03-2005,-500
08-03-2005,-500
09-03-2005,-500
10-03-2005,-500
11-03-2005,-500
14-03-2005,-500
15-03-2005,-500
16-03-2005,-500
17-03-2005,-500
18-03-2005,-500
21-03-2005,-500
22-03-2005,-500
23-03-2005,-500
24-03-2005,-500
28-03-2005,-500
29-03-2005,-500
30-03-2005,-500
31-03-2005,-500
01-04-2005,-500
04-04-2005,-500
05-04-2005,-500
06-04-2005,-500
07-04-2005,-500
08-04-2005,-500
11-04-2005,-500
12-04-2005,-500
13-04-2005,-500
14-04-2005,-500
15-04-2005,-500
18-04-2005,-500
19-04-2005,-500
20-04-2005,-500
21-04-2005,-500
22-04-2005,-500
25-04-2005,-500
26-04-2005,-500
27-04-2005,-500
28-04-2005,-500
29-04-2005,-500
02-05-2005,-500
03-05-2005,-500
04-05-2005,-500
05-05-2005,-500
06-05-2005,-500
09-05-2005,-500
10-05-2005,-500
11-05-2005,-500
12-05-2005,-500
13-05-2005,-500
16-05-2005,-500
17-05-2005,-500
18-05-2005,-500
19-05-2005,-500
20-05-2005,-500
24-05-2005,-500
25-05-2005,-500
26-05-2005,-500
27-05-2005,-500
30-05-2005,-500
31-05-2005,-500
01-06-2005,-500
02-06-2005,-500
03-06-2005,-500
06-06-2005,-500
07-06-2005,-500
08-06-2005,-500
09-06-2005,-500
10-06-2005,-500
13-06-2005,-500
14-06-2005,-500
15-06-2005,-500
16-06-2005,-500
17-06-2005,-500
20-06-2005,-500
21-06-2005,-500
22-06-2005,-500
23-06-2005,-500
24-06-2005,-500
27-06-2005,-500
28-06-2005,-500
29-06-2005,-500
30-06-2005,-500
04-07-2005,-500
05-07-2005,-500
06-07-2005,-500
07-07-2005,-500
08-07-2005,-500
11-07-2005,-500
12-07-2005,-500
13-07-2005,-500
14-07-2005,-500
15-07-2005,-500
18-07-2005,-500
19-07-2005,-500
20-07-2005,-500
21-07-2005,-500
22-07-2005,-500
25-07-2005,-500
26-07-2005,-500
27-07-2005,-500
28-07-2005,-500
29-07-2005,-500
02-08-2005,-500
03-08-2005,-500
04-08-2005,-500
05-08-2005,-500
08-08-2005,-500
09-08-2005,-500
10-08-2005,-500
11-08-2005,-500
12-08-2005,-500
15-08-2005,-500
16-08-2005,-500
17-08-2005,-500
18-08-2005,-500
19-08-2005,-500
22-08-2005,-500
23-08-2005,-500
24-08-2005,-500
25-08-2005,-500
26-08-2005,-500
29-08-2005,-500
30-08-2005,-500
31-08-2005,-500
01-09-2005,-500
02-09-2005,-500
06-09-2005,-500
07-09-2005,-500
08-09-2005,-500
09-09-2005,-500
12-09-2005,-500
13-09-2005,-500
14-09-2005,-500
15-09-2005,-500
16-09-2005,-500
19-09-2005,-500
20-09-2005,-500
21-09-2005,-500
22-09-2005,-500
23-09-2005,-500
26-09-2005,-500
27-09-2005,-500
28-09-2005,-500
29-09-2005,-500
30-09-2005,-500
03-10-2005,-500
04-10-2005,-500
05-10-2005,-500
06-10-2005,-500
07-10-2005,-500
11-10-2005,-500
12-10-2005,-500
13-10-2005,-500
14-10-2005,-500
17-10-2005,-500
18-10-2005,-500
19-10-2005,-500
20-10-2005,-500
21-10-2005,-500
24-10-2005,-500
25-10-2005,-500
26-10-2005,-500
27-10-2005,-500
28-10-2005,-500
31-10-2005,-500
01-11-2005,-500
02-11-2005,-500
03-11-2005,-500
04-11-2005,-500
07-11-2005,-500
08-11-2005,-500
09-11-2005,-500
10-11-2005,-500
14-11-2005,-500
15-11-2005,-500
16-11-2005,-500
17-11-2005,-500
18-11-2005,-500
21-11-2005,-500
22-11-2005,-500
23-11-2005,-500
24-11-2005,-500
25-11-2005,-500
28-11-2005,-500
29-11-2005,-500
30-11-2005,-500
01-12-2005,-500
02-12-2005,-500
05-12-2005,-500
06-12-2005,-500
07-12-2005,-500
08-12-2005,-500
09-12-2005,-500
12-12-2005,-500
13-12-2005,-500
14-12-2005,-500
15-12-2005,-500
16-12-2005,-500
19-12-2005,-500
20-12-2005,-500
21-12-2005,-500
22-12-2005,-500
23-12-2005,-500
28-12-2005,-500
29-12-2005,-500
30-12-2005,-500
03-01-2006,-500
04-01-2006,-500
05-01-2006,-500
06-01-2006,-500
09-01-2006,-500
10-01-2006,-500
11-01-2006,-500
12-01-2006,-500
13-01-2006,-500
16-01-2006,-500
17-01-2006,-500
18-01-2006,-500
19-01-2006,-500
20-01-2006,-500
23-01-2006,-500
24-01-2006,-500
25-01-2006,-500
26-01-2006,-500
27-01-2006,-500
30-01-2006,-500
31-01-2006,-500
01-02-2006,-500
02-02-2006,-500
03-02-2006,-500
06-02-2006,-500
07-02-2006,-500
08-02-2006,-500
09-02-2006,-500
10-02-2006,-500
13-02-2006,-500
14-02-2006,-500
15-02-2006,-500
16-02-2006,-500
17-02-2006,-500
20-02-2006,-500
21-02-2006,-500
22-02-2006,-500
23-02-2006,-500
24-02-2006,-500
27-02-2006,-500
28-02-2006,-500
01-03-2006,-500
02-03-2006,-500
03-03-2006,-500
06-03-2006,-500
07-03-2006,-500
08-03-2006,-500
09-03-2006,-500
10-03-2006,-500
13-03-2006,-500
14-03-2006,-500
15-03-2006,-500
16-03-2006,-500
17-03-2006,-500
20-03-2006,-500
21-03-2006,-500
22-03-2006,-500
23-03-2006,-500
24-03-2006,-500
27-03-2006,-500
28-03-2006,-500
29-03-2006,-500
30-03-2006,-500
31-03-2006,-500
03-04-2006,-500
04-04-2006,-500
05-04-2006,-500
06-04-2006,-500
07-04-2006,-500
10-04-2006,-500
11-04-2006,-500
12-04-2006,-500
13-04-2006,-500
17-04-2006,-500
18-04-2006,-500
19-04-2006,-500
20-04-2006,-500
21-04-2006,-500
24-04-2006,-500
25-04-2006,-500
26-04-2006,-500
27-04-2006,-500
28-04-2006,-500
01-05-2006,-500
02-05-2006,-500
03-05-2006,-500
04-05-2006,-500
05-05-2006,-500
08-05-2006,-500
09-05-2006,-500
10-05-2006,-500
11-05-2006,-500
12-05-2006,-500
15-05-2006,-500
16-05-2006,-500
17-05-2006,-500
18-05-2006,-500
19-05-2006,-500
23-05-2006,-500
24-05-2006,-500
25-05-2006,-500
26-05-2006,-500
29-05-2006,-500
30-05-2006,-500
31-05-2006,-500
01-06-2006,-500
02-06-2006,-500
05-06-2006,-500
06-06-2006,-500
07-06-2006,-500
08-06-2006,-500
09-06-2006,-500
12-06-2006,-500
13-06-2006,-500
14-06-2006,-500
15-06-2006,-500
16-06-2006,-500
19-06-2006,-500
20-06-2006,-500
21-06-2006,-500
22-06-2006,-500
23-06-2006,-500
26-06-2006,-500
27-06-2006,-500
28-06-2006,-500
29-06-2006,-500
30-06-2006,-500
04-07-2006,-500
05-07-2006,-500
06-07-2006,-500
07-07-2006,-500
10-07-2006,-500
11-07-2006,-500
12-07-2006,-500
13-07-2006,-500
14-07-2006,-500
17-07-2006,-500
18-07-2006,-500
19-07-2006,-500
20-07-2006,-500
21-07-2006,-500
24-07-2006,-500
25-07-2006,-500
26-07-2006,-500
27-07-2006,-500
28-07-2006,-500
31-07-2006,-500
01-08-2006,-500
02-08-2006,-500
03-08-2006,-500
04-08-2006,-500
08-08-2006,-500
09-08-2006,-500
10-08-2006,-500
11-08-2006,-500
14-08-2006,-500
15-08-2006,-500
16-08-2006,-500
17-08-2006,-500
18-08-2006,-500
21-08-2006,-500
22-08-2006,-500
23-08-2006,-500
24-08-2006,-500
25-08-2006,-500
28-08-2006,-500
29-08-2006,-500
30-08-2006,-500
31-08-2006,-500
01-09-2006,-500
05-09-2006,-500
06-09-2006,-500
07-09-2006,-500
08-09-2006,-500
11-09-2006,-500
12-09-2006,-500
13-09-2006,-500
14-09-2006,-500
15-09-2006,-500
18-09-2006,-500
19-09-2006,-500
20-09-2006,-500
21-09-2006,-500
22-09-2006,-500
25-09-2006,-500
26-09-2006,-500
27-09-2006,-500
28-09-2006,-500
29-09-2006,-500
02-10-2006,-500
03-10-2006,-500
04-10-2006,-500
05-10-2006,-500
06-10-2006,-500
10-10-2006,-500
11-10-2006,-500
12-10-2006,-500
13-10-2006,-500
16-10-2006,-500
17-10-2006,-500
18-10-2006,-500
19-10-2006,-500
20-10-2006,-500
23-10-2006,-500
24-10-2006,-500
25-10-2006,-500
26-10-2006,-500
27-10-2006,-500
30-10-2006,-500
31-10-2006,-500
01-11-2006,-500
02-11-2006,-500
03-11-2006,-500
06-11-2006,-500
07-11-2006,-500
08-11-2006,-500
09-11-2006,-500
10-11-2006,-500
14-11-2006,-500
15-11-2006,-500
16-11-2006,-500
17-11-2006,-500
20-11-2006,-500
21-11-2006,-500
22-11-2006,-500
23-11-2006,-500
24-11-2006,-500
27-11-2006,-500
28-11-2006,-500
29-11-2006,-500
30-11-2006,-500
01-12-2006,-500
04-12-2006,-500
05-12-2006,-500
06-12-2006,-500
07-12-2006,-500
08-12-2006,-500
11-12-2006,-500
12-12-2006,-500
13-12-2006,-500
14-12-2006,-500
15-12-2006,-500
18-12-2006,-500
19-12-2006,-500
20-12-2006,-500
21-12-2006,-500
22-12-2006,-500
27-12-2006,-500
28-12-2006,-500
29-12-2006,-500
02-01-2007,-500
03-01-2007,-500
04-01-2007,-500
05-01-2007,-500
08-01-2007,-500
09-01-2007,-500
10-01-2007,-500
11-01-2007,-500
12-01-2007,-500
15-01-2007,-500
16-01-2007,-500
17-01-2007,-500
18-01-2007,-500
19-01-2007,-500
22-01-2007,-500
23-01-2007,-500
24-01-2007,-500
25-01-2007,-500
26-01-2007,-500
29-01-2007,-500
30-01-2007,-500
31-01-2007,-500
01-02-2007,-500
02-02-2007,-500
05-02-2007,-500
06-02-2007,-500
07-02-2007,-500
08-02-2007,-500
09-02-2007,-500
12-02-2007,-500
13-02-2007,-500
14-02-2007,-500
15-02-2007,-500
16-02-2007,-500
19-02-2007,-500
20-02-2007,-500
21-02-2007,-500
22-02-2007,-500
23-02-2007,-500
26-02-2007,-500
27-02-2007,-500
28-02-2007,-500
01-03-2007,-500
02-03-2007,-500
05-03-2007,-500
06-03-2007,-500
07-03-2007,-500
08-03-2007,-500
09-03-2007,-500
12-03-2007,-500
13-03-2007,-500
14-03-2007,-500
15-03-2007,-500
16-03-2007,-500
19-03-2007,-500
20-03-2007,-500
21-03-2007,-500
22-03-2007,-500
23-03-2007,-500
26-03-2007,-500
27-03-2007,-500
28-03-2007,-500
29-03-2007,-500
30-03-2007,-500
02-04-2007,-500
03-04-2007,-500
04-04-2007,-500
05-04-2007,-500
09-04-2007,-500
10-04-2007,-500
11-04-2007,-500
12-04-2007,-500
13-04-2007,-500
16-04-2007,-500
17-04-2007,-500
18-04-2007,-500
19-04-2007,-500
20-04-2007,-500
23-04-2007,-500
24-04-2007,-500
25-04-2007,-500
26-04-2007,-500
27-04-2007,-500
30-04-2007,-500
01-05-2007,-500
02-05-2007,-500
03-05-2007,-500
04-05-2007,-500
07-05-2007,-500
08-05-2007,-500
09-05-2007,-500
10-05-2007,-500
11-05-2007,-500
14-05-2007,-500
15-05-2007,-500
16-05-2007,-500
17-05-2007,-500
18-05-2007,-500
22-05-2007,-500
23-05-2007,-500
24-05-2007,-500
25-05-2007,-500
28-05-2007,-500
29-05-2007,-500
30-05-2007,-500
31-05-2007,-500
01-06-2007,-500
04-06-2007,-500
05-06-2007,-500
06-06-2007,-500
07-06-2007,-500
08-06-2007,-500
11-06-2007,-500
12-06-2007,-500
13-06-2007,-500
14-06-2007,-500
15-06-2007,-500
18-06-2007,-500
19-06-2007,-500
20-06-2007,-500
21-06-2007,-500
22-06-2007,-500
25-06-2007,-500
26-06-2007,-500
27-06-2007,-500
28-06-2007,-500
29-06-2007,-500
03-07-2007,-500
04-07-2007,-500
05-07-2007,-500
06-07-2007,-500
09-07-2007,-500
10-07-2007,-500
11-07-2007,-500
12-07-2007,-500
13-07-2007,-500
16-07-2007,-500
17-07-2007,-500
18-07-2007,-500
19-07-2007,-500
20-07-2007,-500
23-07-2007,-500
24-07-2007,-500
25-07-2007,-500
26-07-2007,-500
27-07-2007,-500
30-07-2007,-500
31-07-2007,-500
01-08-2007,-500
02-08-2007,-500
03-08-2007,-500
07-08-2007,-500
08-08-2007,-500
09-08-2007,-500
10-08-2007,-500
13-08-2007,-500
14-08-2007,-500
15-08-2007,-500
16-08-2007,-500
17-08-2007,-500
20-08-2007,-500
21-08-2007,-500
22-08-2007,-500
23-08-2007,-500
24-08-2007,-500
27-08-2007,-500
28-08-2007,-500
29-08-2007,-500
30-08-2007,-500
31-08-2007,-500
04-09-2007,-500
05-09-2007,-500
06-09-2007,-500
07-09-2007,-500
10-09-2007,-500
11-09-2007,-500
12-09-2007,-500
13-09-2007,-500
14-09-2007,-500
17-09-2007,-500
18-09-2007,-500
19-09-2007,-500
20-09-2007,-500
21-09-2007,-500
24-09-2007,-500
25-09-2007,-500
26-09-2007,-500
27-09-2007,-500
28-09-2007,-500
01-10-2007,-500
02-10-2007,-500
03-10-2007,-500
04-10-2007,-500
05-10-2007,-500
09-10-2007,-500
10-10-2007,-500
11-10-2007,-500
12-10-2007,-500
15-10-2007,-500
16-10-2007,-500
17-10-2007,-500
18-10-2007,-500
19-10-2007,-500
22-10-2007,-500
23-10-2007,-500
24-10-2007,-500
25-10-2007,-500
26-10-2007,-500
29-10-2007,-500
30-10-2007,-500
31-10-2007,-500
01-11-2007,-500
02-11-2007,-500
05-11-2007,-500
06-11-2007,-500
07-11-2007,-500
08-11-2007,-500
09-11-2007,-500
13-11-2007,-500
14-11-2007,-500
15-11-2007,-500
16-11-2007,-500
19-11-2007,-500
20-11-2007,-500
21-11-2007,-500
22-11-2007,-500
23-11-2007,-500
26-11-2007,-500
27-11-2007,-500
28-11-2007,-500
29-11-2007,-500
30-11-2007,-500
03-12-2007,-500
04-12-2007,-500
05-12-2007,-500
06-12-2007,-500
07-12-2007,-500
10-12-2007,-500
11-12-2007,-500
12-12-2007,-500
13-12-2007,-500
14-12-2007,-500
17-12-2007,-500
18-12-2007,-500
19-12-2007,-500
20-12-2007,-500
21-12-2007,-500
24-12-2007,-500
27-12-2007,-500
28-12-2007,-500
31-12-2007,-500
02-01-2008,-500
03-01-2008,-500
04-01-2008,-500
07-01-2008,-500
08-01-2008,-500
09-01-2008,-500
10-01-2008,-500
11-01-2008,-500
14-01-2008,-500
15-01-2008,-500
16-01-2008,-500
17-01-2008,-500
18-01-2008,-500
21-01-2008,-500
22-01-2008,-500
23-01-2008,-500
24-01-2008,-500
25-01-2008,-500
28-01-2008,-500
29-01-2008,-500
30-01-2008,-500
31-01-2008,-500
01-02-2008,-500
04-02-2008,-500
05-02-2008,-500
06-02-2008,-500
07-02-2008,-500
08-02-2008,-500
11-02-2008,-500
12-02-2008,-500
13-02-2008,-500
14-02-2008,-500
15-02-2008,-500
19-02-2008,-500
20-02-2008,-500
21-02-2008,-500
22-02-2008,-500
25-02-2008,-500
26-02-2008,-500
27-02-2008,-500
28-02-2008,-500
29-02-2008,-500
03-03-2008,-500
04-03-2008,-500
05-03-2008,-500
06-03-2008,-500
07-03-2008,-500
10-03-2008,-500
11-03-2008,-500
12-03-2008,-500
13-03-2008,-500
14-03-2008,-500
17-03-2008,-500
18-03-2008,-500
19-03-2008,-500
20-03-2008,-500
24-03-2008,-500
25-03-2008,-500
26-03-2008,-500
27-03-2008,-500
28-03-2008,-500
31-03-2008,-500
01-04-2008,-500
02-04-2008,-500
03-04-2008,-500
04-04-2008,-500
07-04-2008,-500
08-04-2008,-500
09-04-2008,-500
10-04-2008,-500
11-04-2008,-500
14-04-2008,-500
15-04-2008,-500
16-04-2008,-500
17-04-2008,-500
18-04-2008,-500
21-04-2008,-500
22-04-2008,-500
23-04-2008,-500
24-04-2008,-500
25-04-2008,-500
28-04-2008,-500
29-04-2008,-500
30-04-2008,-500
01-05-2008,-500
02-05-2008,-500
05-05-2008,-500
06-05-2008,-500
07-05-2008,-500
08-05-2008,-500
09-05-2008,-500
12-05-2008,-500
13-05-2008,-500
14-05-2008,-500
15-05-2008,-500
16-05-2008,-500
20-05-2008,-500
21-05-2008,-500
22-05-2008,-500
23-05-2008,-500
26-05-2008,-500
27-05-2008,-500
28-05-2008,-500
29-05-2008,-500
30-05-2008,-500
02-06-2008,-500
03-06-2008,-500
04-06-2008,-500
05-06-2008,-500
06-06-2008,-500
09-06-2008,-500
10-06-2008,-500
11-06-2008,-500
12-06-2008,-500
13-06-2008,-500
16-06-2008,-500
17-06-2008,-500
18-06-2008,-500
19-06-2008,-500
20-06-2008,-500
23-06-2008,-500
24-06-2008,-500
25-06-2008,-500
26-06-2008,-500
27-06-2008,-500
30-06-2008,-500
02-07-2008,-500
03-07-2008,-500
04-07-2008,-500
07-07-2008,-500
08-07-2008,-500
09-07-2008,-500
10-07-2008,-500
11-07-2008,-500
14-07-2008,-500
15-07-2008,-500
16-07-2008,-500
17-07-2008,-500
18-07-2008,-500
21-07-2008,-500
22-07-2008,-500
23-07-2008,-500
24-07-2008,-500
25-07-2008,-500
28-07-2008,-500
29-07-2008,-500
30-07-2008,-500
31-07-2008,-500
01-08-2008,-500
05-08-2008,-500
06-08-2008,-500
07-08-2008,-500
08-08-2008,-500
11-08-2008,-500
12-08-2008,-500
13-08-2008,-500
14-08-2008,-500
15-08-2008,-500
18-08-2008,-500
19-08-2008,-500
20-08-2008,-500
21-08-2008,-500
22-08-2008,-500
25-08-2008,-500
26-08-2008,-500
27-08-2008,-500
28-08-2008,-500
29-08-2008,-500
02-09-2008,-500
03-09-2008,-500
04-09-2008,-500
05-09-2008,-500
08-09-2008,-500
09-09-2008,-500
10-09-2008,-500
11-09-2008,-500
12-09-2008,-500
15-09-2008,-500
16-09-2008,-500
17-09-2008,-500
18-09-2008,-500
19-09-2008,-500
22-09-2008,-500
23-09-2008,-500
24-09-2008,-500
25-09-2008,-500
26-09-2008,-500
29-09-2008,-500
30-09-2008,-500
01-10-2008,-500
02-10-2008,-500
03-10-2008,-500
06-10-2008,-500
07-10-2008,-500
08-10-2008,-500
09-10-2008,-500
10-10-2008,-500
14-10-2008,-500
15-10-2008,-500
16-10-2008,-500
17-10-2008,-500
20-10-2008,-500
21-10-2008,-500
22-10-2008,-500
23-10-2008,-500
24-10-2008,-500
27-10-2008,-500
28-10-2008,-500
29-10-2008,-500
30-10-2008,-500
31-10-2008,-500
03-11-2008,-500
04-11-2008,-500
05-11-2008,-500
06-11-2008,-500
07-11-2008,-500
10-11-2008,-500
12-11-2008,-500
13-11-2008,-500
14-11-2008,-500
17-11-2008,-500
18-11-2008,-500
19-11-2008,-500
20-11-2008,-500
21-11-2008,-500
24-11-2008,-500
25-11-2008,-500
26-11-2008,-500
27-11-2008,-500
28-11-2008,-500
01-12-2008,-500
02-12-2008,-500
03-12-2008,-500
04-12-2008,-500
05-12-2008,-500
08-12-2008,-500
09-12-2008,-500
10-12-2008,-500
11-12-2008,-500
12-12-2008,-500
15-12-2008,-500
16-12-2008,-500
17-12-2008,-500
18-12-2008,-500
19-12-2008,-500
22-12-2008,-500
23-12-2008,-500
24-12-2008,-500
29-12-2008,-500
30-12-2008,-500
31-12-2008,-500
02-01-2009,-500
05-01-2009,-500
06-01-2009,-500
07-01-2009,-500
08-01-2009,-500
09-01-2009,-500
12-01-2009,-500
13-01-2009,-500
14-01-2009,-500
15-01-2009,-500
16-01-2009,-500
19-01-2009,-500
20-01-2009,-500
21-01-2009,-500
22-01-2009,-500
23-01-2009,-500
26-01-2009,-500
27-01-2009,-500
28-01-2009,-500
29-01-2009,-500
30-01-2009,-500
02-02-2009,-500
03-02-2009,-500
04-02-2009,-500
05-02-2009,-500
06-02-2009,-500
09-02-2009,-500
10-02-2009,-500
11-02-2009,-500
12-02-2009,-500
13-02-2009,-500
17-02-2009,-500
18-02-2009,-500
19-02-2009,-500
20-02-2009,-500
23-02-2009,-500
24-02-2009,-500
25-02-2009,-500
26-02-2009,-500
27-02-2009,-500
02-03-2009,-500
03-03-2009,-500
04-03-2009,-500
05-03-2009,-500
06-03-2009,-500
09-03-2009,-500
10-03-2009,-500
11-03-2009,-500
12-03-2009,-500
13-03-2009,-500
16-03-2009,-500
17-03-2009,-500
18-03-2009,-500
19-03-2009,-500
20-03-2009,-500
23-03-2009,-500
24-03-2009,-500
25-03-2009,-500
26-03-2009,-500
27-03-2009,-500
30-03-2009,-500
31-03-2009,-500
01-04-2009,-500
02-04-2009,-500
03-04-2009,-500
06-04-2009,-500
07-04-2009,-500
08-04-2009,-500
09-04-2009,-500
13-04-2009,-500
14-04-2009,-500
15-04-2009,-500
16-04-2009,-500
17-04-2009,-500
20-04-2009,-500
21-04-2009,-500
22-04-2009,-500
23-04-2009,-500
24-04-2009,-500
27-04-2009,-500
28-04-2009,-500
29-04-2009,-500
30-04-2009,-500
01-05-2009,-500
04-05-2009,-500
05-05-2009,-500
06-05-2009,-500
07-05-2009,-500
08-05-2009,-500
11-05-2009,-500
12-05-2009,-500
13-05-2009,-500
14-05-2009,-500
15-05-2009,-500
19-05-2009,-500
20-05-2009,-500
21-05-2009,-500
22-05-2009,-500
25-05-2009,-500
26-05-2009,-500
27-05-2009,-500
28-05-2009,-500
29-05-2009,-500
01-06-2009,-500
02-06-2009,-500
03-06-2009,-500
04-06-2009,-500
05-06-2009,-500
08-06-2009,-500
09-06-2009,-500
10-06-2009,-500
11-06-2009,-500
12-06-2009,-500
15-06-2009,-500
16-06-2009,-500
17-06-2009,-500
18-06-2009,-500
19-06-2009,-500
22-06-2009,-500
23-06-2009,-500
24-06-2009,-500
25-06-2009,-500
26-06-2009,-500
29-06-2009,-500
30-06-2009,-500
02-07-2009,-500
03-07-2009,-500
06-07-2009,-500
07-07-2009,-500
08-07-2009,-500
09-07-2009,-500
10-07-2009,-500
13-07-2009,-500
14-07-2009,-500
15-07-2009,-500
16-07-2009,-500
17-07-2009,-500
20-07-2009,-500
21-07-2009,-500
22-07-2009,-500
23-07-2009,-500
24-07-2009,-500
27-07-2009,-500
28-07-2009,-500
29-07-2009,-500
30-07-2009,-500
31-07-2009,-500
04-08-2009,-500
05-08-2009,-500
06-08-2009,-500
07-08-2009,-500
10-08-2009,-500
11-08-2009,-500
12-08-2009,-500
13-08-2009,-500
14-08-2009,-500
17-08-2009,-500
18-08-2009,-500
19-08-2009,-500
20-08-2009,-500
21-08-2009,-500
24-08-2009,-500
25-08-2009,-500
26-08-2009,-500
27-08-2009,-500
28-08-2009,-500
31-08-2009,-500
01-09-2009,-500
02-09-2009,-500
03-09-2009,-500
04-09-2009,-500
08-09-2009,-500
09-09-2009,-500
10-09-2009,-500
11-09-2009,-500
14-09-2009,-500
15-09-2009,-500
16-09-2009,-500
17-09-2009,-500
18-09-2009,-500
21-09-2009,-500
22-09-2009,-500
23-09-2009,-500
24-09-2009,-500
25-09-2009,-500
28-09-2009,-500
29-09-2009,-500
30-09-2009,-500
01-10-2009,-500
02-10-2009,-500
05-10-2009,-500
06-10-2009,-500
07-10-2009,-500
08-10-2009,-500
09-10-2009,-500
13-10-2009,-500
14-10-2009,-500
15-10-2009,-500
16-10-2009,-500
19-10-2009,-500
20-10-2009,-500
21-10-2009,-500
22-10-2009,-500
23-10-2009,-500
26-10-2009,-500
27-10-2009,-500
28-10-2009,-500
29-10-2009,-500
30-10-2009,-500
02-11-2009,-500
03-11-2009,-500
04-11-2009,-500
05-11-2009,-500
06-11-2009,-500
09-11-2009,-500
10-11-2009,-500
12-11-2009,-500
13-11-2009,-500
16-11-2009,-500
17-11-2009,-500
18-11-2009,-500
19-11-2009,-500
20-11-2009,-500
23-11-2009,-500
24-11-2009,-500
25-11-2009,-500
26-11-2009,-500
27-11-2009,-500
30-11-2009,-500
01-12-2009,-500
02-12-2009,-500
03-12-2009,-500
04-12-2009,-500
07-12-2009,-500
08-12-2009,-500
09-12-2009,-500
10-12-2009,-500
11-12-2009,-500
14-12-2009,-500
15-12-2009,-500
16-12-2009,-500
17-12-2009,-500
18-12-2009,-500
21-12-2009,-500
22-12-2009,-500
23-12-2009,-500
24-12-2009,-500
29-12-2009,-500
30-12-2009,-500
31-12-2009,-500
04-01-2010,-500
05-01-2010,-500
06-01-2010,-500
07-01-2010,-500
08-01-2010,-500
11-01-2010,-500
12-01-2010,-500
13-01-2010,-500
14-01-2010,-500
15-01-2010,-500
18-01-2010,-500
19-01-2010,-500
20-01-2010,-500
21-01-2010,-500
22-01-2010,-500
25-01-2010,-500
26-01-2010,-500
27-01-2010,-500
28-01-2010,-500
29-01-2010,-500
01-02-2010,-500
02-02-2010,-500
03-02-2010,-500
04-02-2010,-500
05-02-2010,-500
08-02-2010,-500
09-02-2010,-500
10-02-2010,-500
11-02-2010,-500
12-02-2010,-500
16-02-2010,-500
17-02-2010,-500
18-02-2010,-500
19-02-2010,-500
22-02-2010,-500
23-02-2010,-500
24-02-2010,-500
25-02-2010,-500
26-02-2010,-500
01-03-2010,-500
02-03-2010,-500
03-03-2010,-500
04-03-2010,-500
05-03-2010,-500
08-03-2010,-500
09-03-2010,-500
10-03-2010,-500
11-03-2010,-500
12-03-2010,-500
15-03-2010,-500
16-03-2010,-500
17-03-2010,-500
18-03-2010,-500
19-03-2010,-500
22-03-2010,-500
23-03-2010,-500
24-03-2010,-500
25-03-2010,-500
26-03-2010,-500
29-03-2010,-500
30-03-2010,-500
31-03-2010,-500
01-04-2010,-500
05-04-2010,-500
06-04-2010,-500
07-04-2010,-500
08-04-2010,-500
09-04-2010,-500
12-04-2010,-500
13-04-2010,-500
14-04-2010,-500
15-04-2010,-500
16-04-2010,-500
19-04-2010,-500
20-04-2010,-500
21-04-2010,-500
22-04-2010,-500
23-04-2010,-500
26-04-2010,-500
27-04-2010,-500
28-04-2010,-500
29-04-2010,-500
30-04-2010,-500
03-05-2010,-500
04-05-2010,-500
05-05-2010,-500
06-05-2010,-500
07-05-2010,-500
10-05-2010,-500
11-05-2010,-500
12-05-2010,-500
13-05-2010,-500
14-05-2010,-500
17-05-2010,-500
18-05-2010,-500
19-05-2010,-500
20-05-2010,-500
21-05-2010,-500
25-05-2010,-500
26-05-2010,-500
27-05-2010,-500
28-05-2010,-500
31-05-2010,-500
01-06-2010,-500
02-06-2010,-500
03-06-2010,-500
04-06-2010,-500
07-06-2010,-500
08-06-2010,-500
09-06-2010,-500
10-06-2010,-500
11-06-2010,-500
14-06-2010,-500
15-06-2010,-500
16-06-2010,-500
17-06-2010,-500
18-06-2010,-500
21-06-2010,-500
22-06-2010,-500
23-06-2010,-500
24-06-2010,-500
25-06-2010,-500
28-06-2010,-500
29-06-2010,-500
30-06-2010,-500
02-07-2010,-500
05-07-2010,-500
06-07-2010,-500
07-07-2010,-500
08-07-2010,-500
09-07-2010,-500
12-07-2010,-500
13-07-2010,-500
14-07-2010,-500
15-07-2010,-500
16-07-2010,-500
19-07-2010,-500
20-07-2010,-500
21-07-2010,-500
22-07-2010,-500
23-07-2010,-500
26-07-2010,-500
27-07-2010,-500
28-07-2010,-500
29-07-2010,-500
30-07-2010,-500
03-08-2010,-500
04-08-2010,-500
05-08-2010,-500
06-08-2010,-500
09-08-2010,-500
10-08-2010,-500
11-08-2010,-500
12-08-2010,-500
13-08-2010,-500
16-08-2010,-500
17-08-2010,-500
18-08-2010,-500
19-08-2010,-500
20-08-2010,-500
23-08-2010,-500
24-08-2010,-500
25-08-2010,-500
26-08-2010,-500
27-08-2010,-500
30-08-2010,-500
31-08-2010,-500
01-09-2010,-500
02-09-2010,-500
03-09-2010,-500
07-09-2010,-500
08-09-2010,-500
09-09-2010,-500
10-09-2010,-500
13-09-2010,-500
14-09-2010,-500
15-09-2010,-500
16-09-2010,-500
17-09-2010,-500
20-09-2010,-500
21-09-2010,-500
22-09-2010,-500
23-09-2010,-500
24-09-2010,-500
27-09-2010,-500
28-09-2010,-500
29-09-2010,-500
30-09-2010,-500
01-10-2010,-500
04-10-2010,-500
05-10-2010,-500
06-10-2010,-500
07-10-2010,-500
08-10-2010,-500
12-10-2010,-500
13-10-2010,-500
14-10-2010,-500
15-10-2010,-500
18-10-2010,-500
19-10-2010,-500
20-10-2010,-500
21-10-2010,-500
22-10-2010,-500
25-10-2010,-500
26-10-2010,-500
27-10-2010,-500
28-10-2010,-500
29-10-2010,-500
01-11-2010,-500
02-11-2010,-500
03-11-2010,-500
04-11-2010,-500
05-11-2010,-500
08-11-2010,-500
09-11-2010,-500
10-11-2010,-500
12-11-2010,-500
15-11-2010,-500
16-11-2010,-500
17-11-2010,-500
18-11-2010,-500
19-11-2010,-500
22-11-2010,-500
23-11-2010,-500
24-11-2010,-500
25-11-2010,-500
26-11-2010,-500
29-11-2010,-500
30-11-2010,-500
01-12-2010,-500
02-12-2010,-500
03-12-2010,-500
06-12-2010,-500
07-12-2010,-500
08-12-2010,-500
09-12-2010,-500
10-12-2010,-500
13-12-2010,-500
14-12-2010,-500
15-12-2010,-500
16-12-2010,-500
17-12-2010,-500
20-12-2010,-500
21-12-2010,-500
22-12-2010,-500
23-12-2010,-500
24-12-2010,-500
29-12-2010,-500
30-12-2010,-500
31-12-2010,-500
04-01-2011,-500
05-01-2011,-500
06-01-2011,-500
07-01-2011,-500
10-01-2011,-500
11-01-2011,-500
12-01-2011,-500
13-01-2011,-500
14-01-2011,-500
17-01-2011,-500
18-01-2011,-500
19-01-2011,-500
20-01-2011,-500
21-01-2011,-500
24-01-2011,-500
25-01-2011,-500
26-01-2011,-500
27-01-2011,-500
28-01-2011,-500
31-01-2011,-500
01-02-2011,-500
02-02-2011,-500
03-02-2011,-500
04-02-2011,-500
07-02-2011,-500
08-02-2011,-500
09-02-2011,-500
10-02-2011,-500
11-02-2011,-500
14-02-2011,-500
15-02-2011,-500
16-02-2011,-500
17-02-2011,-500
18-02-2011,-500
22-02-2011,-500
23-02-2011,-500
24-02-2011,-500
25-02-2011,-500
28-02-2011,-500
01-03-2011,-500
02-03-2011,-500
03-03-2011,-500
04-03-2011,-500
07-03-2011,-500
08-03-2011,-500
09-03-2011,-500
10-03-2011,-500
11-03-2011,-500
14-03-2011,-500
15-03-2011,-500
16-03-2011,-500
17-03-2011,-500
18-03-2011,-500
21-03-2011,-500
22-03-2011,-500
23-03-2011,-500
24-03-2011,-500
25-03-2011,-500
28-03-2011,-500
29-03-2011,-500
30-03-2011,-500
31-03-2011,-500
01-04-2011,-500
04-04-2011,-500
05-04-2011,-500
06-04-2011,-500
07-04-2011,-500
08-04-2011,-500
11-04-2011,-500
12-04-2011,-500
13-04-2011,-500
14-04-2011,-500
15-04-2011,-500
18-04-2011,-500
19-04-2011,-500
20-04-2011,-500
21-04-2011,-500
25-04-2011,-500
26-04-2011,-500
27-04-2011,-500
28-04-2011,-500
29-04-2011,-500
02-05-2011,-500
03-05-2011,-500
04-05-2011,-500
05-05-2011,-500
06-05-2011,-500
09-05-2011,-500
10-05-2011,-500
11-05-2011,-500
12-05-2011,-500
13-05-2011,-500
16-05-2011,-500
17-05-2011,-500
18-05-2011,-500
19-05-2011,-500
20-05-2011,-500
24-05-2011,-500
25-05-2011,-500
26-05-2011,-500
27-05-2011,-500
30-05-2011,-500
31-05-2011,-500
01-06-2011,-500
02-06-2011,-500
03-06-2011,-500
06-06-2011,-500
07-06-2011,-500
08-06-2011,-500
09-06-2011,-500
10-06-2011,-500
13-06-2011,-500
14-06-2011,-500
15-06-2011,-500
16-06-2011,-500
17-06-2011,-500
20-06-2011,-500
21-06-2011,-500
22-06-2011,-500
23-06-2011,-500
24-06-2011,-500
27-06-2011,-500
28-06-2011,-500
29-06-2011,-500
30-06-2011,-500
04-07-2011,-500
05-07-2011,-500
06-07-2011,-500
07-07-2011,-500
08-07-2011,-500
11-07-2011,-500
12-07-2011,-500
13-07-2011,-500
14-07-2011,-500
15-07-2011,-500
18-07-2011,-500
19-07-2011,-500
20-07-2011,-500
21-07-2011,-500
22-07-2011,-500
25-07-2011,-500
26-07-2011,-500
27-07-2011,-500
28-07-2011,-500
29-07-2011,-500
02-08-2011,-500
03-08-2011,-500
04-08-2011,-500
05-08-2011,-500
08-08-2011,-500
09-08-2011,-500
10-08-2011,-500
11-08-2011,-500
12-08-2011,-500
15-08-2011,-500
16-08-2011,-500
17-08-2011,-500
18-08-2011,-500
19-08-2011,-500
22-08-2011,-500
23-08-2011,-500
24-08-2011,-500
25-08-2011,-500
26-08-2011,-500
29-08-2011,-500
30-08-2011,-500
31-08-2011,-500
01-09-2011,-500
02-09-2011,-500
06-09-2011,-500
07-09-2011,-500
08-09-2011,-500
09-09-2011,-500
12-09-2011,-500
13-09-2011,-500
14-09-2011,-500
15-09-2011,-500
16-09-2011,-500
19-09-2011,-500
20-09-2011,-500
21-09-2011,-500
22-09-2011,-500
23-09-2011,-500
26-09-2011,-500
27-09-2011,-500
28-09-2011,-500
29-09-2011,-500
30-09-2011,-500
03-10-2011,-500
04-10-2011,-500
05-10-2011,-500
06-10-2011,-500
07-10-2011,-500
11-10-2011,-500
12-10-2011,-500
13-10-2011,-500
14-10-2011,-500
17-10-2011,-500
18-10-2011,-500
19-10-2011,-500
20-10-2011,-500
21-10-2011,-500
24-10-2011,-500
25-10-2011,-500
26-10-2011,-500
27-10-2011,-500
28-10-2011,-500
31-10-2011,-500
01-11-2011,-500
02-11-2011,-500
03-11-2011,-500
04-11-2011,-500
07-11-2011,-500
08-11-2011,-500
09-11-2011,-500
10-11-2011,-500
14-11-2011,-500
15-11-2011,-500
16-11-2011,-500
17-11-2011,-500
18-11-2011,-500
21-11-2011,-500
22-11-2011,-500
23-11-2011,-500
24-11-2011,-500
25-11-2011,-500
28-11-2011,-500
29-11-2011,-500
30-11-2011,-500
01-12-2011,-500
02-12-2011,-500
05-12-2011,-500
06-12-2011,-500
07-12-2011,-500
08-12-2011,-500
09-12-2011,-500
12-12-2011,-500
13-12-2011,-500
14-12-2011,-500
15-12-2011,-500
16-12-2011,-500
19-12-2011,-500
20-12-2011,-500
21-12-2011,-500
22-12-2011,-500
23-12-2011,-500
28-12-2011,-500
29-12-2011,-500
30-12-2011,-500
03-01-2012,-500
04-01-2012,-500
05-01-2012,-500
06-01-2012,-500
09-01-2012,-500
10-01-2012,-500
11-01-2012,-500
12-01-2012,-500
13-01-2012,-500
16-01-2012,-500
17-01-2012,-500
18-01-2012,-500
19-01-2012,-500
20-01-2012,-500
23-01-2012,-500
24-01-2012,-500
25-01-2012,-500
26-01-2012,-500
27-01-2012,-500
30-01-2012,-500
31-01-2012,-500
01-02-2012,-500
02-02-2012,-500
03-02-2012,-500
06-02-2012,-500
07-02-2012,-500
08-02-2012,-500
09-02-2012,-500
10-02-2012,-500
13-02-2012,-500
14-02-2012,-500
15-02-2012,-500
16-02-2012,-500
17-02-2012,-500
21-02-2012,-500
22-02-2012,-500
23-02-2012,-500
24-02-2012,-500
27-02-2012,-500
28-02-2012,-500
29-02-2012,-500
01-03-2012,-500
02-03-2012,-500
05-03-2012,-500
06-03-2012,-500
07-03-2012,-500
08-03-2012,-500
09-03-2012,-500
12-03-2012,-500
13-03-2012,-500
14-03-2012,-500
15-03-2012,-500
16-03-2012,-500
19-03-2012,-500
20-03-2012,-500
21-03-2012,-500
22-03-2012,-500
23-03-2012,-500
26-03-2012,-500
27-03-2012,-500
28-03-2012,-500
29-03-2012,-500
30-03-2012,-500
02-04-2012,-500
03-04-2012,-500
04-04-2012,-500
05-04-2012,-500
09-04-2012,-500
10-04-2012,-500
11-04-2012,-500
12-04-2012,-500
13-04-2012,-500
16-04-2012,-500
17-04-2012,-500
18-04-2012,-500
19-04-2012,-500
20-04-2012,-500
23-04-2012,-500
24-04-2012,-500
25-04-2012,-500
26-04-2012,-500
27-04-2012,-500
30-04-2012,-500
01-05-2012,-500
02-05-2012,-500
03-05-2012,-500
04-05-2012,-500
07-05-2012,-500
08-05-2012,-500
09-05-2012,-500
10-05-2012,-500
11-05-2012,-500
14-05-2012,-500
15-05-2012,-500
16-05-2012,-500
17-05-2012,-500
18-05-2012,-500
22-05-2012,-500
23-05-2012,-500
24-05-2012,-500
25-05-2012,-500
28-05-2012,-500
29-05-2012,-500
30-05-2012,-500
31-05-2012,-500
01-06-2012,-500
04-06-2012,-500
05-06-2012,-500
06-06-2012,-500
07-06-2012,-500
08-06-2012,-500
11-06-2012,-500
12-06-2012,-500
13-06-2012,-500
14-06-2012,-500
15-06-2012,-500
18-06-2012,-500
19-06-2012,-500
20-06-2012,-500
21-06-2012,-500
22-06-2012,-500
25-06-2012,-500
26-06-2012,-500
27-06-2012,-500
28-06-2012,-500
29-06-2012,-500
03-07-2012,-500
04-07-2012,-500
05-07-2012,-500
06-07-2012,-500
09-07-2012,-500
10-07-2012,-500
11-07-2012,-500
12-07-2012,-500
13-07-2012,-500
16-07-2012,-500
17-07-2012,-500
18-07-2012,-500
19-07-2012,-500
20-07-2012,-500
23-07-2012,-500
24-07-2012,-500
25-07-2012,-500
26-07-2012,-500
27-07-2012,-500
30-07-2012,-500
31-07-2012,-500
01-08-2012,-500
02-08-2012,-500
03-08-2012,-500
07-08-2012,-500
08-08-2012,-500
09-08-2012,-500
10-08-2012,-500
13-08-2012,-500
14-08-2012,-500
15-08-2012,-500
16-08-2012,-500
17-08-2012,-500
20-08-2012,-500
21-08-2012,-500
22-08-2012,-500
23-08-2012,-500
24-08-2012,-500
27-08-2012,-500
28-08-2012,-500
29-08-2012,-500
30-08-2012,-500
31-08-2012,-500
04-09-2012,-500
05-09-2012,-500
06-09-2012,-500
07-09-2012,-500
10-09-2012,-500
11-09-2012,-500
12-09-2012,-500
13-09-2012,-500
14-09-2012,-500
17-09-2012,-500
18-09-2012,-500
19-09-2012,-500
20-09-2012,-500
21-09-2012,-500
24-09-2012,-500
25-09-2012,-500
26-09-2012,-500
27-09-2012,-500
28-09-2012,-500
01-10-2012,-500
02-10-2012,-500
03-10-2012,-500
04-10-2012,-500
05-10-2012,-500
09-10-2012,-500
10-10-2012,-500
11-10-2012,-500
12-10-2012,-500
15-10-2012,-500
16-10-2012,-500
17-10-2012,-500
18-10-2012,-500
19-10-2012,-500
22-10-2012,-500
23-10-2012,-500
24-10-2012,-500
25-10-2012,-500
26-10-2012,-500
29-10-2012,-500
30-10-2012,-500
31-10-2012,-500
01-11-2012,-500
02-11-2012,-500
05-11-2012,-500
06-11-2012,-500
07-11-2012,-500
08-11-2012,-500
09-11-2012,-500
13-11-2012,-500
14-11-2012,-500
15-11-2012,-500
16-11-2012,-500
19-11-2012,-500
20-11-2012,-500
21-11-2012,-500
22-11-2012,-500
23-11-2012,-500
26-11-2012,-500
27-11-2012,-500
28-11-2012,-500
29-11-2012,-500
30-11-2012,-500
03-12-2012,-500
04-12-2012,-500
05-12-2012,-500
06-12-2012,-500
07-12-2012,-500
10-12-2012,-500
11-12-2012,-500
12-12-2012,-500
13-12-2012,-500
14-12-2012,-500
17-12-2012,-500
18-12-2012,-500
19-12-2012,-500
20-12-2012,-500
21-12-2012,-500
24-12-2012,-500
27-12-2012,-500
28-12-2012,-500
31-12-2012,-500
02-01-2013,-500
03-01-2013,-500
04-01-2013,-500
07-01-2013,-500
08-01-2013,-500
09-01-2013,-500
10-01-2013,-500
11-01-2013,-500
14-01-2013,-500
15-01-2013,-500
16-01-2013,-500
17-01-2013,-500
18-01-2013,-500
21-01-2013,-500
22-01-2013,-500
23-01-2013,-500
24-01-2013,-500
25-01-2013,-500
28-01-2013,-500
29-01-2013,-500
30-01-2013,-500
31-01-2013,-500
01-02-2013,-500
04-02-2013,-500
05-02-2013,-500
06-02-2013,-500
07-02-2013,-500
08-02-2013,-500
11-02-2013,-500
12-02-2013,-500
13-02-2013,-500
14-02-2013,-500
15-02-2013,-500
19-02-2013,-500
20-02-2013,-500
21-02-2013,-500
22-02-2013,-500
25-02-2013,-500
26-02-2013,-500
27-02-2013,-500
28-02-2013,-500
01-03-2013,-500
04-03-2013,-500
05-03-2013,-500
06-03-2013,-500
07-03-2013,-500
08-03-2013,-500
11-03-2013,-500
12-03-2013,-500
13-03-2013,-500
14-03-2013,-500
15-03-2013,-500
18-03-2013,-500
19-03-2013,-500
20-03-2013,-500
21-03-2013,-500
22-03-2013,-500
25-03-2013,-500
26-03-2013,-500
27-03-2013,-500
28-03-2013,-500
01-04-2013,-500
02-04-2013,-500
03-04-2013,-500
04-04-2013,-500
05-04-2013,-500
08-04-2013,-500
09-04-2013,-500
10-04-2013,-500
11-04-2013,-500
12-04-2013,-500
15-04-2013,-500
16-04-2013,-500
17-04-2013,-500
18-04-2013,-500
19-04-2013,-500
22-04-2013,-500
23-04-2013,-500
24-04-2013,-500
25-04-2013,-500
26-04-2013,-500
29-04-2013,-500
30-04-2013,-500
01-05-2013,-500
02-05-2013,-500
03-05-2013,-500
06-05-2013,-500
07-05-2013,-500
08-05-2013,-500
09-05-2013,-500
10-05-2013,-500
13-05-2013,-500
14-05-2013,-500
15-05-2013,-500
16-05-2013,-500
17-05-2013,-500
21-05-2013,-500
22-05-2013,-500
23-05-2013,-500
24-05-2013,-500
27-05-2013,-500
28-05-2013,-500
29-05-2013,-500
30-05-2013,-500
31-05-2013,-500
03-06-2013,-500
04-06-2013,-500
05-06-2013,-500
06-06-2013,-500
07-06-2013,-500
10-06-2013,-500
11-06-2013,-500
12-06-2013,-500
13-06-2013,-500
14-06-2013,-500
17-06-2013,-500
18-06-2013,-500
19-06-2013,-500
20-06-2013,-500
21-06-2013,-500
24-06-2013,-500
25-06-2013,-500
26-06-2013,-500
27-06-2013,-500
28-06-2013,-500
02-07-2013,-500
03-07-2013,-500
04-07-2013,-500
05-07-2013,-500
08-07-2013,-500
09-07-2013,-500
10-07-2013,-500
11-07-2013,-500
12-07-2013,-500
15-07-2013,-500
16-07-2013,-500
17-07-2013,-500
18-07-2013,-500
19-07-2013,-500
22-07-2013,-500
23-07-2013,-500
24-07-2013,-500
25-07-2013,-500
26-07-2013,-500
29-07-2013,-500
30-07-2013,-500
31-07-2013,-500
01-08-2013,-500
02-08-2013,-500
06-08-2013,-500
07-08-2013,-500
08-08-2013,-500
09-08-2013,-500
12-08-2013,-500
13-08-2013,-500
14-08-2013,-500
15-08-2013,-500
16-08-2013,-500
19-08-2013,-500
20-08-2013,-500
21-08-2013,-500
22-08-2013,-500
23-08-2013,-500
26-08-2013,-500
27-08-2013,-500
28-08-2013,-500
29-08-2013,-500
30-08-2013,-500
03-09-2013,-500
04-09-2013,-500
05-09-2013,-500
06-09-2013,-500
09-09-2013,-500
10-09-2013,-500
11-09-2013,-500
12-09-2013,-500
13-09-2013,-500
16-09-2013,-500
17-09-2013,-500
18-09-2013,-500
19-09-2013,-500
20-09-2013,-500
23-09-2013,-500
24-09-2013,-500
25-09-2013,-500
26-09-2013,-500
27-09-2013,-500
30-09-2013,-500
01-10-2013,-500
02-10-2013,-500
03-10-2013,-500
04-10-2013,-500
07-10-2013,-500
08-10-2013,-500
09-10-2013,-500
10-10-2013,-500
11-10-2013,-500
15-10-2013,-500
16-10-2013,-500
17-10-2013,-500
18-10-2013,-500
21-10-2013,-500
22-10-2013,-500
23-10-2013,-500
24-10-2013,-500
25-10-2013,-500
28-10-2013,-500
29-10-2013,-500
30-10-2013,-500
31-10-2013,-500
01-11-2013,-500
04-11-2013,-500
05-11-2013,-500
06-11-2013,-500
07-11-2013,-500
08-11-2013,-500
12-11-2013,-500
13-11-2013,-500
14-11-2013,-500
15-11-2013,-500
18-11-2013,-500
19-11-2013,-500
20-11-2013,-500
21-11-2013,-500
22-11-2013,-500
25-11-2013,-500
26-11-2013,-500
27-11-2013,-500
28-11-2013,-500
29-11-2013,-500
02-12-2013,-500
03-12-2013,-500
04-12-2013,-500
05-12-2013,-500
06-12-2013,-500
09-12-2013,-500
10-12-2013,-500
11-12-2013,-500
12-12-2013,-500
13-12-2013,-500
16-12-2013,-500
17-12-2013,-500
18-12-2013,-500
19-12-2013,-500
20-12-2013,-500
23-12-2013,-500
24-12-2013,-500
27-12-2013,-500
30-12-2013,-500
31-12-2013,-500
02-01-2014,-500
03-01-2014,-500
06-01-2014,-500
07-01-2014,-500
08-01-2014,-500
09-01-2014,-500
10-01-2014,-500
13-01-2014,-500
14-01-2014,-500
15-01-2014,-500
16-01-2014,-500
17-01-2014,-500
20-01-2014,-500
21-01-2014,-500
22-01-2014,-500
23-01-2014,-500
24-01-2014,-500
27-01-2014,-500
28-01-2014,-500
29-01-2014,-500
30-01-2014,-500
31-01-2014,-500
03-02-2014,-500
04-02-2014,-500
05-02-2014,-500
06-02-2014,-500
07-02-2014,-500
10-02-2014,-500
11-02-2014,-500
12-02-2014,-500
13-02-2014,-500
14-02-2014,-500
18-02-2014,-500
19-02-2014,-500
20-02-2014,-500
21-02-2014,-500
24-02-2014,-500
25-02-2014,-500
26-02-2014,-500
27-02-2014,-500
28-02-2014,-500
03-03-2014,-500
04-03-2014,-500
05-03-2014,-500
06-03-2014,-500
07-03-2014,-500
10-03-2014,-500
11-03-2014,-500
12-03-2014,-500
13-03-2014,-500
14-03-2014,-500
17-03-2014,-500
18-03-2014,-500
19-03-2014,-500
20-03-2014,-500
21-03-2014,-500
24-03-2014,-500
25-03-2014,-500
26-03-2014,-500
27-03-2014,-500
28-03-2014,-500
31-03-2014,-500
01-04-2014,-500
02-04-2014,-500
03-04-2014,-500
04-04-2014,-500
07-04-2014,-500
08-04-2014,-500
09-04-2014,-500
10-04-2014,-500
11-04-2014,-500
14-04-2014,-500
15-04-2014,-500
16-04-2014,-500
17-04-2014,-500
21-04-2014,-500
22-04-2014,-500
23-04-2014,-500
24-04-2014,-500
25-04-2014,-500
28-04-2014,-500
29-04-2014,-500
30-04-2014,-500
01-05-2014,-500
02-05-2014,-500
05-05-2014,-500
06-05-2014,-500
07-05-2014,-500
08-05-2014,-500
09-05-2014,-500
12-05-2014,-500
13-05-2014,-500
14-05-2014,-500
15-05-2014,-500
16-05-2014,-500
20-05-2014,-500
21-05-2014,-500
22-05-2014,-500
23-05-2014,-500
26-05-2014,-500
27-05-2014,-500
28-05-2014,-500
29-05-2014,-500
30-05-2014,-500
02-06-2014,-500
03-06-2014,-500
04-06-2014,-500
05-06-2014,-500
06-06-2014,-500
09-06-2014,-500
10-06-2014,-500
11-06-2014,-500
12-06-2014,-500
13-06-2014,-500
16-06-2014,-500
17-06-2014,-500
18-06-2014,-500
19-06-2014,-500
20-06-2014,-500
23-06-2014,-500
24-06-2014,-500
25-06-2014,-500
26-06-2014,-500
27-06-2014,-500
30-06-2014,-500
02-07-2014,-500
03-07-2014,-500
04-07-2014,-500
07-07-2014,-500
08-07-2014,-500
09-07-2014,-500
10-07-2014,-500
11-07-2014,-500
14-07-2014,-500
15-07-2014,-500
16-07-2014,-500
17-07-2014,-500
18-07-2014,-500
21-07-2014,-500
22-07-2014,-500
23-07-2014,-500
24-07-2014,-500
25-07-2014,-500
28-07-2014,-500
29-07-2014,-500
30-07-2014,-500
31-07-2014,-500
01-08-2014,-500
05-08-2014,-500
06-08-2014,-500
07-08-2014,-500
08-08-2014,-500
11-08-2014,-500
12-08-2014,-500
13-08-2014,-500
14-08-2014,-500
15-08-2014,-500
18-08-2014,-500
19-08-2014,-500
20-08-2014,-500
21-08-2014,-500
22-08-2014,-500
25-08-2014,-500
26-08-2014,-500
27-08-2014,-500
28-08-2014,-500
29-08-2014,-500
02-09-2014,-500
03-09-2014,-500
04-09-2014,-500
05-09-2014,-500
08-09-2014,-500
09-09-2014,-500
10-09-2014,-500
11-09-2014,-500
12-09-2014,-500
15-09-2014,-500
16-09-2014,-500
17-09-2014,-500
18-09-2014,-500
19-09-2014,-500
22-09-2014,-500
23-09-2014,-500
24-09-2014,-500
25-09-2014,-500
26-09-2014,-500
29-09-2014,-500
30-09-2014,-500
01-10-2014,-500
02-10-2014,-500
03-10-2014,-500
06-10-2014,-500
07-10-2014,-500
08-10-2014,-500
09-10-2014,-500
10-10-2014,-500
14-10-2014,-500
15-10-2014,-500
16-10-2014,-500
17-10-2014,-500
20-10-2014,-500
21-10-2014,-500
22-10-2014,-500
23-10-2014,-500
24-10-2014,-500
27-10-2014,-500
28-10-2014,-500
29-10-2014,-500
30-10-2014,-500
31-10-2014,-500
03-11-2014,-500
04-11-2014,-500
05-11-2014,-500
06-11-2014,-500
07-11-2014,-500
10-11-2014,-500
12-11-2014,-500
13-11-2014,-500
14-11-2014,-500
17-11-2014,-500
18-11-2014,-500
19-11-2014,-500
20-11-2014,-500
21-11-2014,-500
24-11-2014,-500
25-11-2014,-500
26-11-2014,-500
27-11-2014,-500
28-11-2014,-500
01-12-2014,-500
02-12-2014,-500
03-12-2014,-500
04-12-2014,-500
05-12-2014,-500
08-12-2014,-500
09-12-2014,-500
10-12-2014,-500
11-12-2014,-500
12-12-2014,-500
15-12-2014,-500
16-12-2014,-500
17-12-2014,-500
18-12-2014,-500
19-12-2014,-500
22-12-2014,-500
23-12-2014,-500
24-12-2014,-500
29-12-2014,-500
30-12-2014,-500
31-12-2014,-500
02-01-2015,-500
05-01-2015,-500
06-01-2015,-500
07-01-2015,-500
08-01-2015,-500
09-01-2015,-500
12-01-2015,-500
13-01-2015,-500
14-01-2015,-500
15-01-2015,-500
16-01-2015,-500
19-01-2015,-500
20-01-2015,-500
21-01-2015,-500
22-01-2015,-500
23-01-2015,-500
26-01-2015,-500
27-01-2015,-500
28-01-2015,-500
29-01-2015,-500
30-01-2015,-500
02-02-2015,-500
03-02-2015,-500
04-02-2015,-500
05-02-2015,-500
06-02-2015,-500
09-02-2015,-500
10-02-2015,-500
11-02-2015,-500
12-02-2015,-500
13-02-2015,-500
17-02-2015,-500
18-02-2015,-500
19-02-2015,-500
20-02-2015,-500
23-02-2015,-500
24-02-2015,-500
25-02-2015,-500
26-02-2015,-500
27-02-2015,-500
02-03-2015,-500
03-03-2015,-500
04-03-2015,-500
05-03-2015,-500
06-03-2015,-500
09-03-2015,-500
10-03-2015,-500
11-03-2015,-500
12-03-2015,-500
13-03-2015,-500
16-03-2015,-500
17-03-2015,-500
18-03-2015,-500
19-03-2015,-500
20-03-2015,-500
23-03-2015,-500
24-03-2015,-500
25-03-2015,-500
26-03-2015,-500
27-03-2015,-500
30-03-2015,-500
31-03-2015,-500
01-04-2015,-500
02-04-2015,-500
06-04-2015,-500
07-04-2015,-500
08-04-2015,-500
09-04-2015,-500
10-04-2015,-500
13-04-2015,-500
14-04-2015,-500
15-04-2015,-500
16-04-2015,-500
17-04-2015,-500
20-04-2015,-500
21-04-2015,-500
22-04-2015,-500
23-04-2015,-500
24-04-2015,-500
27-04-2015,-500
28-04-2015,-500
29-04-2015,-500
30-04-2015,-500
01-05-2015,-500
04-05-2015,-500
05-05-2015,-500
06-05-2015,-500
07-05-2015,-500
08-05-2015,-500
11-05-2015,-500
12-05-2015,-500
13-05-2015,-500
14-05-2015,-500
15-05-2015,-500
19-05-2015,-500
20-05-2015,-500
21-05-2015,-500
22-05-2015,-500
25-05-2015,-500
26-05-2015,-500
27-05-2015,-500
28-05-2015,-500
29-05-2015,-500
01-06-2015,-500
02-06-2015,-500
03-06-2015,-500
04-06-2015,-500
05-06-2015,-500
08-06-2015,-500
09-06-2015,-500
10-06-2015,-500
11-06-2015,-500
12-06-2015,-500
15-06-2015,-500
16-06-2015,-500
17-06-2015,-500
18-06-2015,-500
19-06-2015,-500
22-06-2015,-500
23-06-2015,-500
24-06-2015,-500
25-06-2015,-500
26-06-2015,-500
29-06-2015,-500
30-06-2015,-500
02-07-2015,-500
03-07-2015,-500
06-07-2015,-500
07-07-2015,-500
08-07-2015,-500
09-07-2015,-500
10-07-2015,-500
13-07-2015,-500
14-07-2015,-500
15-07-2015,-500
16-07-2015,-500
17-07-2015,-500
20-07-2015,-500
21-07-2015,-500
22-07-2015,-500
23-07-2015,-500
24-07-2015,-500
27-07-2015,-500
28-07-2015,-500
29-07-2015,-500
30-07-2015,-500
31-07-2015,-500
04-08-2015,-500
05-08-2015,-500
06-08-2015,-500
07-08-2015,-500
10-08-2015,-500
11-08-2015,-500
12-08-2015,-500
13-08-2015,-500
14-08-2015,-500
17-08-2015,-500
18-08-2015,-500
19-08-2015,-500
20-08-2015,-500
21-08-2015,-500
24-08-2015,-500
25-08-2015,-500
26-08-2015,-500
27-08-2015,-500
28-08-2015,-500
31-08-2015,-500
01-09-2015,-500
02-09-2015,-500
03-09-2015,-500
04-09-2015,-500
08-09-2015,-500
09-09-2015,-500
10-09-2015,-500
11-09-2015,-500
14-09-2015,-500
15-09-2015,-500
16-09-2015,-500
17-09-2015,-500
18-09-2015,-500
21-09-2015,-500
22-09-2015,-500
23-09-2015,-500
24-09-2015,-500
25-09-2015,-500
28-09-2015,-500
29-09-2015,-500
30-09-2015,-500
01-10-2015,-500
02-10-2015,-500
05-10-2015,-500
06-10-2015,-500
07-10-2015,-500
08-10-2015,-500
09-10-2015,-500
13-10-2015,-500
14-10-2015,-500
15-10-2015,-500
16-10-2015,-500
19-10-2015,-500
20-10-2015,-500
21-10-2015,-500
22-10-2015,-500
23-10-2015,-500
26-10-2015,-500
27-10-2015,-500
28-10-2015,-500
29-10-2015,-500
30-10-2015,-500
02-11-2015,-500
03-11-2015,-500
04-11-2015,-500
05-11-2015,-500
06-11-2015,-500
09-11-2015,-500
10-11-2015,-500
12-11-2015,-500
13-11-2015,-500
16-11-2015,-500
17-11-2015,-500
18-11-2015,-500
19-11-2015,-500
20-11-2015,-500
23-11-2015,-500
24-11-2015,-500
25-11-2015,-500
26-11-2015,-500
27-11-2015,-500
30-11-2015,-500
01-12-2015,-500
02-12-2015,-500
03-12-2015,-500
04-12-2015,-500
07-12-2015,-500
08-12-2015,-500
09-12-2015,-500
10-12-2015,-500
11-12-2015,-500
14-12-2015,-500
15-12-2015,-500
16-12-2015,-500
17-12-2015,-500
18-12-2015,-500
21-12-2015,-500
22-12-2015,-500
23-12-2015,-500
24-12-2015,-500
29-12-2015,-500
30-12-2015,-500
31-12-2015,-500
04-01-2016,-500
05-01-2016,-500
06-01-2016,-500
07-01-2016,-500
08-01-2016,-500
11-01-2016,-500
12-01-2016,-500
13-01-2016,-500
14-01-2016,-500
15-01-2016,-500
18-01-2016,-500
19-01-2016,-500
20-01-2016,-500
21-01-2016,-500
22-01-2016,-500
25-01-2016,-500
26-01-2016,-500
27-01-2016,-500
28-01-2016,-500
29-01-2016,-500
01-02-2016,-500
02-02-2016,-500
03-02-2016,-500
04-02-2016,-500
05-02-2016,-500
08-02-2016,-500
09-02-2016,-500
10-02-2016,-500
11-02-2016,-500
12-02-2016,-500
16-02-2016,-500
17-02-2016,-500
18-02-2016,-500
19-02-2016,-500
22-02-2016,-500
23-02-2016,-500
24-02-2016,-500
25-02-2016,-500
26-02-2016,-500
29-02-2016,-500
01-03-2016,-500
02-03-2016,-500
03-03-2016,-500
04-03-2016,-500
07-03-2016,-500
08-03-2016,-500
09-03-2016,-500
10-03-2016,-500
11-03-2016,-500
14-03-2016,-500
15-03-2016,-500
16-03-2016,-500
17-03-2016,-500
18-03-2016,-500
21-03-2016,-500
22-03-2016,-500
23-03-2016,-500
24-03-2016,-500
28-03-2016,-500
29-03-2016,-500
30-03-2016,-500
31-03-2016,-500
01-04-2016,-500
04-04-2016,-500
05-04-2016,-500
06-04-2016,-500
07-04-2016,-500
08-04-2016,-500
11-04-2016,-500
12-04-2016,-500
13-04-2016,-500
14-04-2016,-500
15-04-2016,-500
18-04-2016,-500
19-04-2016,-500
20-04-2016,-500
21-04-2016,-500
22-04-2016,-500
25-04-2016,-500
26-04-2016,-500
27-04-2016,-500
28-04-2016,-500
29-04-2016,-500
02-05-2016,-500
03-05-2016,-500
04-05-2016,-500
05-05-2016,-500
06-05-2016,-500
09-05-2016,-500
10-05-2016,-500
11-05-2016,-500
12-05-2016,-500
13-05-2016,-500
16-05-2016,-500
17-05-2016,-500
18-05-2016,-500
19-05-2016,-500
20-05-2016,-500
24-05-2016,-500
25-05-2016,-500
26-05-2016,-500
27-05-2016,-500
30-05-2016,-500
31-05-2016,-500
01-06-2016,-500
02-06-2016,-500
03-06-2016,-500
06-06-2016,-500
07-06-2016,-500
08-06-2016,-500
09-06-2016,-500
10-06-2016,-500
13-06-2016,-500
14-06-2016,-500
15-06-2016,-500
16-06-2016,-500
17-06-2016,-500
20-06-2016,-500
21-06-2016,-500
22-06-2016,-500
23-06-2016,-500
24-06-2016,-500
27-06-2016,-500
28-06-2016,-500
29-06-2016,-500
30-06-2016,-500
04-07-2016,-500
05-07-2016,-500
06-07-2016,-500
07-07-2016,-500
08-07-2016,-500
11-07-2016,-500
12-07-2016,-500
13-07-2016,-500
14-07-2016,-500
15-07-2016,-500
18-07-2016,-500
19-07-2016,-500
20-07-2016,-500
21-07-2016,-500
22-07-2016,-500
25-07-2016,-500
26-07-2016,-500
27-07-2016,-500
28-07-2016,-500
29-07-2016,-500
02-08-2016,-500
03-08-2016,-500
04-08-2016,-500
05-08-2016,-500
08-08-2016,-500
09-08-2016,-500
10-08-2016,-500
11-08-2016,-500
12-08-2016,-500
15-08-2016,-500
16-08-2016,-500
17-08-2016,-500
18-08-2016,-500
19-08-2016,-500
22-08-2016,-500
23-08-2016,-500
24-08-2016,-500
25-08-2016,-500
26-08-2016,-500
29-08-2016,-500
30-08-2016,-500
31-08-2016,-500
01-09-2016,-500
02-09-2016,-500
06-09-2016,-500
07-09-2016,-500
08-09-2016,-500
09-09-2016,-500
12-09-2016,-500
13-09-2016,-500
14-09-2016,-500
15-09-2016,-500
16-09-2016,-500
19-09-2016,-500
20-09-2016,-500
21-09-2016,-500
22-09-2016,-500
23-09-2016,-500
26-09-2016,-500
27-09-2016,-500
28-09-2016,-500
29-09-2016,-500
30-09-2016,-500
03-10-2016,-500
04-10-2016,-500
05-10-2016,-500
06-10-2016,-500
07-10-2016,-500
11-10-2016,-500
12-10-2016,-500
13-10-2016,-500
14-10-2016,-500
17-10-2016,-500
18-10-2016,-500
19-10-2016,-500
20-10-2016,-500
21-10-2016,-500
24-10-2016,-500
25-10-2016,-500
26-10-2016,-500
27-10-2016,-500
28-10-2016,-500
31-10-2016,-500
01-11-2016,-500
02-11-2016,-500
03-11-2016,-500
04-11-2016,-500
07-11-2016,-500
08-11-2016,-500
09-11-2016,-500
10-11-2016,-500
14-11-2016,-500
15-11-2016,-500
16-11-2016,-500
17-11-2016,-500
18-11-2016,-500
21-11-2016,-500
22-11-2016,-500
23-11-2016,-500
24-11-2016,-500
25-11-2016,-500
28-11-2016,-500
29-11-2016,-500
30-11-2016,-500
01-12-2016,-500
02-12-2016,-500
05-12-2016,-500
06-12-2016,-500
07-12-2016,-500
08-12-2016,-500
09-12-2016,-500
12-12-2016,-500
13-12-2016,-500
14-12-2016,-500
15-12-2016,-500
16-12-2016,-500
19-12-2016,-500
20-12-2016,-500
21-12-2016,-500
22-12-2016,-500
23-12-2016,-500
28-12-2016,-500
29-12-2016,-500
30-12-2016,-500
03-01-2017,-500
04-01-2017,-500
05-01-2017,-500
06-01-2017,-500
09-01-2017,-500
10-01-2017,-500
11-01-2017,-500
12-01-2017,-500
13-01-2017,-500
16-01-2017,-500
17-01-2017,-500
18-01-2017,-500
19-01-2017,-500
20-01-2017,-500
23-01-2017,-500
24-01-2017,-500
25-01-2017,-500
26-01-2017,-500
27-01-2017,-500
30-01-2017,-500
31-01-2017,-500
01-02-2017,-500
02-02-2017,-500
03-02-2017,-500
06-02-2017,-500
07-02-2017,-500
08-02-2017,-500
09-02-2017,-500
10-02-2017,-500
13-02-2017,-500
14-02-2017,-500
15-02-2017,-500
16-02-2017,-500
17-02-2017,-500
21-02-2017,-500
22-02-2017,-500
23-02-2017,-500
24-02-2017,-500
27-02-2017,-500
28-02-2017,-500
01-03-2017,-500
02-03-2017,-500
03-03-2017,-500
06-03-2017,-500
07-03-2017,-500
08-03-2017,-500
09-03-2017,-500
10-03-2017,-500
13-03-2017,-500
14-03-2017,-500
15-03-2017,-500
16-03-2017,-500
17-03-2017,-500
20-03-2017,-500
21-03-2017,-500
22-03-2017,-500
23-03-2017,-500
24-03-2017,-500
27-03-2017,-500
28-03-2017,-500
29-03-2017,-500
30-03-2017,-500
31-03-2017,-500
03-04-2017,-500
04-04-2017,-500
05-04-2017,-500
06-04-2017,-500
07-04-2017,-500
10-04-2017,-500
11-04-2017,-500
12-04-2017,-500
13-04-2017,-500
17-04-2017,-500
18-04-2017,-500
19-04-2017,-500
20-04-2017,-500
21-04-2017,-500
24-04-2017,-500
25-04-2017,-500
26-04-2017,-500
27-04-2017,-500
28-04-2017,-500
01-05-2017,-500
02-05-2017,-500
03-05-2017,-500
04-05-2017,-500
05-05-2017,-500
08-05-2017,-500
09-05-2017,-500
10-05-2017,-500
11-05-2017,-500
12-05-2017,-500
15-05-2017,-500
16-05-2017,-500
17-05-2017,-500
18-05-2017,-500
19-05-2017,-500
23-05-2017,-500
24-05-2017,-500
25-05-2017,-500
26-05-2017,-500
29-05-2017,-500
30-05-2017,-500
31-05-2017,-500
01-06-2017,-500
02-06-2017,-500
05-06-2017,-500
06-06-2017,-500
07-06-2017,-500
08-06-2017,-500
09-06-2017,-500
12-06-2017,-500
13-06-2017,-500
14-06-2017,-500
15-06-2017,-500
16-06-2017,-500
19-06-2017,-500
20-06-2017,-500
21-06-2017,-500
22-06-2017,-500
23-06-2017,-500
26-06-2017,-500
27-06-2017,-500
28-06-2017,-500
29-06-2017,-500
30-06-2017,-500
04-07-2017,-500
05-07-2017,-500
06-07-2017,-500
07-07-2017,-500
10-07-2017,-500
11-07-2017,-500
12-07-2017,-500
13-07-2017,-500
14-07-2017,-500
17-07-2017,-500
18-07-2017,-500
19-07-2017,-500
20-07-2017,-500
21-07-2017,-500
24-07-2017,-500
25-07-2017,-500
26-07-2017,-500
27-07-2017,-500
28-07-2017,-500
31-07-2017,-500
01-08-2017,-500
02-08-2017,-500
03-08-2017,-500
04-08-2017,-500
08-08-2017,-500
09-08-2017,-500
10-08-2017,-500
11-08-2017,-500
14-08-2017,-500
15-08-2017,-500
16-08-2017,-500
17-08-2017,-500
18-08-2017,-500
21-08-2017,-500
22-08-2017,-500
23-08-2017,-500
24-08-2017,-500
25-08-2017,-500
28-08-2017,-500
29-08-2017,-500
30-08-2017,-500
31-08-2017,-500
01-09-2017,-500
05-09-2017,-500
06-09-2017,-500
07-09-2017,-500
08-09-2017,-500
11-09-2017,-500
12-09-2017,-500
13-09-2017,-500
14-09-2017,-500
15-09-2017,-500
18-09-2017,-500
19-09-2017,-500
20-09-2017,-500
21-09-2017,-500
22-09-2017,-500
25-09-2017,-500
26-09-2017,-500
27-09-2017,-500
28-09-2017,-500
29-09-2017,-500
02-10-2017,-500
03-10-2017,-500
04-10-2017,-500
05-10-2017,-500
06-10-2017,-500
10-10-2017,-500
11-10-2017,-500
12-10-2017,-500
13-10-2017,-500
16-10-2017,-500
17-10-2017,-500
18-10-2017,-500
19-10-2017,-500
20-10-2017,-500
23-10-2017,-500
24-10-2017,-500
25-10-2017,-500
26-10-2017,-500
27-10-2017,-500
30-10-2017,-500
31-10-2017,-500
01-11-2017,-500
02-11-2017,-500
03-11-2017,-500
06-11-2017,-500
07-11-2017,-500
08-11-2017,-500
09-11-2017,-500
10-11-2017,-500
14-11-2017,-500
15-11-2017,-500
16-11-2017,-500
17-11-2017,-500
20-11-2017,-500
21-11-2017,-500
22-11-2017,-500
23-11-2017,-500
24-11-2017,-500
27-11-2017,-500
28-11-2017,-500
29-11-2017,-500
30-11-2017,-500
01-12-2017,-500
04-12-2017,-500
05-12-2017,-500
06-12-2017,-500
07-12-2017,-500
08-12-2017,-500
11-12-2017,-500
12-12-2017,-500
13-12-2017,-500
14-12-2017,-500
15-12-2017,-500
18-12-2017,-500
19-12-2017,-500
20-12-2017,-500
21-12-2017,-500
22-12-2017,-500
27-12-2017,-500
28-12-2017,-500
29-12-2017,-500
02-01-2018,-500
03-01-2018,-500
04-01-2018,-500
05-01-2018,-500
08-01-2018,-500
09-01-2018,-500
10-01-2018,-500
11-01-2018,-500
12-01-2018,-500
15-01-2018,-500
16-01-2018,-500
17-01-2018,-500
18-01-2018,-500
19-01-2018,-500
22-01-2018,-500
23-01-2018,-500
24-01-2018,-500
25-01-2018,-500
26-01-2018,-500
29-01-2018,-500
30-01-2018,-500
31-01-2018,-500
01-02-2018,-500
02-02-2018,-500
05-02-2018,-500
06-02-2018,-500
07-02-2018,-500
08-02-2018,-500
09-02-2018,-500
12-02-2018,-500
13-02-2018,-500
14-02-2018,-500
15-02-2018,-500
16-02-2018,-500
20-02-2018,-500
21-02-2018,-500
22-02-2018,-500
23-02-2018,-500
26-02-2018,-500
27-02-2018,-500
28-02-2018,-500
01-03-2018,-500
02-03-2018,-500
05-03-2018,-500
06-03-2018,-500
07-03-2018,-500
08-03-2018,-500
09-03-2018,-500
12-03-2018,-500
13-03-2018,-500
14-03-2018,-500
15-03-2018,-500
16-03-2018,-500
19-03-2018,-500
20-03-2018,-500
21-03-2018,-500
22-03-2018,-500
23-03-2018,-500
26-03-2018,-500
27-03-2018,-500
28-03-2018,-500
29-03-2018,-500
02-04-2018,-500
03-04-2018,-500
04-04-2018,-500
05-04-2018,-500
06-04-2018,-500
09-04-2018,-500
10-04-2018,-500
11-04-2018,-500
12-04-2018,-500
13-04-2018,-500
16-04-2018,-500
17-04-2018,-500
18-04-2018,-500
19-04-2018,-500
20-04-2018,-500
23-04-2018,-500
24-04-2018,-500
25-04-2018,-500
26-04-2018,-500
27-04-2018,-500
30-04-2018,-500
01-05-2018,-500
02-05-2018,-500
03-05-2018,-500
04-05-2018,-500
07-05-2018,-500
08-05-2018,-500
09-05-2018,-500
10-05-2018,-500
11-05-2018,-500
14-05-2018,-500
15-05-2018,-500
16-05-2018,-500
17-05-2018,-500
18-05-2018,-500
22-05-2018,-500
23-05-2018,-500
24-05-2018,-500
25-05-2018,-500
28-05-2018,-500
29-05-2018,-500
30-05-2018,-500
31-05-2018,-500
01-06-2018,-500
04-06-2018,-500
05-06-2018,-500
06-06-2018,-500
07-06-2018,-500
08-06-2018,-500
11-06-2018,-500
12-06-2018,-500
13-06-2018,-500
14-06-2018,-500
15-06-2018,-500
18-06-2018,-500
19-06-2018,-500
20-06-2018,-500
21-06-2018,-500
22-06-2018,-500
25-06-2018,-500
26-06-2018,-500
27-06-2018,-500
28-06-2018,-500
29-06-2018,-500
03-07-2018,-500
04-07-2018,-500
05-07-2018,-500
06-07-2018,-500
09-07-2018,-500
10-07-2018,-500
11-07-2018,-500
12-07-2018,-500
13-07-2018,-500
16-07-2018,-500
17-07-2018,-500
18-07-2018,-500
19-07-2018,-500
20-07-2018,-500
23-07-2018,-500
24-07-2018,-500
25-07-2018,-500
26-07-2018,-500
27-07-2018,-500
30-07-2018,-500
31-07-2018,-500
01-08-2018,-500
02-08-2018,-500
03-08-2018,-500
07-08-2018,-500
08-08-2018,-500
09-08-2018,-500
10-08-2018,-500
13-08-2018,-500
14-08-2018,-500
15-08-2018,-500
16-08-2018,-500
17-08-2018,-500
20-08-2018,-500
21-08-2018,-500
22-08-2018,-500
23-08-2018,-500
24-08-2018,-500
27-08-2018,-500
28-08-2018,-500
29-08-2018,-500
30-08-2018,-500
31-08-2018,-500
04-09-2018,-500
05-09-2018,-500
06-09-2018,-500
07-09-2018,-500
10-09-2018,-500
11-09-2018,-500
12-09-2018,-500
13-09-2018,-500
14-09-2018,-500
17-09-2018,-500
18-09-2018,-500
19-09-2018,-500
20-09-2018,-500
21-09-2018,-500
24-09-2018,-500
25-09-2018,-500
26-09-2018,-500
27-09-2018,-500
28-09-2018,-500
01-10-2018,-500
02-10-2018,-500
03-10-2018,-500
04-10-2018,-500
05-10-2018,-500
09-10-2018,-500
10-10-2018,-500
11-10-2018,-500
12-10-2018,-500
15-10-2018,-500
16-10-2018,-500
17-10-2018,-500
18-10-2018,-500
19-10-2018,-500
22-10-2018,-500
23-10-2018,-500
24-10-2018,-500
25-10-2018,-500
26-10-2018,-500
29-10-2018,-500
30-10-2018,-500
31-10-2018,-500
01-11-2018,-500
02-11-2018,-500
05-11-2018,-500
06-11-2018,-500
07-11-2018,-500
08-11-2018,-500
09-11-2018,-500
13-11-2018,-500
14-11-2018,-500
15-11-2018,-500
16-11-2018,-500
19-11-2018,-500
20-11-2018,-500
21-11-2018,-500
22-11-2018,-500
23-11-2018,-500
26-11-2018,-500
27-11-2018,-500
28-11-2018,-500
29-11-2018,-500
30-11-2018,-500
03-12-2018,-500
04-12-2018,-500
05-12-2018,-500
06-12-2018,-500
07-12-2018,-500
10-12-2018,-500
11-12-2018,-500
12-12-2018,-500
13-12-2018,-500
14-12-2018,-500
17-12-2018,-500
18-12-2018,-500
19-12-2018,-500
20-12-2018,-500
21-12-2018,-500
24-12-2018,-500
27-12-2018,-500
28-12-2018,-500
31-12-2018,-500
02-01-2019,-500
03-01-2019,-500
04-01-2019,-500
07-01-2019,-500
08-01-2019,-500
09-01-2019,-500
10-01-2019,-500
11-01-2019,-500
14-01-2019,-500
15-01-2019,-500
16-01-2019,-500
17-01-2019,-500
18-01-2019,-500
21-01-2019,-500
22-01-2019,-500
23-01-2019,-500
24-01-2019,-500
25-01-2019,-500
28-01-2019,-500
29-01-2019,-500
30-01-2019,-500
31-01-2019,-500
01-02-2019,-500
04-02-2019,-500
05-02-2019,-500
06-02-2019,-500
07-02-2019,-500
08-02-2019,-500
11-02-2019,-500
12-02-2019,-500
13-02-2019,-500
14-02-2019,-500
15-02-2019,-500
19-02-2019,-500
20-02-2019,-500
21-02-2019,-500
22-02-2019,-500
25-02-2019,-500
26-02-2019,-500
27-02-2019,-500
28-02-2019,-500
01-03-2019,-500
04-03-2019,-500
05-03-2019,-500
06-03-2019,-500
07-03-2019,-500
08-03-2019,-500
11-03-2019,-500
12-03-2019,-500
13-03-2019,-500
14-03-2019,-500
15-03-2019,-500
18-03-2019,-500
19-03-2019,-500
20-03-2019,-500
21-03-2019,-500
22-03-2019,-500
25-03-2019,-500
26-03-2019,-500
27-03-2019,-500
28-03-2019,-500
29-03-2019,-500
01-04-2019,-500
02-04-2019,-500
03-04-2019,-500
04-04-2019,-500
05-04-2019,-500
08-04-2019,-500
09-04-2019,-500
10-04-2019,-500
11-04-2019,-500
12-04-2019,-500
15-04-2019,-500
16-04-2019,-500
17-04-2019,-500
18-04-2019,-500
22-04-2019,-500
23-04-2019,-500
24-04-2019,-500
25-04-2019,-500
26-04-2019,-500
29-04-2019,-500
30-04-2019,-500
01-05-2019,-500
02-05-2019,-500
03-05-2019,-500
06-05-2019,-500
07-05-2019,-500
08-05-2019,-500
09-05-2019,-500
10-05-2019,-500
13-05-2019,-500
14-05-2019,-500
15-05-2019,-500
16-05-2019,-500
17-05-2019,-500
21-05-2019,-500
22-05-2019,-500
23-05-2019,-500
24-05-2019,-500
27-05-2019,-500
28-05-2019,-500
29-05-2019,-500
30-05-2019,-500
31-05-2019,-500
03-06-2019,-500
04-06-2019,-500
05-06-2019,-500
06-06-2019,-500
07-06-2019,-500
10-06-2019,-500
11-06-2019,-500
12-06-2019,-500
13-06-2019,-500
14-06-2019,-500
17-06-2019,-500
18-06-2019,-500
19-06-2019,-500
20-06-2019,-500
21-06-2019,-500
24-06-2019,-500
25-06-2019,-500
26-06-2019,-500
27-06-2019,-500
28-06-2019,-500
02-07-2019,-500
03-07-2019,-500
04-07-2019,-500
05-07-2019,-500
08-07-2019,-500
09-07-2019,-500
10-07-2019,-500
11-07-2019,-500
12-07-2019,-500
15-07-2019,-500
16-07-2019,-500
17-07-2019,-500
18-07-2019,-500
19-07-2019,-500
22-07-2019,-500
23-07-2019,-500
24-07-2019,-500
25-07-2019,-500
26-07-2019,-500
29-07-2019,-500
30-07-2019,-500
31-07-2019,-500
01-08-2019,-500
02-08-2019,-500
06-08-2019,-500
07-08-2019,-500
08-08-2019,-500
09-08-2019,-500
12-08-2019,-500
13-08-2019,-500
14-08-2019,-500
15-08-2019,-500
16-08-2019,-500
19-08-2019,-500
20-08-2019,-500
21-08-2019,-500
22-08-2019,-500
23-08-2019,-500
26-08-2019,-500
27-08-2019,-500
28-08-2019,-500
29-08-2019,-500
30-08-2019,-500
03-09-2019,-500
04-09-2019,-500
05-09-2019,-500
06-09-2019,-500
09-09-2019,-500
10-09-2019,-500
11-09-2019,-500
12-09-2019,-500
13-09-2019,-500
16-09-2019,-500
17-09-2019,-500
18-09-2019,-500
19-09-2019,-500
20-09-2019,-500
23-09-2019,-500
24-09-2019,-500
25-09-2019,-500
26-09-2019,-500
27-09-2019,-500
30-09-2019,-500
01-10-2019,-500
02-10-2019,-500
03-10-2019,-500
04-10-2019,-500
07-10-2019,-500
08-10-2019,-500
09-10-2019,-500
10-10-2019,-500
11-10-2019,-500
15-10-2019,-500
16-10-2019,-500
17-10-2019,-500
18-10-2019,-500
21-10-2019,-500
22-10-2019,-500
23-10-2019,-500
24-10-2019,-500
25-10-2019,-500
28-10-2019,-500
29-10-2019,-500
30-10-2019,-500
31-10-2019,-500
01-11-2019,-500
04-11-2019,-500
05-11-2019,-500
06-11-2019,-500
07-11-2019,-500
08-11-2019,-500
12-11-2019,-500
13-11-2019,-500
14-11-2019,-500
15-11-2019,-500
18-11-2019,-500
19-11-2019,-500
20-11-2019,-500
21-11-2019,-500
22-11-2019,-500
25-11-2019,-500
26-11-2019,-500
27-11-2019,-500
28-11-2019,-500
29-11-2019,-500
02-12-2019,-500
03-12-2019,-500
04-12-2019,-500
05-12-2019,-500
06-12-2019,-500
09-12-2019,-500
10-12-2019,-500
11-12-2019,-500
12-12-2019,-500
13-12-2019,-500
16-12-2019,-500
17-12-2019,-500
18-12-2019,-500
19-12-2019,-500
20-12-2019,-500
23-12-2019,-500
24-12-2019,-500
27-12-2019,-500
30-12-2019,-500
31-12-2019,-500
02-01-2020,-500
03-01-2020,-500
06-01-2020,-500
07-01-2020,-500
08-01-2020,-500
09-01-2020,-500
10-01-2020,-500
13-01-2020,-500
14-01-2020,-500
15-01-2020,-500
16-01-2020,-500
17-01-2020,-500
20-01-2020,-500
21-01-2020,-500
22-01-2020,-500
23-01-2020,-500
24-01-2020,-500
27-01-2020,-500
28-01-2020,-500
29-01-2020,-500
30-01-2020,-500
31-01-2020,-500
03-02-2020,-500
04-02-2020,-500
05-02-2020,-500
06-02-2020,-500
07-02-2020,-500
10-02-2020,-500
11-02-2020,-500
12-02-2020,-500
13-02-2020,-500
14-02-2020,-500
18-02-2020,-500
19-02-2020,-500
20-02-2020,-500
21-02-2020,-500
24-02-2020,-500
25-02-2020,-500
26-02-2020,-500
27-02-2020,-500
28-02-2020,-500
02-03-2020,-500
03-03-2020,-500
04-03-2020,-500
05-03-2020,-500
06-03-2020,-500
09-03-2020,-500
10-03-2020,-500
11-03-2020,-500
12-03-2020,-500
13-03-2020,-500
16-03-2020,-500
17-03-2020,-500
18-03-2020,-500
19-03-2020,-500
20-03-2020,-500
23-03-2020,-500
24-03-2020,-500
25-03-2020,-500
26-03-2020,-500
27-03-2020,-500
30-03-2020,-500
31-03-2020,-500
01-04-2020,-500
02-04-2020,-500
03-04-2020,-500
06-04-2020,-500
07-04-2020,-500
08-04-2020,-500
09-04-2020,-500
13-04-2020,-500
14-04-2020,-500
15-04-2020,-500
16-04-2020,-500
17-04-2020,-500
20-04-2020,-500
21-04-2020,-500
22-04-2020,-500
23-04-2020,-500
24-04-2020,-500
27-04-2020,-500
28-04-2020,-500
29-04-2020,-500
30-04-2020,-500
01-05-2020,-500
04-05-2020,-500
05-05-2020,-500
06-05-2020,-500
07-05-2020,-500
08-05-2020,-500
11-05-2020,-500
12-05-2020,-500
13-05-2020,-500
14-05-2020,-500
15-05-2020,-500
19-05-2020,-500
20-05-2020,-500
21-05-2020,-500
22-05-2020,-500
25-05-2020,-500
26-05-2020,-500
27-05-2020,-500
28-05-2020,-500
29-05-2020,-500
01-06-2020,-500
02-06-2020,-500
03-06-2020,-500
04-06-2020,-500
05-06-2020,-500
08-06-2020,-500
09-06-2020,-500
10-06-2020,-500
11-06-2020,-500
12-06-2020,-500
15-06-2020,-500
16-06-2020,-500
17-06-2020,-500
18-06-2020,-500
19-06-2020,-500
22-06-2020,-500
23-06-2020,-500
24-06-2020,-500
25-06-2020,-500
26-06-2020,-500
29-06-2020,-500
30-06-2020,-500
02-07-2020,-500
03-07-2020,-500
06-07-2020,-500
07-07-2020,-500
08-07-2020,-500
09-07-2020,-500
10-07-2020,-500
13-07-2020,-500
14-07-2020,-500
15-07-2020,-500
16-07-2020,-500
17-07-2020,-500
20-07-2020,-500
21-07-2020,-500
22-07-2020,-500
23-07-2020,-500
24-07-2020,-500
27-07-2020,-500
28-07-2020,-500
29-07-2020,-500
30-07-2020,-500
31-07-2020,-500
04-08-2020,-500
05-08-2020,-500
06-08-2020,-500
07-08-2020,-500
10-08-2020,-500
11-08-2020,-500
12-08-2020,-500
13-08-2020,-500
14-08-2020,-500
17-08-2020,-500
18-08-2020,-500
19-08-2020,-500
20-08-2020,-500
21-08-2020,-500
24-08-2020,-500
25-08-2020,-500
26-08-2020,-500
27-08-2020,-500
28-08-2020,-500
31-08-2020,-500
01-09-2020,-500
02-09-2020,-500
03-09-2020,-500
04-09-2020,-500
08-09-2020,-500
09-09-2020,-500
10-09-2020,-500
11-09-2020,-500
14-09-2020,-500
15-09-2020,-500
16-09-2020,-500
17-09-2020,-500
18-09-2020,-500
21-09-2020,-500
22-09-2020,-500
23-09-2020,-500
24-09-2020,-500
25-09-2020,-500
28-09-2020,-500
29-09-2020,-500
30-09-2020,-500
01-10-2020,-500
02-10-2020,-500
05-10-2020,-500
06-10-2020,-500
07-10-2020,-500
08-10-2020,-500
09-10-2020,-500
13-10-2020,-500
14-10-2020,-500
15-10-2020,-500
16-10-2020,-500
19-10-2020,-500
20-10-2020,-500
21-10-2020,-500
22-10-2020,-500
23-10-2020,-500
26-10-2020,-500
27-10-2020,-500
28-10-2020,-500
29-10-2020,-500
30-10-2020,-500
02-11-2020,-500
03-11-2020,-500
04-11-2020,-500
05-11-2020,-500
06-11-2020,-500
09-11-2020,-500
10-11-2020,-500
12-11-2020,-500
13-11-2020,-500
16-11-2020,-500
17-11-2020,-500
18-11-2020,-500
19-11-2020,-500
20-11-2020,-500
23-11-2020,-500
24-11-2020,-500
25-11-2020,-500
26-11-2020,-500
27-11-2020,-500
30-11-2020,-500
01-12-2020,-500
02-12-2020,-500
03-12-2020,-500
04-12-2020,-500
07-12-2020,-500
08-12-2020,-500
09-12-2020,-500
10-12-2020,-500
11-12-2020,-500
14-12-2020,-500
15-12-2020,-500
16-12-2020,-500
17-12-2020,-500
18-12-2020,-500
21-12-2020,-500
22-12-2020,-500
23-12-2020,-500
24-12-2020,-500
29-12-2020,-500
30-12-2020,-500
31-12-2020,-500
04-01-2021,-500
05-01-2021,-500
06-01-2021,-500
07-01-2021,-500
08-01-2021,-500
11-01-2021,-500
12-01-2021,-500
13-01-2021,-500
14-01-2021,-500
15-01-2021,-500
18-01-2021,-500
19-01-2021,-500
20-01-2021,-500
21-01-2021,-500
22-01-2021,-500
25-01-2021,-500
26-01-2021,-500
27-01-2021,-500
28-01-2021,-500
29-01-2021,-500
01-02-2021,-500
02-02-2021,-500
03-02-2021,-500
04-02-2021,-500
05-02-2021,-500
08-02-2021,-500
09-02-2021,-500
10-02-2021,-500
11-02-2021,-500
12-02-2021,-500
16-02-2021,-500
17-02-2021,-500
18-02-2021,-500
19-02-2021,-500
22-02-2021,-500
23-02-2021,-500
24-02-2021,-500
25-02-2021,-500
26-02-2021,-500
01-03-2021,-500
02-03-2021,-500
03-03-2021,-500
04-03-2021,-500
05-03-2021,-500
08-03-2021,-500
09-03-2021,-500
10-03-2021,-500
11-03-2021,-500
12-03-2021,-500
15-03-2021,-500
16-03-2021,-500
17-03-2021,-500
18-03-2021,-500
19-03-2021,-500
22-03-2021,-500
23-03-2021,-500
24-03-2021,-500
25-03-2021,-500
26-03-2021,-500
29-03-2021,-500
30-03-2021,-500
31-03-2021,-500
01-04-2021,-500
05-04-2021,-500
06-04-2021,-500
07-04-2021,-500
08-04-2021,-500
09-04-2021,-500
12-04-2021,-500
13-04-2021,-500
14-04-2021,-500
15-04-2021,-500
16-04-2021,-500
19-04-2021,-500
20-04-2021,-500
21-04-2021,-500
22-04-2021,-500
23-04-2021,-500
26-04-2021,-500
27-04-2021,-500
28-04-2021,-500
29-04-2021,-500
30-04-2021,-500
03-05-2021,-500
04-05-2021,-500
05-05-2021,-500
06-05-2021,-500
07-05-2021,-500
10-05-2021,-500
11-05-2021,-500
12-05-2021,-500
13-05-2021,-500
14-05-2021,-500
17-05-2021,-500
18-05-2021,-500
19-05-2021,-500
20-05-2021,-500
21-05-2021,-500
25-05-2021,-500
26-05-2021,-500
27-05-2021,-500
28-05-2021,-500
31-05-2021,-500
01-06-2021,-500
02-06-2021,-500
03-06-2021,-500
04-06-2021,-500
07-06-2021,-500
08-06-2021,-500
09-06-2021,-500
10-06-2021,-500
11-06-2021,-500
14-06-2021,-500
15-06-2021,-500
16-06-2021,-500
17-06-2021,-500
18-06-2021,-500
21-06-2021,-500
22-06-2021,-500
23-06-2021,-500
24-06-2021,-500
25-06-2021,-500
28-06-2021,-500
29-06-2021,-500
30-06-2021,-500
02-07-2021,-500
05-07-2021,-500
06-07-2021,-500
07-07-2021,-500
08-07-2021,-500
09-07-2021,-500
12-07-2021,-500
13-07-2021,-500
14-07-2021,-500
15-07-2021,-500
16-07-2021,-500
19-07-2021,-500
20-07-2021,-500
21-07-2021,-500
22-07-2021,-500
23-07-2021,-500
26-07-2021,-500
27-07-2021,-500
28-07-2021,-500
29-07-2021,-500
30-07-2021,-500
03-08-2021,-500
04-08-2021,-500
05-08-2021,-500
06-08-2021,-500
09-08-2021,-500
10-08-2021,-500
11-08-2021,-500
12-08-2021,-500
13-08-2021,-500
16-08-2021,-500
17-08-2021,-500
18-08-2021,-500
19-08-2021,-500
20-08-2021,-500
23-08-2021,-500
24-08-2021,-500
25-08-2021,-500
26-08-2021,-500
27-08-2021,-500
30-08-2021,-500
31-08-2021,-500
01-09-2021,-500
02-09-2021,-500
03-09-2021,-500
07-09-2021,-500
08-09-2021,-500
09-09-2021,-500
10-09-2021,-500
13-09-2021,-500
14-09-2021,-500
15-09-2021,-500
16-09-2021,-500
17-09-2021,-500
20-09-2021,-500
21-09-2021,-500
22-09-2021,-500
23-09-2021,-500
24-09-2021,-500
27-09-2021,-500
28-09-2021,-500
29-09-2021,-500
01-10-2021,-500
04-10-2021,-500
05-10-2021,-500
06-10-2021,-500
07-10-2021,-500
08-10-2021,-500
12-10-2021,-500
13-10-2021,-500
14-10-2021,-500
15-10-2021,-500
18-10-2021,-500
19-10-2021,-500
20-10-2021,-500
21-10-2021,-500
22-10-2021,-500
25-10-2021,-500
26-10-2021,-500
27-10-2021,-500
28-10-2021,-500
29-10-2021,-500
01-11-2021,-500
02-11-2021,-500
03-11-2021,-500
04-11-2021,-500
05-11-2021,-500
08-11-2021,-500
09-11-2021,-500
10-11-2021,-500
12-11-2021,-500
15-11-2021,-500
16-11-2021,-500
17-11-2021,-500
18-11-2021,-500
19-11-2021,-500
22-11-2021,-500
23-11-2021,-500
24-11-2021,-500
25-11-2021,-500
26-11-2021,-500
29-11-2021,-500
30-11-2021,-500
01-12-2021,-500
02-12-2021,-500
03-12-2021,-500
06-12-2021,-500
07-12-2021,-500
08-12-2021,-500
09-12-2021,-500
10-12-2021,-500
13-12-2021,-500
14-12-2021,-500
15-12-2021,-500
16-12-2021,-500
17-12-2021,-500
20-12-2021,-500
21-12-2021,-500
22-12-2021,-500
23-12-2021,-500
24-12-2021,-500
29-12-2021,-500
30-12-2021,-500
31-12-2021,-500
04-01-2022,-500
05-01-2022,-500
06-01-2022,-500
07-01-2022,-500
10-01-2022,-500
11-01-2022,-500
12-01-2022,-500
13-01-2022,-500
14-01-2022,-500
17-01-2022,-500
18-01-2022,-500
19-01-2022,-500
20-01-2022,-500
21-01-2022,-500
24-01-2022,-500
25-01-2022,-500
26-01-2022,-500
27-01-2022,-500
28-01-2022,-500
31-01-2022,-500
01-02-2022,-500
02-02-2022,-500
03-02-2022,-500
04-02-2022,-500
07-02-2022,-500
08-02-2022,-500
09-02-2022,-500
10-02-2022,-500
11-02-2022,-500
14-02-2022,-500
15-02-2022,-500
16-02-2022,-500
17-02-2022,-500
18-02-2022,-500
22-02-2022,-500
23-02-2022,-500
24-02-2022,-500
25-02-2022,-500
28-02-2022,-500
01-03-2022,-500
02-03-2022,-500
03-03-2022,-500
04-03-2022,-500
07-03-2022,-500
08-03-2022,-500
09-03-2022,-500
10-03-2022,-500
11-03-2022,-500
14-03-2022,-500
15-03-2022,-500
16-03-2022,-500
17-03-2022,-500
18-03-2022,-500
21-03-2022,-500
22-03-2022,-500
23-03-2022,-500
24-03-2022,-500
25-03-2022,-500
28-03-2022,-500
29-03-2022,-500
30-03-2022,-500
31-03-2022,-500
01-04-2022,-500
04-04-2022,-500
05-04-2022,-500
06-04-2022,-500
07-04-2022,-500
08-04-2022,-500
11-04-2022,-500
12-04-2022,-500
13-04-2022,-500
14-04-2022,-500
18-04-2022,-500
19-04-2022,-500
20-04-2022,-500
21-04-2022,-500
22-04-2022,-500
25-04-2022,-500
26-04-2022,-500
27-04-2022,-500
28-04-2022,-500
29-04-2022,-500
02-05-2022,-500
03-05-2022,-500
04-05-2022,-500
05-05-2022,-500
06-05-2022,-500
09-05-2022,-500
10-05-2022,-500
11-05-2022,-500
12-05-2022,-500
13-05-2022,-500
16-05-2022,-500
17-05-2022,-500
18-05-2022,-500
19-05-2022,-500
20-05-2022,-500
24-05-2022,-500
25-05-2022,-500
26-05-2022,-500
27-05-2022,-500
30-05-2022,-500
31-05-2022,-500
01-06-2022,-500
02-06-2022,-500
03-06-2022,-500
06-06-2022,-500
07-06-2022,-500
08-06-2022,-500
09-06-2022,-500
10-06-2022,-500
13-06-2022,-500
14-06-2022,-500
15-06-2022,-500
16-06-2022,-500
17-06-2022,-500
20-06-2022,-500
21-06-2022,-500
22-06-2022,-500
23-06-2022,-500
24-06-2022,-500
27-06-2022,-500
28-06-2022,-500
29-06-2022,-500
30-06-2022,-500
04-07-2022,-500
05-07-2022,-500
06-07-2022,-500
07-07-2022,-500
08-07-2022,-500
11-07-2022,-500
12-07-2022,-500
13-07-2022,-500
14-07-2022,-500
15-07-2022,-500
18-07-2022,-500
19-07-2022,-500
20-07-2022,-500
21-07-2022,-500
22-07-2022,-500
25-07-2022,-500
26-07-2022,-500
27-07-2022,-500
28-07-2022,-500
29-07-2022,-500
02-08-2022,-500
03-08-2022,-500
04-08-2022,-500
05-08-2022,-500
08-08-2022,-500
09-08-2022,-500
10-08-2022,-500
11-08-2022,-500
12-08-2022,-500
15-08-2022,-500
16-08-2022,-500
17-08-2022,-500
18-08-2022,-500
19-08-2022,-500
22-08-2022,-500
23-08-2022,-500
24-08-2022,-500
25-08-2022,-500
26-08-2022,-500
29-08-2022,-500
30-08-2022,-500
31-08-2022,-500
01-09-2022,-500
02-09-2022,-500
06-09-2022,-500
07-09-2022,-500
08-09-2022,-500
09-09-2022,-500
12-09-2022,-500
13-09-2022,-500
14-09-2022,-500
15-09-2022,-500
16-09-2022,-500
19-09-2022,-500
20-09-2022,-500
21-09-2022,-500
22-09-2022,-500
23-09-2022,-500
26-09-2022,-500
27-09-2022,-500
28-09-2022,-500
29-09-2022,-500
03-10-2022,-500
04-10-2022,-500
05-10-2022,-500
06-10-2022,-500
07-10-2022,-500
11-10-2022,-500
12-10-2022,-500
13-10-2022,-500
14-10-2022,-500
17-10-2022,-500
18-10-2022,-500
19-10-2022,-500
20-10-2022,-500
21-10-2022,-500
24-10-2022,-500
25-10-2022,-500
26-10-2022,-500
27-10-2022,-500
28-10-2022,-500
31-10-2022,-500
01-11-2022,-500
02-11-2022,-500
03-11-2022,-500
04-11-2022,-500
07-11-2022,-500
08-11-2022,-500
09-11-2022,-500
10-11-2022,-500
14-11-2022,-500
15-11-2022,-500
16-11-2022,-500
17-11-2022,-500
18-11-2022,-500
21-11-2022,-500
22-11-2022,-500
23-11-2022,-500
24-11-2022,-500
25-11-2022,-500
28-11-2022,-500
29-11-2022,-500
30-11-2022,-500
01-12-2022,-500
02-12-2022,-500
05-12-2022,-500
06-12-2022,-500
07-12-2022,-500
08-12-2022,-500
09-12-2022,-500
12-12-2022,-500
13-12-2022,-500
14-12-2022,-500
15-12-2022,-500
16-12-2022,-500
19-12-2022,-500
20-12-2022,-500
21-12-2022,-500
22-12-2022,-500
23-12-2022,-500
28-12-2022,-500
29-12-2022,-500
30-12-2022,-500
03-01-2023,-500
04-01-2023,-500
05-01-2023,-500
06-01-2023,-500
09-01-2023,-500
10-01-2023,-500
11-01-2023,-500
12-01-2023,-500
13-01-2023,-500
16-01-2023,-500
17-01-2023,-500
18-01-2023,-500
19-01-2023,-500
20-01-2023,-500
23-01-2023,-500
24-01-2023,-500
25-01-2023,-500
26-01-2023,-500
27-01-2023,-500
30-01-2023,-500
31-01-2023,-500
01-02-2023,-500
02-02-2023,-500
03-02-2023,-500
06-02-2023,-500
07-02-2023,-500
08-02-2023,-500
09-02-2023,-500
10-02-2023,-500
13-02-2023,-500
14-02-2023,-500
15-02-2023,-500
16-02-2023,-500
17-02-2023,-500
21-02-2023,-500
22-02-2023,-500
23-02-2023,-500
24-02-2023,-500
27-02-2023,-500
28-02-2023,-500
01-03-2023,-500
02-03-2023,-500
03-03-2023,-500
06-03-2023,-500
07-03-2023,-500
08-03-2023,-500
09-03-2023,-500
10-03-2023,-500
13-03-2023,-500
14-03-2023,-500
15-03-2023,-500
16-03-2023,-500
17-03-2023,-500
20-03-2023,-500
21-03-2023,-500
22-03-2023,-500
23-03-2023,-500
24-03-2023,-500
27-03-2023,-500
28-03-2023,-500
29-03-2023,-500
30-03-2023,-500
31-03-2023,-500
03-04-2023,-500
04-04-2023,-500
05-04-2023,-500
06-04-2023,-500
10-04-2023,-500
11-04-2023,-500
12-04-2023,-500
13-04-2023,-500
14-04-2023,-500
17-04-2023,-500
18-04-2023,-500
19-04-2023,-500
20-04-2023,-500
21-04-2023,-500
24-04-2023,-500
25-04-2023,-500
26-04-2023,-500
27-04-2023,-500
28-04-2023,-500
01-05-2023,-500
02-05-2023,-500
03-05-2023,-500
04-05-2023,-500
05-05-2023,-500
08-05-2023,-500
09-05-2023,-500
10-05-2023,-500
11-05-2023,-500
12-05-2023,-500
15-05-2023,-500
16-05-2023,-500
17-05-2023,-500
18-05-2023,-500
19-05-2023,-500
23-05-2023,-500
24-05-2023,-500
25-05-2023,-500
26-05-2023,-500
29-05-2023,-500
30-05-2023,-500
31-05-2023,-500
01-06-2023,-500
02-06-2023,-500
05-06-2023,-500
06-06-2023,-500
07-06-2023,-500
08-06-2023,-500
09-06-2023,-500
12-06-2023,-500
13-06-2023,-500
14-06-2023,-500
15-06-2023,-500
16-06-2023,-500
19-06-2023,-500
20-06-2023,-500
21-06-2023,-500
22-06-2023,-500
23-06-2023,-500
26-06-2023,-500
27-06-2023,-500
28-06-2023,-500
29-06-2023,-500
30-06-2023,-500
04-07-2023,-500
05-07-2023,-500
06-07-2023,-500
07-07-2023,-500
10-07-2023,-500
11-07-2023,-500
12-07-2023,-500
13-07-2023,-500
14-07-2023,-500
17-07-2023,-500
18-07-2023,-500
19-07-2023,-500
20-07-2023,-500
21-07-2023,-500
24-07-2023,-500
25-07-2023,-500
26-07-2023,-500
27-07-2023,-500
28-07-2023,-500
31-07-2023,-500
01-08-2023,-500
02-08-2023,-500
03-08-2023,-500
04-08-2023,-500
08-08-2023,-500
09-08-2023,-500
10-08-2023,-500
11-08-2023,-500
14-08-2023,-500
15-08-2023,-500
16-08-2023,-500
17-08-2023,-500
18-08-2023,-500
21-08-2023,-500
22-08-2023,-500
23-08-2023,-500
24-08-2023,-500
25-08-2023,-500
28-08-2023,-500
29-08-2023,-500
30-08-2023,-500
31-08-2023,-500
01-09-2023,-500
05-09-2023,-500
06-09-2023,-500
07-09-2023,-500
================================================
FILE: python/rateslib/data/historical/corra.csv
================================================
reference_date,rate
12-08-1997,-500
18-08-1997,-500
19-08-1997,-500
20-08-1997,-500
21-08-1997,-500
22-08-1997,-500
25-08-1997,-500
26-08-1997,-500
27-08-1997,-500
28-08-1997,-500
02-09-1997,-500
03-09-1997,-500
04-09-1997,-500
05-09-1997,-500
08-09-1997,-500
09-09-1997,-500
10-09-1997,-500
11-09-1997,-500
12-09-1997,-500
15-09-1997,-500
16-09-1997,-500
17-09-1997,-500
18-09-1997,-500
19-09-1997,-500
22-09-1997,-500
23-09-1997,-500
24-09-1997,-500
25-09-1997,-500
26-09-1997,-500
29-09-1997,-500
30-09-1997,-500
01-10-1997,-500
02-10-1997,-500
03-10-1997,-500
06-10-1997,-500
07-10-1997,-500
08-10-1997,-500
09-10-1997,-500
10-10-1997,-500
14-10-1997,-500
15-10-1997,-500
16-10-1997,-500
17-10-1997,-500
20-10-1997,-500
21-10-1997,-500
22-10-1997,-500
23-10-1997,-500
24-10-1997,-500
27-10-1997,-500
28-10-1997,-500
29-10-1997,-500
30-10-1997,-500
31-10-1997,-500
03-11-1997,-500
04-11-1997,-500
05-11-1997,-500
06-11-1997,-500
07-11-1997,-500
10-11-1997,-500
12-11-1997,-500
13-11-1997,-500
14-11-1997,-500
17-11-1997,-500
18-11-1997,-500
19-11-1997,-500
20-11-1997,-500
21-11-1997,-500
24-11-1997,-500
25-11-1997,-500
26-11-1997,-500
27-11-1997,-500
28-11-1997,-500
01-12-1997,-500
02-12-1997,-500
03-12-1997,-500
04-12-1997,-500
05-12-1997,-500
08-12-1997,-500
09-12-1997,-500
10-12-1997,-500
11-12-1997,-500
12-12-1997,-500
15-12-1997,-500
16-12-1997,-500
17-12-1997,-500
18-12-1997,-500
19-12-1997,-500
23-12-1997,-500
24-12-1997,-500
29-12-1997,-500
30-12-1997,-500
31-12-1997,-500
02-01-1998,-500
05-01-1998,-500
06-01-1998,-500
07-01-1998,-500
08-01-1998,-500
09-01-1998,-500
12-01-1998,-500
13-01-1998,-500
14-01-1998,-500
15-01-1998,-500
16-01-1998,-500
19-01-1998,-500
20-01-1998,-500
21-01-1998,-500
22-01-1998,-500
23-01-1998,-500
26-01-1998,-500
27-01-1998,-500
28-01-1998,-500
29-01-1998,-500
30-01-1998,-500
02-02-1998,-500
03-02-1998,-500
04-02-1998,-500
05-02-1998,-500
06-02-1998,-500
09-02-1998,-500
10-02-1998,-500
11-02-1998,-500
12-02-1998,-500
13-02-1998,-500
16-02-1998,-500
17-02-1998,-500
18-02-1998,-500
19-02-1998,-500
20-02-1998,-500
23-02-1998,-500
24-02-1998,-500
25-02-1998,-500
26-02-1998,-500
27-02-1998,-500
02-03-1998,-500
03-03-1998,-500
04-03-1998,-500
05-03-1998,-500
06-03-1998,-500
09-03-1998,-500
10-03-1998,-500
11-03-1998,-500
12-03-1998,-500
13-03-1998,-500
16-03-1998,-500
17-03-1998,-500
18-03-1998,-500
19-03-1998,-500
20-03-1998,-500
23-03-1998,-500
24-03-1998,-500
25-03-1998,-500
26-03-1998,-500
27-03-1998,-500
30-03-1998,-500
31-03-1998,-500
01-04-1998,-500
02-04-1998,-500
03-04-1998,-500
06-04-1998,-500
07-04-1998,-500
08-04-1998,-500
13-04-1998,-500
14-04-1998,-500
15-04-1998,-500
16-04-1998,-500
17-04-1998,-500
20-04-1998,-500
21-04-1998,-500
22-04-1998,-500
23-04-1998,-500
24-04-1998,-500
27-04-1998,-500
28-04-1998,-500
30-04-1998,-500
01-05-1998,-500
04-05-1998,-500
05-05-1998,-500
06-05-1998,-500
07-05-1998,-500
08-05-1998,-500
11-05-1998,-500
12-05-1998,-500
13-05-1998,-500
14-05-1998,-500
15-05-1998,-500
19-05-1998,-500
20-05-1998,-500
21-05-1998,-500
22-05-1998,-500
25-05-1998,-500
26-05-1998,-500
27-05-1998,-500
28-05-1998,-500
29-05-1998,-500
01-06-1998,-500
02-06-1998,-500
03-06-1998,-500
04-06-1998,-500
05-06-1998,-500
08-06-1998,-500
09-06-1998,-500
10-06-1998,-500
11-06-1998,-500
12-06-1998,-500
15-06-1998,-500
16-06-1998,-500
17-06-1998,-500
18-06-1998,-500
19-06-1998,-500
22-06-1998,-500
23-06-1998,-500
24-06-1998,-500
25-06-1998,-500
26-06-1998,-500
29-06-1998,-500
30-06-1998,-500
02-07-1998,-500
03-07-1998,-500
06-07-1998,-500
07-07-1998,-500
08-07-1998,-500
09-07-1998,-500
10-07-1998,-500
13-07-1998,-500
14-07-1998,-500
15-07-1998,-500
16-07-1998,-500
17-07-1998,-500
20-07-1998,-500
21-07-1998,-500
22-07-1998,-500
23-07-1998,-500
24-07-1998,-500
27-07-1998,-500
28-07-1998,-500
29-07-1998,-500
30-07-1998,-500
31-07-1998,-500
04-08-1998,-500
05-08-1998,-500
06-08-1998,-500
07-08-1998,-500
10-08-1998,-500
11-08-1998,-500
12-08-1998,-500
13-08-1998,-500
14-08-1998,-500
17-08-1998,-500
18-08-1998,-500
19-08-1998,-500
20-08-1998,-500
21-08-1998,-500
24-08-1998,-500
25-08-1998,-500
26-08-1998,-500
27-08-1998,-500
28-08-1998,-500
31-08-1998,-500
01-09-1998,-500
02-09-1998,-500
03-09-1998,-500
04-09-1998,-500
08-09-1998,-500
09-09-1998,-500
10-09-1998,-500
11-09-1998,-500
14-09-1998,-500
15-09-1998,-500
16-09-1998,-500
17-09-1998,-500
18-09-1998,-500
21-09-1998,-500
22-09-1998,-500
23-09-1998,-500
24-09-1998,-500
25-09-1998,-500
28-09-1998,-500
29-09-1998,-500
30-09-1998,-500
01-10-1998,-500
02-10-1998,-500
05-10-1998,-500
06-10-1998,-500
07-10-1998,-500
08-10-1998,-500
09-10-1998,-500
13-10-1998,-500
14-10-1998,-500
15-10-1998,-500
16-10-1998,-500
19-10-1998,-500
20-10-1998,-500
21-10-1998,-500
22-10-1998,-500
23-10-1998,-500
26-10-1998,-500
27-10-1998,-500
28-10-1998,-500
29-10-1998,-500
30-10-1998,-500
02-11-1998,-500
03-11-1998,-500
04-11-1998,-500
05-11-1998,-500
06-11-1998,-500
09-11-1998,-500
10-11-1998,-500
12-11-1998,-500
13-11-1998,-500
16-11-1998,-500
17-11-1998,-500
18-11-1998,-500
19-11-1998,-500
20-11-1998,-500
23-11-1998,-500
24-11-1998,-500
25-11-1998,-500
26-11-1998,-500
27-11-1998,-500
30-11-1998,-500
01-12-1998,-500
02-12-1998,-500
03-12-1998,-500
04-12-1998,-500
07-12-1998,-500
08-12-1998,-500
09-12-1998,-500
10-12-1998,-500
11-12-1998,-500
14-12-1998,-500
15-12-1998,-500
16-12-1998,-500
17-12-1998,-500
18-12-1998,-500
21-12-1998,-500
22-12-1998,-500
23-12-1998,-500
24-12-1998,-500
29-12-1998,-500
30-12-1998,-500
31-12-1998,-500
04-01-1999,-500
05-01-1999,-500
06-01-1999,-500
07-01-1999,-500
08-01-1999,-500
11-01-1999,-500
12-01-1999,-500
13-01-1999,-500
14-01-1999,-500
15-01-1999,-500
18-01-1999,-500
19-01-1999,-500
20-01-1999,-500
21-01-1999,-500
22-01-1999,-500
25-01-1999,-500
26-01-1999,-500
27-01-1999,-500
28-01-1999,-500
29-01-1999,-500
01-02-1999,-500
02-02-1999,-500
03-02-1999,-500
04-02-1999,-500
05-02-1999,-500
08-02-1999,-500
09-02-1999,-500
10-02-1999,-500
11-02-1999,-500
12-02-1999,-500
15-02-1999,-500
16-02-1999,-500
17-02-1999,-500
18-02-1999,-500
19-02-1999,-500
22-02-1999,-500
23-02-1999,-500
24-02-1999,-500
25-02-1999,-500
26-02-1999,-500
01-03-1999,-500
02-03-1999,-500
03-03-1999,-500
04-03-1999,-500
05-03-1999,-500
08-03-1999,-500
09-03-1999,-500
10-03-1999,-500
11-03-1999,-500
12-03-1999,-500
15-03-1999,-500
16-03-1999,-500
17-03-1999,-500
18-03-1999,-500
19-03-1999,-500
22-03-1999,-500
23-03-1999,-500
24-03-1999,-500
25-03-1999,-500
26-03-1999,-500
29-03-1999,-500
30-03-1999,-500
31-03-1999,-500
01-04-1999,-500
05-04-1999,-500
06-04-1999,-500
07-04-1999,-500
08-04-1999,-500
09-04-1999,-500
12-04-1999,-500
13-04-1999,-500
14-04-1999,-500
15-04-1999,-500
16-04-1999,-500
19-04-1999,-500
20-04-1999,-500
21-04-1999,-500
22-04-1999,-500
23-04-1999,-500
26-04-1999,-500
27-04-1999,-500
28-04-1999,-500
29-04-1999,-500
30-04-1999,-500
03-05-1999,-500
04-05-1999,-500
05-05-1999,-500
06-05-1999,-500
07-05-1999,-500
10-05-1999,-500
11-05-1999,-500
12-05-1999,-500
13-05-1999,-500
14-05-1999,-500
17-05-1999,-500
18-05-1999,-500
19-05-1999,-500
20-05-1999,-500
21-05-1999,-500
25-05-1999,-500
26-05-1999,-500
27-05-1999,-500
28-05-1999,-500
31-05-1999,-500
01-06-1999,-500
02-06-1999,-500
03-06-1999,-500
04-06-1999,-500
07-06-1999,-500
08-06-1999,-500
09-06-1999,-500
10-06-1999,-500
11-06-1999,-500
14-06-1999,-500
15-06-1999,-500
16-06-1999,-500
17-06-1999,-500
18-06-1999,-500
21-06-1999,-500
22-06-1999,-500
23-06-1999,-500
24-06-1999,-500
25-06-1999,-500
28-06-1999,-500
29-06-1999,-500
30-06-1999,-500
02-07-1999,-500
05-07-1999,-500
06-07-1999,-500
07-07-1999,-500
08-07-1999,-500
09-07-1999,-500
12-07-1999,-500
13-07-1999,-500
14-07-1999,-500
15-07-1999,-500
16-07-1999,-500
19-07-1999,-500
20-07-1999,-500
21-07-1999,-500
22-07-1999,-500
23-07-1999,-500
26-07-1999,-500
27-07-1999,-500
28-07-1999,-500
29-07-1999,-500
30-07-1999,-500
03-08-1999,-500
04-08-1999,-500
05-08-1999,-500
06-08-1999,-500
09-08-1999,-500
10-08-1999,-500
11-08-1999,-500
12-08-1999,-500
13-08-1999,-500
16-08-1999,-500
17-08-1999,-500
18-08-1999,-500
19-08-1999,-500
20-08-1999,-500
23-08-1999,-500
24-08-1999,-500
25-08-1999,-500
26-08-1999,-500
27-08-1999,-500
30-08-1999,-500
31-08-1999,-500
01-09-1999,-500
02-09-1999,-500
03-09-1999,-500
07-09-1999,-500
08-09-1999,-500
09-09-1999,-500
10-09-1999,-500
13-09-1999,-500
14-09-1999,-500
15-09-1999,-500
16-09-1999,-500
17-09-1999,-500
20-09-1999,-500
21-09-1999,-500
22-09-1999,-500
23-09-1999,-500
24-09-1999,-500
27-09-1999,-500
28-09-1999,-500
29-09-1999,-500
30-09-1999,-500
01-10-1999,-500
04-10-1999,-500
05-10-1999,-500
06-10-1999,-500
07-10-1999,-500
08-10-1999,-500
12-10-1999,-500
13-10-1999,-500
14-10-1999,-500
15-10-1999,-500
18-10-1999,-500
19-10-1999,-500
20-10-1999,-500
21-10-1999,-500
22-10-1999,-500
25-10-1999,-500
26-10-1999,-500
27-10-1999,-500
28-10-1999,-500
29-10-1999,-500
01-11-1999,-500
02-11-1999,-500
03-11-1999,-500
04-11-1999,-500
05-11-1999,-500
08-11-1999,-500
09-11-1999,-500
10-11-1999,-500
12-11-1999,-500
15-11-1999,-500
16-11-1999,-500
17-11-1999,-500
18-11-1999,-500
19-11-1999,-500
22-11-1999,-500
23-11-1999,-500
24-11-1999,-500
25-11-1999,-500
26-11-1999,-500
29-11-1999,-500
30-11-1999,-500
01-12-1999,-500
02-12-1999,-500
03-12-1999,-500
06-12-1999,-500
07-12-1999,-500
08-12-1999,-500
09-12-1999,-500
10-12-1999,-500
13-12-1999,-500
14-12-1999,-500
15-12-1999,-500
16-12-1999,-500
17-12-1999,-500
20-12-1999,-500
21-12-1999,-500
22-12-1999,-500
23-12-1999,-500
24-12-1999,-500
29-12-1999,-500
30-12-1999,-500
31-12-1999,-500
04-01-2000,-500
05-01-2000,-500
06-01-2000,-500
07-01-2000,-500
10-01-2000,-500
11-01-2000,-500
12-01-2000,-500
13-01-2000,-500
14-01-2000,-500
17-01-2000,-500
18-01-2000,-500
19-01-2000,-500
20-01-2000,-500
21-01-2000,-500
24-01-2000,-500
25-01-2000,-500
26-01-2000,-500
27-01-2000,-500
28-01-2000,-500
31-01-2000,-500
01-02-2000,-500
02-02-2000,-500
03-02-2000,-500
04-02-2000,-500
07-02-2000,-500
08-02-2000,-500
09-02-2000,-500
10-02-2000,-500
11-02-2000,-500
14-02-2000,-500
15-02-2000,-500
16-02-2000,-500
17-02-2000,-500
18-02-2000,-500
21-02-2000,-500
22-02-2000,-500
23-02-2000,-500
24-02-2000,-500
25-02-2000,-500
28-02-2000,-500
29-02-2000,-500
01-03-2000,-500
02-03-2000,-500
03-03-2000,-500
06-03-2000,-500
07-03-2000,-500
08-03-2000,-500
09-03-2000,-500
10-03-2000,-500
13-03-2000,-500
14-03-2000,-500
15-03-2000,-500
16-03-2000,-500
17-03-2000,-500
20-03-2000,-500
21-03-2000,-500
22-03-2000,-500
23-03-2000,-500
24-03-2000,-500
27-03-2000,-500
28-03-2000,-500
29-03-2000,-500
30-03-2000,-500
31-03-2000,-500
03-04-2000,-500
04-04-2000,-500
05-04-2000,-500
06-04-2000,-500
07-04-2000,-500
10-04-2000,-500
11-04-2000,-500
12-04-2000,-500
13-04-2000,-500
14-04-2000,-500
17-04-2000,-500
18-04-2000,-500
19-04-2000,-500
20-04-2000,-500
24-04-2000,-500
25-04-2000,-500
26-04-2000,-500
27-04-2000,-500
28-04-2000,-500
01-05-2000,-500
02-05-2000,-500
03-05-2000,-500
04-05-2000,-500
05-05-2000,-500
08-05-2000,-500
09-05-2000,-500
10-05-2000,-500
11-05-2000,-500
12-05-2000,-500
15-05-2000,-500
16-05-2000,-500
17-05-2000,-500
18-05-2000,-500
19-05-2000,-500
23-05-2000,-500
24-05-2000,-500
25-05-2000,-500
26-05-2000,-500
29-05-2000,-500
30-05-2000,-500
31-05-2000,-500
01-06-2000,-500
02-06-2000,-500
05-06-2000,-500
06-06-2000,-500
07-06-2000,-500
08-06-2000,-500
09-06-2000,-500
12-06-2000,-500
13-06-2000,-500
14-06-2000,-500
15-06-2000,-500
16-06-2000,-500
19-06-2000,-500
20-06-2000,-500
21-06-2000,-500
22-06-2000,-500
23-06-2000,-500
26-06-2000,-500
27-06-2000,-500
28-06-2000,-500
29-06-2000,-500
30-06-2000,-500
04-07-2000,-500
05-07-2000,-500
06-07-2000,-500
07-07-2000,-500
10-07-2000,-500
11-07-2000,-500
12-07-2000,-500
13-07-2000,-500
14-07-2000,-500
17-07-2000,-500
18-07-2000,-500
19-07-2000,-500
20-07-2000,-500
21-07-2000,-500
24-07-2000,-500
25-07-2000,-500
26-07-2000,-500
27-07-2000,-500
28-07-2000,-500
31-07-2000,-500
01-08-2000,-500
02-08-2000,-500
03-08-2000,-500
04-08-2000,-500
08-08-2000,-500
09-08-2000,-500
10-08-2000,-500
11-08-2000,-500
14-08-2000,-500
15-08-2000,-500
16-08-2000,-500
17-08-2000,-500
18-08-2000,-500
21-08-2000,-500
22-08-2000,-500
23-08-2000,-500
24-08-2000,-500
25-08-2000,-500
28-08-2000,-500
29-08-2000,-500
30-08-2000,-500
31-08-2000,-500
01-09-2000,-500
05-09-2000,-500
06-09-2000,-500
07-09-2000,-500
08-09-2000,-500
11-09-2000,-500
12-09-2000,-500
13-09-2000,-500
14-09-2000,-500
15-09-2000,-500
18-09-2000,-500
19-09-2000,-500
20-09-2000,-500
21-09-2000,-500
22-09-2000,-500
25-09-2000,-500
26-09-2000,-500
27-09-2000,-500
28-09-2000,-500
29-09-2000,-500
02-10-2000,-500
03-10-2000,-500
04-10-2000,-500
05-10-2000,-500
06-10-2000,-500
10-10-2000,-500
11-10-2000,-500
12-10-2000,-500
13-10-2000,-500
16-10-2000,-500
17-10-2000,-500
18-10-2000,-500
19-10-2000,-500
20-10-2000,-500
23-10-2000,-500
24-10-2000,-500
25-10-2000,-500
26-10-2000,-500
27-10-2000,-500
30-10-2000,-500
31-10-2000,-500
01-11-2000,-500
02-11-2000,-500
03-11-2000,-500
06-11-2000,-500
07-11-2000,-500
08-11-2000,-500
09-11-2000,-500
10-11-2000,-500
14-11-2000,-500
15-11-2000,-500
16-11-2000,-500
17-11-2000,-500
20-11-2000,-500
21-11-2000,-500
22-11-2000,-500
23-11-2000,-500
24-11-2000,-500
27-11-2000,-500
28-11-2000,-500
29-11-2000,-500
30-11-2000,-500
01-12-2000,-500
04-12-2000,-500
05-12-2000,-500
06-12-2000,-500
07-12-2000,-500
08-12-2000,-500
11-12-2000,-500
12-12-2000,-500
13-12-2000,-500
14-12-2000,-500
15-12-2000,-500
18-12-2000,-500
19-12-2000,-500
20-12-2000,-500
21-12-2000,-500
22-12-2000,-500
27-12-2000,-500
28-12-2000,-500
29-12-2000,-500
02-01-2001,-500
03-01-2001,-500
04-01-2001,-500
05-01-2001,-500
08-01-2001,-500
09-01-2001,-500
10-01-2001,-500
11-01-2001,-500
12-01-2001,-500
15-01-2001,-500
16-01-2001,-500
17-01-2001,-500
18-01-2001,-500
19-01-2001,-500
22-01-2001,-500
23-01-2001,-500
24-01-2001,-500
25-01-2001,-500
26-01-2001,-500
29-01-2001,-500
30-01-2001,-500
31-01-2001,-500
01-02-2001,-500
02-02-2001,-500
05-02-2001,-500
06-02-2001,-500
07-02-2001,-500
08-02-2001,-500
09-02-2001,-500
12-02-2001,-500
13-02-2001,-500
14-02-2001,-500
15-02-2001,-500
16-02-2001,-500
19-02-2001,-500
20-02-2001,-500
21-02-2001,-500
22-02-2001,-500
23-02-2001,-500
26-02-2001,-500
27-02-2001,-500
28-02-2001,-500
01-03-2001,-500
02-03-2001,-500
05-03-2001,-500
06-03-2001,-500
07-03-2001,-500
08-03-2001,-500
09-03-2001,-500
12-03-2001,-500
13-03-2001,-500
14-03-2001,-500
15-03-2001,-500
16-03-2001,-500
19-03-2001,-500
20-03-2001,-500
21-03-2001,-500
22-03-2001,-500
23-03-2001,-500
26-03-2001,-500
27-03-2001,-500
28-03-2001,-500
29-03-2001,-500
30-03-2001,-500
02-04-2001,-500
03-04-2001,-500
04-04-2001,-500
05-04-2001,-500
06-04-2001,-500
09-04-2001,-500
10-04-2001,-500
11-04-2001,-500
12-04-2001,-500
16-04-2001,-500
17-04-2001,-500
18-04-2001,-500
19-04-2001,-500
20-04-2001,-500
23-04-2001,-500
24-04-2001,-500
25-04-2001,-500
26-04-2001,-500
27-04-2001,-500
30-04-2001,-500
01-05-2001,-500
02-05-2001,-500
03-05-2001,-500
04-05-2001,-500
07-05-2001,-500
08-05-2001,-500
09-05-2001,-500
10-05-2001,-500
11-05-2001,-500
14-05-2001,-500
15-05-2001,-500
16-05-2001,-500
17-05-2001,-500
18-05-2001,-500
22-05-2001,-500
23-05-2001,-500
24-05-2001,-500
25-05-2001,-500
28-05-2001,-500
29-05-2001,-500
30-05-2001,-500
31-05-2001,-500
01-06-2001,-500
04-06-2001,-500
05-06-2001,-500
06-06-2001,-500
07-06-2001,-500
08-06-2001,-500
11-06-2001,-500
12-06-2001,-500
13-06-2001,-500
14-06-2001,-500
15-06-2001,-500
18-06-2001,-500
19-06-2001,-500
20-06-2001,-500
21-06-2001,-500
22-06-2001,-500
25-06-2001,-500
26-06-2001,-500
27-06-2001,-500
28-06-2001,-500
29-06-2001,-500
03-07-2001,-500
04-07-2001,-500
05-07-2001,-500
06-07-2001,-500
09-07-2001,-500
10-07-2001,-500
11-07-2001,-500
12-07-2001,-500
13-07-2001,-500
16-07-2001,-500
17-07-2001,-500
18-07-2001,-500
19-07-2001,-500
20-07-2001,-500
23-07-2001,-500
24-07-2001,-500
25-07-2001,-500
26-07-2001,-500
27-07-2001,-500
30-07-2001,-500
31-07-2001,-500
01-08-2001,-500
02-08-2001,-500
03-08-2001,-500
07-08-2001,-500
08-08-2001,-500
09-08-2001,-500
10-08-2001,-500
13-08-2001,-500
14-08-2001,-500
15-08-2001,-500
16-08-2001,-500
17-08-2001,-500
20-08-2001,-500
21-08-2001,-500
22-08-2001,-500
23-08-2001,-500
24-08-2001,-500
27-08-2001,-500
28-08-2001,-500
29-08-2001,-500
30-08-2001,-500
31-08-2001,-500
04-09-2001,-500
05-09-2001,-500
06-09-2001,-500
07-09-2001,-500
10-09-2001,-500
11-09-2001,-500
12-09-2001,-500
13-09-2001,-500
14-09-2001,-500
17-09-2001,-500
18-09-2001,-500
19-09-2001,-500
20-09-2001,-500
21-09-2001,-500
24-09-2001,-500
25-09-2001,-500
26-09-2001,-500
27-09-2001,-500
28-09-2001,-500
01-10-2001,-500
02-10-2001,-500
03-10-2001,-500
04-10-2001,-500
05-10-2001,-500
09-10-2001,-500
10-10-2001,-500
11-10-2001,-500
12-10-2001,-500
15-10-2001,-500
16-10-2001,-500
17-10-2001,-500
18-10-2001,-500
19-10-2001,-500
22-10-2001,-500
23-10-2001,-500
24-10-2001,-500
25-10-2001,-500
26-10-2001,-500
29-10-2001,-500
30-10-2001,-500
31-10-2001,-500
01-11-2001,-500
02-11-2001,-500
05-11-2001,-500
06-11-2001,-500
07-11-2001,-500
08-11-2001,-500
09-11-2001,-500
13-11-2001,-500
14-11-2001,-500
15-11-2001,-500
16-11-2001,-500
19-11-2001,-500
20-11-2001,-500
21-11-2001,-500
22-11-2001,-500
23-11-2001,-500
26-11-2001,-500
27-11-2001,-500
28-11-2001,-500
29-11-2001,-500
30-11-2001,-500
03-12-2001,-500
04-12-2001,-500
05-12-2001,-500
06-12-2001,-500
07-12-2001,-500
10-12-2001,-500
11-12-2001,-500
12-12-2001,-500
13-12-2001,-500
14-12-2001,-500
17-12-2001,-500
18-12-2001,-500
19-12-2001,-500
20-12-2001,-500
21-12-2001,-500
24-12-2001,-500
27-12-2001,-500
28-12-2001,-500
31-12-2001,-500
02-01-2002,-500
03-01-2002,-500
04-01-2002,-500
07-01-2002,-500
08-01-2002,-500
09-01-2002,-500
10-01-2002,-500
11-01-2002,-500
14-01-2002,-500
15-01-2002,-500
16-01-2002,-500
17-01-2002,-500
18-01-2002,-500
21-01-2002,-500
22-01-2002,-500
23-01-2002,-500
24-01-2002,-500
25-01-2002,-500
28-01-2002,-500
29-01-2002,-500
30-01-2002,-500
31-01-2002,-500
01-02-2002,-500
04-02-2002,-500
05-02-2002,-500
06-02-2002,-500
07-02-2002,-500
08-02-2002,-500
11-02-2002,-500
12-02-2002,-500
13-02-2002,-500
14-02-2002,-500
15-02-2002,-500
18-02-2002,-500
19-02-2002,-500
20-02-2002,-500
21-02-2002,-500
22-02-2002,-500
25-02-2002,-500
26-02-2002,-500
27-02-2002,-500
28-02-2002,-500
01-03-2002,-500
04-03-2002,-500
05-03-2002,-500
06-03-2002,-500
07-03-2002,-500
08-03-2002,-500
11-03-2002,-500
12-03-2002,-500
13-03-2002,-500
14-03-2002,-500
15-03-2002,-500
18-03-2002,-500
19-03-2002,-500
20-03-2002,-500
21-03-2002,-500
22-03-2002,-500
25-03-2002,-500
26-03-2002,-500
27-03-2002,-500
28-03-2002,-500
01-04-2002,-500
02-04-2002,-500
03-04-2002,-500
04-04-2002,-500
05-04-2002,-500
08-04-2002,-500
09-04-2002,-500
10-04-2002,-500
11-04-2002,-500
12-04-2002,-500
15-04-2002,-500
16-04-2002,-500
17-04-2002,-500
18-04-2002,-500
19-04-2002,-500
22-04-2002,-500
23-04-2002,-500
24-04-2002,-500
25-04-2002,-500
26-04-2002,-500
29-04-2002,-500
30-04-2002,-500
01-05-2002,-500
02-05-2002,-500
03-05-2002,-500
06-05-2002,-500
07-05-2002,-500
08-05-2002,-500
09-05-2002,-500
10-05-2002,-500
13-05-2002,-500
14-05-2002,-500
15-05-2002,-500
16-05-2002,-500
17-05-2002,-500
21-05-2002,-500
22-05-2002,-500
23-05-2002,-500
24-05-2002,-500
27-05-2002,-500
28-05-2002,-500
29-05-2002,-500
30-05-2002,-500
31-05-2002,-500
03-06-2002,-500
04-06-2002,-500
05-06-2002,-500
06-06-2002,-500
07-06-2002,-500
10-06-2002,-500
11-06-2002,-500
12-06-2002,-500
13-06-2002,-500
14-06-2002,-500
17-06-2002,-500
18-06-2002,-500
19-06-2002,-500
20-06-2002,-500
21-06-2002,-500
24-06-2002,-500
25-06-2002,-500
26-06-2002,-500
27-06-2002,-500
28-06-2002,-500
02-07-2002,-500
03-07-2002,-500
04-07-2002,-500
05-07-2002,-500
08-07-2002,-500
09-07-2002,-500
10-07-2002,-500
11-07-2002,-500
12-07-2002,-500
15-07-2002,-500
16-07-2002,-500
17-07-2002,-500
18-07-2002,-500
19-07-2002,-500
22-07-2002,-500
23-07-2002,-500
24-07-2002,-500
25-07-2002,-500
26-07-2002,-500
29-07-2002,-500
30-07-2002,-500
31-07-2002,-500
01-08-2002,-500
02-08-2002,-500
06-08-2002,-500
07-08-2002,-500
08-08-2002,-500
09-08-2002,-500
12-08-2002,-500
13-08-2002,-500
14-08-2002,-500
15-08-2002,-500
16-08-2002,-500
19-08-2002,-500
20-08-2002,-500
21-08-2002,-500
22-08-2002,-500
23-08-2002,-500
26-08-2002,-500
27-08-2002,-500
28-08-2002,-500
29-08-2002,-500
30-08-2002,-500
03-09-2002,-500
04-09-2002,-500
05-09-2002,-500
06-09-2002,-500
09-09-2002,-500
10-09-2002,-500
11-09-2002,-500
12-09-2002,-500
13-09-2002,-500
16-09-2002,-500
17-09-2002,-500
18-09-2002,-500
19-09-2002,-500
20-09-2002,-500
23-09-2002,-500
24-09-2002,-500
25-09-2002,-500
26-09-2002,-500
27-09-2002,-500
30-09-2002,-500
01-10-2002,-500
02-10-2002,-500
03-10-2002,-500
04-10-2002,-500
07-10-2002,-500
08-10-2002,-500
09-10-2002,-500
10-10-2002,-500
11-10-2002,-500
15-10-2002,-500
16-10-2002,-500
17-10-2002,-500
18-10-2002,-500
21-10-2002,-500
22-10-2002,-500
23-10-2002,-500
24-10-2002,-500
25-10-2002,-500
28-10-2002,-500
29-10-2002,-500
30-10-2002,-500
31-10-2002,-500
01-11-2002,-500
04-11-2002,-500
05-11-2002,-500
06-11-2002,-500
07-11-2002,-500
08-11-2002,-500
12-11-2002,-500
13-11-2002,-500
14-11-2002,-500
15-11-2002,-500
18-11-2002,-500
19-11-2002,-500
20-11-2002,-500
21-11-2002,-500
22-11-2002,-500
25-11-2002,-500
26-11-2002,-500
27-11-2002,-500
28-11-2002,-500
29-11-2002,-500
02-12-2002,-500
03-12-2002,-500
04-12-2002,-500
05-12-2002,-500
06-12-2002,-500
09-12-2002,-500
10-12-2002,-500
11-12-2002,-500
12-12-2002,-500
13-12-2002,-500
16-12-2002,-500
17-12-2002,-500
18-12-2002,-500
19-12-2002,-500
20-12-2002,-500
23-12-2002,-500
24-12-2002,-500
27-12-2002,-500
30-12-2002,-500
31-12-2002,-500
02-01-2003,-500
03-01-2003,-500
06-01-2003,-500
07-01-2003,-500
08-01-2003,-500
09-01-2003,-500
10-01-2003,-500
13-01-2003,-500
14-01-2003,-500
15-01-2003,-500
16-01-2003,-500
17-01-2003,-500
20-01-2003,-500
21-01-2003,-500
22-01-2003,-500
23-01-2003,-500
24-01-2003,-500
27-01-2003,-500
28-01-2003,-500
29-01-2003,-500
30-01-2003,-500
31-01-2003,-500
03-02-2003,-500
04-02-2003,-500
05-02-2003,-500
06-02-2003,-500
07-02-2003,-500
10-02-2003,-500
11-02-2003,-500
12-02-2003,-500
13-02-2003,-500
14-02-2003,-500
17-02-2003,-500
18-02-2003,-500
19-02-2003,-500
20-02-2003,-500
21-02-2003,-500
24-02-2003,-500
25-02-2003,-500
26-02-2003,-500
27-02-2003,-500
28-02-2003,-500
03-03-2003,-500
04-03-2003,-500
05-03-2003,-500
06-03-2003,-500
07-03-2003,-500
10-03-2003,-500
11-03-2003,-500
12-03-2003,-500
13-03-2003,-500
14-03-2003,-500
17-03-2003,-500
18-03-2003,-500
19-03-2003,-500
20-03-2003,-500
21-03-2003,-500
24-03-2003,-500
25-03-2003,-500
26-03-2003,-500
27-03-2003,-500
28-03-2003,-500
31-03-2003,-500
01-04-2003,-500
02-04-2003,-500
03-04-2003,-500
04-04-2003,-500
07-04-2003,-500
08-04-2003,-500
09-04-2003,-500
10-04-2003,-500
11-04-2003,-500
14-04-2003,-500
15-04-2003,-500
16-04-2003,-500
17-04-2003,-500
21-04-2003,-500
22-04-2003,-500
23-04-2003,-500
24-04-2003,-500
25-04-2003,-500
28-04-2003,-500
29-04-2003,-500
30-04-2003,-500
01-05-2003,-500
02-05-2003,-500
05-05-2003,-500
06-05-2003,-500
07-05-2003,-500
08-05-2003,-500
09-05-2003,-500
12-05-2003,-500
13-05-2003,-500
14-05-2003,-500
15-05-2003,-500
16-05-2003,-500
20-05-2003,-500
21-05-2003,-500
22-05-2003,-500
23-05-2003,-500
26-05-2003,-500
27-05-2003,-500
28-05-2003,-500
29-05-2003,-500
30-05-2003,-500
02-06-2003,-500
03-06-2003,-500
04-06-2003,-500
05-06-2003,-500
06-06-2003,-500
09-06-2003,-500
10-06-2003,-500
11-06-2003,-500
12-06-2003,-500
13-06-2003,-500
16-06-2003,-500
17-06-2003,-500
18-06-2003,-500
19-06-2003,-500
20-06-2003,-500
23-06-2003,-500
24-06-2003,-500
25-06-2003,-500
26-06-2003,-500
27-06-2003,-500
30-06-2003,-500
02-07-2003,-500
03-07-2003,-500
04-07-2003,-500
07-07-2003,-500
08-07-2003,-500
09-07-2003,-500
10-07-2003,-500
11-07-2003,-500
14-07-2003,-500
15-07-2003,-500
16-07-2003,-500
17-07-2003,-500
18-07-2003,-500
21-07-2003,-500
22-07-2003,-500
23-07-2003,-500
24-07-2003,-500
25-07-2003,-500
28-07-2003,-500
29-07-2003,-500
30-07-2003,-500
31-07-2003,-500
01-08-2003,-500
05-08-2003,-500
06-08-2003,-500
07-08-2003,-500
08-08-2003,-500
11-08-2003,-500
12-08-2003,-500
13-08-2003,-500
14-08-2003,-500
15-08-2003,-500
18-08-2003,-500
19-08-2003,-500
20-08-2003,-500
21-08-2003,-500
22-08-2003,-500
25-08-2003,-500
26-08-2003,-500
27-08-2003,-500
28-08-2003,-500
29-08-2003,-500
02-09-2003,-500
03-09-2003,-500
04-09-2003,-500
05-09-2003,-500
08-09-2003,-500
09-09-2003,-500
10-09-2003,-500
11-09-2003,-500
12-09-2003,-500
15-09-2003,-500
16-09-2003,-500
17-09-2003,-500
18-09-2003,-500
19-09-2003,-500
22-09-2003,-500
23-09-2003,-500
24-09-2003,-500
25-09-2003,-500
26-09-2003,-500
29-09-2003,-500
30-09-2003,-500
01-10-2003,-500
02-10-2003,-500
03-10-2003,-500
06-10-2003,-500
07-10-2003,-500
08-10-2003,-500
09-10-2003,-500
10-10-2003,-500
14-10-2003,-500
15-10-2003,-500
16-10-2003,-500
17-10-2003,-500
20-10-2003,-500
21-10-2003,-500
22-10-2003,-500
23-10-2003,-500
24-10-2003,-500
27-10-2003,-500
28-10-2003,-500
29-10-2003,-500
30-10-2003,-500
31-10-2003,-500
03-11-2003,-500
04-11-2003,-500
05-11-2003,-500
06-11-2003,-500
07-11-2003,-500
10-11-2003,-500
12-11-2003,-500
13-11-2003,-500
14-11-2003,-500
17-11-2003,-500
18-11-2003,-500
19-11-2003,-500
20-11-2003,-500
21-11-2003,-500
24-11-2003,-500
25-11-2003,-500
26-11-2003,-500
27-11-2003,-500
28-11-2003,-500
01-12-2003,-500
02-12-2003,-500
03-12-2003,-500
04-12-2003,-500
05-12-2003,-500
08-12-2003,-500
09-12-2003,-500
10-12-2003,-500
11-12-2003,-500
12-12-2003,-500
15-12-2003,-500
16-12-2003,-500
17-12-2003,-500
18-12-2003,-500
19-12-2003,-500
22-12-2003,-500
23-12-2003,-500
24-12-2003,-500
29-12-2003,-500
30-12-2003,-500
31-12-2003,-500
02-01-2004,-500
05-01-2004,-500
06-01-2004,-500
07-01-2004,-500
08-01-2004,-500
09-01-2004,-500
12-01-2004,-500
13-01-2004,-500
14-01-2004,-500
15-01-2004,-500
16-01-2004,-500
19-01-2004,-500
20-01-2004,-500
21-01-2004,-500
22-01-2004,-500
23-01-2004,-500
26-01-2004,-500
27-01-2004,-500
28-01-2004,-500
29-01-2004,-500
30-01-2004,-500
02-02-2004,-500
03-02-2004,-500
04-02-2004,-500
05-02-2004,-500
06-02-2004,-500
09-02-2004,-500
10-02-2004,-500
11-02-2004,-500
12-02-2004,-500
13-02-2004,-500
16-02-2004,-500
17-02-2004,-500
18-02-2004,-500
19-02-2004,-500
20-02-2004,-500
23-02-2004,-500
24-02-2004,-500
25-02-2004,-500
26-02-2004,-500
27-02-2004,-500
01-03-2004,-500
02-03-2004,-500
03-03-2004,-500
04-03-2004,-500
05-03-2004,-500
08-03-2004,-500
09-03-2004,-500
10-03-2004,-500
11-03-2004,-500
12-03-2004,-500
15-03-2004,-500
16-03-2004,-500
17-03-2004,-500
18-03-2004,-500
19-03-2004,-500
22-03-2004,-500
23-03-2004,-500
24-03-2004,-500
25-03-2004,-500
26-03-2004,-500
29-03-2004,-500
30-03-2004,-500
31-03-2004,-500
01-04-2004,-500
02-04-2004,-500
05-04-2004,-500
06-04-2004,-500
07-04-2004,-500
08-04-2004,-500
12-04-2004,-500
13-04-2004,-500
14-04-2004,-500
15-04-2004,-500
16-04-2004,-500
19-04-2004,-500
20-04-2004,-500
21-04-2004,-500
22-04-2004,-500
23-04-2004,-500
26-04-2004,-500
27-04-2004,-500
28-04-2004,-500
29-04-2004,-500
30-04-2004,-500
03-05-2004,-500
04-05-2004,-500
05-05-2004,-500
06-05-2004,-500
07-05-2004,-500
10-05-2004,-500
11-05-2004,-500
12-05-2004,-500
13-05-2004,-500
14-05-2004,-500
17-05-2004,-500
18-05-2004,-500
19-05-2004,-500
20-05-2004,-500
21-05-2004,-500
25-05-2004,-500
26-05-2004,-500
27-05-2004,-500
28-05-2004,-500
31-05-2004,-500
01-06-2004,-500
02-06-2004,-500
03-06-2004,-500
04-06-2004,-500
07-06-2004,-500
08-06-2004,-500
09-06-2004,-500
10-06-2004,-500
11-06-2004,-500
14-06-2004,-500
15-06-2004,-500
16-06-2004,-500
17-06-2004,-500
18-06-2004,-500
21-06-2004,-500
22-06-2004,-500
23-06-2004,-500
24-06-2004,-500
25-06-2004,-500
28-06-2004,-500
29-06-2004,-500
30-06-2004,-500
02-07-2004,-500
05-07-2004,-500
06-07-2004,-500
07-07-2004,-500
08-07-2004,-500
09-07-2004,-500
12-07-2004,-500
13-07-2004,-500
14-07-2004,-500
15-07-2004,-500
16-07-2004,-500
19-07-2004,-500
20-07-2004,-500
21-07-2004,-500
22-07-2004,-500
23-07-2004,-500
26-07-2004,-500
27-07-2004,-500
28-07-2004,-500
29-07-2004,-500
30-07-2004,-500
03-08-2004,-500
04-08-2004,-500
05-08-2004,-500
06-08-2004,-500
09-08-2004,-500
10-08-2004,-500
11-08-2004,-500
12-08-2004,-500
13-08-2004,-500
16-08-2004,-500
17-08-2004,-500
18-08-2004,-500
19-08-2004,-500
20-08-2004,-500
23-08-2004,-500
24-08-2004,-500
25-08-2004,-500
26-08-2004,-500
27-08-2004,-500
30-08-2004,-500
31-08-2004,-500
01-09-2004,-500
02-09-2004,-500
03-09-2004,-500
07-09-2004,-500
08-09-2004,-500
09-09-2004,-500
10-09-2004,-500
13-09-2004,-500
14-09-2004,-500
15-09-2004,-500
16-09-2004,-500
17-09-2004,-500
20-09-2004,-500
21-09-2004,-500
22-09-2004,-500
23-09-2004,-500
24-09-2004,-500
27-09-2004,-500
28-09-2004,-500
29-09-2004,-500
30-09-2004,-500
01-10-2004,-500
04-10-2004,-500
05-10-2004,-500
06-10-2004,-500
07-10-2004,-500
08-10-2004,-500
12-10-2004,-500
13-10-2004,-500
14-10-2004,-500
15-10-2004,-500
18-10-2004,-500
19-10-2004,-500
20-10-2004,-500
21-10-2004,-500
22-10-2004,-500
25-10-2004,-500
26-10-2004,-500
27-10-2004,-500
28-10-2004,-500
29-10-2004,-500
01-11-2004,-500
02-11-2004,-500
03-11-2004,-500
04-11-2004,-500
05-11-2004,-500
08-11-2004,-500
09-11-2004,-500
10-11-2004,-500
12-11-2004,-500
15-11-2004,-500
16-11-2004,-500
17-11-2004,-500
18-11-2004,-500
19-11-2004,-500
22-11-2004,-500
23-11-2004,-500
24-11-2004,-500
25-11-2004,-500
26-11-2004,-500
29-11-2004,-500
30-11-2004,-500
01-12-2004,-500
02-12-2004,-500
03-12-2004,-500
06-12-2004,-500
07-12-2004,-500
08-12-2004,-500
09-12-2004,-500
10-12-2004,-500
13-12-2004,-500
14-12-2004,-500
15-12-2004,-500
16-12-2004,-500
17-12-2004,-500
20-12-2004,-500
21-12-2004,-500
22-12-2004,-500
23-12-2004,-500
24-12-2004,-500
29-12-2004,-500
30-12-2004,-500
31-12-2004,-500
04-01-2005,-500
05-01-2005,-500
06-01-2005,-500
07-01-2005,-500
10-01-2005,-500
11-01-2005,-500
12-01-2005,-500
13-01-2005,-500
14-01-2005,-500
17-01-2005,-500
18-01-2005,-500
19-01-2005,-500
20-01-2005,-500
21-01-2005,-500
24-01-2005,-500
25-01-2005,-500
26-01-2005,-500
27-01-2005,-500
28-01-2005,-500
31-01-2005,-500
01-02-2005,-500
02-02-2005,-500
03-02-2005,-500
04-02-2005,-500
07-02-2005,-500
08-02-2005,-500
09-02-2005,-500
10-02-2005,-500
11-02-2005,-500
14-02-2005,-500
15-02-2005,-500
16-02-2005,-500
17-02-2005,-500
18-02-2005,-500
21-02-2005,-500
22-02-2005,-500
23-02-2005,-500
24-02-2005,-500
25-02-2005,-500
28-02-2005,-500
01-03-2005,-500
02-03-2005,-500
03-03-2005,-500
04-03-2005,-500
07-03-2005,-500
08-03-2005,-500
09-03-2005,-500
10-03-2005,-500
11-03-2005,-500
14-03-2005,-500
15-03-2005,-500
16-03-2005,-500
17-03-2005,-500
18-03-2005,-500
21-03-2005,-500
22-03-2005,-500
23-03-2005,-500
24-03-2005,-500
28-03-2005,-500
29-03-2005,-500
30-03-2005,-500
31-03-2005,-500
01-04-2005,-500
04-04-2005,-500
05-04-2005,-500
06-04-2005,-500
07-04-2005,-500
08-04-2005,-500
11-04-2005,-500
12-04-2005,-500
13-04-2005,-500
14-04-2005,-500
15-04-2005,-500
18-04-2005,-500
19-04-2005,-500
20-04-2005,-500
21-04-2005,-500
22-04-2005,-500
25-04-2005,-500
26-04-2005,-500
27-04-2005,-500
28-04-2005,-500
29-04-2005,-500
02-05-2005,-500
03-05-2005,-500
04-05-2005,-500
05-05-2005,-500
06-05-2005,-500
09-05-2005,-500
10-05-2005,-500
11-05-2005,-500
12-05-2005,-500
13-05-2005,-500
16-05-2005,-500
17-05-2005,-500
18-05-2005,-500
19-05-2005,-500
20-05-2005,-500
24-05-2005,-500
25-05-2005,-500
26-05-2005,-500
27-05-2005,-500
30-05-2005,-500
31-05-2005,-500
01-06-2005,-500
02-06-2005,-500
03-06-2005,-500
06-06-2005,-500
07-06-2005,-500
08-06-2005,-500
09-06-2005,-500
10-06-2005,-500
13-06-2005,-500
14-06-2005,-500
15-06-2005,-500
16-06-2005,-500
17-06-2005,-500
20-06-2005,-500
21-06-2005,-500
22-06-2005,-500
23-06-2005,-500
24-06-2005,-500
27-06-2005,-500
28-06-2005,-500
29-06-2005,-500
30-06-2005,-500
04-07-2005,-500
05-07-2005,-500
06-07-2005,-500
07-07-2005,-500
08-07-2005,-500
11-07-2005,-500
12-07-2005,-500
13-07-2005,-500
14-07-2005,-500
15-07-2005,-500
18-07-2005,-500
19-07-2005,-500
20-07-2005,-500
21-07-2005,-500
22-07-2005,-500
25-07-2005,-500
26-07-2005,-500
27-07-2005,-500
28-07-2005,-500
29-07-2005,-500
02-08-2005,-500
03-08-2005,-500
04-08-2005,-500
05-08-2005,-500
08-08-2005,-500
09-08-2005,-500
10-08-2005,-500
11-08-2005,-500
12-08-2005,-500
15-08-2005,-500
16-08-2005,-500
17-08-2005,-500
18-08-2005,-500
19-08-2005,-500
22-08-2005,-500
23-08-2005,-500
24-08-2005,-500
25-08-2005,-500
26-08-2005,-500
29-08-2005,-500
30-08-2005,-500
31-08-2005,-500
01-09-2005,-500
02-09-2005,-500
06-09-2005,-500
07-09-2005,-500
08-09-2005,-500
09-09-2005,-500
12-09-2005,-500
13-09-2005,-500
14-09-2005,-500
15-09-2005,-500
16-09-2005,-500
19-09-2005,-500
20-09-2005,-500
21-09-2005,-500
22-09-2005,-500
23-09-2005,-500
26-09-2005,-500
27-09-2005,-500
28-09-2005,-500
29-09-2005,-500
30-09-2005,-500
03-10-2005,-500
04-10-2005,-500
05-10-2005,-500
06-10-2005,-500
07-10-2005,-500
11-10-2005,-500
12-10-2005,-500
13-10-2005,-500
14-10-2005,-500
17-10-2005,-500
18-10-2005,-500
19-10-2005,-500
20-10-2005,-500
21-10-2005,-500
24-10-2005,-500
25-10-2005,-500
26-10-2005,-500
27-10-2005,-500
28-10-2005,-500
31-10-2005,-500
01-11-2005,-500
02-11-2005,-500
03-11-2005,-500
04-11-2005,-500
07-11-2005,-500
08-11-2005,-500
09-11-2005,-500
10-11-2005,-500
14-11-2005,-500
15-11-2005,-500
16-11-2005,-500
17-11-2005,-500
18-11-2005,-500
21-11-2005,-500
22-11-2005,-500
23-11-2005,-500
24-11-2005,-500
25-11-2005,-500
28-11-2005,-500
29-11-2005,-500
30-11-2005,-500
01-12-2005,-500
02-12-2005,-500
05-12-2005,-500
06-12-2005,-500
07-12-2005,-500
08-12-2005,-500
09-12-2005,-500
12-12-2005,-500
13-12-2005,-500
14-12-2005,-500
15-12-2005,-500
16-12-2005,-500
19-12-2005,-500
20-12-2005,-500
21-12-2005,-500
22-12-2005,-500
23-12-2005,-500
28-12-2005,-500
29-12-2005,-500
30-12-2005,-500
03-01-2006,-500
04-01-2006,-500
05-01-2006,-500
06-01-2006,-500
09-01-2006,-500
10-01-2006,-500
11-01-2006,-500
12-01-2006,-500
13-01-2006,-500
16-01-2006,-500
17-01-2006,-500
18-01-2006,-500
19-01-2006,-500
20-01-2006,-500
23-01-2006,-500
24-01-2006,-500
25-01-2006,-500
26-01-2006,-500
27-01-2006,-500
30-01-2006,-500
31-01-2006,-500
01-02-2006,-500
02-02-2006,-500
03-02-2006,-500
06-02-2006,-500
07-02-2006,-500
08-02-2006,-500
09-02-2006,-500
10-02-2006,-500
13-02-2006,-500
14-02-2006,-500
15-02-2006,-500
16-02-2006,-500
17-02-2006,-500
20-02-2006,-500
21-02-2006,-500
22-02-2006,-500
23-02-2006,-500
24-02-2006,-500
27-02-2006,-500
28-02-2006,-500
01-03-2006,-500
02-03-2006,-500
03-03-2006,-500
06-03-2006,-500
07-03-2006,-500
08-03-2006,-500
09-03-2006,-500
10-03-2006,-500
13-03-2006,-500
14-03-2006,-500
15-03-2006,-500
16-03-2006,-500
17-03-2006,-500
20-03-2006,-500
21-03-2006,-500
22-03-2006,-500
23-03-2006,-500
24-03-2006,-500
27-03-2006,-500
28-03-2006,-500
29-03-2006,-500
30-03-2006,-500
31-03-2006,-500
03-04-2006,-500
04-04-2006,-500
05-04-2006,-500
06-04-2006,-500
07-04-2006,-500
10-04-2006,-500
11-04-2006,-500
12-04-2006,-500
13-04-2006,-500
17-04-2006,-500
18-04-2006,-500
19-04-2006,-500
20-04-2006,-500
21-04-2006,-500
24-04-2006,-500
25-04-2006,-500
26-04-2006,-500
27-04-2006,-500
28-04-2006,-500
01-05-2006,-500
02-05-2006,-500
03-05-2006,-500
04-05-2006,-500
05-05-2006,-500
08-05-2006,-500
09-05-2006,-500
10-05-2006,-500
11-05-2006,-500
12-05-2006,-500
15-05-2006,-500
16-05-2006,-500
17-05-2006,-500
18-05-2006,-500
19-05-2006,-500
23-05-2006,-500
24-05-2006,-500
25-05-2006,-500
26-05-2006,-500
29-05-2006,-500
30-05-2006,-500
31-05-2006,-500
01-06-2006,-500
02-06-2006,-500
05-06-2006,-500
06-06-2006,-500
07-06-2006,-500
08-06-2006,-500
09-06-2006,-500
12-06-2006,-500
13-06-2006,-500
14-06-2006,-500
15-06-2006,-500
16-06-2006,-500
19-06-2006,-500
20-06-2006,-500
21-06-2006,-500
22-06-2006,-500
23-06-2006,-500
26-06-2006,-500
27-06-2006,-500
28-06-2006,-500
29-06-2006,-500
30-06-2006,-500
04-07-2006,-500
05-07-2006,-500
06-07-2006,-500
07-07-2006,-500
10-07-2006,-500
11-07-2006,-500
12-07-2006,-500
13-07-2006,-500
14-07-2006,-500
17-07-2006,-500
18-07-2006,-500
19-07-2006,-500
20-07-2006,-500
21-07-2006,-500
24-07-2006,-500
25-07-2006,-500
26-07-2006,-500
27-07-2006,-500
28-07-2006,-500
31-07-2006,-500
01-08-2006,-500
02-08-2006,-500
03-08-2006,-500
04-08-2006,-500
08-08-2006,-500
09-08-2006,-500
10-08-2006,-500
11-08-2006,-500
14-08-2006,-500
15-08-2006,-500
16-08-2006,-500
17-08-2006,-500
18-08-2006,-500
21-08-2006,-500
22-08-2006,-500
23-08-2006,-500
24-08-2006,-500
25-08-2006,-500
28-08-2006,-500
29-08-2006,-500
30-08-2006,-500
31-08-2006,-500
01-09-2006,-500
05-09-2006,-500
06-09-2006,-500
07-09-2006,-500
08-09-2006,-500
11-09-2006,-500
12-09-2006,-500
13-09-2006,-500
14-09-2006,-500
15-09-2006,-500
18-09-2006,-500
19-09-2006,-500
20-09-2006,-500
21-09-2006,-500
22-09-2006,-500
25-09-2006,-500
26-09-2006,-500
27-09-2006,-500
28-09-2006,-500
29-09-2006,-500
02-10-2006,-500
03-10-2006,-500
04-10-2006,-500
05-10-2006,-500
06-10-2006,-500
10-10-2006,-500
11-10-2006,-500
12-10-2006,-500
13-10-2006,-500
16-10-2006,-500
17-10-2006,-500
18-10-2006,-500
19-10-2006,-500
20-10-2006,-500
23-10-2006,-500
24-10-2006,-500
25-10-2006,-500
26-10-2006,-500
27-10-2006,-500
30-10-2006,-500
31-10-2006,-500
01-11-2006,-500
02-11-2006,-500
03-11-2006,-500
06-11-2006,-500
07-11-2006,-500
08-11-2006,-500
09-11-2006,-500
10-11-2006,-500
14-11-2006,-500
15-11-2006,-500
16-11-2006,-500
17-11-2006,-500
20-11-2006,-500
21-11-2006,-500
22-11-2006,-500
23-11-2006,-500
24-11-2006,-500
27-11-2006,-500
28-11-2006,-500
29-11-2006,-500
30-11-2006,-500
01-12-2006,-500
04-12-2006,-500
05-12-2006,-500
06-12-2006,-500
07-12-2006,-500
08-12-2006,-500
11-12-2006,-500
12-12-2006,-500
13-12-2006,-500
14-12-2006,-500
15-12-2006,-500
18-12-2006,-500
19-12-2006,-500
20-12-2006,-500
21-12-2006,-500
22-12-2006,-500
27-12-2006,-500
28-12-2006,-500
29-12-2006,-500
02-01-2007,-500
03-01-2007,-500
04-01-2007,-500
05-01-2007,-500
08-01-2007,-500
09-01-2007,-500
10-01-2007,-500
11-01-2007,-500
12-01-2007,-500
15-01-2007,-500
16-01-2007,-500
17-01-2007,-500
18-01-2007,-500
19-01-2007,-500
22-01-2007,-500
23-01-2007,-500
24-01-2007,-500
25-01-2007,-500
26-01-2007,-500
29-01-2007,-500
30-01-2007,-500
31-01-2007,-500
01-02-2007,-500
02-02-2007,-500
05-02-2007,-500
06-02-2007,-500
07-02-2007,-500
08-02-2007,-500
09-02-2007,-500
12-02-2007,-500
13-02-2007,-500
14-02-2007,-500
15-02-2007,-500
16-02-2007,-500
19-02-2007,-500
20-02-2007,-500
21-02-2007,-500
22-02-2007,-500
23-02-2007,-500
26-02-2007,-500
27-02-2007,-500
28-02-2007,-500
01-03-2007,-500
02-03-2007,-500
05-03-2007,-500
06-03-2007,-500
07-03-2007,-500
08-03-2007,-500
09-03-2007,-500
12-03-2007,-500
13-03-2007,-500
14-03-2007,-500
15-03-2007,-500
16-03-2007,-500
19-03-2007,-500
20-03-2007,-500
21-03-2007,-500
22-03-2007,-500
23-03-2007,-500
26-03-2007,-500
27-03-2007,-500
28-03-2007,-500
29-03-2007,-500
30-03-2007,-500
02-04-2007,-500
03-04-2007,-500
04-04-2007,-500
05-04-2007,-500
09-04-2007,-500
10-04-2007,-500
11-04-2007,-500
12-04-2007,-500
13-04-2007,-500
16-04-2007,-500
17-04-2007,-500
18-04-2007,-500
19-04-2007,-500
20-04-2007,-500
23-04-2007,-500
24-04-2007,-500
25-04-2007,-500
26-04-2007,-500
27-04-2007,-500
30-04-2007,-500
01-05-2007,-500
02-05-2007,-500
03-05-2007,-500
04-05-2007,-500
07-05-2007,-500
08-05-2007,-500
09-05-2007,-500
10-05-2007,-500
11-05-2007,-500
14-05-2007,-500
15-05-2007,-500
16-05-2007,-500
17-05-2007,-500
18-05-2007,-500
22-05-2007,-500
23-05-2007,-500
24-05-2007,-500
25-05-2007,-500
28-05-2007,-500
29-05-2007,-500
30-05-2007,-500
31-05-2007,-500
01-06-2007,-500
04-06-2007,-500
05-06-2007,-500
06-06-2007,-500
07-06-2007,-500
08-06-2007,-500
11-06-2007,-500
12-06-2007,-500
13-06-2007,-500
14-06-2007,-500
15-06-2007,-500
18-06-2007,-500
19-06-2007,-500
20-06-2007,-500
21-06-2007,-500
22-06-2007,-500
25-06-2007,-500
26-06-2007,-500
27-06-2007,-500
28-06-2007,-500
29-06-2007,-500
03-07-2007,-500
04-07-2007,-500
05-07-2007,-500
06-07-2007,-500
09-07-2007,-500
10-07-2007,-500
11-07-2007,-500
12-07-2007,-500
13-07-2007,-500
16-07-2007,-500
17-07-2007,-500
18-07-2007,-500
19-07-2007,-500
20-07-2007,-500
23-07-2007,-500
24-07-2007,-500
25-07-2007,-500
26-07-2007,-500
27-07-2007,-500
30-07-2007,-500
31-07-2007,-500
01-08-2007,-500
02-08-2007,-500
03-08-2007,-500
07-08-2007,-500
08-08-2007,-500
09-08-2007,-500
10-08-2007,-500
13-08-2007,-500
14-08-2007,-500
15-08-2007,-500
16-08-2007,-500
17-08-2007,-500
20-08-2007,-500
21-08-2007,-500
22-08-2007,-500
23-08-2007,-500
24-08-2007,-500
27-08-2007,-500
28-08-2007,-500
29-08-2007,-500
30-08-2007,-500
31-08-2007,-500
04-09-2007,-500
05-09-2007,-500
06-09-2007,-500
07-09-2007,-500
10-09-2007,-500
11-09-2007,-500
12-09-2007,-500
13-09-2007,-500
14-09-2007,-500
17-09-2007,-500
18-09-2007,-500
19-09-2007,-500
20-09-2007,-500
21-09-2007,-500
24-09-2007,-500
25-09-2007,-500
26-09-2007,-500
27-09-2007,-500
28-09-2007,-500
01-10-2007,-500
02-10-2007,-500
03-10-2007,-500
04-10-2007,-500
05-10-2007,-500
09-10-2007,-500
10-10-2007,-500
11-10-2007,-500
12-10-2007,-500
15-10-2007,-500
16-10-2007,-500
17-10-2007,-500
18-10-2007,-500
19-10-2007,-500
22-10-2007,-500
23-10-2007,-500
24-10-2007,-500
25-10-2007,-500
26-10-2007,-500
29-10-2007,-500
30-10-2007,-500
31-10-2007,-500
01-11-2007,-500
02-11-2007,-500
05-11-2007,-500
06-11-2007,-500
07-11-2007,-500
08-11-2007,-500
09-11-2007,-500
13-11-2007,-500
14-11-2007,-500
15-11-2007,-500
16-11-2007,-500
19-11-2007,-500
20-11-2007,-500
21-11-2007,-500
22-11-2007,-500
23-11-2007,-500
26-11-2007,-500
27-11-2007,-500
28-11-2007,-500
29-11-2007,-500
30-11-2007,-500
03-12-2007,-500
04-12-2007,-500
05-12-2007,-500
06-12-2007,-500
07-12-2007,-500
10-12-2007,-500
11-12-2007,-500
12-12-2007,-500
13-12-2007,-500
14-12-2007,-500
17-12-2007,-500
18-12-2007,-500
19-12-2007,-500
20-12-2007,-500
21-12-2007,-500
24-12-2007,-500
27-12-2007,-500
28-12-2007,-500
31-12-2007,-500
02-01-2008,-500
03-01-2008,-500
04-01-2008,-500
07-01-2008,-500
08-01-2008,-500
09-01-2008,-500
10-01-2008,-500
11-01-2008,-500
14-01-2008,-500
15-01-2008,-500
16-01-2008,-500
17-01-2008,-500
18-01-2008,-500
21-01-2008,-500
22-01-2008,-500
23-01-2008,-500
24-01-2008,-500
25-01-2008,-500
28-01-2008,-500
29-01-2008,-500
30-01-2008,-500
31-01-2008,-500
01-02-2008,-500
04-02-2008,-500
05-02-2008,-500
06-02-2008,-500
07-02-2008,-500
08-02-2008,-500
11-02-2008,-500
12-02-2008,-500
13-02-2008,-500
14-02-2008,-500
15-02-2008,-500
19-02-2008,-500
20-02-2008,-500
21-02-2008,-500
22-02-2008,-500
25-02-2008,-500
26-02-2008,-500
27-02-2008,-500
28-02-2008,-500
29-02-2008,-500
03-03-2008,-500
04-03-2008,-500
05-03-2008,-500
06-03-2008,-500
07-03-2008,-500
10-03-2008,-500
11-03-2008,-500
12-03-2008,-500
13-03-2008,-500
14-03-2008,-500
17-03-2008,-500
18-03-2008,-500
19-03-2008,-500
20-03-2008,-500
24-03-2008,-500
25-03-2008,-500
26-03-2008,-500
27-03-2008,-500
28-03-2008,-500
31-03-2008,-500
01-04-2008,-500
02-04-2008,-500
03-04-2008,-500
04-04-2008,-500
07-04-2008,-500
08-04-2008,-500
09-04-2008,-500
10-04-2008,-500
11-04-2008,-500
14-04-2008,-500
15-04-2008,-500
16-04-2008,-500
17-04-2008,-500
18-04-2008,-500
21-04-2008,-500
22-04-2008,-500
23-04-2008,-500
24-04-2008,-500
25-04-2008,-500
28-04-2008,-500
29-04-2008,-500
30-04-2008,-500
01-05-2008,-500
02-05-2008,-500
05-05-2008,-500
06-05-2008,-500
07-05-2008,-500
08-05-2008,-500
09-05-2008,-500
12-05-2008,-500
13-05-2008,-500
14-05-2008,-500
15-05-2008,-500
16-05-2008,-500
20-05-2008,-500
21-05-2008,-500
22-05-2008,-500
23-05-2008,-500
26-05-2008,-500
27-05-2008,-500
28-05-2008,-500
29-05-2008,-500
30-05-2008,-500
02-06-2008,-500
03-06-2008,-500
04-06-2008,-500
05-06-2008,-500
06-06-2008,-500
09-06-2008,-500
10-06-2008,-500
11-06-2008,-500
12-06-2008,-500
13-06-2008,-500
16-06-2008,-500
17-06-2008,-500
18-06-2008,-500
19-06-2008,-500
20-06-2008,-500
23-06-2008,-500
24-06-2008,-500
25-06-2008,-500
26-06-2008,-500
27-06-2008,-500
30-06-2008,-500
02-07-2008,-500
03-07-2008,-500
04-07-2008,-500
07-07-2008,-500
08-07-2008,-500
09-07-2008,-500
10-07-2008,-500
11-07-2008,-500
14-07-2008,-500
15-07-2008,-500
16-07-2008,-500
17-07-2008,-500
18-07-2008,-500
21-07-2008,-500
22-07-2008,-500
23-07-2008,-500
24-07-2008,-500
25-07-2008,-500
28-07-2008,-500
29-07-2008,-500
30-07-2008,-500
31-07-2008,-500
01-08-2008,-500
05-08-2008,-500
06-08-2008,-500
07-08-2008,-500
08-08-2008,-500
11-08-2008,-500
12-08-2008,-500
13-08-2008,-500
14-08-2008,-500
15-08-2008,-500
18-08-2008,-500
19-08-2008,-500
20-08-2008,-500
21-08-2008,-500
22-08-2008,-500
25-08-2008,-500
26-08-2008,-500
27-08-2008,-500
28-08-2008,-500
29-08-2008,-500
02-09-2008,-500
03-09-2008,-500
04-09-2008,-500
05-09-2008,-500
08-09-2008,-500
09-09-2008,-500
10-09-2008,-500
11-09-2008,-500
12-09-2008,-500
15-09-2008,-500
16-09-2008,-500
17-09-2008,-500
18-09-2008,-500
19-09-2008,-500
22-09-2008,-500
23-09-2008,-500
24-09-2008,-500
25-09-2008,-500
26-09-2008,-500
29-09-2008,-500
30-09-2008,-500
01-10-2008,-500
02-10-2008,-500
03-10-2008,-500
06-10-2008,-500
07-10-2008,-500
08-10-2008,-500
09-10-2008,-500
10-10-2008,-500
14-10-2008,-500
15-10-2008,-500
16-10-2008,-500
17-10-2008,-500
20-10-2008,-500
21-10-2008,-500
22-10-2008,-500
23-10-2008,-500
24-10-2008,-500
27-10-2008,-500
28-10-2008,-500
29-10-2008,-500
30-10-2008,-500
31-10-2008,-500
03-11-2008,-500
04-11-2008,-500
05-11-2008,-500
06-11-2008,-500
07-11-2008,-500
10-11-2008,-500
12-11-2008,-500
13-11-2008,-500
14-11-2008,-500
17-11-2008,-500
18-11-2008,-500
19-11-2008,-500
20-11-2008,-500
21-11-2008,-500
24-11-2008,-500
25-11-2008,-500
26-11-2008,-500
27-11-2008,-500
28-11-2008,-500
01-12-2008,-500
02-12-2008,-500
03-12-2008,-500
04-12-2008,-500
05-12-2008,-500
08-12-2008,-500
09-12-2008,-500
10-12-2008,-500
11-12-2008,-500
12-12-2008,-500
15-12-2008,-500
16-12-2008,-500
17-12-2008,-500
18-12-2008,-500
19-12-2008,-500
22-12-2008,-500
23-12-2008,-500
24-12-2008,-500
29-12-2008,-500
30-12-2008,-500
31-12-2008,-500
02-01-2009,-500
05-01-2009,-500
06-01-2009,-500
07-01-2009,-500
08-01-2009,-500
09-01-2009,-500
12-01-2009,-500
13-01-2009,-500
14-01-2009,-500
15-01-2009,-500
16-01-2009,-500
19-01-2009,-500
20-01-2009,-500
21-01-2009,-500
22-01-2009,-500
23-01-2009,-500
26-01-2009,-500
27-01-2009,-500
28-01-2009,-500
29-01-2009,-500
30-01-2009,-500
02-02-2009,-500
03-02-2009,-500
04-02-2009,-500
05-02-2009,-500
06-02-2009,-500
09-02-2009,-500
10-02-2009,-500
11-02-2009,-500
12-02-2009,-500
13-02-2009,-500
17-02-2009,-500
18-02-2009,-500
19-02-2009,-500
20-02-2009,-500
23-02-2009,-500
24-02-2009,-500
25-02-2009,-500
26-02-2009,-500
27-02-2009,-500
02-03-2009,-500
03-03-2009,-500
04-03-2009,-500
05-03-2009,-500
06-03-2009,-500
09-03-2009,-500
10-03-2009,-500
11-03-2009,-500
12-03-2009,-500
13-03-2009,-500
16-03-2009,-500
17-03-2009,-500
18-03-2009,-500
19-03-2009,-500
20-03-2009,-500
23-03-2009,-500
24-03-2009,-500
25-03-2009,-500
26-03-2009,-500
27-03-2009,-500
30-03-2009,-500
31-03-2009,-500
01-04-2009,-500
02-04-2009,-500
03-04-2009,-500
06-04-2009,-500
07-04-2009,-500
08-04-2009,-500
09-04-2009,-500
13-04-2009,-500
14-04-2009,-500
15-04-2009,-500
16-04-2009,-500
17-04-2009,-500
20-04-2009,-500
21-04-2009,-500
22-04-2009,-500
23-04-2009,-500
24-04-2009,-500
27-04-2009,-500
28-04-2009,-500
29-04-2009,-500
30-04-2009,-500
01-05-2009,-500
04-05-2009,-500
05-05-2009,-500
06-05-2009,-500
07-05-2009,-500
08-05-2009,-500
11-05-2009,-500
12-05-2009,-500
13-05-2009,-500
14-05-2009,-500
15-05-2009,-500
19-05-2009,-500
20-05-2009,-500
21-05-2009,-500
22-05-2009,-500
25-05-2009,-500
26-05-2009,-500
27-05-2009,-500
28-05-2009,-500
29-05-2009,-500
01-06-2009,-500
02-06-2009,-500
03-06-2009,-500
04-06-2009,-500
05-06-2009,-500
08-06-2009,-500
09-06-2009,-500
10-06-2009,-500
11-06-2009,-500
12-06-2009,-500
15-06-2009,-500
16-06-2009,-500
17-06-2009,-500
18-06-2009,-500
19-06-2009,-500
22-06-2009,-500
23-06-2009,-500
24-06-2009,-500
25-06-2009,-500
26-06-2009,-500
29-06-2009,-500
30-06-2009,-500
02-07-2009,-500
03-07-2009,-500
06-07-2009,-500
07-07-2009,-500
08-07-2009,-500
09-07-2009,-500
10-07-2009,-500
13-07-2009,-500
14-07-2009,-500
15-07-2009,-500
16-07-2009,-500
17-07-2009,-500
20-07-2009,-500
21-07-2009,-500
22-07-2009,-500
23-07-2009,-500
24-07-2009,-500
27-07-2009,-500
28-07-2009,-500
29-07-2009,-500
30-07-2009,-500
31-07-2009,-500
04-08-2009,-500
05-08-2009,-500
06-08-2009,-500
07-08-2009,-500
10-08-2009,-500
11-08-2009,-500
12-08-2009,-500
13-08-2009,-500
14-08-2009,-500
17-08-2009,-500
18-08-2009,-500
19-08-2009,-500
20-08-2009,-500
21-08-2009,-500
24-08-2009,-500
25-08-2009,-500
26-08-2009,-500
27-08-2009,-500
28-08-2009,-500
31-08-2009,-500
01-09-2009,-500
02-09-2009,-500
03-09-2009,-500
04-09-2009,-500
08-09-2009,-500
09-09-2009,-500
10-09-2009,-500
11-09-2009,-500
14-09-2009,-500
15-09-2009,-500
16-09-2009,-500
17-09-2009,-500
18-09-2009,-500
21-09-2009,-500
22-09-2009,-500
23-09-2009,-500
24-09-2009,-500
25-09-2009,-500
28-09-2009,-500
29-09-2009,-500
30-09-2009,-500
01-10-2009,-500
02-10-2009,-500
05-10-2009,-500
06-10-2009,-500
07-10-2009,-500
08-10-2009,-500
09-10-2009,-500
13-10-2009,-500
14-10-2009,-500
15-10-2009,-500
16-10-2009,-500
19-10-2009,-500
20-10-2009,-500
21-10-2009,-500
22-10-2009,-500
23-10-2009,-500
26-10-2009,-500
27-10-2009,-500
28-10-2009,-500
29-10-2009,-500
30-10-2009,-500
02-11-2009,-500
03-11-2009,-500
04-11-2009,-500
05-11-2009,-500
06-11-2009,-500
09-11-2009,-500
10-11-2009,-500
12-11-2009,-500
13-11-2009,-500
16-11-2009,-500
17-11-2009,-500
18-11-2009,-500
19-11-2009,-500
20-11-2009,-500
23-11-2009,-500
24-11-2009,-500
25-11-2009,-500
26-11-2009,-500
27-11-2009,-500
30-11-2009,-500
01-12-2009,-500
02-12-2009,-500
03-12-2009,-500
04-12-2009,-500
07-12-2009,-500
08-12-2009,-500
09-12-2009,-500
10-12-2009,-500
11-12-2009,-500
14-12-2009,-500
15-12-2009,-500
16-12-2009,-500
17-12-2009,-500
18-12-2009,-500
21-12-2009,-500
22-12-2009,-500
23-12-2009,-500
24-12-2009,-500
29-12-2009,-500
30-12-2009,-500
31-12-2009,-500
04-01-2010,-500
05-01-2010,-500
06-01-2010,-500
07-01-2010,-500
08-01-2010,-500
11-01-2010,-500
12-01-2010,-500
13-01-2010,-500
14-01-2010,-500
15-01-2010,-500
18-01-2010,-500
19-01-2010,-500
20-01-2010,-500
21-01-2010,-500
22-01-2010,-500
25-01-2010,-500
26-01-2010,-500
27-01-2010,-500
28-01-2010,-500
29-01-2010,-500
01-02-2010,-500
02-02-2010,-500
03-02-2010,-500
04-02-2010,-500
05-02-2010,-500
08-02-2010,-500
09-02-2010,-500
10-02-2010,-500
11-02-2010,-500
12-02-2010,-500
16-02-2010,-500
17-02-2010,-500
18-02-2010,-500
19-02-2010,-500
22-02-2010,-500
23-02-2010,-500
24-02-2010,-500
25-02-2010,-500
26-02-2010,-500
01-03-2010,-500
02-03-2010,-500
03-03-2010,-500
04-03-2010,-500
05-03-2010,-500
08-03-2010,-500
09-03-2010,-500
10-03-2010,-500
11-03-2010,-500
12-03-2010,-500
15-03-2010,-500
16-03-2010,-500
17-03-2010,-500
18-03-2010,-500
19-03-2010,-500
22-03-2010,-500
23-03-2010,-500
24-03-2010,-500
25-03-2010,-500
26-03-2010,-500
29-03-2010,-500
30-03-2010,-500
31-03-2010,-500
01-04-2010,-500
05-04-2010,-500
06-04-2010,-500
07-04-2010,-500
08-04-2010,-500
09-04-2010,-500
12-04-2010,-500
13-04-2010,-500
14-04-2010,-500
15-04-2010,-500
16-04-2010,-500
19-04-2010,-500
20-04-2010,-500
21-04-2010,-500
22-04-2010,-500
23-04-2010,-500
26-04-2010,-500
27-04-2010,-500
28-04-2010,-500
29-04-2010,-500
30-04-2010,-500
03-05-2010,-500
04-05-2010,-500
05-05-2010,-500
06-05-2010,-500
07-05-2010,-500
10-05-2010,-500
11-05-2010,-500
12-05-2010,-500
13-05-2010,-500
14-05-2010,-500
17-05-2010,-500
18-05-2010,-500
19-05-2010,-500
20-05-2010,-500
21-05-2010,-500
25-05-2010,-500
26-05-2010,-500
27-05-2010,-500
28-05-2010,-500
31-05-2010,-500
01-06-2010,-500
02-06-2010,-500
03-06-2010,-500
04-06-2010,-500
07-06-2010,-500
08-06-2010,-500
09-06-2010,-500
10-06-2010,-500
11-06-2010,-500
14-06-2010,-500
15-06-2010,-500
16-06-2010,-500
17-06-2010,-500
18-06-2010,-500
21-06-2010,-500
22-06-2010,-500
23-06-2010,-500
24-06-2010,-500
25-06-2010,-500
28-06-2010,-500
29-06-2010,-500
30-06-2010,-500
02-07-2010,-500
05-07-2010,-500
06-07-2010,-500
07-07-2010,-500
08-07-2010,-500
09-07-2010,-500
12-07-2010,-500
13-07-2010,-500
14-07-2010,-500
15-07-2010,-500
16-07-2010,-500
19-07-2010,-500
20-07-2010,-500
21-07-2010,-500
22-07-2010,-500
23-07-2010,-500
26-07-2010,-500
27-07-2010,-500
28-07-2010,-500
29-07-2010,-500
30-07-2010,-500
03-08-2010,-500
04-08-2010,-500
05-08-2010,-500
06-08-2010,-500
09-08-2010,-500
10-08-2010,-500
11-08-2010,-500
12-08-2010,-500
13-08-2010,-500
16-08-2010,-500
17-08-2010,-500
18-08-2010,-500
19-08-2010,-500
20-08-2010,-500
23-08-2010,-500
24-08-2010,-500
25-08-2010,-500
26-08-2010,-500
27-08-2010,-500
30-08-2010,-500
31-08-2010,-500
01-09-2010,-500
02-09-2010,-500
03-09-2010,-500
07-09-2010,-500
08-09-2010,-500
09-09-2010,-500
10-09-2010,-500
13-09-2010,-500
14-09-2010,-500
15-09-2010,-500
16-09-2010,-500
17-09-2010,-500
20-09-2010,-500
21-09-2010,-500
22-09-2010,-500
23-09-2010,-500
24-09-2010,-500
27-09-2010,-500
28-09-2010,-500
29-09-2010,-500
30-09-2010,-500
01-10-2010,-500
04-10-2010,-500
05-10-2010,-500
06-10-2010,-500
07-10-2010,-500
08-10-2010,-500
12-10-2010,-500
13-10-2010,-500
14-10-2010,-500
15-10-2010,-500
18-10-2010,-500
19-10-2010,-500
20-10-2010,-500
21-10-2010,-500
22-10-2010,-500
25-10-2010,-500
26-10-2010,-500
27-10-2010,-500
28-10-2010,-500
29-10-2010,-500
01-11-2010,-500
02-11-2010,-500
03-11-2010,-500
04-11-2010,-500
05-11-2010,-500
08-11-2010,-500
09-11-2010,-500
10-11-2010,-500
12-11-2010,-500
15-11-2010,-500
16-11-2010,-500
17-11-2010,-500
18-11-2010,-500
19-11-2010,-500
22-11-2010,-500
23-11-2010,-500
24-11-2010,-500
25-11-2010,-500
26-11-2010,-500
29-11-2010,-500
30-11-2010,-500
01-12-2010,-500
02-12-2010,-500
03-12-2010,-500
06-12-2010,-500
07-12-2010,-500
08-12-2010,-500
09-12-2010,-500
10-12-2010,-500
13-12-2010,-500
14-12-2010,-500
15-12-2010,-500
16-12-2010,-500
17-12-2010,-500
20-12-2010,-500
21-12-2010,-500
22-12-2010,-500
23-12-2010,-500
24-12-2010,-500
29-12-2010,-500
30-12-2010,-500
31-12-2010,-500
04-01-2011,-500
05-01-2011,-500
06-01-2011,-500
07-01-2011,-500
10-01-2011,-500
11-01-2011,-500
12-01-2011,-500
13-01-2011,-500
14-01-2011,-500
17-01-2011,-500
18-01-2011,-500
19-01-2011,-500
20-01-2011,-500
21-01-2011,-500
24-01-2011,-500
25-01-2011,-500
26-01-2011,-500
27-01-2011,-500
28-01-2011,-500
31-01-2011,-500
01-02-2011,-500
02-02-2011,-500
03-02-2011,-500
04-02-2011,-500
07-02-2011,-500
08-02-2011,-500
09-02-2011,-500
10-02-2011,-500
11-02-2011,-500
14-02-2011,-500
15-02-2011,-500
16-02-2011,-500
17-02-2011,-500
18-02-2011,-500
22-02-2011,-500
23-02-2011,-500
24-02-2011,-500
25-02-2011,-500
28-02-2011,-500
01-03-2011,-500
02-03-2011,-500
03-03-2011,-500
04-03-2011,-500
07-03-2011,-500
08-03-2011,-500
09-03-2011,-500
10-03-2011,-500
11-03-2011,-500
14-03-2011,-500
15-03-2011,-500
16-03-2011,-500
17-03-2011,-500
18-03-2011,-500
21-03-2011,-500
22-03-2011,-500
23-03-2011,-500
24-03-2011,-500
25-03-2011,-500
28-03-2011,-500
29-03-2011,-500
30-03-2011,-500
31-03-2011,-500
01-04-2011,-500
04-04-2011,-500
05-04-2011,-500
06-04-2011,-500
07-04-2011,-500
08-04-2011,-500
11-04-2011,-500
12-04-2011,-500
13-04-2011,-500
14-04-2011,-500
15-04-2011,-500
18-04-2011,-500
19-04-2011,-500
20-04-2011,-500
21-04-2011,-500
25-04-2011,-500
26-04-2011,-500
27-04-2011,-500
28-04-2011,-500
29-04-2011,-500
02-05-2011,-500
03-05-2011,-500
04-05-2011,-500
05-05-2011,-500
06-05-2011,-500
09-05-2011,-500
10-05-2011,-500
11-05-2011,-500
12-05-2011,-500
13-05-2011,-500
16-05-2011,-500
17-05-2011,-500
18-05-2011,-500
19-05-2011,-500
20-05-2011,-500
24-05-2011,-500
25-05-2011,-500
26-05-2011,-500
27-05-2011,-500
30-05-2011,-500
31-05-2011,-500
01-06-2011,-500
02-06-2011,-500
03-06-2011,-500
06-06-2011,-500
07-06-2011,-500
08-06-2011,-500
09-06-2011,-500
10-06-2011,-500
13-06-2011,-500
14-06-2011,-500
15-06-2011,-500
16-06-2011,-500
17-06-2011,-500
20-06-2011,-500
21-06-2011,-500
22-06-2011,-500
23-06-2011,-500
24-06-2011,-500
27-06-2011,-500
28-06-2011,-500
29-06-2011,-500
30-06-2011,-500
04-07-2011,-500
05-07-2011,-500
06-07-2011,-500
07-07-2011,-500
08-07-2011,-500
11-07-2011,-500
12-07-2011,-500
13-07-2011,-500
14-07-2011,-500
15-07-2011,-500
18-07-2011,-500
19-07-2011,-500
20-07-2011,-500
21-07-2011,-500
22-07-2011,-500
25-07-2011,-500
26-07-2011,-500
27-07-2011,-500
28-07-2011,-500
29-07-2011,-500
02-08-2011,-500
03-08-2011,-500
04-08-2011,-500
05-08-2011,-500
08-08-2011,-500
09-08-2011,-500
10-08-2011,-500
11-08-2011,-500
12-08-2011,-500
15-08-2011,-500
16-08-2011,-500
17-08-2011,-500
18-08-2011,-500
19-08-2011,-500
22-08-2011,-500
23-08-2011,-500
24-08-2011,-500
25-08-2011,-500
26-08-2011,-500
29-08-2011,-500
30-08-2011,-500
31-08-2011,-500
01-09-2011,-500
02-09-2011,-500
06-09-2011,-500
07-09-2011,-500
08-09-2011,-500
09-09-2011,-500
12-09-2011,-500
13-09-2011,-500
14-09-2011,-500
15-09-2011,-500
16-09-2011,-500
19-09-2011,-500
20-09-2011,-500
21-09-2011,-500
22-09-2011,-500
23-09-2011,-500
26-09-2011,-500
27-09-2011,-500
28-09-2011,-500
29-09-2011,-500
30-09-2011,-500
03-10-2011,-500
04-10-2011,-500
05-10-2011,-500
06-10-2011,-500
07-10-2011,-500
11-10-2011,-500
12-10-2011,-500
13-10-2011,-500
14-10-2011,-500
17-10-2011,-500
18-10-2011,-500
19-10-2011,-500
20-10-2011,-500
21-10-2011,-500
24-10-2011,-500
25-10-2011,-500
26-10-2011,-500
27-10-2011,-500
28-10-2011,-500
31-10-2011,-500
01-11-2011,-500
02-11-2011,-500
03-11-2011,-500
04-11-2011,-500
07-11-2011,-500
08-11-2011,-500
09-11-2011,-500
10-11-2011,-500
14-11-2011,-500
15-11-2011,-500
16-11-2011,-500
17-11-2011,-500
18-11-2011,-500
21-11-2011,-500
22-11-2011,-500
23-11-2011,-500
24-11-2011,-500
25-11-2011,-500
28-11-2011,-500
29-11-2011,-500
30-11-2011,-500
01-12-2011,-500
02-12-2011,-500
05-12-2011,-500
06-12-2011,-500
07-12-2011,-500
08-12-2011,-500
09-12-2011,-500
12-12-2011,-500
13-12-2011,-500
14-12-2011,-500
15-12-2011,-500
16-12-2011,-500
19-12-2011,-500
20-12-2011,-500
21-12-2011,-500
22-12-2011,-500
23-12-2011,-500
28-12-2011,-500
29-12-2011,-500
30-12-2011,-500
03-01-2012,-500
04-01-2012,-500
05-01-2012,-500
06-01-2012,-500
09-01-2012,-500
10-01-2012,-500
11-01-2012,-500
12-01-2012,-500
13-01-2012,-500
16-01-2012,-500
17-01-2012,-500
18-01-2012,-500
19-01-2012,-500
20-01-2012,-500
23-01-2012,-500
24-01-2012,-500
25-01-2012,-500
26-01-2012,-500
27-01-2012,-500
30-01-2012,-500
31-01-2012,-500
01-02-2012,-500
02-02-2012,-500
03-02-2012,-500
06-02-2012,-500
07-02-2012,-500
08-02-2012,-500
09-02-2012,-500
10-02-2012,-500
13-02-2012,-500
14-02-2012,-500
15-02-2012,-500
16-02-2012,-500
17-02-2012,-500
21-02-2012,-500
22-02-2012,-500
23-02-2012,-500
24-02-2012,-500
27-02-2012,-500
28-02-2012,-500
29-02-2012,-500
01-03-2012,-500
02-03-2012,-500
05-03-2012,-500
06-03-2012,-500
07-03-2012,-500
08-03-2012,-500
09-03-2012,-500
12-03-2012,-500
13-03-2012,-500
14-03-2012,-500
15-03-2012,-500
16-03-2012,-500
19-03-2012,-500
20-03-2012,-500
21-03-2012,-500
22-03-2012,-500
23-03-2012,-500
26-03-2012,-500
27-03-2012,-500
28-03-2012,-500
29-03-2012,-500
30-03-2012,-500
02-04-2012,-500
03-04-2012,-500
04-04-2012,-500
05-04-2012,-500
09-04-2012,-500
10-04-2012,-500
11-04-2012,-500
12-04-2012,-500
13-04-2012,-500
16-04-2012,-500
17-04-2012,-500
18-04-2012,-500
19-04-2012,-500
20-04-2012,-500
23-04-2012,-500
24-04-2012,-500
25-04-2012,-500
26-04-2012,-500
27-04-2012,-500
30-04-2012,-500
01-05-2012,-500
02-05-2012,-500
03-05-2012,-500
04-05-2012,-500
07-05-2012,-500
08-05-2012,-500
09-05-2012,-500
10-05-2012,-500
11-05-2012,-500
14-05-2012,-500
15-05-2012,-500
16-05-2012,-500
17-05-2012,-500
18-05-2012,-500
22-05-2012,-500
23-05-2012,-500
24-05-2012,-500
25-05-2012,-500
28-05-2012,-500
29-05-2012,-500
30-05-2012,-500
31-05-2012,-500
01-06-2012,-500
04-06-2012,-500
05-06-2012,-500
06-06-2012,-500
07-06-2012,-500
08-06-2012,-500
11-06-2012,-500
12-06-2012,-500
13-06-2012,-500
14-06-2012,-500
15-06-2012,-500
18-06-2012,-500
19-06-2012,-500
20-06-2012,-500
21-06-2012,-500
22-06-2012,-500
25-06-2012,-500
26-06-2012,-500
27-06-2012,-500
28-06-2012,-500
29-06-2012,-500
03-07-2012,-500
04-07-2012,-500
05-07-2012,-500
06-07-2012,-500
09-07-2012,-500
10-07-2012,-500
11-07-2012,-500
12-07-2012,-500
13-07-2012,-500
16-07-2012,-500
17-07-2012,-500
18-07-2012,-500
19-07-2012,-500
20-07-2012,-500
23-07-2012,-500
24-07-2012,-500
25-07-2012,-500
26-07-2012,-500
27-07-2012,-500
30-07-2012,-500
31-07-2012,-500
01-08-2012,-500
02-08-2012,-500
03-08-2012,-500
07-08-2012,-500
08-08-2012,-500
09-08-2012,-500
10-08-2012,-500
13-08-2012,-500
14-08-2012,-500
15-08-2012,-500
16-08-2012,-500
17-08-2012,-500
20-08-2012,-500
21-08-2012,-500
22-08-2012,-500
23-08-2012,-500
24-08-2012,-500
27-08-2012,-500
28-08-2012,-500
29-08-2012,-500
30-08-2012,-500
31-08-2012,-500
04-09-2012,-500
05-09-2012,-500
06-09-2012,-500
07-09-2012,-500
10-09-2012,-500
11-09-2012,-500
12-09-2012,-500
13-09-2012,-500
14-09-2012,-500
17-09-2012,-500
18-09-2012,-500
19-09-2012,-500
20-09-2012,-500
21-09-2012,-500
24-09-2012,-500
25-09-2012,-500
26-09-2012,-500
27-09-2012,-500
28-09-2012,-500
01-10-2012,-500
02-10-2012,-500
03-10-2012,-500
04-10-2012,-500
05-10-2012,-500
09-10-2012,-500
10-10-2012,-500
11-10-2012,-500
12-10-2012,-500
15-10-2012,-500
16-10-2012,-500
17-10-2012,-500
18-10-2012,-500
19-10-2012,-500
22-10-2012,-500
23-10-2012,-500
24-10-2012,-500
25-10-2012,-500
26-10-2012,-500
29-10-2012,-500
30-10-2012,-500
31-10-2012,-500
01-11-2012,-500
02-11-2012,-500
05-11-2012,-500
06-11-2012,-500
07-11-2012,-500
08-11-2012,-500
09-11-2012,-500
13-11-2012,-500
14-11-2012,-500
15-11-2012,-500
16-11-2012,-500
19-11-2012,-500
20-11-2012,-500
21-11-2012,-500
22-11-2012,-500
23-11-2012,-500
26-11-2012,-500
27-11-2012,-500
28-11-2012,-500
29-11-2012,-500
30-11-2012,-500
03-12-2012,-500
04-12-2012,-500
05-12-2012,-500
06-12-2012,-500
07-12-2012,-500
10-12-2012,-500
11-12-2012,-500
12-12-2012,-500
13-12-2012,-500
14-12-2012,-500
17-12-2012,-500
18-12-2012,-500
19-12-2012,-500
20-12-2012,-500
21-12-2012,-500
24-12-2012,-500
27-12-2012,-500
28-12-2012,-500
31-12-2012,-500
02-01-2013,-500
03-01-2013,-500
04-01-2013,-500
07-01-2013,-500
08-01-2013,-500
09-01-2013,-500
10-01-2013,-500
11-01-2013,-500
14-01-2013,-500
15-01-2013,-500
16-01-2013,-500
17-01-2013,-500
18-01-2013,-500
21-01-2013,-500
22-01-2013,-500
23-01-2013,-500
24-01-2013,-500
25-01-2013,-500
28-01-2013,-500
29-01-2013,-500
30-01-2013,-500
31-01-2013,-500
01-02-2013,-500
04-02-2013,-500
05-02-2013,-500
06-02-2013,-500
07-02-2013,-500
08-02-2013,-500
11-02-2013,-500
12-02-2013,-500
13-02-2013,-500
14-02-2013,-500
15-02-2013,-500
19-02-2013,-500
20-02-2013,-500
21-02-2013,-500
22-02-2013,-500
25-02-2013,-500
26-02-2013,-500
27-02-2013,-500
28-02-2013,-500
01-03-2013,-500
04-03-2013,-500
05-03-2013,-500
06-03-2013,-500
07-03-2013,-500
08-03-2013,-500
11-03-2013,-500
12-03-2013,-500
13-03-2013,-500
14-03-2013,-500
15-03-2013,-500
18-03-2013,-500
19-03-2013,-500
20-03-2013,-500
21-03-2013,-500
22-03-2013,-500
25-03-2013,-500
26-03-2013,-500
27-03-2013,-500
28-03-2013,-500
01-04-2013,-500
02-04-2013,-500
03-04-2013,-500
04-04-2013,-500
05-04-2013,-500
08-04-2013,-500
09-04-2013,-500
10-04-2013,-500
11-04-2013,-500
12-04-2013,-500
15-04-2013,-500
16-04-2013,-500
17-04-2013,-500
18-04-2013,-500
19-04-2013,-500
22-04-2013,-500
23-04-2013,-500
24-04-2013,-500
25-04-2013,-500
26-04-2013,-500
29-04-2013,-500
30-04-2013,-500
01-05-2013,-500
02-05-2013,-500
03-05-2013,-500
06-05-2013,-500
07-05-2013,-500
08-05-2013,-500
09-05-2013,-500
10-05-2013,-500
13-05-2013,-500
14-05-2013,-500
15-05-2013,-500
16-05-2013,-500
17-05-2013,-500
21-05-2013,-500
22-05-2013,-500
23-05-2013,-500
24-05-2013,-500
27-05-2013,-500
28-05-2013,-500
29-05-2013,-500
30-05-2013,-500
31-05-2013,-500
03-06-2013,-500
04-06-2013,-500
05-06-2013,-500
06-06-2013,-500
07-06-2013,-500
10-06-2013,-500
11-06-2013,-500
12-06-2013,-500
13-06-2013,-500
14-06-2013,-500
17-06-2013,-500
18-06-2013,-500
19-06-2013,-500
20-06-2013,-500
21-06-2013,-500
24-06-2013,-500
25-06-2013,-500
26-06-2013,-500
27-06-2013,-500
28-06-2013,-500
02-07-2013,-500
03-07-2013,-500
04-07-2013,-500
05-07-2013,-500
08-07-2013,-500
09-07-2013,-500
10-07-2013,-500
11-07-2013,-500
12-07-2013,-500
15-07-2013,-500
16-07-2013,-500
17-07-2013,-500
18-07-2013,-500
19-07-2013,-500
22-07-2013,-500
23-07-2013,-500
24-07-2013,-500
25-07-2013,-500
26-07-2013,-500
29-07-2013,-500
30-07-2013,-500
31-07-2013,-500
01-08-2013,-500
02-08-2013,-500
06-08-2013,-500
07-08-2013,-500
08-08-2013,-500
09-08-2013,-500
12-08-2013,-500
13-08-2013,-500
14-08-2013,-500
15-08-2013,-500
16-08-2013,-500
19-08-2013,-500
20-08-2013,-500
21-08-2013,-500
22-08-2013,-500
23-08-2013,-500
26-08-2013,-500
27-08-2013,-500
28-08-2013,-500
29-08-2013,-500
30-08-2013,-500
03-09-2013,-500
04-09-2013,-500
05-09-2013,-500
06-09-2013,-500
09-09-2013,-500
10-09-2013,-500
11-09-2013,-500
12-09-2013,-500
13-09-2013,-500
16-09-2013,-500
17-09-2013,-500
18-09-2013,-500
19-09-2013,-500
20-09-2013,-500
23-09-2013,-500
24-09-2013,-500
25-09-2013,-500
26-09-2013,-500
27-09-2013,-500
30-09-2013,-500
01-10-2013,-500
02-10-2013,-500
03-10-2013,-500
04-10-2013,-500
07-10-2013,-500
08-10-2013,-500
09-10-2013,-500
10-10-2013,-500
11-10-2013,-500
15-10-2013,-500
16-10-2013,-500
17-10-2013,-500
18-10-2013,-500
21-10-2013,-500
22-10-2013,-500
23-10-2013,-500
24-10-2013,-500
25-10-2013,-500
28-10-2013,-500
29-10-2013,-500
30-10-2013,-500
31-10-2013,-500
01-11-2013,-500
04-11-2013,-500
05-11-2013,-500
06-11-2013,-500
07-11-2013,-500
08-11-2013,-500
12-11-2013,-500
13-11-2013,-500
14-11-2013,-500
15-11-2013,-500
18-11-2013,-500
19-11-2013,-500
20-11-2013,-500
21-11-2013,-500
22-11-2013,-500
25-11-2013,-500
26-11-2013,-500
27-11-2013,-500
28-11-2013,-500
29-11-2013,-500
02-12-2013,-500
03-12-2013,-500
04-12-2013,-500
05-12-2013,-500
06-12-2013,-500
09-12-2013,-500
10-12-2013,-500
11-12-2013,-500
12-12-2013,-500
13-12-2013,-500
16-12-2013,-500
17-12-2013,-500
18-12-2013,-500
19-12-2013,-500
20-12-2013,-500
23-12-2013,-500
24-12-2013,-500
27-12-2013,-500
30-12-2013,-500
31-12-2013,-500
02-01-2014,-500
03-01-2014,-500
06-01-2014,-500
07-01-2014,-500
08-01-2014,-500
09-01-2014,-500
10-01-2014,-500
13-01-2014,-500
14-01-2014,-500
15-01-2014,-500
16-01-2014,-500
17-01-2014,-500
20-01-2014,-500
21-01-2014,-500
22-01-2014,-500
23-01-2014,-500
24-01-2014,-500
27-01-2014,-500
28-01-2014,-500
29-01-2014,-500
30-01-2014,-500
31-01-2014,-500
03-02-2014,-500
04-02-2014,-500
05-02-2014,-500
06-02-2014,-500
07-02-2014,-500
10-02-2014,-500
11-02-2014,-500
12-02-2014,-500
13-02-2014,-500
14-02-2014,-500
18-02-2014,-500
19-02-2014,-500
20-02-2014,-500
21-02-2014,-500
24-02-2014,-500
25-02-2014,-500
26-02-2014,-500
27-02-2014,-500
28-02-2014,-500
03-03-2014,-500
04-03-2014,-500
05-03-2014,-500
06-03-2014,-500
07-03-2014,-500
10-03-2014,-500
11-03-2014,-500
12-03-2014,-500
13-03-2014,-500
14-03-2014,-500
17-03-2014,-500
18-03-2014,-500
19-03-2014,-500
20-03-2014,-500
21-03-2014,-500
24-03-2014,-500
25-03-2014,-500
26-03-2014,-500
27-03-2014,-500
28-03-2014,-500
31-03-2014,-500
01-04-2014,-500
02-04-2014,-500
03-04-2014,-500
04-04-2014,-500
07-04-2014,-500
08-04-2014,-500
09-04-2014,-500
10-04-2014,-500
11-04-2014,-500
14-04-2014,-500
15-04-2014,-500
16-04-2014,-500
17-04-2014,-500
21-04-2014,-500
22-04-2014,-500
23-04-2014,-500
24-04-2014,-500
25-04-2014,-500
28-04-2014,-500
29-04-2014,-500
30-04-2014,-500
01-05-2014,-500
02-05-2014,-500
05-05-2014,-500
06-05-2014,-500
07-05-2014,-500
08-05-2014,-500
09-05-2014,-500
12-05-2014,-500
13-05-2014,-500
14-05-2014,-500
15-05-2014,-500
16-05-2014,-500
20-05-2014,-500
21-05-2014,-500
22-05-2014,-500
23-05-2014,-500
26-05-2014,-500
27-05-2014,-500
28-05-2014,-500
29-05-2014,-500
30-05-2014,-500
02-06-2014,-500
03-06-2014,-500
04-06-2014,-500
05-06-2014,-500
06-06-2014,-500
09-06-2014,-500
10-06-2014,-500
11-06-2014,-500
12-06-2014,-500
13-06-2014,-500
16-06-2014,-500
17-06-2014,-500
18-06-2014,-500
19-06-2014,-500
20-06-2014,-500
23-06-2014,-500
24-06-2014,-500
25-06-2014,-500
26-06-2014,-500
27-06-2014,-500
30-06-2014,-500
02-07-2014,-500
03-07-2014,-500
04-07-2014,-500
07-07-2014,-500
08-07-2014,-500
09-07-2014,-500
10-07-2014,-500
11-07-2014,-500
14-07-2014,-500
15-07-2014,-500
16-07-2014,-500
17-07-2014,-500
18-07-2014,-500
21-07-2014,-500
22-07-2014,-500
23-07-2014,-500
24-07-2014,-500
25-07-2014,-500
28-07-2014,-500
29-07-2014,-500
30-07-2014,-500
31-07-2014,-500
01-08-2014,-500
05-08-2014,-500
06-08-2014,-500
07-08-2014,-500
08-08-2014,-500
11-08-2014,-500
12-08-2014,-500
13-08-2014,-500
14-08-2014,-500
15-08-2014,-500
18-08-2014,-500
19-08-2014,-500
20-08-2014,-500
21-08-2014,-500
22-08-2014,-500
25-08-2014,-500
26-08-2014,-500
27-08-2014,-500
28-08-2014,-500
29-08-2014,-500
02-09-2014,-500
03-09-2014,-500
04-09-2014,-500
05-09-2014,-500
08-09-2014,-500
09-09-2014,-500
10-09-2014,-500
11-09-2014,-500
12-09-2014,-500
15-09-2014,-500
16-09-2014,-500
17-09-2014,-500
18-09-2014,-500
19-09-2014,-500
22-09-2014,-500
23-09-2014,-500
24-09-2014,-500
25-09-2014,-500
26-09-2014,-500
29-09-2014,-500
30-09-2014,-500
01-10-2014,-500
02-10-2014,-500
03-10-2014,-500
06-10-2014,-500
07-10-2014,-500
08-10-2014,-500
09-10-2014,-500
10-10-2014,-500
14-10-2014,-500
15-10-2014,-500
16-10-2014,-500
17-10-2014,-500
20-10-2014,-500
21-10-2014,-500
22-10-2014,-500
23-10-2014,-500
24-10-2014,-500
27-10-2014,-500
28-10-2014,-500
29-10-2014,-500
30-10-2014,-500
31-10-2014,-500
03-11-2014,-500
04-11-2014,-500
05-11-2014,-500
06-11-2014,-500
07-11-2014,-500
10-11-2014,-500
12-11-2014,-500
13-11-2014,-500
14-11-2014,-500
17-11-2014,-500
18-11-2014,-500
19-11-2014,-500
20-11-2014,-500
21-11-2014,-500
24-11-2014,-500
25-11-2014,-500
26-11-2014,-500
27-11-2014,-500
28-11-2014,-500
01-12-2014,-500
02-12-2014,-500
03-12-2014,-500
04-12-2014,-500
05-12-2014,-500
08-12-2014,-500
09-12-2014,-500
10-12-2014,-500
11-12-2014,-500
12-12-2014,-500
15-12-2014,-500
16-12-2014,-500
17-12-2014,-500
18-12-2014,-500
19-12-2014,-500
22-12-2014,-500
23-12-2014,-500
24-12-2014,-500
29-12-2014,-500
30-12-2014,-500
31-12-2014,-500
02-01-2015,-500
05-01-2015,-500
06-01-2015,-500
07-01-2015,-500
08-01-2015,-500
09-01-2015,-500
12-01-2015,-500
13-01-2015,-500
14-01-2015,-500
15-01-2015,-500
16-01-2015,-500
19-01-2015,-500
20-01-2015,-500
21-01-2015,-500
22-01-2015,-500
23-01-2015,-500
26-01-2015,-500
27-01-2015,-500
28-01-2015,-500
29-01-2015,-500
30-01-2015,-500
02-02-2015,-500
03-02-2015,-500
04-02-2015,-500
05-02-2015,-500
06-02-2015,-500
09-02-2015,-500
10-02-2015,-500
11-02-2015,-500
12-02-2015,-500
13-02-2015,-500
17-02-2015,-500
18-02-2015,-500
19-02-2015,-500
20-02-2015,-500
23-02-2015,-500
24-02-2015,-500
25-02-2015,-500
26-02-2015,-500
27-02-2015,-500
02-03-2015,-500
03-03-2015,-500
04-03-2015,-500
05-03-2015,-500
06-03-2015,-500
09-03-2015,-500
10-03-2015,-500
11-03-2015,-500
12-03-2015,-500
13-03-2015,-500
16-03-2015,-500
17-03-2015,-500
18-03-2015,-500
19-03-2015,-500
20-03-2015,-500
23-03-2015,-500
24-03-2015,-500
25-03-2015,-500
26-03-2015,-500
27-03-2015,-500
30-03-2015,-500
31-03-2015,-500
01-04-2015,-500
02-04-2015,-500
06-04-2015,-500
07-04-2015,-500
08-04-2015,-500
09-04-2015,-500
10-04-2015,-500
13-04-2015,-500
14-04-2015,-500
15-04-2015,-500
16-04-2015,-500
17-04-2015,-500
20-04-2015,-500
21-04-2015,-500
22-04-2015,-500
23-04-2015,-500
24-04-2015,-500
27-04-2015,-500
28-04-2015,-500
29-04-2015,-500
30-04-2015,-500
01-05-2015,-500
04-05-2015,-500
05-05-2015,-500
06-05-2015,-500
07-05-2015,-500
08-05-2015,-500
11-05-2015,-500
12-05-2015,-500
13-05-2015,-500
14-05-2015,-500
15-05-2015,-500
19-05-2015,-500
20-05-2015,-500
21-05-2015,-500
22-05-2015,-500
25-05-2015,-500
26-05-2015,-500
27-05-2015,-500
28-05-2015,-500
29-05-2015,-500
01-06-2015,-500
02-06-2015,-500
03-06-2015,-500
04-06-2015,-500
05-06-2015,-500
08-06-2015,-500
09-06-2015,-500
10-06-2015,-500
11-06-2015,-500
12-06-2015,-500
15-06-2015,-500
16-06-2015,-500
17-06-2015,-500
18-06-2015,-500
19-06-2015,-500
22-06-2015,-500
23-06-2015,-500
24-06-2015,-500
25-06-2015,-500
26-06-2015,-500
29-06-2015,-500
30-06-2015,-500
02-07-2015,-500
03-07-2015,-500
06-07-2015,-500
07-07-2015,-500
08-07-2015,-500
09-07-2015,-500
10-07-2015,-500
13-07-2015,-500
14-07-2015,-500
15-07-2015,-500
16-07-2015,-500
17-07-2015,-500
20-07-2015,-500
21-07-2015,-500
22-07-2015,-500
23-07-2015,-500
24-07-2015,-500
27-07-2015,-500
28-07-2015,-500
29-07-2015,-500
30-07-2015,-500
31-07-2015,-500
04-08-2015,-500
05-08-2015,-500
06-08-2015,-500
07-08-2015,-500
10-08-2015,-500
11-08-2015,-500
12-08-2015,-500
13-08-2015,-500
14-08-2015,-500
17-08-2015,-500
18-08-2015,-500
19-08-2015,-500
20-08-2015,-500
21-08-2015,-500
24-08-2015,-500
25-08-2015,-500
26-08-2015,-500
27-08-2015,-500
28-08-2015,-500
31-08-2015,-500
01-09-2015,-500
02-09-2015,-500
03-09-2015,-500
04-09-2015,-500
08-09-2015,-500
09-09-2015,-500
10-09-2015,-500
11-09-2015,-500
14-09-2015,-500
15-09-2015,-500
16-09-2015,-500
17-09-2015,-500
18-09-2015,-500
21-09-2015,-500
22-09-2015,-500
23-09-2015,-500
24-09-2015,-500
25-09-2015,-500
28-09-2015,-500
29-09-2015,-500
30-09-2015,-500
01-10-2015,-500
02-10-2015,-500
05-10-2015,-500
06-10-2015,-500
07-10-2015,-500
08-10-2015,-500
09-10-2015,-500
13-10-2015,-500
14-10-2015,-500
15-10-2015,-500
16-10-2015,-500
19-10-2015,-500
20-10-2015,-500
21-10-2015,-500
22-10-2015,-500
23-10-2015,-500
26-10-2015,-500
27-10-2015,-500
28-10-2015,-500
29-10-2015,-500
30-10-2015,-500
02-11-2015,-500
03-11-2015,-500
04-11-2015,-500
05-11-2015,-500
06-11-2015,-500
09-11-2015,-500
10-11-2015,-500
12-11-2015,-500
13-11-2015,-500
16-11-2015,-500
17-11-2015,-500
18-11-2015,-500
19-11-2015,-500
20-11-2015,-500
23-11-2015,-500
24-11-2015,-500
25-11-2015,-500
26-11-2015,-500
27-11-2015,-500
30-11-2015,-500
01-12-2015,-500
02-12-2015,-500
03-12-2015,-500
04-12-2015,-500
07-12-2015,-500
08-12-2015,-500
09-12-2015,-500
10-12-2015,-500
11-12-2015,-500
14-12-2015,-500
15-12-2015,-500
16-12-2015,-500
17-12-2015,-500
18-12-2015,-500
21-12-2015,-500
22-12-2015,-500
23-12-2015,-500
24-12-2015,-500
29-12-2015,-500
30-12-2015,-500
31-12-2015,-500
04-01-2016,-500
05-01-2016,-500
06-01-2016,-500
07-01-2016,-500
08-01-2016,-500
11-01-2016,-500
12-01-2016,-500
13-01-2016,-500
14-01-2016,-500
15-01-2016,-500
18-01-2016,-500
19-01-2016,-500
20-01-2016,-500
21-01-2016,-500
22-01-2016,-500
25-01-2016,-500
26-01-2016,-500
27-01-2016,-500
28-01-2016,-500
29-01-2016,-500
01-02-2016,-500
02-02-2016,-500
03-02-2016,-500
04-02-2016,-500
05-02-2016,-500
08-02-2016,-500
09-02-2016,-500
10-02-2016,-500
11-02-2016,-500
12-02-2016,-500
16-02-2016,-500
17-02-2016,-500
18-02-2016,-500
19-02-2016,-500
22-02-2016,-500
23-02-2016,-500
24-02-2016,-500
25-02-2016,-500
26-02-2016,-500
29-02-2016,-500
01-03-2016,-500
02-03-2016,-500
03-03-2016,-500
04-03-2016,-500
07-03-2016,-500
08-03-2016,-500
09-03-2016,-500
10-03-2016,-500
11-03-2016,-500
14-03-2016,-500
15-03-2016,-500
16-03-2016,-500
17-03-2016,-500
18-03-2016,-500
21-03-2016,-500
22-03-2016,-500
23-03-2016,-500
24-03-2016,-500
28-03-2016,-500
29-03-2016,-500
30-03-2016,-500
31-03-2016,-500
01-04-2016,-500
04-04-2016,-500
05-04-2016,-500
06-04-2016,-500
07-04-2016,-500
08-04-2016,-500
11-04-2016,-500
12-04-2016,-500
13-04-2016,-500
14-04-2016,-500
15-04-2016,-500
18-04-2016,-500
19-04-2016,-500
20-04-2016,-500
21-04-2016,-500
22-04-2016,-500
25-04-2016,-500
26-04-2016,-500
27-04-2016,-500
28-04-2016,-500
29-04-2016,-500
02-05-2016,-500
03-05-2016,-500
04-05-2016,-500
05-05-2016,-500
06-05-2016,-500
09-05-2016,-500
10-05-2016,-500
11-05-2016,-500
12-05-2016,-500
13-05-2016,-500
16-05-2016,-500
17-05-2016,-500
18-05-2016,-500
19-05-2016,-500
20-05-2016,-500
24-05-2016,-500
25-05-2016,-500
26-05-2016,-500
27-05-2016,-500
30-05-2016,-500
31-05-2016,-500
01-06-2016,-500
02-06-2016,-500
03-06-2016,-500
06-06-2016,-500
07-06-2016,-500
08-06-2016,-500
09-06-2016,-500
10-06-2016,-500
13-06-2016,-500
14-06-2016,-500
15-06-2016,-500
16-06-2016,-500
17-06-2016,-500
20-06-2016,-500
21-06-2016,-500
22-06-2016,-500
23-06-2016,-500
24-06-2016,-500
27-06-2016,-500
28-06-2016,-500
29-06-2016,-500
30-06-2016,-500
04-07-2016,-500
05-07-2016,-500
06-07-2016,-500
07-07-2016,-500
08-07-2016,-500
11-07-2016,-500
12-07-2016,-500
13-07-2016,-500
14-07-2016,-500
15-07-2016,-500
18-07-2016,-500
19-07-2016,-500
20-07-2016,-500
21-07-2016,-500
22-07-2016,-500
25-07-2016,-500
26-07-2016,-500
27-07-2016,-500
28-07-2016,-500
29-07-2016,-500
02-08-2016,-500
03-08-2016,-500
04-08-2016,-500
05-08-2016,-500
08-08-2016,-500
09-08-2016,-500
10-08-2016,-500
11-08-2016,-500
12-08-2016,-500
15-08-2016,-500
16-08-2016,-500
17-08-2016,-500
18-08-2016,-500
19-08-2016,-500
22-08-2016,-500
23-08-2016,-500
24-08-2016,-500
25-08-2016,-500
26-08-2016,-500
29-08-2016,-500
30-08-2016,-500
31-08-2016,-500
01-09-2016,-500
02-09-2016,-500
06-09-2016,-500
07-09-2016,-500
08-09-2016,-500
09-09-2016,-500
12-09-2016,-500
13-09-2016,-500
14-09-2016,-500
15-09-2016,-500
16-09-2016,-500
19-09-2016,-500
20-09-2016,-500
21-09-2016,-500
22-09-2016,-500
23-09-2016,-500
26-09-2016,-500
27-09-2016,-500
28-09-2016,-500
29-09-2016,-500
30-09-2016,-500
03-10-2016,-500
04-10-2016,-500
05-10-2016,-500
06-10-2016,-500
07-10-2016,-500
11-10-2016,-500
12-10-2016,-500
13-10-2016,-500
14-10-2016,-500
17-10-2016,-500
18-10-2016,-500
19-10-2016,-500
20-10-2016,-500
21-10-2016,-500
24-10-2016,-500
25-10-2016,-500
26-10-2016,-500
27-10-2016,-500
28-10-2016,-500
31-10-2016,-500
01-11-2016,-500
02-11-2016,-500
03-11-2016,-500
04-11-2016,-500
07-11-2016,-500
08-11-2016,-500
09-11-2016,-500
10-11-2016,-500
14-11-2016,-500
15-11-2016,-500
16-11-2016,-500
17-11-2016,-500
18-11-2016,-500
21-11-2016,-500
22-11-2016,-500
23-11-2016,-500
24-11-2016,-500
25-11-2016,-500
28-11-2016,-500
29-11-2016,-500
30-11-2016,-500
01-12-2016,-500
02-12-2016,-500
05-12-2016,-500
06-12-2016,-500
07-12-2016,-500
08-12-2016,-500
09-12-2016,-500
12-12-2016,-500
13-12-2016,-500
14-12-2016,-500
15-12-2016,-500
16-12-2016,-500
19-12-2016,-500
20-12-2016,-500
21-12-2016,-500
22-12-2016,-500
23-12-2016,-500
28-12-2016,-500
29-12-2016,-500
30-12-2016,-500
03-01-2017,-500
04-01-2017,-500
05-01-2017,-500
06-01-2017,-500
09-01-2017,-500
10-01-2017,-500
11-01-2017,-500
12-01-2017,-500
13-01-2017,-500
16-01-2017,-500
17-01-2017,-500
18-01-2017,-500
19-01-2017,-500
20-01-2017,-500
23-01-2017,-500
24-01-2017,-500
25-01-2017,-500
26-01-2017,-500
27-01-2017,-500
30-01-2017,-500
31-01-2017,-500
01-02-2017,-500
02-02-2017,-500
03-02-2017,-500
06-02-2017,-500
07-02-2017,-500
08-02-2017,-500
09-02-2017,-500
10-02-2017,-500
13-02-2017,-500
14-02-2017,-500
15-02-2017,-500
16-02-2017,-500
17-02-2017,-500
21-02-2017,-500
22-02-2017,-500
23-02-2017,-500
24-02-2017,-500
27-02-2017,-500
28-02-2017,-500
01-03-2017,-500
02-03-2017,-500
03-03-2017,-500
06-03-2017,-500
07-03-2017,-500
08-03-2017,-500
09-03-2017,-500
10-03-2017,-500
13-03-2017,-500
14-03-2017,-500
15-03-2017,-500
16-03-2017,-500
17-03-2017,-500
20-03-2017,-500
21-03-2017,-500
22-03-2017,-500
23-03-2017,-500
24-03-2017,-500
27-03-2017,-500
28-03-2017,-500
29-03-2017,-500
30-03-2017,-500
31-03-2017,-500
03-04-2017,-500
04-04-2017,-500
05-04-2017,-500
06-04-2017,-500
07-04-2017,-500
10-04-2017,-500
11-04-2017,-500
12-04-2017,-500
13-04-2017,-500
17-04-2017,-500
18-04-2017,-500
19-04-2017,-500
20-04-2017,-500
21-04-2017,-500
24-04-2017,-500
25-04-2017,-500
26-04-2017,-500
27-04-2017,-500
28-04-2017,-500
01-05-2017,-500
02-05-2017,-500
03-05-2017,-500
04-05-2017,-500
05-05-2017,-500
08-05-2017,-500
09-05-2017,-500
10-05-2017,-500
11-05-2017,-500
12-05-2017,-500
15-05-2017,-500
16-05-2017,-500
17-05-2017,-500
18-05-2017,-500
19-05-2017,-500
23-05-2017,-500
24-05-2017,-500
25-05-2017,-500
26-05-2017,-500
29-05-2017,-500
30-05-2017,-500
31-05-2017,-500
01-06-2017,-500
02-06-2017,-500
05-06-2017,-500
06-06-2017,-500
07-06-2017,-500
08-06-2017,-500
09-06-2017,-500
12-06-2017,-500
13-06-2017,-500
14-06-2017,-500
15-06-2017,-500
16-06-2017,-500
19-06-2017,-500
20-06-2017,-500
21-06-2017,-500
22-06-2017,-500
23-06-2017,-500
26-06-2017,-500
27-06-2017,-500
28-06-2017,-500
29-06-2017,-500
30-06-2017,-500
04-07-2017,-500
05-07-2017,-500
06-07-2017,-500
07-07-2017,-500
10-07-2017,-500
11-07-2017,-500
12-07-2017,-500
13-07-2017,-500
14-07-2017,-500
17-07-2017,-500
18-07-2017,-500
19-07-2017,-500
20-07-2017,-500
21-07-2017,-500
24-07-2017,-500
25-07-2017,-500
26-07-2017,-500
27-07-2017,-500
28-07-2017,-500
31-07-2017,-500
01-08-2017,-500
02-08-2017,-500
03-08-2017,-500
04-08-2017,-500
08-08-2017,-500
09-08-2017,-500
10-08-2017,-500
11-08-2017,-500
14-08-2017,-500
15-08-2017,-500
16-08-2017,-500
17-08-2017,-500
18-08-2017,-500
21-08-2017,-500
22-08-2017,-500
23-08-2017,-500
24-08-2017,-500
25-08-2017,-500
28-08-2017,-500
29-08-2017,-500
30-08-2017,-500
31-08-2017,-500
01-09-2017,-500
05-09-2017,-500
06-09-2017,-500
07-09-2017,-500
08-09-2017,-500
11-09-2017,-500
12-09-2017,-500
13-09-2017,-500
14-09-2017,-500
15-09-2017,-500
18-09-2017,-500
19-09-2017,-500
20-09-2017,-500
21-09-2017,-500
22-09-2017,-500
25-09-2017,-500
26-09-2017,-500
27-09-2017,-500
28-09-2017,-500
29-09-2017,-500
02-10-2017,-500
03-10-2017,-500
04-10-2017,-500
05-10-2017,-500
06-10-2017,-500
10-10-2017,-500
11-10-2017,-500
12-10-2017,-500
13-10-2017,-500
16-10-2017,-500
17-10-2017,-500
18-10-2017,-500
19-10-2017,-500
20-10-2017,-500
23-10-2017,-500
24-10-2017,-500
25-10-2017,-500
26-10-2017,-500
27-10-2017,-500
30-10-2017,-500
31-10-2017,-500
01-11-2017,-500
02-11-2017,-500
03-11-2017,-500
06-11-2017,-500
07-11-2017,-500
08-11-2017,-500
09-11-2017,-500
10-11-2017,-500
14-11-2017,-500
15-11-2017,-500
16-11-2017,-500
17-11-2017,-500
20-11-2017,-500
21-11-2017,-500
22-11-2017,-500
23-11-2017,-500
24-11-2017,-500
27-11-2017,-500
28-11-2017,-500
29-11-2017,-500
30-11-2017,-500
01-12-2017,-500
04-12-2017,-500
05-12-2017,-500
06-12-2017,-500
07-12-2017,-500
08-12-2017,-500
11-12-2017,-500
12-12-2017,-500
13-12-2017,-500
14-12-2017,-500
15-12-2017,-500
18-12-2017,-500
19-12-2017,-500
20-12-2017,-500
21-12-2017,-500
22-12-2017,-500
27-12-2017,-500
28-12-2017,-500
29-12-2017,-500
02-01-2018,-500
03-01-2018,-500
04-01-2018,-500
05-01-2018,-500
08-01-2018,-500
09-01-2018,-500
10-01-2018,-500
11-01-2018,-500
12-01-2018,-500
15-01-2018,-500
16-01-2018,-500
17-01-2018,-500
18-01-2018,-500
19-01-2018,-500
22-01-2018,-500
23-01-2018,-500
24-01-2018,-500
25-01-2018,-500
26-01-2018,-500
29-01-2018,-500
30-01-2018,-500
31-01-2018,-500
01-02-2018,-500
02-02-2018,-500
05-02-2018,-500
06-02-2018,-500
07-02-2018,-500
08-02-2018,-500
09-02-2018,-500
12-02-2018,-500
13-02-2018,-500
14-02-2018,-500
15-02-2018,-500
16-02-2018,-500
20-02-2018,-500
21-02-2018,-500
22-02-2018,-500
23-02-2018,-500
26-02-2018,-500
27-02-2018,-500
28-02-2018,-500
01-03-2018,-500
02-03-2018,-500
05-03-2018,-500
06-03-2018,-500
07-03-2018,-500
08-03-2018,-500
09-03-2018,-500
12-03-2018,-500
13-03-2018,-500
14-03-2018,-500
15-03-2018,-500
16-03-2018,-500
19-03-2018,-500
20-03-2018,-500
21-03-2018,-500
22-03-2018,-500
23-03-2018,-500
26-03-2018,-500
27-03-2018,-500
28-03-2018,-500
29-03-2018,-500
02-04-2018,-500
03-04-2018,-500
04-04-2018,-500
05-04-2018,-500
06-04-2018,-500
09-04-2018,-500
10-04-2018,-500
11-04-2018,-500
12-04-2018,-500
13-04-2018,-500
16-04-2018,-500
17-04-2018,-500
18-04-2018,-500
19-04-2018,-500
20-04-2018,-500
23-04-2018,-500
24-04-2018,-500
25-04-2018,-500
26-04-2018,-500
27-04-2018,-500
30-04-2018,-500
01-05-2018,-500
02-05-2018,-500
03-05-2018,-500
04-05-2018,-500
07-05-2018,-500
08-05-2018,-500
09-05-2018,-500
10-05-2018,-500
11-05-2018,-500
14-05-2018,-500
15-05-2018,-500
16-05-2018,-500
17-05-2018,-500
18-05-2018,-500
22-05-2018,-500
23-05-2018,-500
24-05-2018,-500
25-05-2018,-500
28-05-2018,-500
29-05-2018,-500
30-05-2018,-500
31-05-2018,-500
01-06-2018,-500
04-06-2018,-500
05-06-2018,-500
06-06-2018,-500
07-06-2018,-500
08-06-2018,-500
11-06-2018,-500
12-06-2018,-500
13-06-2018,-500
14-06-2018,-500
15-06-2018,-500
18-06-2018,-500
19-06-2018,-500
20-06-2018,-500
21-06-2018,-500
22-06-2018,-500
25-06-2018,-500
26-06-2018,-500
27-06-2018,-500
28-06-2018,-500
29-06-2018,-500
03-07-2018,-500
04-07-2018,-500
05-07-2018,-500
06-07-2018,-500
09-07-2018,-500
10-07-2018,-500
11-07-2018,-500
12-07-2018,-500
13-07-2018,-500
16-07-2018,-500
17-07-2018,-500
18-07-2018,-500
19-07-2018,-500
20-07-2018,-500
23-07-2018,-500
24-07-2018,-500
25-07-2018,-500
26-07-2018,-500
27-07-2018,-500
30-07-2018,-500
31-07-2018,-500
01-08-2018,-500
02-08-2018,-500
03-08-2018,-500
07-08-2018,-500
08-08-2018,-500
09-08-2018,-500
10-08-2018,-500
13-08-2018,-500
14-08-2018,-500
15-08-2018,-500
16-08-2018,-500
17-08-2018,-500
20-08-2018,-500
21-08-2018,-500
22-08-2018,-500
23-08-2018,-500
24-08-2018,-500
27-08-2018,-500
28-08-2018,-500
29-08-2018,-500
30-08-2018,-500
31-08-2018,-500
04-09-2018,-500
05-09-2018,-500
06-09-2018,-500
07-09-2018,-500
10-09-2018,-500
11-09-2018,-500
12-09-2018,-500
13-09-2018,-500
14-09-2018,-500
17-09-2018,-500
18-09-2018,-500
19-09-2018,-500
20-09-2018,-500
21-09-2018,-500
24-09-2018,-500
25-09-2018,-500
26-09-2018,-500
27-09-2018,-500
28-09-2018,-500
01-10-2018,-500
02-10-2018,-500
03-10-2018,-500
04-10-2018,-500
05-10-2018,-500
09-10-2018,-500
10-10-2018,-500
11-10-2018,-500
12-10-2018,-500
15-10-2018,-500
16-10-2018,-500
17-10-2018,-500
18-10-2018,-500
19-10-2018,-500
22-10-2018,-500
23-10-2018,-500
24-10-2018,-500
25-10-2018,-500
26-10-2018,-500
29-10-2018,-500
30-10-2018,-500
31-10-2018,-500
01-11-2018,-500
02-11-2018,-500
05-11-2018,-500
06-11-2018,-500
07-11-2018,-500
08-11-2018,-500
09-11-2018,-500
13-11-2018,-500
14-11-2018,-500
15-11-2018,-500
16-11-2018,-500
19-11-2018,-500
20-11-2018,-500
21-11-2018,-500
22-11-2018,-500
23-11-2018,-500
26-11-2018,-500
27-11-2018,-500
28-11-2018,-500
29-11-2018,-500
30-11-2018,-500
03-12-2018,-500
04-12-2018,-500
05-12-2018,-500
06-12-2018,-500
07-12-2018,-500
10-12-2018,-500
11-12-2018,-500
12-12-2018,-500
13-12-2018,-500
14-12-2018,-500
17-12-2018,-500
18-12-2018,-500
19-12-2018,-500
20-12-2018,-500
21-12-2018,-500
24-12-2018,-500
27-12-2018,-500
28-12-2018,-500
31-12-2018,-500
02-01-2019,-500
03-01-2019,-500
04-01-2019,-500
07-01-2019,-500
08-01-2019,-500
09-01-2019,-500
10-01-2019,-500
11-01-2019,-500
14-01-2019,-500
15-01-2019,-500
16-01-2019,-500
17-01-2019,-500
18-01-2019,-500
21-01-2019,-500
22-01-2019,-500
23-01-2019,-500
24-01-2019,-500
25-01-2019,-500
28-01-2019,-500
29-01-2019,-500
30-01-2019,-500
31-01-2019,-500
01-02-2019,-500
04-02-2019,-500
05-02-2019,-500
06-02-2019,-500
07-02-2019,-500
08-02-2019,-500
11-02-2019,-500
12-02-2019,-500
13-02-2019,-500
14-02-2019,-500
15-02-2019,-500
19-02-2019,-500
20-02-2019,-500
21-02-2019,-500
22-02-2019,-500
25-02-2019,-500
26-02-2019,-500
27-02-2019,-500
28-02-2019,-500
01-03-2019,-500
04-03-2019,-500
05-03-2019,-500
06-03-2019,-500
07-03-2019,-500
08-03-2019,-500
11-03-2019,-500
12-03-2019,-500
13-03-2019,-500
14-03-2019,-500
15-03-2019,-500
18-03-2019,-500
19-03-2019,-500
20-03-2019,-500
21-03-2019,-500
22-03-2019,-500
25-03-2019,-500
26-03-2019,-500
27-03-2019,-500
28-03-2019,-500
29-03-2019,-500
01-04-2019,-500
02-04-2019,-500
03-04-2019,-500
04-04-2019,-500
05-04-2019,-500
08-04-2019,-500
09-04-2019,-500
10-04-2019,-500
11-04-2019,-500
12-04-2019,-500
15-04-2019,-500
16-04-2019,-500
17-04-2019,-500
18-04-2019,-500
22-04-2019,-500
23-04-2019,-500
24-04-2019,-500
25-04-2019,-500
26-04-2019,-500
29-04-2019,-500
30-04-2019,-500
01-05-2019,-500
02-05-2019,-500
03-05-2019,-500
06-05-2019,-500
07-05-2019,-500
08-05-2019,-500
09-05-2019,-500
10-05-2019,-500
13-05-2019,-500
14-05-2019,-500
15-05-2019,-500
16-05-2019,-500
17-05-2019,-500
21-05-2019,-500
22-05-2019,-500
23-05-2019,-500
24-05-2019,-500
27-05-2019,-500
28-05-2019,-500
29-05-2019,-500
30-05-2019,-500
31-05-2019,-500
03-06-2019,-500
04-06-2019,-500
05-06-2019,-500
06-06-2019,-500
07-06-2019,-500
10-06-2019,-500
11-06-2019,-500
12-06-2019,-500
13-06-2019,-500
14-06-2019,-500
17-06-2019,-500
18-06-2019,-500
19-06-2019,-500
20-06-2019,-500
21-06-2019,-500
24-06-2019,-500
25-06-2019,-500
26-06-2019,-500
27-06-2019,-500
28-06-2019,-500
02-07-2019,-500
03-07-2019,-500
04-07-2019,-500
05-07-2019,-500
08-07-2019,-500
09-07-2019,-500
10-07-2019,-500
11-07-2019,-500
12-07-2019,-500
15-07-2019,-500
16-07-2019,-500
17-07-2019,-500
18-07-2019,-500
19-07-2019,-500
22-07-2019,-500
23-07-2019,-500
24-07-2019,-500
25-07-2019,-500
26-07-2019,-500
29-07-2019,-500
30-07-2019,-500
31-07-2019,-500
01-08-2019,-500
02-08-2019,-500
06-08-2019,-500
07-08-2019,-500
08-08-2019,-500
09-08-2019,-500
12-08-2019,-500
13-08-2019,-500
14-08-2019,-500
15-08-2019,-500
16-08-2019,-500
19-08-2019,-500
20-08-2019,-500
21-08-2019,-500
22-08-2019,-500
23-08-2019,-500
26-08-2019,-500
27-08-2019,-500
28-08-2019,-500
29-08-2019,-500
30-08-2019,-500
03-09-2019,-500
04-09-2019,-500
05-09-2019,-500
06-09-2019,-500
09-09-2019,-500
10-09-2019,-500
11-09-2019,-500
12-09-2019,-500
13-09-2019,-500
16-09-2019,-500
17-09-2019,-500
18-09-2019,-500
19-09-2019,-500
20-09-2019,-500
23-09-2019,-500
24-09-2019,-500
25-09-2019,-500
26-09-2019,-500
27-09-2019,-500
30-09-2019,-500
01-10-2019,-500
02-10-2019,-500
03-10-2019,-500
04-10-2019,-500
07-10-2019,-500
08-10-2019,-500
09-10-2019,-500
10-10-2019,-500
11-10-2019,-500
15-10-2019,-500
16-10-2019,-500
17-10-2019,-500
18-10-2019,-500
21-10-2019,-500
22-10-2019,-500
23-10-2019,-500
24-10-2019,-500
25-10-2019,-500
28-10-2019,-500
29-10-2019,-500
30-10-2019,-500
31-10-2019,-500
01-11-2019,-500
04-11-2019,-500
05-11-2019,-500
06-11-2019,-500
07-11-2019,-500
08-11-2019,-500
12-11-2019,-500
13-11-2019,-500
14-11-2019,-500
15-11-2019,-500
18-11-2019,-500
19-11-2019,-500
20-11-2019,-500
21-11-2019,-500
22-11-2019,-500
25-11-2019,-500
26-11-2019,-500
27-11-2019,-500
28-11-2019,-500
29-11-2019,-500
02-12-2019,-500
03-12-2019,-500
04-12-2019,-500
05-12-2019,-500
06-12-2019,-500
09-12-2019,-500
10-12-2019,-500
11-12-2019,-500
12-12-2019,-500
13-12-2019,-500
16-12-2019,-500
17-12-2019,-500
18-12-2019,-500
19-12-2019,-500
20-12-2019,-500
23-12-2019,-500
24-12-2019,-500
27-12-2019,-500
30-12-2019,-500
31-12-2019,-500
02-01-2020,-500
03-01-2020,-500
06-01-2020,-500
07-01-2020,-500
08-01-2020,-500
09-01-2020,-500
10-01-2020,-500
13-01-2020,-500
14-01-2020,-500
15-01-2020,-500
16-01-2020,-500
17-01-2020,-500
20-01-2020,-500
21-01-2020,-500
22-01-2020,-500
23-01-2020,-500
24-01-2020,-500
27-01-2020,-500
28-01-2020,-500
29-01-2020,-500
30-01-2020,-500
31-01-2020,-500
03-02-2020,-500
04-02-2020,-500
05-02-2020,-500
06-02-2020,-500
07-02-2020,-500
10-02-2020,-500
11-02-2020,-500
12-02-2020,-500
13-02-2020,-500
14-02-2020,-500
18-02-2020,-500
19-02-2020,-500
20-02-2020,-500
21-02-2020,-500
24-02-2020,-500
25-02-2020,-500
26-02-2020,-500
27-02-2020,-500
28-02-2020,-500
02-03-2020,-500
03-03-2020,-500
04-03-2020,-500
05-03-2020,-500
06-03-2020,-500
09-03-2020,-500
10-03-2020,-500
11-03-2020,-500
12-03-2020,-500
13-03-2020,-500
16-03-2020,-500
17-03-2020,-500
18-03-2020,-500
19-03-2020,-500
20-03-2020,-500
23-03-2020,-500
24-03-2020,-500
25-03-2020,-500
26-03-2020,-500
27-03-2020,-500
30-03-2020,-500
31-03-2020,-500
01-04-2020,-500
02-04-2020,-500
03-04-2020,-500
06-04-2020,-500
07-04-2020,-500
08-04-2020,-500
09-04-2020,-500
13-04-2020,-500
14-04-2020,-500
15-04-2020,-500
16-04-2020,-500
17-04-2020,-500
20-04-2020,-500
21-04-2020,-500
22-04-2020,-500
23-04-2020,-500
24-04-2020,-500
27-04-2020,-500
28-04-2020,-500
29-04-2020,-500
30-04-2020,-500
01-05-2020,-500
04-05-2020,-500
05-05-2020,-500
06-05-2020,-500
07-05-2020,-500
08-05-2020,-500
11-05-2020,-500
12-05-2020,-500
13-05-2020,-500
14-05-2020,-500
15-05-2020,-500
19-05-2020,-500
20-05-2020,-500
21-05-2020,-500
22-05-2020,-500
25-05-2020,-500
26-05-2020,-500
27-05-2020,-500
28-05-2020,-500
29-05-2020,-500
01-06-2020,-500
02-06-2020,-500
03-06-2020,-500
04-06-2020,-500
05-06-2020,-500
08-06-2020,-500
09-06-2020,-500
10-06-2020,-500
11-06-2020,-500
12-06-2020,-500
15-06-2020,-500
16-06-2020,-500
17-06-2020,-500
18-06-2020,-500
19-06-2020,-500
22-06-2020,-500
23-06-2020,-500
24-06-2020,-500
25-06-2020,-500
26-06-2020,-500
29-06-2020,-500
30-06-2020,-500
02-07-2020,-500
03-07-2020,-500
06-07-2020,-500
07-07-2020,-500
08-07-2020,-500
09-07-2020,-500
10-07-2020,-500
13-07-2020,-500
14-07-2020,-500
15-07-2020,-500
16-07-2020,-500
17-07-2020,-500
20-07-2020,-500
21-07-2020,-500
22-07-2020,-500
23-07-2020,-500
24-07-2020,-500
27-07-2020,-500
28-07-2020,-500
29-07-2020,-500
30-07-2020,-500
31-07-2020,-500
04-08-2020,-500
05-08-2020,-500
06-08-2020,-500
07-08-2020,-500
10-08-2020,-500
11-08-2020,-500
12-08-2020,-500
13-08-2020,-500
14-08-2020,-500
17-08-2020,-500
18-08-2020,-500
19-08-2020,-500
20-08-2020,-500
21-08-2020,-500
24-08-2020,-500
25-08-2020,-500
26-08-2020,-500
27-08-2020,-500
28-08-2020,-500
31-08-2020,-500
01-09-2020,-500
02-09-2020,-500
03-09-2020,-500
04-09-2020,-500
08-09-2020,-500
09-09-2020,-500
10-09-2020,-500
11-09-2020,-500
14-09-2020,-500
15-09-2020,-500
16-09-2020,-500
17-09-2020,-500
18-09-2020,-500
21-09-2020,-500
22-09-2020,-500
23-09-2020,-500
24-09-2020,-500
25-09-2020,-500
28-09-2020,-500
29-09-2020,-500
30-09-2020,-500
01-10-2020,-500
02-10-2020,-500
05-10-2020,-500
06-10-2020,-500
07-10-2020,-500
08-10-2020,-500
09-10-2020,-500
13-10-2020,-500
14-10-2020,-500
15-10-2020,-500
16-10-2020,-500
19-10-2020,-500
20-10-2020,-500
21-10-2020,-500
22-10-2020,-500
23-10-2020,-500
26-10-2020,-500
27-10-2020,-500
28-10-2020,-500
29-10-2020,-500
30-10-2020,-500
02-11-2020,-500
03-11-2020,-500
04-11-2020,-500
05-11-2020,-500
06-11-2020,-500
09-11-2020,-500
10-11-2020,-500
12-11-2020,-500
13-11-2020,-500
16-11-2020,-500
17-11-2020,-500
18-11-2020,-500
19-11-2020,-500
20-11-2020,-500
23-11-2020,-500
24-11-2020,-500
25-11-2020,-500
26-11-2020,-500
27-11-2020,-500
30-11-2020,-500
01-12-2020,-500
02-12-2020,-500
03-12-2020,-500
04-12-2020,-500
07-12-2020,-500
08-12-2020,-500
09-12-2020,-500
10-12-2020,-500
11-12-2020,-500
14-12-2020,-500
15-12-2020,-500
16-12-2020,-500
17-12-2020,-500
18-12-2020,-500
21-12-2020,-500
22-12-2020,-500
23-12-2020,-500
24-12-2020,-500
29-12-2020,-500
30-12-2020,-500
31-12-2020,-500
04-01-2021,-500
05-01-2021,-500
06-01-2021,-500
07-01-2021,-500
08-01-2021,-500
11-01-2021,-500
12-01-2021,-500
13-01-2021,-500
14-01-2021,-500
15-01-2021,-500
18-01-2021,-500
19-01-2021,-500
20-01-2021,-500
21-01-2021,-500
22-01-2021,-500
25-01-2021,-500
26-01-2021,-500
27-01-2021,-500
28-01-2021,-500
29-01-2021,-500
01-02-2021,-500
02-02-2021,-500
03-02-2021,-500
04-02-2021,-500
05-02-2021,-500
08-02-2021,-500
09-02-2021,-500
10-02-2021,-500
11-02-2021,-500
12-02-2021,-500
16-02-2021,-500
17-02-2021,-500
18-02-2021,-500
19-02-2021,-500
22-02-2021,-500
23-02-2021,-500
24-02-2021,-500
25-02-2021,-500
26-02-2021,-500
01-03-2021,-500
02-03-2021,-500
03-03-2021,-500
04-03-2021,-500
05-03-2021,-500
08-03-2021,-500
09-03-2021,-500
10-03-2021,-500
11-03-2021,-500
12-03-2021,-500
15-03-2021,-500
16-03-2021,-500
17-03-2021,-500
18-03-2021,-500
19-03-2021,-500
22-03-2021,-500
23-03-2021,-500
24-03-2021,-500
25-03-2021,-500
26-03-2021,-500
29-03-2021,-500
30-03-2021,-500
31-03-2021,-500
01-04-2021,-500
05-04-2021,-500
06-04-2021,-500
07-04-2021,-500
08-04-2021,-500
09-04-2021,-500
12-04-2021,-500
13-04-2021,-500
14-04-2021,-500
15-04-2021,-500
16-04-2021,-500
19-04-2021,-500
20-04-2021,-500
21-04-2021,-500
22-04-2021,-500
23-04-2021,-500
26-04-2021,-500
27-04-2021,-500
28-04-2021,-500
29-04-2021,-500
30-04-2021,-500
03-05-2021,-500
04-05-2021,-500
05-05-2021,-500
06-05-2021,-500
07-05-2021,-500
10-05-2021,-500
11-05-2021,-500
12-05-2021,-500
13-05-2021,-500
14-05-2021,-500
17-05-2021,-500
18-05-2021,-500
19-05-2021,-500
20-05-2021,-500
21-05-2021,-500
25-05-2021,-500
26-05-2021,-500
27-05-2021,-500
28-05-2021,-500
31-05-2021,-500
01-06-2021,-500
02-06-2021,-500
03-06-2021,-500
04-06-2021,-500
07-06-2021,-500
08-06-2021,-500
09-06-2021,-500
10-06-2021,-500
11-06-2021,-500
14-06-2021,-500
15-06-2021,-500
16-06-2021,-500
17-06-2021,-500
18-06-2021,-500
21-06-2021,-500
22-06-2021,-500
23-06-2021,-500
24-06-2021,-500
25-06-2021,-500
28-06-2021,-500
29-06-2021,-500
30-06-2021,-500
02-07-2021,-500
05-07-2021,-500
06-07-2021,-500
07-07-2021,-500
08-07-2021,-500
09-07-2021,-500
12-07-2021,-500
13-07-2021,-500
14-07-2021,-500
15-07-2021,-500
16-07-2021,-500
19-07-2021,-500
20-07-2021,-500
21-07-2021,-500
22-07-2021,-500
23-07-2021,-500
26-07-2021,-500
27-07-2021,-500
28-07-2021,-500
29-07-2021,-500
30-07-2021,-500
03-08-2021,-500
04-08-2021,-500
05-08-2021,-500
06-08-2021,-500
09-08-2021,-500
10-08-2021,-500
11-08-2021,-500
12-08-2021,-500
13-08-2021,-500
16-08-2021,-500
17-08-2021,-500
18-08-2021,-500
19-08-2021,-500
20-08-2021,-500
23-08-2021,-500
24-08-2021,-500
25-08-2021,-500
26-08-2021,-500
27-08-2021,-500
30-08-2021,-500
31-08-2021,-500
01-09-2021,-500
02-09-2021,-500
03-09-2021,-500
07-09-2021,-500
08-09-2021,-500
09-09-2021,-500
10-09-2021,-500
13-09-2021,-500
14-09-2021,-500
15-09-2021,-500
16-09-2021,-500
17-09-2021,-500
20-09-2021,-500
21-09-2021,-500
22-09-2021,-500
23-09-2021,-500
24-09-2021,-500
27-09-2021,-500
28-09-2021,-500
29-09-2021,-500
01-10-2021,-500
04-10-2021,-500
05-10-2021,-500
06-10-2021,-500
07-10-2021,-500
08-10-2021,-500
12-10-2021,-500
13-10-2021,-500
14-10-2021,-500
15-10-2021,-500
18-10-2021,-500
19-10-2021,-500
20-10-2021,-500
21-10-2021,-500
22-10-2021,-500
25-10-2021,-500
26-10-2021,-500
27-10-2021,-500
28-10-2021,-500
29-10-2021,-500
01-11-2021,-500
02-11-2021,-500
03-11-2021,-500
04-11-2021,-500
05-11-2021,-500
08-11-2021,-500
09-11-2021,-500
10-11-2021,-500
12-11-2021,-500
15-11-2021,-500
16-11-2021,-500
17-11-2021,-500
18-11-2021,-500
19-11-2021,-500
22-11-2021,-500
23-11-2021,-500
24-11-2021,-500
25-11-2021,-500
26-11-2021,-500
29-11-2021,-500
30-11-2021,-500
01-12-2021,-500
02-12-2021,-500
03-12-2021,-500
06-12-2021,-500
07-12-2021,-500
08-12-2021,-500
09-12-2021,-500
10-12-2021,-500
13-12-2021,-500
14-12-2021,-500
15-12-2021,-500
16-12-2021,-500
17-12-2021,-500
20-12-2021,-500
21-12-2021,-500
22-12-2021,-500
23-12-2021,-500
24-12-2021,-500
29-12-2021,-500
30-12-2021,-500
31-12-2021,-500
04-01-2022,-500
05-01-2022,-500
06-01-2022,-500
07-01-2022,-500
10-01-2022,-500
11-01-2022,-500
12-01-2022,-500
13-01-2022,-500
14-01-2022,-500
17-01-2022,-500
18-01-2022,-500
19-01-2022,-500
20-01-2022,-500
21-01-2022,-500
24-01-2022,-500
25-01-2022,-500
26-01-2022,-500
27-01-2022,-500
28-01-2022,-500
31-01-2022,-500
01-02-2022,-500
02-02-2022,-500
03-02-2022,-500
04-02-2022,-500
07-02-2022,-500
08-02-2022,-500
09-02-2022,-500
10-02-2022,-500
11-02-2022,-500
14-02-2022,-500
15-02-2022,-500
16-02-2022,-500
17-02-2022,-500
18-02-2022,-500
22-02-2022,-500
23-02-2022,-500
24-02-2022,-500
25-02-2022,-500
28-02-2022,-500
01-03-2022,-500
02-03-2022,-500
03-03-2022,-500
04-03-2022,-500
07-03-2022,-500
08-03-2022,-500
09-03-2022,-500
10-03-2022,-500
11-03-2022,-500
14-03-2022,-500
15-03-2022,-500
16-03-2022,-500
17-03-2022,-500
18-03-2022,-500
21-03-2022,-500
22-03-2022,-500
23-03-2022,-500
24-03-2022,-500
25-03-2022,-500
28-03-2022,-500
29-03-2022,-500
30-03-2022,-500
31-03-2022,-500
01-04-2022,-500
04-04-2022,-500
05-04-2022,-500
06-04-2022,-500
07-04-2022,-500
08-04-2022,-500
11-04-2022,-500
12-04-2022,-500
13-04-2022,-500
14-04-2022,-500
18-04-2022,-500
19-04-2022,-500
20-04-2022,-500
21-04-2022,-500
22-04-2022,-500
25-04-2022,-500
26-04-2022,-500
27-04-2022,-500
28-04-2022,-500
29-04-2022,-500
02-05-2022,-500
03-05-2022,-500
04-05-2022,-500
05-05-2022,-500
06-05-2022,-500
09-05-2022,-500
10-05-2022,-500
11-05-2022,-500
12-05-2022,-500
13-05-2022,-500
16-05-2022,-500
17-05-2022,-500
18-05-2022,-500
19-05-2022,-500
20-05-2022,-500
24-05-2022,-500
25-05-2022,-500
26-05-2022,-500
27-05-2022,-500
30-05-2022,-500
31-05-2022,-500
01-06-2022,-500
02-06-2022,-500
03-06-2022,-500
06-06-2022,-500
07-06-2022,-500
08-06-2022,-500
09-06-2022,-500
10-06-2022,-500
13-06-2022,-500
14-06-2022,-500
15-06-2022,-500
16-06-2022,-500
17-06-2022,-500
20-06-2022,-500
21-06-2022,-500
22-06-2022,-500
23-06-2022,-500
24-06-2022,-500
27-06-2022,-500
28-06-2022,-500
29-06-2022,-500
30-06-2022,-500
04-07-2022,-500
05-07-2022,-500
06-07-2022,-500
07-07-2022,-500
08-07-2022,-500
11-07-2022,-500
12-07-2022,-500
13-07-2022,-500
14-07-2022,-500
15-07-2022,-500
18-07-2022,-500
19-07-2022,-500
20-07-2022,-500
21-07-2022,-500
22-07-2022,-500
25-07-2022,-500
26-07-2022,-500
27-07-2022,-500
28-07-2022,-500
29-07-2022,-500
02-08-2022,-500
03-08-2022,-500
04-08-2022,-500
05-08-2022,-500
08-08-2022,-500
09-08-2022,-500
10-08-2022,-500
11-08-2022,-500
12-08-2022,-500
15-08-2022,-500
16-08-2022,-500
17-08-2022,-500
18-08-2022,-500
19-08-2022,-500
22-08-2022,-500
23-08-2022,-500
24-08-2022,-500
25-08-2022,-500
26-08-2022,-500
29-08-2022,-500
30-08-2022,-500
31-08-2022,-500
01-09-2022,-500
02-09-2022,-500
06-09-2022,-500
07-09-2022,-500
08-09-2022,-500
09-09-2022,-500
12-09-2022,-500
13-09-2022,-500
14-09-2022,-500
15-09-2022,-500
16-09-2022,-500
19-09-2022,-500
20-09-2022,-500
21-09-2022,-500
22-09-2022,-500
23-09-2022,-500
26-09-2022,-500
27-09-2022,-500
28-09-2022,-500
29-09-2022,-500
03-10-2022,-500
04-10-2022,-500
05-10-2022,-500
06-10-2022,-500
07-10-2022,-500
11-10-2022,-500
12-10-2022,-500
13-10-2022,-500
14-10-2022,-500
17-10-2022,-500
18-10-2022,-500
19-10-2022,-500
20-10-2022,-500
21-10-2022,-500
24-10-2022,-500
25-10-2022,-500
26-10-2022,-500
27-10-2022,-500
28-10-2022,-500
31-10-2022,-500
01-11-2022,-500
02-11-2022,-500
03-11-2022,-500
04-11-2022,-500
07-11-2022,-500
08-11-2022,-500
09-11-2022,-500
10-11-2022,-500
14-11-2022,-500
15-11-2022,-500
16-11-2022,-500
17-11-2022,-500
18-11-2022,-500
21-11-2022,-500
22-11-2022,-500
23-11-2022,-500
24-11-2022,-500
25-11-2022,-500
28-11-2022,-500
29-11-2022,-500
30-11-2022,-500
01-12-2022,-500
02-12-2022,-500
05-12-2022,-500
06-12-2022,-500
07-12-2022,-500
08-12-2022,-500
09-12-2022,-500
12-12-2022,-500
13-12-2022,-500
14-12-2022,-500
15-12-2022,-500
16-12-2022,-500
19-12-2022,-500
20-12-2022,-500
21-12-2022,-500
22-12-2022,-500
23-12-2022,-500
28-12-2022,-500
29-12-2022,-500
30-12-2022,-500
03-01-2023,-500
04-01-2023,-500
05-01-2023,-500
06-01-2023,-500
09-01-2023,-500
10-01-2023,-500
11-01-2023,-500
12-01-2023,-500
13-01-2023,-500
16-01-2023,-500
17-01-2023,-500
18-01-2023,-500
19-01-2023,-500
20-01-2023,-500
23-01-2023,-500
24-01-2023,-500
25-01-2023,-500
26-01-2023,-500
27-01-2023,-500
30-01-2023,-500
31-01-2023,-500
01-02-2023,-500
02-02-2023,-500
03-02-2023,-500
06-02-2023,-500
07-02-2023,-500
08-02-2023,-500
09-02-2023,-500
10-02-2023,-500
13-02-2023,-500
14-02-2023,-500
15-02-2023,-500
16-02-2023,-500
17-02-2023,-500
21-02-2023,-500
22-02-2023,-500
23-02-2023,-500
24-02-2023,-500
27-02-2023,-500
28-02-2023,-500
01-03-2023,-500
02-03-2023,-500
03-03-2023,-500
06-03-2023,-500
07-03-2023,-500
08-03-2023,-500
09-03-2023,-500
10-03-2023,-500
13-03-2023,-500
14-03-2023,-500
15-03-2023,-500
16-03-2023,-500
17-03-2023,-500
20-03-2023,-500
21-03-2023,-500
22-03-2023,-500
23-03-2023,-500
24-03-2023,-500
27-03-2023,-500
28-03-2023,-500
29-03-2023,-500
30-03-2023,-500
31-03-2023,-500
03-04-2023,-500
04-04-2023,-500
05-04-2023,-500
06-04-2023,-500
10-04-2023,-500
11-04-2023,-500
12-04-2023,-500
13-04-2023,-500
14-04-2023,-500
17-04-2023,-500
18-04-2023,-500
19-04-2023,-500
20-04-2023,-500
21-04-2023,-500
24-04-2023,-500
25-04-2023,-500
26-04-2023,-500
27-04-2023,-500
28-04-2023,-500
01-05-2023,-500
02-05-2023,-500
03-05-2023,-500
04-05-2023,-500
05-05-2023,-500
08-05-2023,-500
09-05-2023,-500
10-05-2023,-500
11-05-2023,-500
12-05-2023,-500
15-05-2023,-500
16-05-2023,-500
17-05-2023,-500
18-05-2023,-500
19-05-2023,-500
23-05-2023,-500
24-05-2023,-500
25-05-2023,-500
26-05-2023,-500
29-05-2023,-500
30-05-2023,-500
31-05-2023,-500
01-06-2023,-500
02-06-2023,-500
05-06-2023,-500
06-06-2023,-500
07-06-2023,-500
08-06-2023,-500
09-06-2023,-500
12-06-2023,-500
13-06-2023,-500
14-06-2023,-500
15-06-2023,-500
16-06-2023,-500
19-06-2023,-500
20-06-2023,-500
21-06-2023,-500
22-06-2023,-500
23-06-2023,-500
26-06-2023,-500
27-06-2023,-500
28-06-2023,-500
29-06-2023,-500
30-06-2023,-500
04-07-2023,-500
05-07-2023,-500
06-07-2023,-500
07-07-2023,-500
10-07-2023,-500
11-07-2023,-500
12-07-2023,-500
13-07-2023,-500
14-07-2023,-500
17-07-2023,-500
18-07-2023,-500
19-07-2023,-500
20-07-2023,-500
21-07-2023,-500
24-07-2023,-500
25-07-2023,-500
26-07-2023,-500
27-07-2023,-500
28-07-2023,-500
31-07-2023,-500
01-08-2023,-500
02-08-2023,-500
03-08-2023,-500
04-08-2023,-500
08-08-2023,-500
09-08-2023,-500
10-08-2023,-500
11-08-2023,-500
14-08-2023,-500
15-08-2023,-500
16-08-2023,-500
17-08-2023,-500
18-08-2023,-500
21-08-2023,-500
22-08-2023,-500
23-08-2023,-500
24-08-2023,-500
25-08-2023,-500
28-08-2023,-500
29-08-2023,-500
30-08-2023,-500
31-08-2023,-500
01-09-2023,-500
05-09-2023,-500
06-09-2023,-500
07-09-2023,-500
================================================
FILE: python/rateslib/data/historical/estr.csv
================================================
reference_date,rate
01-10-2019,-500
02-10-2019,-500
03-10-2019,-500
04-10-2019,-500
07-10-2019,-500
08-10-2019,-500
09-10-2019,-500
10-10-2019,-500
11-10-2019,-500
14-10-2019,-500
15-10-2019,-500
16-10-2019,-500
17-10-2019,-500
18-10-2019,-500
21-10-2019,-500
22-10-2019,-500
23-10-2019,-500
24-10-2019,-500
25-10-2019,-500
28-10-2019,-500
29-10-2019,-500
30-10-2019,-500
31-10-2019,-500
01-11-2019,-500
04-11-2019,-500
05-11-2019,-500
06-11-2019,-500
07-11-2019,-500
08-11-2019,-500
11-11-2019,-500
12-11-2019,-500
13-11-2019,-500
14-11-2019,-500
15-11-2019,-500
18-11-2019,-500
19-11-2019,-500
20-11-2019,-500
21-11-2019,-500
22-11-2019,-500
25-11-2019,-500
26-11-2019,-500
27-11-2019,-500
28-11-2019,-500
29-11-2019,-500
02-12-2019,-500
03-12-2019,-500
04-12-2019,-500
05-12-2019,-500
06-12-2019,-500
09-12-2019,-500
10-12-2019,-500
11-12-2019,-500
12-12-2019,-500
13-12-2019,-500
16-12-2019,-500
17-12-2019,-500
18-12-2019,-500
19-12-2019,-500
20-12-2019,-500
23-12-2019,-500
24-12-2019,-500
27-12-2019,-500
30-12-2019,-500
31-12-2019,-500
02-01-2020,-500
03-01-2020,-500
06-01-2020,-500
07-01-2020,-500
08-01-2020,-500
09-01-2020,-500
10-01-2020,-500
13-01-2020,-500
14-01-2020,-500
15-01-2020,-500
16-01-2020,-500
17-01-2020,-500
20-01-2020,-500
21-01-2020,-500
22-01-2020,-500
23-01-2020,-500
24-01-2020,-500
27-01-2020,-500
28-01-2020,-500
29-01-2020,-500
30-01-2020,-500
31-01-2020,-500
03-02-2020,-500
04-02-2020,-500
05-02-2020,-500
06-02-2020,-500
07-02-2020,-500
10-02-2020,-500
11-02-2020,-500
12-02-2020,-500
13-02-2020,-500
14-02-2020,-500
17-02-2020,-500
18-02-2020,-500
19-02-2020,-500
20-02-2020,-500
21-02-2020,-500
24-02-2020,-500
25-02-2020,-500
26-02-2020,-500
27-02-2020,-500
28-02-2020,-500
02-03-2020,-500
03-03-2020,-500
04-03-2020,-500
05-03-2020,-500
06-03-2020,-500
09-03-2020,-500
10-03-2020,-500
11-03-2020,-500
12-03-2020,-500
13-03-2020,-500
16-03-2020,-500
17-03-2020,-500
18-03-2020,-500
19-03-2020,-500
20-03-2020,-500
23-03-2020,-500
24-03-2020,-500
25-03-2020,-500
26-03-2020,-500
27-03-2020,-500
30-03-2020,-500
31-03-2020,-500
01-04-2020,-500
02-04-2020,-500
03-04-2020,-500
06-04-2020,-500
07-04-2020,-500
08-04-2020,-500
09-04-2020,-500
14-04-2020,-500
15-04-2020,-500
16-04-2020,-500
17-04-2020,-500
20-04-2020,-500
21-04-2020,-500
22-04-2020,-500
23-04-2020,-500
24-04-2020,-500
27-04-2020,-500
28-04-2020,-500
29-04-2020,-500
30-04-2020,-500
04-05-2020,-500
05-05-2020,-500
06-05-2020,-500
07-05-2020,-500
08-05-2020,-500
11-05-2020,-500
12-05-2020,-500
13-05-2020,-500
14-05-2020,-500
15-05-2020,-500
18-05-2020,-500
19-05-2020,-500
20-05-2020,-500
21-05-2020,-500
22-05-2020,-500
25-05-2020,-500
26-05-2020,-500
27-05-2020,-500
28-05-2020,-500
29-05-2020,-500
01-06-2020,-500
02-06-2020,-500
03-06-2020,-500
04-06-2020,-500
05-06-2020,-500
08-06-2020,-500
09-06-2020,-500
10-06-2020,-500
11-06-2020,-500
12-06-2020,-500
15-06-2020,-500
16-06-2020,-500
17-06-2020,-500
18-06-2020,-500
19-06-2020,-500
22-06-2020,-500
23-06-2020,-500
24-06-2020,-500
25-06-2020,-500
26-06-2020,-500
29-06-2020,-500
30-06-2020,-500
01-07-2020,-500
02-07-2020,-500
03-07-2020,-500
06-07-2020,-500
07-07-2020,-500
08-07-2020,-500
09-07-2020,-500
10-07-2020,-500
13-07-2020,-500
14-07-2020,-500
15-07-2020,-500
16-07-2020,-500
17-07-2020,-500
20-07-2020,-500
21-07-2020,-500
22-07-2020,-500
23-07-2020,-500
24-07-2020,-500
27-07-2020,-500
28-07-2020,-500
29-07-2020,-500
30-07-2020,-500
31-07-2020,-500
03-08-2020,-500
04-08-2020,-500
05-08-2020,-500
06-08-2020,-500
07-08-2020,-500
10-08-2020,-500
11-08-2020,-500
12-08-2020,-500
13-08-2020,-500
14-08-2020,-500
17-08-2020,-500
18-08-2020,-500
19-08-2020,-500
20-08-2020,-500
21-08-2020,-500
24-08-2020,-500
25-08-2020,-500
26-08-2020,-500
27-08-2020,-500
28-08-2020,-500
31-08-2020,-500
01-09-2020,-500
02-09-2020,-500
03-09-2020,-500
04-09-2020,-500
07-09-2020,-500
08-09-2020,-500
09-09-2020,-500
10-09-2020,-500
11-09-2020,-500
14-09-2020,-500
15-09-2020,-500
16-09-2020,-500
17-09-2020,-500
18-09-2020,-500
21-09-2020,-500
22-09-2020,-500
23-09-2020,-500
24-09-2020,-500
25-09-2020,-500
28-09-2020,-500
29-09-2020,-500
30-09-2020,-500
01-10-2020,-500
02-10-2020,-500
05-10-2020,-500
06-10-2020,-500
07-10-2020,-500
08-10-2020,-500
09-10-2020,-500
12-10-2020,-500
13-10-2020,-500
14-10-2020,-500
15-10-2020,-500
16-10-2020,-500
19-10-2020,-500
20-10-2020,-500
21-10-2020,-500
22-10-2020,-500
23-10-2020,-500
26-10-2020,-500
27-10-2020,-500
28-10-2020,-500
29-10-2020,-500
30-10-2020,-500
02-11-2020,-500
03-11-2020,-500
04-11-2020,-500
05-11-2020,-500
06-11-2020,-500
09-11-2020,-500
10-11-2020,-500
11-11-2020,-500
12-11-2020,-500
13-11-2020,-500
16-11-2020,-500
17-11-2020,-500
18-11-2020,-500
19-11-2020,-500
20-11-2020,-500
23-11-2020,-500
24-11-2020,-500
25-11-2020,-500
26-11-2020,-500
27-11-2020,-500
30-11-2020,-500
01-12-2020,-500
02-12-2020,-500
03-12-2020,-500
04-12-2020,-500
07-12-2020,-500
08-12-2020,-500
09-12-2020,-500
10-12-2020,-500
11-12-2020,-500
14-12-2020,-500
15-12-2020,-500
16-12-2020,-500
17-12-2020,-500
18-12-2020,-500
21-12-2020,-500
22-12-2020,-500
23-12-2020,-500
24-12-2020,-500
28-12-2020,-500
29-12-2020,-500
30-12-2020,-500
31-12-2020,-500
04-01-2021,-500
05-01-2021,-500
06-01-2021,-500
07-01-2021,-500
08-01-2021,-500
11-01-2021,-500
12-01-2021,-500
13-01-2021,-500
14-01-2021,-500
15-01-2021,-500
18-01-2021,-500
19-01-2021,-500
20-01-2021,-500
21-01-2021,-500
22-01-2021,-500
25-01-2021,-500
26-01-2021,-500
27-01-2021,-500
28-01-2021,-500
29-01-2021,-500
01-02-2021,-500
02-02-2021,-500
03-02-2021,-500
04-02-2021,-500
05-02-2021,-500
08-02-2021,-500
09-02-2021,-500
10-02-2021,-500
11-02-2021,-500
12-02-2021,-500
15-02-2021,-500
16-02-2021,-500
17-02-2021,-500
18-02-2021,-500
19-02-2021,-500
22-02-2021,-500
23-02-2021,-500
24-02-2021,-500
25-02-2021,-500
26-02-2021,-500
01-03-2021,-500
02-03-2021,-500
03-03-2021,-500
04-03-2021,-500
05-03-2021,-500
08-03-2021,-500
09-03-2021,-500
10-03-2021,-500
11-03-2021,-500
12-03-2021,-500
15-03-2021,-500
16-03-2021,-500
17-03-2021,-500
18-03-2021,-500
19-03-2021,-500
22-03-2021,-500
23-03-2021,-500
24-03-2021,-500
25-03-2021,-500
26-03-2021,-500
29-03-2021,-500
30-03-2021,-500
31-03-2021,-500
01-04-2021,-500
06-04-2021,-500
07-04-2021,-500
08-04-2021,-500
09-04-2021,-500
12-04-2021,-500
13-04-2021,-500
14-04-2021,-500
15-04-2021,-500
16-04-2021,-500
19-04-2021,-500
20-04-2021,-500
21-04-2021,-500
22-04-2021,-500
23-04-2021,-500
26-04-2021,-500
27-04-2021,-500
28-04-2021,-500
29-04-2021,-500
30-04-2021,-500
03-05-2021,-500
04-05-2021,-500
05-05-2021,-500
06-05-2021,-500
07-05-2021,-500
10-05-2021,-500
11-05-2021,-500
12-05-2021,-500
13-05-2021,-500
14-05-2021,-500
17-05-2021,-500
18-05-2021,-500
19-05-2021,-500
20-05-2021,-500
21-05-2021,-500
24-05-2021,-500
25-05-2021,-500
26-05-2021,-500
27-05-2021,-500
28-05-2021,-500
31-05-2021,-500
01-06-2021,-500
02-06-2021,-500
03-06-2021,-500
04-06-2021,-500
07-06-2021,-500
08-06-2021,-500
09-06-2021,-500
10-06-2021,-500
11-06-2021,-500
14-06-2021,-500
15-06-2021,-500
16-06-2021,-500
17-06-2021,-500
18-06-2021,-500
21-06-2021,-500
22-06-2021,-500
23-06-2021,-500
24-06-2021,-500
25-06-2021,-500
28-06-2021,-500
29-06-2021,-500
30-06-2021,-500
01-07-2021,-500
02-07-2021,-500
05-07-2021,-500
06-07-2021,-500
07-07-2021,-500
08-07-2021,-500
09-07-2021,-500
12-07-2021,-500
13-07-2021,-500
14-07-2021,-500
15-07-2021,-500
16-07-2021,-500
19-07-2021,-500
20-07-2021,-500
21-07-2021,-500
22-07-2021,-500
23-07-2021,-500
26-07-2021,-500
27-07-2021,-500
28-07-2021,-500
29-07-2021,-500
30-07-2021,-500
02-08-2021,-500
03-08-2021,-500
04-08-2021,-500
05-08-2021,-500
06-08-2021,-500
09-08-2021,-500
10-08-2021,-500
11-08-2021,-500
12-08-2021,-500
13-08-2021,-500
16-08-2021,-500
17-08-2021,-500
18-08-2021,-500
19-08-2021,-500
20-08-2021,-500
23-08-2021,-500
24-08-2021,-500
25-08-2021,-500
26-08-2021,-500
27-08-2021,-500
30-08-2021,-500
31-08-2021,-500
01-09-2021,-500
02-09-2021,-500
03-09-2021,-500
06-09-2021,-500
07-09-2021,-500
08-09-2021,-500
09-09-2021,-500
10-09-2021,-500
13-09-2021,-500
14-09-2021,-500
15-09-2021,-500
16-09-2021,-500
17-09-2021,-500
20-09-2021,-500
21-09-2021,-500
22-09-2021,-500
23-09-2021,-500
24-09-2021,-500
27-09-2021,-500
28-09-2021,-500
29-09-2021,-500
30-09-2021,-500
01-10-2021,-500
04-10-2021,-500
05-10-2021,-500
06-10-2021,-500
07-10-2021,-500
08-10-2021,-500
11-10-2021,-500
12-10-2021,-500
13-10-2021,-500
14-10-2021,-500
15-10-2021,-500
18-10-2021,-500
19-10-2021,-500
20-10-2021,-500
21-10-2021,-500
22-10-2021,-500
25-10-2021,-500
26-10-2021,-500
27-10-2021,-500
28-10-2021,-500
29-10-2021,-500
01-11-2021,-500
02-11-2021,-500
03-11-2021,-500
04-11-2021,-500
05-11-2021,-500
08-11-2021,-500
09-11-2021,-500
10-11-2021,-500
11-11-2021,-500
12-11-2021,-500
15-11-2021,-500
16-11-2021,-500
17-11-2021,-500
18-11-2021,-500
19-11-2021,-500
22-11-2021,-500
23-11-2021,-500
24-11-2021,-500
25-11-2021,-500
26-11-2021,-500
29-11-2021,-500
30-11-2021,-500
01-12-2021,-500
02-12-2021,-500
03-12-2021,-500
06-12-2021,-500
07-12-2021,-500
08-12-2021,-500
09-12-2021,-500
10-12-2021,-500
13-12-2021,-500
14-12-2021,-500
15-12-2021,-500
16-12-2021,-500
17-12-2021,-500
20-12-2021,-500
21-12-2021,-500
22-12-2021,-500
23-12-2021,-500
24-12-2021,-500
27-12-2021,-500
28-12-2021,-500
29-12-2021,-500
30-12-2021,-500
31-12-2021,-500
03-01-2022,-500
04-01-2022,-500
05-01-2022,-500
06-01-2022,-500
07-01-2022,-500
10-01-2022,-500
11-01-2022,-500
12-01-2022,-500
13-01-2022,-500
14-01-2022,-500
17-01-2022,-500
18-01-2022,-500
19-01-2022,-500
20-01-2022,-500
21-01-2022,-500
24-01-2022,-500
25-01-2022,-500
26-01-2022,-500
27-01-2022,-500
28-01-2022,-500
31-01-2022,-500
01-02-2022,-500
02-02-2022,-500
03-02-2022,-500
04-02-2022,-500
07-02-2022,-500
08-02-2022,-500
09-02-2022,-500
10-02-2022,-500
11-02-2022,-500
14-02-2022,-500
15-02-2022,-500
16-02-2022,-500
17-02-2022,-500
18-02-2022,-500
21-02-2022,-500
22-02-2022,-500
23-02-2022,-500
24-02-2022,-500
25-02-2022,-500
28-02-2022,-500
01-03-2022,-500
02-03-2022,-500
03-03-2022,-500
04-03-2022,-500
07-03-2022,-500
08-03-2022,-500
09-03-2022,-500
10-03-2022,-500
11-03-2022,-500
14-03-2022,-500
15-03-2022,-500
16-03-2022,-500
17-03-2022,-500
18-03-2022,-500
21-03-2022,-500
22-03-2022,-500
23-03-2022,-500
24-03-2022,-500
25-03-2022,-500
28-03-2022,-500
29-03-2022,-500
30-03-2022,-500
31-03-2022,-500
01-04-2022,-500
04-04-2022,-500
05-04-2022,-500
06-04-2022,-500
07-04-2022,-500
08-04-2022,-500
11-04-2022,-500
12-04-2022,-500
13-04-2022,-500
14-04-2022,-500
19-04-2022,-500
20-04-2022,-500
21-04-2022,-500
22-04-2022,-500
25-04-2022,-500
26-04-2022,-500
27-04-2022,-500
28-04-2022,-500
29-04-2022,-500
02-05-2022,-500
03-05-2022,-500
04-05-2022,-500
05-05-2022,-500
06-05-2022,-500
09-05-2022,-500
10-05-2022,-500
11-05-2022,-500
12-05-2022,-500
13-05-2022,-500
16-05-2022,-500
17-05-2022,-500
18-05-2022,-500
19-05-2022,-500
20-05-2022,-500
23-05-2022,-500
24-05-2022,-500
25-05-2022,-500
26-05-2022,-500
27-05-2022,-500
30-05-2022,-500
31-05-2022,-500
01-06-2022,-500
02-06-2022,-500
03-06-2022,-500
06-06-2022,-500
07-06-2022,-500
08-06-2022,-500
09-06-2022,-500
10-06-2022,-500
13-06-2022,-500
14-06-2022,-500
15-06-2022,-500
16-06-2022,-500
17-06-2022,-500
20-06-2022,-500
21-06-2022,-500
22-06-2022,-500
23-06-2022,-500
24-06-2022,-500
27-06-2022,-500
28-06-2022,-500
29-06-2022,-500
30-06-2022,-500
01-07-2022,-500
04-07-2022,-500
05-07-2022,-500
06-07-2022,-500
07-07-2022,-500
08-07-2022,-500
11-07-2022,-500
12-07-2022,-500
13-07-2022,-500
14-07-2022,-500
15-07-2022,-500
18-07-2022,-500
19-07-2022,-500
20-07-2022,-500
21-07-2022,-500
22-07-2022,-500
25-07-2022,-500
26-07-2022,-500
27-07-2022,-500
28-07-2022,-500
29-07-2022,-500
01-08-2022,-500
02-08-2022,-500
03-08-2022,-500
04-08-2022,-500
05-08-2022,-500
08-08-2022,-500
09-08-2022,-500
10-08-2022,-500
11-08-2022,-500
12-08-2022,-500
15-08-2022,-500
16-08-2022,-500
17-08-2022,-500
18-08-2022,-500
19-08-2022,-500
22-08-2022,-500
23-08-2022,-500
24-08-2022,-500
25-08-2022,-500
26-08-2022,-500
29-08-2022,-500
30-08-2022,-500
31-08-2022,-500
01-09-2022,-500
02-09-2022,-500
05-09-2022,-500
06-09-2022,-500
07-09-2022,-500
08-09-2022,-500
09-09-2022,-500
12-09-2022,-500
13-09-2022,-500
14-09-2022,-500
15-09-2022,-500
16-09-2022,-500
19-09-2022,-500
20-09-2022,-500
21-09-2022,-500
22-09-2022,-500
23-09-2022,-500
26-09-2022,-500
27-09-2022,-500
28-09-2022,-500
29-09-2022,-500
30-09-2022,-500
03-10-2022,-500
04-10-2022,-500
05-10-2022,-500
06-10-2022,-500
07-10-2022,-500
10-10-2022,-500
11-10-2022,-500
12-10-2022,-500
13-10-2022,-500
14-10-2022,-500
17-10-2022,-500
18-10-2022,-500
19-10-2022,-500
20-10-2022,-500
21-10-2022,-500
24-10-2022,-500
25-10-2022,-500
26-10-2022,-500
27-10-2022,-500
28-10-2022,-500
31-10-2022,-500
01-11-2022,-500
02-11-2022,-500
03-11-2022,-500
04-11-2022,-500
07-11-2022,-500
08-11-2022,-500
09-11-2022,-500
10-11-2022,-500
11-11-2022,-500
14-11-2022,-500
15-11-2022,-500
16-11-2022,-500
17-11-2022,-500
18-11-2022,-500
21-11-2022,-500
22-11-2022,-500
23-11-2022,-500
24-11-2022,-500
25-11-2022,-500
28-11-2022,-500
29-11-2022,-500
30-11-2022,-500
01-12-2022,-500
02-12-2022,-500
05-12-2022,-500
06-12-2022,-500
07-12-2022,-500
08-12-2022,-500
09-12-2022,-500
12-12-2022,-500
13-12-2022,-500
14-12-2022,-500
15-12-2022,-500
16-12-2022,-500
19-12-2022,-500
20-12-2022,-500
21-12-2022,-500
22-12-2022,-500
23-12-2022,-500
27-12-2022,-500
28-12-2022,-500
29-12-2022,-500
30-12-2022,-500
02-01-2023,-500
03-01-2023,-500
04-01-2023,-500
05-01-2023,-500
06-01-2023,-500
09-01-2023,-500
10-01-2023,-500
11-01-2023,-500
12-01-2023,-500
13-01-2023,-500
16-01-2023,-500
17-01-2023,-500
18-01-2023,-500
19-01-2023,-500
20-01-2023,-500
23-01-2023,-500
24-01-2023,-500
25-01-2023,-500
26-01-2023,-500
27-01-2023,-500
30-01-2023,-500
31-01-2023,-500
01-02-2023,-500
02-02-2023,-500
03-02-2023,-500
06-02-2023,-500
07-02-2023,-500
08-02-2023,-500
09-02-2023,-500
10-02-2023,-500
13-02-2023,-500
14-02-2023,-500
15-02-2023,-500
16-02-2023,-500
17-02-2023,-500
20-02-2023,-500
21-02-2023,-500
22-02-2023,-500
23-02-2023,-500
24-02-2023,-500
27-02-2023,-500
28-02-2023,-500
01-03-2023,-500
02-03-2023,-500
03-03-2023,-500
06-03-2023,-500
07-03-2023,-500
08-03-2023,-500
09-03-2023,-500
10-03-2023,-500
13-03-2023,-500
14-03-2023,-500
15-03-2023,-500
16-03-2023,-500
17-03-2023,-500
20-03-2023,-500
21-03-2023,-500
22-03-2023,-500
23-03-2023,-500
24-03-2023,-500
27-03-2023,-500
28-03-2023,-500
29-03-2023,-500
30-03-2023,-500
31-03-2023,-500
03-04-2023,-500
04-04-2023,-500
05-04-2023,-500
06-04-2023,-500
11-04-2023,-500
12-04-2023,-500
13-04-2023,-500
14-04-2023,-500
17-04-2023,-500
18-04-2023,-500
19-04-2023,-500
20-04-2023,-500
21-04-2023,-500
24-04-2023,-500
25-04-2023,-500
26-04-2023,-500
27-04-2023,-500
28-04-2023,-500
02-05-2023,-500
03-05-2023,-500
04-05-2023,-500
05-05-2023,-500
08-05-2023,-500
09-05-2023,-500
10-05-2023,-500
11-05-2023,-500
12-05-2023,-500
15-05-2023,-500
16-05-2023,-500
17-05-2023,-500
18-05-2023,-500
19-05-2023,-500
22-05-2023,-500
23-05-2023,-500
24-05-2023,-500
25-05-2023,-500
26-05-2023,-500
29-05-2023,-500
30-05-2023,-500
31-05-2023,-500
01-06-2023,-500
02-06-2023,-500
05-06-2023,-500
06-06-2023,-500
07-06-2023,-500
08-06-2023,-500
09-06-2023,-500
12-06-2023,-500
13-06-2023,-500
14-06-2023,-500
15-06-2023,-500
16-06-2023,-500
19-06-2023,-500
20-06-2023,-500
21-06-2023,-500
22-06-2023,-500
23-06-2023,-500
26-06-2023,-500
27-06-2023,-500
28-06-2023,-500
29-06-2023,-500
30-06-2023,-500
03-07-2023,-500
04-07-2023,-500
05-07-2023,-500
06-07-2023,-500
07-07-2023,-500
10-07-2023,-500
11-07-2023,-500
12-07-2023,-500
13-07-2023,-500
14-07-2023,-500
17-07-2023,-500
18-07-2023,-500
19-07-2023,-500
20-07-2023,-500
21-07-2023,-500
24-07-2023,-500
25-07-2023,-500
26-07-2023,-500
27-07-2023,-500
28-07-2023,-500
31-07-2023,-500
01-08-2023,-500
02-08-2023,-500
================================================
FILE: python/rateslib/data/historical/eur_rfr.csv
================================================
reference_date,rate
01-10-2019,-500
02-10-2019,-500
03-10-2019,-500
04-10-2019,-500
07-10-2019,-500
08-10-2019,-500
09-10-2019,-500
10-10-2019,-500
11-10-2019,-500
14-10-2019,-500
15-10-2019,-500
16-10-2019,-500
17-10-2019,-500
18-10-2019,-500
21-10-2019,-500
22-10-2019,-500
23-10-2019,-500
24-10-2019,-500
25-10-2019,-500
28-10-2019,-500
29-10-2019,-500
30-10-2019,-500
31-10-2019,-500
01-11-2019,-500
04-11-2019,-500
05-11-2019,-500
06-11-2019,-500
07-11-2019,-500
08-11-2019,-500
11-11-2019,-500
12-11-2019,-500
13-11-2019,-500
14-11-2019,-500
15-11-2019,-500
18-11-2019,-500
19-11-2019,-500
20-11-2019,-500
21-11-2019,-500
22-11-2019,-500
25-11-2019,-500
26-11-2019,-500
27-11-2019,-500
28-11-2019,-500
29-11-2019,-500
02-12-2019,-500
03-12-2019,-500
04-12-2019,-500
05-12-2019,-500
06-12-2019,-500
09-12-2019,-500
10-12-2019,-500
11-12-2019,-500
12-12-2019,-500
13-12-2019,-500
16-12-2019,-500
17-12-2019,-500
18-12-2019,-500
19-12-2019,-500
20-12-2019,-500
23-12-2019,-500
24-12-2019,-500
27-12-2019,-500
30-12-2019,-500
31-12-2019,-500
02-01-2020,-500
03-01-2020,-500
06-01-2020,-500
07-01-2020,-500
08-01-2020,-500
09-01-2020,-500
10-01-2020,-500
13-01-2020,-500
14-01-2020,-500
15-01-2020,-500
16-01-2020,-500
17-01-2020,-500
20-01-2020,-500
21-01-2020,-500
22-01-2020,-500
23-01-2020,-500
24-01-2020,-500
27-01-2020,-500
28-01-2020,-500
29-01-2020,-500
30-01-2020,-500
31-01-2020,-500
03-02-2020,-500
04-02-2020,-500
05-02-2020,-500
06-02-2020,-500
07-02-2020,-500
10-02-2020,-500
11-02-2020,-500
12-02-2020,-500
13-02-2020,-500
14-02-2020,-500
17-02-2020,-500
18-02-2020,-500
19-02-2020,-500
20-02-2020,-500
21-02-2020,-500
24-02-2020,-500
25-02-2020,-500
26-02-2020,-500
27-02-2020,-500
28-02-2020,-500
02-03-2020,-500
03-03-2020,-500
04-03-2020,-500
05-03-2020,-500
06-03-2020,-500
09-03-2020,-500
10-03-2020,-500
11-03-2020,-500
12-03-2020,-500
13-03-2020,-500
16-03-2020,-500
17-03-2020,-500
18-03-2020,-500
19-03-2020,-500
20-03-2020,-500
23-03-2020,-500
24-03-2020,-500
25-03-2020,-500
26-03-2020,-500
27-03-2020,-500
30-03-2020,-500
31-03-2020,-500
01-04-2020,-500
02-04-2020,-500
03-04-2020,-500
06-04-2020,-500
07-04-2020,-500
08-04-2020,-500
09-04-2020,-500
14-04-2020,-500
15-04-2020,-500
16-04-2020,-500
17-04-2020,-500
20-04-2020,-500
21-04-2020,-500
22-04-2020,-500
23-04-2020,-500
24-04-2020,-500
27-04-2020,-500
28-04-2020,-500
29-04-2020,-500
30-04-2020,-500
04-05-2020,-500
05-05-2020,-500
06-05-2020,-500
07-05-2020,-500
08-05-2020,-500
11-05-2020,-500
12-05-2020,-500
13-05-2020,-500
14-05-2020,-500
15-05-2020,-500
18-05-2020,-500
19-05-2020,-500
20-05-2020,-500
21-05-2020,-500
22-05-2020,-500
25-05-2020,-500
26-05-2020,-500
27-05-2020,-500
28-05-2020,-500
29-05-2020,-500
01-06-2020,-500
02-06-2020,-500
03-06-2020,-500
04-06-2020,-500
05-06-2020,-500
08-06-2020,-500
09-06-2020,-500
10-06-2020,-500
11-06-2020,-500
12-06-2020,-500
15-06-2020,-500
16-06-2020,-500
17-06-2020,-500
18-06-2020,-500
19-06-2020,-500
22-06-2020,-500
23-06-2020,-500
24-06-2020,-500
25-06-2020,-500
26-06-2020,-500
29-06-2020,-500
30-06-2020,-500
01-07-2020,-500
02-07-2020,-500
03-07-2020,-500
06-07-2020,-500
07-07-2020,-500
08-07-2020,-500
09-07-2020,-500
10-07-2020,-500
13-07-2020,-500
14-07-2020,-500
15-07-2020,-500
16-07-2020,-500
17-07-2020,-500
20-07-2020,-500
21-07-2020,-500
22-07-2020,-500
23-07-2020,-500
24-07-2020,-500
27-07-2020,-500
28-07-2020,-500
29-07-2020,-500
30-07-2020,-500
31-07-2020,-500
03-08-2020,-500
04-08-2020,-500
05-08-2020,-500
06-08-2020,-500
07-08-2020,-500
10-08-2020,-500
11-08-2020,-500
12-08-2020,-500
13-08-2020,-500
14-08-2020,-500
17-08-2020,-500
18-08-2020,-500
19-08-2020,-500
20-08-2020,-500
21-08-2020,-500
24-08-2020,-500
25-08-2020,-500
26-08-2020,-500
27-08-2020,-500
28-08-2020,-500
31-08-2020,-500
01-09-2020,-500
02-09-2020,-500
03-09-2020,-500
04-09-2020,-500
07-09-2020,-500
08-09-2020,-500
09-09-2020,-500
10-09-2020,-500
11-09-2020,-500
14-09-2020,-500
15-09-2020,-500
16-09-2020,-500
17-09-2020,-500
18-09-2020,-500
21-09-2020,-500
22-09-2020,-500
23-09-2020,-500
24-09-2020,-500
25-09-2020,-500
28-09-2020,-500
29-09-2020,-500
30-09-2020,-500
01-10-2020,-500
02-10-2020,-500
05-10-2020,-500
06-10-2020,-500
07-10-2020,-500
08-10-2020,-500
09-10-2020,-500
12-10-2020,-500
13-10-2020,-500
14-10-2020,-500
15-10-2020,-500
16-10-2020,-500
19-10-2020,-500
20-10-2020,-500
21-10-2020,-500
22-10-2020,-500
23-10-2020,-500
26-10-2020,-500
27-10-2020,-500
28-10-2020,-500
29-10-2020,-500
30-10-2020,-500
02-11-2020,-500
03-11-2020,-500
04-11-2020,-500
05-11-2020,-500
06-11-2020,-500
09-11-2020,-500
10-11-2020,-500
11-11-2020,-500
12-11-2020,-500
13-11-2020,-500
16-11-2020,-500
17-11-2020,-500
18-11-2020,-500
19-11-2020,-500
20-11-2020,-500
23-11-2020,-500
24-11-2020,-500
25-11-2020,-500
26-11-2020,-500
27-11-2020,-500
30-11-2020,-500
01-12-2020,-500
02-12-2020,-500
03-12-2020,-500
04-12-2020,-500
07-12-2020,-500
08-12-2020,-500
09-12-2020,-500
10-12-2020,-500
11-12-2020,-500
14-12-2020,-500
15-12-2020,-500
16-12-2020,-500
17-12-2020,-500
18-12-2020,-500
21-12-2020,-500
22-12-2020,-500
23-12-2020,-500
24-12-2020,-500
28-12-2020,-500
29-12-2020,-500
30-12-2020,-500
31-12-2020,-500
04-01-2021,-500
05-01-2021,-500
06-01-2021,-500
07-01-2021,-500
08-01-2021,-500
11-01-2021,-500
12-01-2021,-500
13-01-2021,-500
14-01-2021,-500
15-01-2021,-500
18-01-2021,-500
19-01-2021,-500
20-01-2021,-500
21-01-2021,-500
22-01-2021,-500
25-01-2021,-500
26-01-2021,-500
27-01-2021,-500
28-01-2021,-500
29-01-2021,-500
01-02-2021,-500
02-02-2021,-500
03-02-2021,-500
04-02-2021,-500
05-02-2021,-500
08-02-2021,-500
09-02-2021,-500
10-02-2021,-500
11-02-2021,-500
12-02-2021,-500
15-02-2021,-500
16-02-2021,-500
17-02-2021,-500
18-02-2021,-500
19-02-2021,-500
22-02-2021,-500
23-02-2021,-500
24-02-2021,-500
25-02-2021,-500
26-02-2021,-500
01-03-2021,-500
02-03-2021,-500
03-03-2021,-500
04-03-2021,-500
05-03-2021,-500
08-03-2021,-500
09-03-2021,-500
10-03-2021,-500
11-03-2021,-500
12-03-2021,-500
15-03-2021,-500
16-03-2021,-500
17-03-2021,-500
18-03-2021,-500
19-03-2021,-500
22-03-2021,-500
23-03-2021,-500
24-03-2021,-500
25-03-2021,-500
26-03-2021,-500
29-03-2021,-500
30-03-2021,-500
31-03-2021,-500
01-04-2021,-500
06-04-2021,-500
07-04-2021,-500
08-04-2021,-500
09-04-2021,-500
12-04-2021,-500
13-04-2021,-500
14-04-2021,-500
15-04-2021,-500
16-04-2021,-500
19-04-2021,-500
20-04-2021,-500
21-04-2021,-500
22-04-2021,-500
23-04-2021,-500
26-04-2021,-500
27-04-2021,-500
28-04-2021,-500
29-04-2021,-500
30-04-2021,-500
03-05-2021,-500
04-05-2021,-500
05-05-2021,-500
06-05-2021,-500
07-05-2021,-500
10-05-2021,-500
11-05-2021,-500
12-05-2021,-500
13-05-2021,-500
14-05-2021,-500
17-05-2021,-500
18-05-2021,-500
19-05-2021,-500
20-05-2021,-500
21-05-2021,-500
24-05-2021,-500
25-05-2021,-500
26-05-2021,-500
27-05-2021,-500
28-05-2021,-500
31-05-2021,-500
01-06-2021,-500
02-06-2021,-500
03-06-2021,-500
04-06-2021,-500
07-06-2021,-500
08-06-2021,-500
09-06-2021,-500
10-06-2021,-500
11-06-2021,-500
14-06-2021,-500
15-06-2021,-500
16-06-2021,-500
17-06-2021,-500
18-06-2021,-500
21-06-2021,-500
22-06-2021,-500
23-06-2021,-500
24-06-2021,-500
25-06-2021,-500
28-06-2021,-500
29-06-2021,-500
30-06-2021,-500
01-07-2021,-500
02-07-2021,-500
05-07-2021,-500
06-07-2021,-500
07-07-2021,-500
08-07-2021,-500
09-07-2021,-500
12-07-2021,-500
13-07-2021,-500
14-07-2021,-500
15-07-2021,-500
16-07-2021,-500
19-07-2021,-500
20-07-2021,-500
21-07-2021,-500
22-07-2021,-500
23-07-2021,-500
26-07-2021,-500
27-07-2021,-500
28-07-2021,-500
29-07-2021,-500
30-07-2021,-500
02-08-2021,-500
03-08-2021,-500
04-08-2021,-500
05-08-2021,-500
06-08-2021,-500
09-08-2021,-500
10-08-2021,-500
11-08-2021,-500
12-08-2021,-500
13-08-2021,-500
16-08-2021,-500
17-08-2021,-500
18-08-2021,-500
19-08-2021,-500
20-08-2021,-500
23-08-2021,-500
24-08-2021,-500
25-08-2021,-500
26-08-2021,-500
27-08-2021,-500
30-08-2021,-500
31-08-2021,-500
01-09-2021,-500
02-09-2021,-500
03-09-2021,-500
06-09-2021,-500
07-09-2021,-500
08-09-2021,-500
09-09-2021,-500
10-09-2021,-500
13-09-2021,-500
14-09-2021,-500
15-09-2021,-500
16-09-2021,-500
17-09-2021,-500
20-09-2021,-500
21-09-2021,-500
22-09-2021,-500
23-09-2021,-500
24-09-2021,-500
27-09-2021,-500
28-09-2021,-500
29-09-2021,-500
30-09-2021,-500
01-10-2021,-500
04-10-2021,-500
05-10-2021,-500
06-10-2021,-500
07-10-2021,-500
08-10-2021,-500
11-10-2021,-500
12-10-2021,-500
13-10-2021,-500
14-10-2021,-500
15-10-2021,-500
18-10-2021,-500
19-10-2021,-500
20-10-2021,-500
21-10-2021,-500
22-10-2021,-500
25-10-2021,-500
26-10-2021,-500
27-10-2021,-500
28-10-2021,-500
29-10-2021,-500
01-11-2021,-500
02-11-2021,-500
03-11-2021,-500
04-11-2021,-500
05-11-2021,-500
08-11-2021,-500
09-11-2021,-500
10-11-2021,-500
11-11-2021,-500
12-11-2021,-500
15-11-2021,-500
16-11-2021,-500
17-11-2021,-500
18-11-2021,-500
19-11-2021,-500
22-11-2021,-500
23-11-2021,-500
24-11-2021,-500
25-11-2021,-500
26-11-2021,-500
29-11-2021,-500
30-11-2021,-500
01-12-2021,-500
02-12-2021,-500
03-12-2021,-500
06-12-2021,-500
07-12-2021,-500
08-12-2021,-500
09-12-2021,-500
10-12-2021,-500
13-12-2021,-500
14-12-2021,-500
15-12-2021,-500
16-12-2021,-500
17-12-2021,-500
20-12-2021,-500
21-12-2021,-500
22-12-2021,-500
23-12-2021,-500
24-12-2021,-500
27-12-2021,-500
28-12-2021,-500
29-12-2021,-500
30-12-2021,-500
31-12-2021,-500
03-01-2022,-500
04-01-2022,-500
05-01-2022,-500
06-01-2022,-500
07-01-2022,-500
10-01-2022,-500
11-01-2022,-500
12-01-2022,-500
13-01-2022,-500
14-01-2022,-500
17-01-2022,-500
18-01-2022,-500
19-01-2022,-500
20-01-2022,-500
21-01-2022,-500
24-01-2022,-500
25-01-2022,-500
26-01-2022,-500
27-01-2022,-500
28-01-2022,-500
31-01-2022,-500
01-02-2022,-500
02-02-2022,-500
03-02-2022,-500
04-02-2022,-500
07-02-2022,-500
08-02-2022,-500
09-02-2022,-500
10-02-2022,-500
11-02-2022,-500
14-02-2022,-500
15-02-2022,-500
16-02-2022,-500
17-02-2022,-500
18-02-2022,-500
21-02-2022,-500
22-02-2022,-500
23-02-2022,-500
24-02-2022,-500
25-02-2022,-500
28-02-2022,-500
01-03-2022,-500
02-03-2022,-500
03-03-2022,-500
04-03-2022,-500
07-03-2022,-500
08-03-2022,-500
09-03-2022,-500
10-03-2022,-500
11-03-2022,-500
14-03-2022,-500
15-03-2022,-500
16-03-2022,-500
17-03-2022,-500
18-03-2022,-500
21-03-2022,-500
22-03-2022,-500
23-03-2022,-500
24-03-2022,-500
25-03-2022,-500
28-03-2022,-500
29-03-2022,-500
30-03-2022,-500
31-03-2022,-500
01-04-2022,-500
04-04-2022,-500
05-04-2022,-500
06-04-2022,-500
07-04-2022,-500
08-04-2022,-500
11-04-2022,-500
12-04-2022,-500
13-04-2022,-500
14-04-2022,-500
19-04-2022,-500
20-04-2022,-500
21-04-2022,-500
22-04-2022,-500
25-04-2022,-500
26-04-2022,-500
27-04-2022,-500
28-04-2022,-500
29-04-2022,-500
02-05-2022,-500
03-05-2022,-500
04-05-2022,-500
05-05-2022,-500
06-05-2022,-500
09-05-2022,-500
10-05-2022,-500
11-05-2022,-500
12-05-2022,-500
13-05-2022,-500
16-05-2022,-500
17-05-2022,-500
18-05-2022,-500
19-05-2022,-500
20-05-2022,-500
23-05-2022,-500
24-05-2022,-500
25-05-2022,-500
26-05-2022,-500
27-05-2022,-500
30-05-2022,-500
31-05-2022,-500
01-06-2022,-500
02-06-2022,-500
03-06-2022,-500
06-06-2022,-500
07-06-2022,-500
08-06-2022,-500
09-06-2022,-500
10-06-2022,-500
13-06-2022,-500
14-06-2022,-500
15-06-2022,-500
16-06-2022,-500
17-06-2022,-500
20-06-2022,-500
21-06-2022,-500
22-06-2022,-500
23-06-2022,-500
24-06-2022,-500
27-06-2022,-500
28-06-2022,-500
29-06-2022,-500
30-06-2022,-500
01-07-2022,-500
04-07-2022,-500
05-07-2022,-500
06-07-2022,-500
07-07-2022,-500
08-07-2022,-500
11-07-2022,-500
12-07-2022,-500
13-07-2022,-500
14-07-2022,-500
15-07-2022,-500
18-07-2022,-500
19-07-2022,-500
20-07-2022,-500
21-07-2022,-500
22-07-2022,-500
25-07-2022,-500
26-07-2022,-500
27-07-2022,-500
28-07-2022,-500
29-07-2022,-500
01-08-2022,-500
02-08-2022,-500
03-08-2022,-500
04-08-2022,-500
05-08-2022,-500
08-08-2022,-500
09-08-2022,-500
10-08-2022,-500
11-08-2022,-500
12-08-2022,-500
15-08-2022,-500
16-08-2022,-500
17-08-2022,-500
18-08-2022,-500
19-08-2022,-500
22-08-2022,-500
23-08-2022,-500
24-08-2022,-500
25-08-2022,-500
26-08-2022,-500
29-08-2022,-500
30-08-2022,-500
31-08-2022,-500
01-09-2022,-500
02-09-2022,-500
05-09-2022,-500
06-09-2022,-500
07-09-2022,-500
08-09-2022,-500
09-09-2022,-500
12-09-2022,-500
13-09-2022,-500
14-09-2022,-500
15-09-2022,-500
16-09-2022,-500
19-09-2022,-500
20-09-2022,-500
21-09-2022,-500
22-09-2022,-500
23-09-2022,-500
26-09-2022,-500
27-09-2022,-500
28-09-2022,-500
29-09-2022,-500
30-09-2022,-500
03-10-2022,-500
04-10-2022,-500
05-10-2022,-500
06-10-2022,-500
07-10-2022,-500
10-10-2022,-500
11-10-2022,-500
12-10-2022,-500
13-10-2022,-500
14-10-2022,-500
17-10-2022,-500
18-10-2022,-500
19-10-2022,-500
20-10-2022,-500
21-10-2022,-500
24-10-2022,-500
25-10-2022,-500
26-10-2022,-500
27-10-2022,-500
28-10-2022,-500
31-10-2022,-500
01-11-2022,-500
02-11-2022,-500
03-11-2022,-500
04-11-2022,-500
07-11-2022,-500
08-11-2022,-500
09-11-2022,-500
10-11-2022,-500
11-11-2022,-500
14-11-2022,-500
15-11-2022,-500
16-11-2022,-500
17-11-2022,-500
18-11-2022,-500
21-11-2022,-500
22-11-2022,-500
23-11-2022,-500
24-11-2022,-500
25-11-2022,-500
28-11-2022,-500
29-11-2022,-500
30-11-2022,-500
01-12-2022,-500
02-12-2022,-500
05-12-2022,-500
06-12-2022,-500
07-12-2022,-500
08-12-2022,-500
09-12-2022,-500
12-12-2022,-500
13-12-2022,-500
14-12-2022,-500
15-12-2022,-500
16-12-2022,-500
19-12-2022,-500
20-12-2022,-500
21-12-2022,-500
22-12-2022,-500
23-12-2022,-500
27-12-2022,-500
28-12-2022,-500
29-12-2022,-500
30-12-2022,-500
02-01-2023,-500
03-01-2023,-500
04-01-2023,-500
05-01-2023,-500
06-01-2023,-500
09-01-2023,-500
10-01-2023,-500
11-01-2023,-500
12-01-2023,-500
13-01-2023,-500
16-01-2023,-500
17-01-2023,-500
18-01-2023,-500
19-01-2023,-500
20-01-2023,-500
23-01-2023,-500
24-01-2023,-500
25-01-2023,-500
26-01-2023,-500
27-01-2023,-500
30-01-2023,-500
31-01-2023,-500
01-02-2023,-500
02-02-2023,-500
03-02-2023,-500
06-02-2023,-500
07-02-2023,-500
08-02-2023,-500
09-02-2023,-500
10-02-2023,-500
13-02-2023,-500
14-02-2023,-500
15-02-2023,-500
16-02-2023,-500
17-02-2023,-500
20-02-2023,-500
21-02-2023,-500
22-02-2023,-500
23-02-2023,-500
24-02-2023,-500
27-02-2023,-500
28-02-2023,-500
01-03-2023,-500
02-03-2023,-500
03-03-2023,-500
06-03-2023,-500
07-03-2023,-500
08-03-2023,-500
09-03-2023,-500
10-03-2023,-500
13-03-2023,-500
14-03-2023,-500
15-03-2023,-500
16-03-2023,-500
17-03-2023,-500
20-03-2023,-500
21-03-2023,-500
22-03-2023,-500
23-03-2023,-500
24-03-2023,-500
27-03-2023,-500
28-03-2023,-500
29-03-2023,-500
30-03-2023,-500
31-03-2023,-500
03-04-2023,-500
04-04-2023,-500
05-04-2023,-500
06-04-2023,-500
11-04-2023,-500
12-04-2023,-500
13-04-2023,-500
14-04-2023,-500
17-04-2023,-500
18-04-2023,-500
19-04-2023,-500
20-04-2023,-500
21-04-2023,-500
24-04-2023,-500
25-04-2023,-500
26-04-2023,-500
27-04-2023,-500
28-04-2023,-500
02-05-2023,-500
03-05-2023,-500
04-05-2023,-500
05-05-2023,-500
08-05-2023,-500
09-05-2023,-500
10-05-2023,-500
11-05-2023,-500
12-05-2023,-500
15-05-2023,-500
16-05-2023,-500
17-05-2023,-500
18-05-2023,-500
19-05-2023,-500
22-05-2023,-500
23-05-2023,-500
24-05-2023,-500
25-05-2023,-500
26-05-2023,-500
29-05-2023,-500
30-05-2023,-500
31-05-2023,-500
01-06-2023,-500
02-06-2023,-500
05-06-2023,-500
06-06-2023,-500
07-06-2023,-500
08-06-2023,-500
09-06-2023,-500
12-06-2023,-500
13-06-2023,-500
14-06-2023,-500
15-06-2023,-500
16-06-2023,-500
19-06-2023,-500
20-06-2023,-500
21-06-2023,-500
22-06-2023,-500
23-06-2023,-500
26-06-2023,-500
27-06-2023,-500
28-06-2023,-500
29-06-2023,-500
30-06-2023,-500
03-07-2023,-500
04-07-2023,-500
05-07-2023,-500
06-07-2023,-500
07-07-2023,-500
10-07-2023,-500
11-07-2023,-500
12-07-2023,-500
13-07-2023,-500
14-07-2023,-500
17-07-2023,-500
18-07-2023,-500
19-07-2023,-500
20-07-2023,-500
21-07-2023,-500
24-07-2023,-500
25-07-2023,-500
26-07-2023,-500
27-07-2023,-500
28-07-2023,-500
31-07-2023,-500
01-08-2023,-500
02-08-2023,-500
================================================
FILE: python/rateslib/data/historical/gbp_rfr.csv
================================================
reference_date,rate
01-08-2023,-500
31-07-2023,-500
28-07-2023,-500
27-07-2023,-500
26-07-2023,-500
25-07-2023,-500
24-07-2023,-500
21-07-2023,-500
20-07-2023,-500
19-07-2023,-500
18-07-2023,-500
17-07-2023,-500
14-07-2023,-500
13-07-2023,-500
12-07-2023,-500
11-07-2023,-500
10-07-2023,-500
07-07-2023,-500
06-07-2023,-500
05-07-2023,-500
04-07-2023,-500
03-07-2023,-500
30-06-2023,-500
29-06-2023,-500
28-06-2023,-500
27-06-2023,-500
26-06-2023,-500
23-06-2023,-500
22-06-2023,-500
21-06-2023,-500
20-06-2023,-500
19-06-2023,-500
16-06-2023,-500
15-06-2023,-500
14-06-2023,-500
13-06-2023,-500
12-06-2023,-500
09-06-2023,-500
08-06-2023,-500
07-06-2023,-500
06-06-2023,-500
05-06-2023,-500
02-06-2023,-500
01-06-2023,-500
31-05-2023,-500
30-05-2023,-500
26-05-2023,-500
25-05-2023,-500
24-05-2023,-500
23-05-2023,-500
22-05-2023,-500
19-05-2023,-500
18-05-2023,-500
17-05-2023,-500
16-05-2023,-500
15-05-2023,-500
12-05-2023,-500
11-05-2023,-500
10-05-2023,-500
09-05-2023,-500
05-05-2023,-500
04-05-2023,-500
03-05-2023,-500
02-05-2023,-500
28-04-2023,-500
27-04-2023,-500
26-04-2023,-500
25-04-2023,-500
24-04-2023,-500
21-04-2023,-500
20-04-2023,-500
19-04-2023,-500
18-04-2023,-500
17-04-2023,-500
14-04-2023,-500
13-04-2023,-500
12-04-2023,-500
11-04-2023,-500
06-04-2023,-500
05-04-2023,-500
04-04-2023,-500
03-04-2023,-500
31-03-2023,-500
30-03-2023,-500
29-03-2023,-500
28-03-2023,-500
27-03-2023,-500
24-03-2023,-500
23-03-2023,-500
22-03-2023,-500
21-03-2023,-500
20-03-2023,-500
17-03-2023,-500
16-03-2023,-500
15-03-2023,-500
14-03-2023,-500
13-03-2023,-500
10-03-2023,-500
09-03-2023,-500
08-03-2023,-500
07-03-2023,-500
06-03-2023,-500
03-03-2023,-500
02-03-2023,-500
01-03-2023,-500
28-02-2023,-500
27-02-2023,-500
24-02-2023,-500
23-02-2023,-500
22-02-2023,-500
21-02-2023,-500
20-02-2023,-500
17-02-2023,-500
16-02-2023,-500
15-02-2023,-500
14-02-2023,-500
13-02-2023,-500
10-02-2023,-500
09-02-2023,-500
08-02-2023,-500
07-02-2023,-500
06-02-2023,-500
03-02-2023,-500
02-02-2023,-500
01-02-2023,-500
31-01-2023,-500
30-01-2023,-500
27-01-2023,-500
26-01-2023,-500
25-01-2023,-500
24-01-2023,-500
23-01-2023,-500
20-01-2023,-500
19-01-2023,-500
18-01-2023,-500
17-01-2023,-500
16-01-2023,-500
13-01-2023,-500
12-01-2023,-500
11-01-2023,-500
10-01-2023,-500
09-01-2023,-500
06-01-2023,-500
05-01-2023,-500
04-01-2023,-500
03-01-2023,-500
30-12-2022,-500
29-12-2022,-500
28-12-2022,-500
23-12-2022,-500
22-12-2022,-500
21-12-2022,-500
20-12-2022,-500
19-12-2022,-500
16-12-2022,-500
15-12-2022,-500
14-12-2022,-500
13-12-2022,-500
12-12-2022,-500
09-12-2022,-500
08-12-2022,-500
07-12-2022,-500
06-12-2022,-500
05-12-2022,-500
02-12-2022,-500
01-12-2022,-500
30-11-2022,-500
29-11-2022,-500
28-11-2022,-500
25-11-2022,-500
24-11-2022,-500
23-11-2022,-500
22-11-2022,-500
21-11-2022,-500
18-11-2022,-500
17-11-2022,-500
16-11-2022,-500
15-11-2022,-500
14-11-2022,-500
11-11-2022,-500
10-11-2022,-500
09-11-2022,-500
08-11-2022,-500
07-11-2022,-500
04-11-2022,-500
03-11-2022,-500
02-11-2022,-500
01-11-2022,-500
31-10-2022,-500
28-10-2022,-500
27-10-2022,-500
26-10-2022,-500
25-10-2022,-500
24-10-2022,-500
21-10-2022,-500
20-10-2022,-500
19-10-2022,-500
18-10-2022,-500
17-10-2022,-500
14-10-2022,-500
13-10-2022,-500
12-10-2022,-500
11-10-2022,-500
10-10-2022,-500
07-10-2022,-500
06-10-2022,-500
05-10-2022,-500
04-10-2022,-500
03-10-2022,-500
30-09-2022,-500
29-09-2022,-500
28-09-2022,-500
27-09-2022,-500
26-09-2022,-500
23-09-2022,-500
22-09-2022,-500
21-09-2022,-500
20-09-2022,-500
16-09-2022,-500
15-09-2022,-500
14-09-2022,-500
13-09-2022,-500
12-09-2022,-500
09-09-2022,-500
08-09-2022,-500
07-09-2022,-500
06-09-2022,-500
05-09-2022,-500
02-09-2022,-500
01-09-2022,-500
31-08-2022,-500
30-08-2022,-500
26-08-2022,-500
25-08-2022,-500
24-08-2022,-500
23-08-2022,-500
22-08-2022,-500
19-08-2022,-500
18-08-2022,-500
17-08-2022,-500
16-08-2022,-500
15-08-2022,-500
12-08-2022,-500
11-08-2022,-500
10-08-2022,-500
09-08-2022,-500
08-08-2022,-500
05-08-2022,-500
04-08-2022,-500
03-08-2022,-500
02-08-2022,-500
01-08-2022,-500
29-07-2022,-500
28-07-2022,-500
27-07-2022,-500
26-07-2022,-500
25-07-2022,-500
22-07-2022,-500
21-07-2022,-500
20-07-2022,-500
19-07-2022,-500
18-07-2022,-500
15-07-2022,-500
14-07-2022,-500
13-07-2022,-500
12-07-2022,-500
11-07-2022,-500
08-07-2022,-500
07-07-2022,-500
06-07-2022,-500
05-07-2022,-500
04-07-2022,-500
01-07-2022,-500
30-06-2022,-500
29-06-2022,-500
28-06-2022,-500
27-06-2022,-500
24-06-2022,-500
23-06-2022,-500
22-06-2022,-500
21-06-2022,-500
20-06-2022,-500
17-06-2022,-500
16-06-2022,-500
15-06-2022,-500
14-06-2022,-500
13-06-2022,-500
10-06-2022,-500
09-06-2022,-500
08-06-2022,-500
07-06-2022,-500
06-06-2022,-500
01-06-2022,-500
31-05-2022,-500
30-05-2022,-500
27-05-2022,-500
26-05-2022,-500
25-05-2022,-500
24-05-2022,-500
23-05-2022,-500
20-05-2022,-500
19-05-2022,-500
18-05-2022,-500
17-05-2022,-500
16-05-2022,-500
13-05-2022,-500
12-05-2022,-500
11-05-2022,-500
10-05-2022,-500
09-05-2022,-500
06-05-2022,-500
05-05-2022,-500
04-05-2022,-500
03-05-2022,-500
29-04-2022,-500
28-04-2022,-500
27-04-2022,-500
26-04-2022,-500
25-04-2022,-500
22-04-2022,-500
21-04-2022,-500
20-04-2022,-500
19-04-2022,-500
14-04-2022,-500
13-04-2022,-500
12-04-2022,-500
11-04-2022,-500
08-04-2022,-500
07-04-2022,-500
06-04-2022,-500
05-04-2022,-500
04-04-2022,-500
01-04-2022,-500
31-03-2022,-500
30-03-2022,-500
29-03-2022,-500
28-03-2022,-500
25-03-2022,-500
24-03-2022,-500
23-03-2022,-500
22-03-2022,-500
21-03-2022,-500
18-03-2022,-500
17-03-2022,-500
16-03-2022,-500
15-03-2022,-500
14-03-2022,-500
11-03-2022,-500
10-03-2022,-500
09-03-2022,-500
08-03-2022,-500
07-03-2022,-500
04-03-2022,-500
03-03-2022,-500
02-03-2022,-500
01-03-2022,-500
28-02-2022,-500
25-02-2022,-500
24-02-2022,-500
23-02-2022,-500
22-02-2022,-500
21-02-2022,-500
18-02-2022,-500
17-02-2022,-500
16-02-2022,-500
15-02-2022,-500
14-02-2022,-500
11-02-2022,-500
10-02-2022,-500
09-02-2022,-500
08-02-2022,-500
07-02-2022,-500
04-02-2022,-500
03-02-2022,-500
02-02-2022,-500
01-02-2022,-500
31-01-2022,-500
28-01-2022,-500
27-01-2022,-500
26-01-2022,-500
25-01-2022,-500
24-01-2022,-500
21-01-2022,-500
20-01-2022,-500
19-01-2022,-500
18-01-2022,-500
17-01-2022,-500
14-01-2022,-500
13-01-2022,-500
12-01-2022,-500
11-01-2022,-500
10-01-2022,-500
07-01-2022,-500
06-01-2022,-500
05-01-2022,-500
04-01-2022,-500
31-12-2021,-500
30-12-2021,-500
29-12-2021,-500
24-12-2021,-500
23-12-2021,-500
22-12-2021,-500
21-12-2021,-500
20-12-2021,-500
17-12-2021,-500
16-12-2021,-500
15-12-2021,-500
14-12-2021,-500
13-12-2021,-500
10-12-2021,-500
09-12-2021,-500
08-12-2021,-500
07-12-2021,-500
06-12-2021,-500
03-12-2021,-500
02-12-2021,-500
01-12-2021,-500
30-11-2021,-500
29-11-2021,-500
26-11-2021,-500
25-11-2021,-500
24-11-2021,-500
23-11-2021,-500
22-11-2021,-500
19-11-2021,-500
18-11-2021,-500
17-11-2021,-500
16-11-2021,-500
15-11-2021,-500
12-11-2021,-500
11-11-2021,-500
10-11-2021,-500
09-11-2021,-500
08-11-2021,-500
05-11-2021,-500
04-11-2021,-500
03-11-2021,-500
02-11-2021,-500
01-11-2021,-500
29-10-2021,-500
28-10-2021,-500
27-10-2021,-500
26-10-2021,-500
25-10-2021,-500
22-10-2021,-500
21-10-2021,-500
20-10-2021,-500
19-10-2021,-500
18-10-2021,-500
15-10-2021,-500
14-10-2021,-500
13-10-2021,-500
12-10-2021,-500
11-10-2021,-500
08-10-2021,-500
07-10-2021,-500
06-10-2021,-500
05-10-2021,-500
04-10-2021,-500
01-10-2021,-500
30-09-2021,-500
29-09-2021,-500
28-09-2021,-500
27-09-2021,-500
24-09-2021,-500
23-09-2021,-500
22-09-2021,-500
21-09-2021,-500
20-09-2021,-500
17-09-2021,-500
16-09-2021,-500
15-09-2021,-500
14-09-2021,-500
13-09-2021,-500
10-09-2021,-500
09-09-2021,-500
08-09-2021,-500
07-09-2021,-500
06-09-2021,-500
03-09-2021,-500
02-09-2021,-500
01-09-2021,-500
31-08-2021,-500
27-08-2021,-500
26-08-2021,-500
25-08-2021,-500
24-08-2021,-500
23-08-2021,-500
20-08-2021,-500
19-08-2021,-500
18-08-2021,-500
17-08-2021,-500
16-08-2021,-500
13-08-2021,-500
12-08-2021,-500
11-08-2021,-500
10-08-2021,-500
09-08-2021,-500
06-08-2021,-500
05-08-2021,-500
04-08-2021,-500
03-08-2021,-500
02-08-2021,-500
30-07-2021,-500
29-07-2021,-500
28-07-2021,-500
27-07-2021,-500
26-07-2021,-500
23-07-2021,-500
22-07-2021,-500
21-07-2021,-500
20-07-2021,-500
19-07-2021,-500
16-07-2021,-500
15-07-2021,-500
14-07-2021,-500
13-07-2021,-500
12-07-2021,-500
09-07-2021,-500
08-07-2021,-500
07-07-2021,-500
06-07-2021,-500
05-07-2021,-500
02-07-2021,-500
01-07-2021,-500
30-06-2021,-500
29-06-2021,-500
28-06-2021,-500
25-06-2021,-500
24-06-2021,-500
23-06-2021,-500
22-06-2021,-500
21-06-2021,-500
18-06-2021,-500
17-06-2021,-500
16-06-2021,-500
15-06-2021,-500
14-06-2021,-500
11-06-2021,-500
10-06-2021,-500
09-06-2021,-500
08-06-2021,-500
07-06-2021,-500
04-06-2021,-500
03-06-2021,-500
02-06-2021,-500
01-06-2021,-500
28-05-2021,-500
27-05-2021,-500
26-05-2021,-500
25-05-2021,-500
24-05-2021,-500
21-05-2021,-500
20-05-2021,-500
19-05-2021,-500
18-05-2021,-500
17-05-2021,-500
14-05-2021,-500
13-05-2021,-500
12-05-2021,-500
11-05-2021,-500
10-05-2021,-500
07-05-2021,-500
06-05-2021,-500
05-05-2021,-500
04-05-2021,-500
30-04-2021,-500
29-04-2021,-500
28-04-2021,-500
27-04-2021,-500
26-04-2021,-500
23-04-2021,-500
22-04-2021,-500
21-04-2021,-500
20-04-2021,-500
19-04-2021,-500
16-04-2021,-500
15-04-2021,-500
14-04-2021,-500
13-04-2021,-500
12-04-2021,-500
09-04-2021,-500
08-04-2021,-500
07-04-2021,-500
06-04-2021,-500
01-04-2021,-500
31-03-2021,-500
30-03-2021,-500
29-03-2021,-500
26-03-2021,-500
25-03-2021,-500
24-03-2021,-500
23-03-2021,-500
22-03-2021,-500
19-03-2021,-500
18-03-2021,-500
17-03-2021,-500
16-03-2021,-500
15-03-2021,-500
12-03-2021,-500
11-03-2021,-500
10-03-2021,-500
09-03-2021,-500
08-03-2021,-500
05-03-2021,-500
04-03-2021,-500
03-03-2021,-500
02-03-2021,-500
01-03-2021,-500
26-02-2021,-500
25-02-2021,-500
24-02-2021,-500
23-02-2021,-500
22-02-2021,-500
19-02-2021,-500
18-02-2021,-500
17-02-2021,-500
16-02-2021,-500
15-02-2021,-500
12-02-2021,-500
11-02-2021,-500
10-02-2021,-500
09-02-2021,-500
08-02-2021,-500
05-02-2021,-500
04-02-2021,-500
03-02-2021,-500
02-02-2021,-500
01-02-2021,-500
29-01-2021,-500
28-01-2021,-500
27-01-2021,-500
26-01-2021,-500
25-01-2021,-500
22-01-2021,-500
21-01-2021,-500
20-01-2021,-500
19-01-2021,-500
18-01-2021,-500
15-01-2021,-500
14-01-2021,-500
13-01-2021,-500
12-01-2021,-500
11-01-2021,-500
08-01-2021,-500
07-01-2021,-500
06-01-2021,-500
05-01-2021,-500
04-01-2021,-500
31-12-2020,-500
30-12-2020,-500
29-12-2020,-500
24-12-2020,-500
23-12-2020,-500
22-12-2020,-500
21-12-2020,-500
18-12-2020,-500
17-12-2020,-500
16-12-2020,-500
15-12-2020,-500
14-12-2020,-500
11-12-2020,-500
10-12-2020,-500
09-12-2020,-500
08-12-2020,-500
07-12-2020,-500
04-12-2020,-500
03-12-2020,-500
02-12-2020,-500
01-12-2020,-500
30-11-2020,-500
27-11-2020,-500
26-11-2020,-500
25-11-2020,-500
24-11-2020,-500
23-11-2020,-500
20-11-2020,-500
19-11-2020,-500
18-11-2020,-500
17-11-2020,-500
16-11-2020,-500
13-11-2020,-500
12-11-2020,-500
11-11-2020,-500
10-11-2020,-500
09-11-2020,-500
06-11-2020,-500
05-11-2020,-500
04-11-2020,-500
03-11-2020,-500
02-11-2020,-500
30-10-2020,-500
29-10-2020,-500
28-10-2020,-500
27-10-2020,-500
26-10-2020,-500
23-10-2020,-500
22-10-2020,-500
21-10-2020,-500
20-10-2020,-500
19-10-2020,-500
16-10-2020,-500
15-10-2020,-500
14-10-2020,-500
13-10-2020,-500
12-10-2020,-500
09-10-2020,-500
08-10-2020,-500
07-10-2020,-500
06-10-2020,-500
05-10-2020,-500
02-10-2020,-500
01-10-2020,-500
30-09-2020,-500
29-09-2020,-500
28-09-2020,-500
25-09-2020,-500
24-09-2020,-500
23-09-2020,-500
22-09-2020,-500
21-09-2020,-500
18-09-2020,-500
17-09-2020,-500
16-09-2020,-500
15-09-2020,-500
14-09-2020,-500
11-09-2020,-500
10-09-2020,-500
09-09-2020,-500
08-09-2020,-500
07-09-2020,-500
04-09-2020,-500
03-09-2020,-500
02-09-2020,-500
01-09-2020,-500
28-08-2020,-500
27-08-2020,-500
26-08-2020,-500
25-08-2020,-500
24-08-2020,-500
21-08-2020,-500
20-08-2020,-500
19-08-2020,-500
18-08-2020,-500
17-08-2020,-500
14-08-2020,-500
13-08-2020,-500
12-08-2020,-500
11-08-2020,-500
10-08-2020,-500
07-08-2020,-500
06-08-2020,-500
05-08-2020,-500
04-08-2020,-500
03-08-2020,-500
31-07-2020,-500
30-07-2020,-500
29-07-2020,-500
28-07-2020,-500
27-07-2020,-500
24-07-2020,-500
23-07-2020,-500
22-07-2020,-500
21-07-2020,-500
20-07-2020,-500
17-07-2020,-500
16-07-2020,-500
15-07-2020,-500
14-07-2020,-500
13-07-2020,-500
10-07-2020,-500
09-07-2020,-500
08-07-2020,-500
07-07-2020,-500
06-07-2020,-500
03-07-2020,-500
02-07-2020,-500
01-07-2020,-500
30-06-2020,-500
29-06-2020,-500
26-06-2020,-500
25-06-2020,-500
24-06-2020,-500
23-06-2020,-500
22-06-2020,-500
19-06-2020,-500
18-06-2020,-500
17-06-2020,-500
16-06-2020,-500
15-06-2020,-500
12-06-2020,-500
11-06-2020,-500
10-06-2020,-500
09-06-2020,-500
08-06-2020,-500
05-06-2020,-500
04-06-2020,-500
03-06-2020,-500
02-06-2020,-500
01-06-2020,-500
29-05-2020,-500
28-05-2020,-500
27-05-2020,-500
26-05-2020,-500
22-05-2020,-500
21-05-2020,-500
20-05-2020,-500
19-05-2020,-500
18-05-2020,-500
15-05-2020,-500
14-05-2020,-500
13-05-2020,-500
12-05-2020,-500
11-05-2020,-500
07-05-2020,-500
06-05-2020,-500
05-05-2020,-500
04-05-2020,-500
01-05-2020,-500
30-04-2020,-500
29-04-2020,-500
28-04-2020,-500
27-04-2020,-500
24-04-2020,-500
23-04-2020,-500
22-04-2020,-500
21-04-2020,-500
20-04-2020,-500
17-04-2020,-500
16-04-2020,-500
15-04-2020,-500
14-04-2020,-500
09-04-2020,-500
08-04-2020,-500
07-04-2020,-500
06-04-2020,-500
03-04-2020,-500
02-04-2020,-500
01-04-2020,-500
31-03-2020,-500
30-03-2020,-500
27-03-2020,-500
26-03-2020,-500
25-03-2020,-500
24-03-2020,-500
23-03-2020,-500
20-03-2020,-500
19-03-2020,-500
18-03-2020,-500
17-03-2020,-500
16-03-2020,-500
13-03-2020,-500
12-03-2020,-500
11-03-2020,-500
10-03-2020,-500
09-03-2020,-500
06-03-2020,-500
05-03-2020,-500
04-03-2020,-500
03-03-2020,-500
02-03-2020,-500
28-02-2020,-500
27-02-2020,-500
26-02-2020,-500
25-02-2020,-500
24-02-2020,-500
21-02-2020,-500
20-02-2020,-500
19-02-2020,-500
18-02-2020,-500
17-02-2020,-500
14-02-2020,-500
13-02-2020,-500
12-02-2020,-500
11-02-2020,-500
10-02-2020,-500
07-02-2020,-500
06-02-2020,-500
05-02-2020,-500
04-02-2020,-500
03-02-2020,-500
31-01-2020,-500
30-01-2020,-500
29-01-2020,-500
28-01-2020,-500
27-01-2020,-500
24-01-2020,-500
23-01-2020,-500
22-01-2020,-500
21-01-2020,-500
20-01-2020,-500
17-01-2020,-500
16-01-2020,-500
15-01-2020,-500
14-01-2020,-500
13-01-2020,-500
10-01-2020,-500
09-01-2020,-500
08-01-2020,-500
07-01-2020,-500
06-01-2020,-500
03-01-2020,-500
02-01-2020,-500
31-12-2019,-500
30-12-2019,-500
27-12-2019,-500
24-12-2019,-500
23-12-2019,-500
20-12-2019,-500
19-12-2019,-500
18-12-2019,-500
17-12-2019,-500
16-12-2019,-500
13-12-2019,-500
12-12-2019,-500
11-12-2019,-500
10-12-2019,-500
09-12-2019,-500
06-12-2019,-500
05-12-2019,-500
04-12-2019,-500
03-12-2019,-500
02-12-2019,-500
29-11-2019,-500
28-11-2019,-500
27-11-2019,-500
26-11-2019,-500
25-11-2019,-500
22-11-2019,-500
21-11-2019,-500
20-11-2019,-500
19-11-2019,-500
18-11-2019,-500
15-11-2019,-500
14-11-2019,-500
13-11-2019,-500
12-11-2019,-500
11-11-2019,-500
08-11-2019,-500
07-11-2019,-500
06-11-2019,-500
05-11-2019,-500
04-11-2019,-500
01-11-2019,-500
31-10-2019,-500
30-10-2019,-500
29-10-2019,-500
28-10-2019,-500
25-10-2019,-500
24-10-2019,-500
23-10-2019,-500
22-10-2019,-500
21-10-2019,-500
18-10-2019,-500
17-10-2019,-500
16-10-2019,-500
15-10-2019,-500
14-10-2019,-500
11-10-2019,-500
10-10-2019,-500
09-10-2019,-500
08-10-2019,-500
07-10-2019,-500
04-10-2019,-500
03-10-2019,-500
02-10-2019,-500
01-10-2019,-500
30-09-2019,-500
27-09-2019,-500
26-09-2019,-500
25-09-2019,-500
24-09-2019,-500
23-09-2019,-500
20-09-2019,-500
19-09-2019,-500
18-09-2019,-500
17-09-2019,-500
16-09-2019,-500
13-09-2019,-500
12-09-2019,-500
11-09-2019,-500
10-09-2019,-500
09-09-2019,-500
06-09-2019,-500
05-09-2019,-500
04-09-2019,-500
03-09-2019,-500
02-09-2019,-500
30-08-2019,-500
29-08-2019,-500
28-08-2019,-500
27-08-2019,-500
23-08-2019,-500
22-08-2019,-500
21-08-2019,-500
20-08-2019,-500
19-08-2019,-500
16-08-2019,-500
15-08-2019,-500
14-08-2019,-500
13-08-2019,-500
12-08-2019,-500
09-08-2019,-500
08-08-2019,-500
07-08-2019,-500
06-08-2019,-500
05-08-2019,-500
02-08-2019,-500
01-08-2019,-500
31-07-2019,-500
30-07-2019,-500
29-07-2019,-500
26-07-2019,-500
25-07-2019,-500
24-07-2019,-500
23-07-2019,-500
22-07-2019,-500
19-07-2019,-500
18-07-2019,-500
17-07-2019,-500
16-07-2019,-500
15-07-2019,-500
12-07-2019,-500
11-07-2019,-500
10-07-2019,-500
09-07-2019,-500
08-07-2019,-500
05-07-2019,-500
04-07-2019,-500
03-07-2019,-500
02-07-2019,-500
01-07-2019,-500
28-06-2019,-500
27-06-2019,-500
26-06-2019,-500
25-06-2019,-500
24-06-2019,-500
21-06-2019,-500
20-06-2019,-500
19-06-2019,-500
18-06-2019,-500
17-06-2019,-500
14-06-2019,-500
13-06-2019,-500
12-06-2019,-500
11-06-2019,-500
10-06-2019,-500
07-06-2019,-500
06-06-2019,-500
05-06-2019,-500
04-06-2019,-500
03-06-2019,-500
31-05-2019,-500
30-05-2019,-500
29-05-2019,-500
28-05-2019,-500
24-05-2019,-500
23-05-2019,-500
22-05-2019,-500
21-05-2019,-500
20-05-2019,-500
17-05-2019,-500
16-05-2019,-500
15-05-2019,-500
14-05-2019,-500
13-05-2019,-500
10-05-2019,-500
09-05-2019,-500
08-05-2019,-500
07-05-2019,-500
03-05-2019,-500
02-05-2019,-500
01-05-2019,-500
30-04-2019,-500
29-04-2019,-500
26-04-2019,-500
25-04-2019,-500
24-04-2019,-500
23-04-2019,-500
18-04-2019,-500
17-04-2019,-500
16-04-2019,-500
15-04-2019,-500
12-04-2019,-500
11-04-2019,-500
10-04-2019,-500
09-04-2019,-500
08-04-2019,-500
05-04-2019,-500
04-04-2019,-500
03-04-2019,-500
02-04-2019,-500
01-04-2019,-500
29-03-2019,-500
28-03-2019,-500
27-03-2019,-500
26-03-2019,-500
25-03-2019,-500
22-03-2019,-500
21-03-2019,-500
20-03-2019,-500
19-03-2019,-500
18-03-2019,-500
15-03-2019,-500
14-03-2019,-500
13-03-2019,-500
12-03-2019,-500
11-03-2019,-500
08-03-2019,-500
07-03-2019,-500
06-03-2019,-500
05-03-2019,-500
04-03-2019,-500
01-03-2019,-500
28-02-2019,-500
27-02-2019,-500
26-02-2019,-500
25-02-2019,-500
22-02-2019,-500
21-02-2019,-500
20-02-2019,-500
19-02-2019,-500
18-02-2019,-500
15-02-2019,-500
14-02-2019,-500
13-02-2019,-500
12-02-2019,-500
11-02-2019,-500
08-02-2019,-500
07-02-2019,-500
06-02-2019,-500
05-02-2019,-500
04-02-2019,-500
01-02-2019,-500
31-01-2019,-500
30-01-2019,-500
29-01-2019,-500
28-01-2019,-500
25-01-2019,-500
24-01-2019,-500
23-01-2019,-500
22-01-2019,-500
21-01-2019,-500
18-01-2019,-500
17-01-2019,-500
16-01-2019,-500
15-01-2019,-500
14-01-2019,-500
11-01-2019,-500
10-01-2019,-500
09-01-2019,-500
08-01-2019,-500
07-01-2019,-500
04-01-2019,-500
03-01-2019,-500
02-01-2019,-500
31-12-2018,-500
28-12-2018,-500
27-12-2018,-500
24-12-2018,-500
21-12-2018,-500
20-12-2018,-500
19-12-2018,-500
18-12-2018,-500
17-12-2018,-500
14-12-2018,-500
13-12-2018,-500
12-12-2018,-500
11-12-2018,-500
10-12-2018,-500
07-12-2018,-500
06-12-2018,-500
05-12-2018,-500
04-12-2018,-500
03-12-2018,-500
30-11-2018,-500
29-11-2018,-500
28-11-2018,-500
27-11-2018,-500
26-11-2018,-500
23-11-2018,-500
22-11-2018,-500
21-11-2018,-500
20-11-2018,-500
19-11-2018,-500
16-11-2018,-500
15-11-2018,-500
14-11-2018,-500
13-11-2018,-500
12-11-2018,-500
09-11-2018,-500
08-11-2018,-500
07-11-2018,-500
06-11-2018,-500
05-11-2018,-500
02-11-2018,-500
01-11-2018,-500
31-10-2018,-500
30-10-2018,-500
29-10-2018,-500
26-10-2018,-500
25-10-2018,-500
24-10-2018,-500
23-10-2018,-500
22-10-2018,-500
19-10-2018,-500
18-10-2018,-500
17-10-2018,-500
16-10-2018,-500
15-10-2018,-500
12-10-2018,-500
11-10-2018,-500
10-10-2018,-500
09-10-2018,-500
08-10-2018,-500
05-10-2018,-500
04-10-2018,-500
03-10-2018,-500
02-10-2018,-500
01-10-2018,-500
28-09-2018,-500
27-09-2018,-500
26-09-2018,-500
25-09-2018,-500
24-09-2018,-500
21-09-2018,-500
20-09-2018,-500
19-09-2018,-500
18-09-2018,-500
17-09-2018,-500
14-09-2018,-500
13-09-2018,-500
12-09-2018,-500
11-09-2018,-500
10-09-2018,-500
07-09-2018,-500
06-09-2018,-500
05-09-2018,-500
04-09-2018,-500
03-09-2018,-500
31-08-2018,-500
30-08-2018,-500
29-08-2018,-500
28-08-2018,-500
24-08-2018,-500
23-08-2018,-500
22-08-2018,-500
21-08-2018,-500
20-08-2018,-500
17-08-2018,-500
16-08-2018,-500
15-08-2018,-500
14-08-2018,-500
13-08-2018,-500
10-08-2018,-500
09-08-2018,-500
08-08-2018,-500
07-08-2018,-500
06-08-2018,-500
03-08-2018,-500
02-08-2018,-500
01-08-2018,-500
31-07-2018,-500
30-07-2018,-500
27-07-2018,-500
26-07-2018,-500
25-07-2018,-500
24-07-2018,-500
23-07-2018,-500
20-07-2018,-500
19-07-2018,-500
18-07-2018,-500
17-07-2018,-500
16-07-2018,-500
13-07-2018,-500
12-07-2018,-500
11-07-2018,-500
10-07-2018,-500
09-07-2018,-500
06-07-2018,-500
05-07-2018,-500
04-07-2018,-500
03-07-2018,-500
02-07-2018,-500
29-06-2018,-500
28-06-2018,-500
27-06-2018,-500
26-06-2018,-500
25-06-2018,-500
22-06-2018,-500
21-06-2018,-500
20-06-2018,-500
19-06-2018,-500
18-06-2018,-500
15-06-2018,-500
14-06-2018,-500
13-06-2018,-500
12-06-2018,-500
11-06-2018,-500
08-06-2018,-500
07-06-2018,-500
06-06-2018,-500
05-06-2018,-500
04-06-2018,-500
01-06-2018,-500
31-05-2018,-500
30-05-2018,-500
29-05-2018,-500
25-05-2018,-500
24-05-2018,-500
23-05-2018,-500
22-05-2018,-500
21-05-2018,-500
18-05-2018,-500
17-05-2018,-500
16-05-2018,-500
15-05-2018,-500
14-05-2018,-500
11-05-2018,-500
10-05-2018,-500
09-05-2018,-500
08-05-2018,-500
04-05-2018,-500
03-05-2018,-500
02-05-2018,-500
01-05-2018,-500
30-04-2018,-500
27-04-2018,-500
26-04-2018,-500
25-04-2018,-500
24-04-2018,-500
23-04-2018,-500
20-04-2018,-500
19-04-2018,-500
18-04-2018,-500
17-04-2018,-500
16-04-2018,-500
13-04-2018,-500
12-04-2018,-500
11-04-2018,-500
10-04-2018,-500
09-04-2018,-500
06-04-2018,-500
05-04-2018,-500
04-04-2018,-500
03-04-2018,-500
29-03-2018,-500
28-03-2018,-500
27-03-2018,-500
26-03-2018,-500
23-03-2018,-500
22-03-2018,-500
21-03-2018,-500
20-03-2018,-500
19-03-2018,-500
16-03-2018,-500
15-03-2018,-500
14-03-2018,-500
13-03-2018,-500
12-03-2018,-500
09-03-2018,-500
08-03-2018,-500
07-03-2018,-500
06-03-2018,-500
05-03-2018,-500
02-03-2018,-500
01-03-2018,-500
28-02-2018,-500
27-02-2018,-500
26-02-2018,-500
23-02-2018,-500
22-02-2018,-500
21-02-2018,-500
20-02-2018,-500
19-02-2018,-500
16-02-2018,-500
15-02-2018,-500
14-02-2018,-500
13-02-2018,-500
12-02-2018,-500
09-02-2018,-500
08-02-2018,-500
07-02-2018,-500
06-02-2018,-500
05-02-2018,-500
02-02-2018,-500
01-02-2018,-500
31-01-2018,-500
30-01-2018,-500
29-01-2018,-500
26-01-2018,-500
25-01-2018,-500
24-01-2018,-500
23-01-2018,-500
22-01-2018,-500
19-01-2018,-500
18-01-2018,-500
17-01-2018,-500
16-01-2018,-500
15-01-2018,-500
12-01-2018,-500
11-01-2018,-500
10-01-2018,-500
09-01-2018,-500
08-01-2018,-500
05-01-2018,-500
04-01-2018,-500
03-01-2018,-500
02-01-2018,-500
29-12-2017,-500
28-12-2017,-500
27-12-2017,-500
22-12-2017,-500
21-12-2017,-500
20-12-2017,-500
19-12-2017,-500
18-12-2017,-500
15-12-2017,-500
14-12-2017,-500
13-12-2017,-500
12-12-2017,-500
11-12-2017,-500
08-12-2017,-500
07-12-2017,-500
06-12-2017,-500
05-12-2017,-500
04-12-2017,-500
01-12-2017,-500
30-11-2017,-500
29-11-2017,-500
28-11-2017,-500
27-11-2017,-500
24-11-2017,-500
23-11-2017,-500
22-11-2017,-500
21-11-2017,-500
20-11-2017,-500
17-11-2017,-500
16-11-2017,-500
15-11-2017,-500
14-11-2017,-500
13-11-2017,-500
10-11-2017,-500
09-11-2017,-500
08-11-2017,-500
07-11-2017,-500
06-11-2017,-500
03-11-2017,-500
02-11-2017,-500
01-11-2017,-500
31-10-2017,-500
30-10-2017,-500
27-10-2017,-500
26-10-2017,-500
25-10-2017,-500
24-10-2017,-500
23-10-2017,-500
20-10-2017,-500
19-10-2017,-500
18-10-2017,-500
17-10-2017,-500
16-10-2017,-500
13-10-2017,-500
12-10-2017,-500
11-10-2017,-500
10-10-2017,-500
09-10-2017,-500
06-10-2017,-500
05-10-2017,-500
04-10-2017,-500
03-10-2017,-500
02-10-2017,-500
29-09-2017,-500
28-09-2017,-500
27-09-2017,-500
26-09-2017,-500
25-09-2017,-500
22-09-2017,-500
21-09-2017,-500
20-09-2017,-500
19-09-2017,-500
18-09-2017,-500
15-09-2017,-500
14-09-2017,-500
13-09-2017,-500
12-09-2017,-500
11-09-2017,-500
08-09-2017,-500
07-09-2017,-500
06-09-2017,-500
05-09-2017,-500
04-09-2017,-500
01-09-2017,-500
31-08-2017,-500
30-08-2017,-500
29-08-2017,-500
25-08-2017,-500
24-08-2017,-500
23-08-2017,-500
22-08-2017,-500
21-08-2017,-500
18-08-2017,-500
17-08-2017,-500
16-08-2017,-500
15-08-2017,-500
14-08-2017,-500
11-08-2017,-500
10-08-2017,-500
09-08-2017,-500
08-08-2017,-500
07-08-2017,-500
04-08-2017,-500
03-08-2017,-500
02-08-2017,-500
01-08-2017,-500
31-07-2017,-500
28-07-2017,-500
27-07-2017,-500
26-07-2017,-500
25-07-2017,-500
24-07-2017,-500
21-07-2017,-500
20-07-2017,-500
19-07-2017,-500
18-07-2017,-500
17-07-2017,-500
14-07-2017,-500
13-07-2017,-500
12-07-2017,-500
11-07-2017,-500
10-07-2017,-500
07-07-2017,-500
06-07-2017,-500
05-07-2017,-500
04-07-2017,-500
03-07-2017,-500
30-06-2017,-500
29-06-2017,-500
28-06-2017,-500
27-06-2017,-500
26-06-2017,-500
23-06-2017,-500
22-06-2017,-500
21-06-2017,-500
20-06-2017,-500
19-06-2017,-500
16-06-2017,-500
15-06-2017,-500
14-06-2017,-500
13-06-2017,-500
12-06-2017,-500
09-06-2017,-500
08-06-2017,-500
07-06-2017,-500
06-06-2017,-500
05-06-2017,-500
02-06-2017,-500
01-06-2017,-500
31-05-2017,-500
30-05-2017,-500
26-05-2017,-500
25-05-2017,-500
24-05-2017,-500
23-05-2017,-500
22-05-2017,-500
19-05-2017,-500
18-05-2017,-500
17-05-2017,-500
16-05-2017,-500
15-05-2017,-500
12-05-2017,-500
11-05-2017,-500
10-05-2017,-500
09-05-2017,-500
08-05-2017,-500
05-05-2017,-500
04-05-2017,-500
03-05-2017,-500
02-05-2017,-500
28-04-2017,-500
27-04-2017,-500
26-04-2017,-500
25-04-2017,-500
24-04-2017,-500
21-04-2017,-500
20-04-2017,-500
19-04-2017,-500
18-04-2017,-500
13-04-2017,-500
12-04-2017,-500
11-04-2017,-500
10-04-2017,-500
07-04-2017,-500
06-04-2017,-500
05-04-2017,-500
04-04-2017,-500
03-04-2017,-500
31-03-2017,-500
30-03-2017,-500
29-03-2017,-500
28-03-2017,-500
27-03-2017,-500
24-03-2017,-500
23-03-2017,-500
22-03-2017,-500
21-03-2017,-500
20-03-2017,-500
17-03-2017,-500
16-03-2017,-500
15-03-2017,-500
14-03-2017,-500
13-03-2017,-500
10-03-2017,-500
09-03-2017,-500
08-03-2017,-500
07-03-2017,-500
06-03-2017,-500
03-03-2017,-500
02-03-2017,-500
01-03-2017,-500
28-02-2017,-500
27-02-2017,-500
24-02-2017,-500
23-02-2017,-500
22-02-2017,-500
21-02-2017,-500
20-02-2017,-500
17-02-2017,-500
16-02-2017,-500
15-02-2017,-500
14-02-2017,-500
13-02-2017,-500
10-02-2017,-500
09-02-2017,-500
08-02-2017,-500
07-02-2017,-500
06-02-2017,-500
03-02-2017,-500
02-02-2017,-500
01-02-2017,-500
31-01-2017,-500
30-01-2017,-500
27-01-2017,-500
26-01-2017,-500
25-01-2017,-500
24-01-2017,-500
23-01-2017,-500
20-01-2017,-500
19-01-2017,-500
18-01-2017,-500
17-01-2017,-500
16-01-2017,-500
13-01-2017,-500
12-01-2017,-500
11-01-2017,-500
10-01-2017,-500
09-01-2017,-500
06-01-2017,-500
05-01-2017,-500
04-01-2017,-500
03-01-2017,-500
30-12-2016,-500
29-12-2016,-500
28-12-2016,-500
23-12-2016,-500
22-12-2016,-500
21-12-2016,-500
20-12-2016,-500
19-12-2016,-500
16-12-2016,-500
15-12-2016,-500
14-12-2016,-500
13-12-2016,-500
12-12-2016,-500
09-12-2016,-500
08-12-2016,-500
07-12-2016,-500
06-12-2016,-500
05-12-2016,-500
02-12-2016,-500
01-12-2016,-500
30-11-2016,-500
29-11-2016,-500
28-11-2016,-500
25-11-2016,-500
24-11-2016,-500
23-11-2016,-500
22-11-2016,-500
21-11-2016,-500
18-11-2016,-500
17-11-2016,-500
16-11-2016,-500
15-11-2016,-500
14-11-2016,-500
11-11-2016,-500
10-11-2016,-500
09-11-2016,-500
08-11-2016,-500
07-11-2016,-500
04-11-2016,-500
03-11-2016,-500
02-11-2016,-500
01-11-2016,-500
31-10-2016,-500
28-10-2016,-500
27-10-2016,-500
26-10-2016,-500
25-10-2016,-500
24-10-2016,-500
21-10-2016,-500
20-10-2016,-500
19-10-2016,-500
18-10-2016,-500
17-10-2016,-500
14-10-2016,-500
13-10-2016,-500
12-10-2016,-500
11-10-2016,-500
10-10-2016,-500
07-10-2016,-500
06-10-2016,-500
05-10-2016,-500
04-10-2016,-500
03-10-2016,-500
30-09-2016,-500
29-09-2016,-500
28-09-2016,-500
27-09-2016,-500
26-09-2016,-500
23-09-2016,-500
22-09-2016,-500
21-09-2016,-500
20-09-2016,-500
19-09-2016,-500
16-09-2016,-500
15-09-2016,-500
14-09-2016,-500
13-09-2016,-500
12-09-2016,-500
09-09-2016,-500
08-09-2016,-500
07-09-2016,-500
06-09-2016,-500
05-09-2016,-500
02-09-2016,-500
01-09-2016,-500
31-08-2016,-500
30-08-2016,-500
26-08-2016,-500
25-08-2016,-500
24-08-2016,-500
23-08-2016,-500
22-08-2016,-500
19-08-2016,-500
18-08-2016,-500
17-08-2016,-500
16-08-2016,-500
15-08-2016,-500
12-08-2016,-500
11-08-2016,-500
10-08-2016,-500
09-08-2016,-500
08-08-2016,-500
05-08-2016,-500
04-08-2016,-500
03-08-2016,-500
02-08-2016,-500
01-08-2016,-500
29-07-2016,-500
28-07-2016,-500
27-07-2016,-500
26-07-2016,-500
25-07-2016,-500
22-07-2016,-500
21-07-2016,-500
20-07-2016,-500
19-07-2016,-500
18-07-2016,-500
15-07-2016,-500
14-07-2016,-500
13-07-2016,-500
12-07-2016,-500
11-07-2016,-500
08-07-2016,-500
07-07-2016,-500
06-07-2016,-500
05-07-2016,-500
04-07-2016,-500
01-07-2016,-500
30-06-2016,-500
29-06-2016,-500
28-06-2016,-500
27-06-2016,-500
24-06-2016,-500
23-06-2016,-500
22-06-2016,-500
21-06-2016,-500
20-06-2016,-500
17-06-2016,-500
16-06-2016,-500
15-06-2016,-500
14-06-2016,-500
13-06-2016,-500
10-06-2016,-500
09-06-2016,-500
08-06-2016,-500
07-06-2016,-500
06-06-2016,-500
03-06-2016,-500
02-06-2016,-500
01-06-2016,-500
31-05-2016,-500
27-05-2016,-500
26-05-2016,-500
25-05-2016,-500
24-05-2016,-500
23-05-2016,-500
20-05-2016,-500
19-05-2016,-500
18-05-2016,-500
17-05-2016,-500
16-05-2016,-500
13-05-2016,-500
12-05-2016,-500
11-05-2016,-500
10-05-2016,-500
09-05-2016,-500
06-05-2016,-500
05-05-2016,-500
04-05-2016,-500
03-05-2016,-500
29-04-2016,-500
28-04-2016,-500
27-04-2016,-500
26-04-2016,-500
25-04-2016,-500
22-04-2016,-500
21-04-2016,-500
20-04-2016,-500
19-04-2016,-500
18-04-2016,-500
15-04-2016,-500
14-04-2016,-500
13-04-2016,-500
12-04-2016,-500
11-04-2016,-500
08-04-2016,-500
07-04-2016,-500
06-04-2016,-500
05-04-2016,-500
04-04-2016,-500
01-04-2016,-500
31-03-2016,-500
30-03-2016,-500
29-03-2016,-500
24-03-2016,-500
23-03-2016,-500
22-03-2016,-500
21-03-2016,-500
18-03-2016,-500
17-03-2016,-500
16-03-2016,-500
15-03-2016,-500
14-03-2016,-500
11-03-2016,-500
10-03-2016,-500
09-03-2016,-500
08-03-2016,-500
07-03-2016,-500
04-03-2016,-500
03-03-2016,-500
02-03-2016,-500
01-03-2016,-500
29-02-2016,-500
26-02-2016,-500
25-02-2016,-500
24-02-2016,-500
23-02-2016,-500
22-02-2016,-500
19-02-2016,-500
18-02-2016,-500
17-02-2016,-500
16-02-2016,-500
15-02-2016,-500
12-02-2016,-500
11-02-2016,-500
10-02-2016,-500
09-02-2016,-500
08-02-2016,-500
05-02-2016,-500
04-02-2016,-500
03-02-2016,-500
02-02-2016,-500
01-02-2016,-500
29-01-2016,-500
28-01-2016,-500
27-01-2016,-500
26-01-2016,-500
25-01-2016,-500
22-01-2016,-500
21-01-2016,-500
20-01-2016,-500
19-01-2016,-500
18-01-2016,-500
15-01-2016,-500
14-01-2016,-500
13-01-2016,-500
12-01-2016,-500
11-01-2016,-500
08-01-2016,-500
07-01-2016,-500
06-01-2016,-500
05-01-2016,-500
04-01-2016,-500
31-12-2015,-500
30-12-2015,-500
29-12-2015,-500
24-12-2015,-500
23-12-2015,-500
22-12-2015,-500
21-12-2015,-500
18-12-2015,-500
17-12-2015,-500
16-12-2015,-500
15-12-2015,-500
14-12-2015,-500
11-12-2015,-500
10-12-2015,-500
09-12-2015,-500
08-12-2015,-500
07-12-2015,-500
04-12-2015,-500
03-12-2015,-500
02-12-2015,-500
01-12-2015,-500
30-11-2015,-500
27-11-2015,-500
26-11-2015,-500
25-11-2015,-500
24-11-2015,-500
23-11-2015,-500
20-11-2015,-500
19-11-2015,-500
18-11-2015,-500
17-11-2015,-500
16-11-2015,-500
13-11-2015,-500
12-11-2015,-500
11-11-2015,-500
10-11-2015,-500
09-11-2015,-500
06-11-2015,-500
05-11-2015,-500
04-11-2015,-500
03-11-2015,-500
02-11-2015,-500
30-10-2015,-500
29-10-2015,-500
28-10-2015,-500
27-10-2015,-500
26-10-2015,-500
23-10-2015,-500
22-10-2015,-500
21-10-2015,-500
20-10-2015,-500
19-10-2015,-500
16-10-2015,-500
15-10-2015,-500
14-10-2015,-500
13-10-2015,-500
12-10-2015,-500
09-10-2015,-500
08-10-2015,-500
07-10-2015,-500
06-10-2015,-500
05-10-2015,-500
02-10-2015,-500
01-10-2015,-500
30-09-2015,-500
29-09-2015,-500
28-09-2015,-500
25-09-2015,-500
24-09-2015,-500
23-09-2015,-500
22-09-2015,-500
21-09-2015,-500
18-09-2015,-500
17-09-2015,-500
16-09-2015,-500
15-09-2015,-500
14-09-2015,-500
11-09-2015,-500
10-09-2015,-500
09-09-2015,-500
08-09-2015,-500
07-09-2015,-500
04-09-2015,-500
03-09-2015,-500
02-09-2015,-500
01-09-2015,-500
28-08-2015,-500
27-08-2015,-500
26-08-2015,-500
25-08-2015,-500
24-08-2015,-500
21-08-2015,-500
20-08-2015,-500
19-08-2015,-500
18-08-2015,-500
17-08-2015,-500
14-08-2015,-500
13-08-2015,-500
12-08-2015,-500
11-08-2015,-500
10-08-2015,-500
07-08-2015,-500
06-08-2015,-500
05-08-2015,-500
04-08-2015,-500
03-08-2015,-500
31-07-2015,-500
30-07-2015,-500
29-07-2015,-500
28-07-2015,-500
27-07-2015,-500
24-07-2015,-500
23-07-2015,-500
22-07-2015,-500
21-07-2015,-500
20-07-2015,-500
17-07-2015,-500
16-07-2015,-500
15-07-2015,-500
14-07-2015,-500
13-07-2015,-500
10-07-2015,-500
09-07-2015,-500
08-07-2015,-500
07-07-2015,-500
06-07-2015,-500
03-07-2015,-500
02-07-2015,-500
01-07-2015,-500
30-06-2015,-500
29-06-2015,-500
26-06-2015,-500
25-06-2015,-500
24-06-2015,-500
23-06-2015,-500
22-06-2015,-500
19-06-2015,-500
18-06-2015,-500
17-06-2015,-500
16-06-2015,-500
15-06-2015,-500
12-06-2015,-500
11-06-2015,-500
10-06-2015,-500
09-06-2015,-500
08-06-2015,-500
05-06-2015,-500
04-06-2015,-500
03-06-2015,-500
02-06-2015,-500
01-06-2015,-500
29-05-2015,-500
28-05-2015,-500
27-05-2015,-500
26-05-2015,-500
22-05-2015,-500
21-05-2015,-500
20-05-2015,-500
19-05-2015,-500
18-05-2015,-500
15-05-2015,-500
14-05-2015,-500
13-05-2015,-500
12-05-2015,-500
11-05-2015,-500
08-05-2015,-500
07-05-2015,-500
06-05-2015,-500
05-05-2015,-500
01-05-2015,-500
30-04-2015,-500
29-04-2015,-500
28-04-2015,-500
27-04-2015,-500
24-04-2015,-500
23-04-2015,-500
22-04-2015,-500
21-04-2015,-500
20-04-2015,-500
17-04-2015,-500
16-04-2015,-500
15-04-2015,-500
14-04-2015,-500
13-04-2015,-500
10-04-2015,-500
09-04-2015,-500
08-04-2015,-500
07-04-2015,-500
02-04-2015,-500
01-04-2015,-500
31-03-2015,-500
30-03-2015,-500
27-03-2015,-500
26-03-2015,-500
25-03-2015,-500
24-03-2015,-500
23-03-2015,-500
20-03-2015,-500
19-03-2015,-500
18-03-2015,-500
17-03-2015,-500
16-03-2015,-500
13-03-2015,-500
12-03-2015,-500
11-03-2015,-500
10-03-2015,-500
09-03-2015,-500
06-03-2015,-500
05-03-2015,-500
04-03-2015,-500
03-03-2015,-500
02-03-2015,-500
27-02-2015,-500
26-02-2015,-500
25-02-2015,-500
24-02-2015,-500
23-02-2015,-500
20-02-2015,-500
19-02-2015,-500
18-02-2015,-500
17-02-2015,-500
16-02-2015,-500
13-02-2015,-500
12-02-2015,-500
11-02-2015,-500
10-02-2015,-500
09-02-2015,-500
06-02-2015,-500
05-02-2015,-500
04-02-2015,-500
03-02-2015,-500
02-02-2015,-500
30-01-2015,-500
29-01-2015,-500
28-01-2015,-500
27-01-2015,-500
26-01-2015,-500
23-01-2015,-500
22-01-2015,-500
21-01-2015,-500
20-01-2015,-500
19-01-2015,-500
16-01-2015,-500
15-01-2015,-500
14-01-2015,-500
13-01-2015,-500
12-01-2015,-500
09-01-2015,-500
08-01-2015,-500
07-01-2015,-500
06-01-2015,-500
05-01-2015,-500
02-01-2015,-500
31-12-2014,-500
30-12-2014,-500
29-12-2014,-500
24-12-2014,-500
23-12-2014,-500
22-12-2014,-500
19-12-2014,-500
18-12-2014,-500
17-12-2014,-500
16-12-2014,-500
15-12-2014,-500
12-12-2014,-500
11-12-2014,-500
10-12-2014,-500
09-12-2014,-500
08-12-2014,-500
05-12-2014,-500
04-12-2014,-500
03-12-2014,-500
02-12-2014,-500
01-12-2014,-500
28-11-2014,-500
27-11-2014,-500
26-11-2014,-500
25-11-2014,-500
24-11-2014,-500
21-11-2014,-500
20-11-2014,-500
19-11-2014,-500
18-11-2014,-500
17-11-2014,-500
14-11-2014,-500
13-11-2014,-500
12-11-2014,-500
11-11-2014,-500
10-11-2014,-500
07-11-2014,-500
06-11-2014,-500
05-11-2014,-500
04-11-2014,-500
03-11-2014,-500
31-10-2014,-500
30-10-2014,-500
29-10-2014,-500
28-10-2014,-500
27-10-2014,-500
24-10-2014,-500
23-10-2014,-500
22-10-2014,-500
21-10-2014,-500
20-10-2014,-500
17-10-2014,-500
16-10-2014,-500
15-10-2014,-500
14-10-2014,-500
13-10-2014,-500
10-10-2014,-500
09-10-2014,-500
08-10-2014,-500
07-10-2014,-500
06-10-2014,-500
03-10-2014,-500
02-10-2014,-500
01-10-2014,-500
30-09-2014,-500
29-09-2014,-500
26-09-2014,-500
25-09-2014,-500
24-09-2014,-500
23-09-2014,-500
22-09-2014,-500
19-09-2014,-500
18-09-2014,-500
17-09-2014,-500
16-09-2014,-500
15-09-2014,-500
12-09-2014,-500
11-09-2014,-500
10-09-2014,-500
09-09-2014,-500
08-09-2014,-500
05-09-2014,-500
04-09-2014,-500
03-09-2014,-500
02-09-2014,-500
01-09-2014,-500
29-08-2014,-500
28-08-2014,-500
27-08-2014,-500
26-08-2014,-500
22-08-2014,-500
21-08-2014,-500
20-08-2014,-500
19-08-2014,-500
18-08-2014,-500
15-08-2014,-500
14-08-2014,-500
13-08-2014,-500
12-08-2014,-500
11-08-2014,-500
08-08-2014,-500
07-08-2014,-500
06-08-2014,-500
05-08-2014,-500
04-08-2014,-500
01-08-2014,-500
31-07-2014,-500
30-07-2014,-500
29-07-2014,-500
28-07-2014,-500
25-07-2014,-500
24-07-2014,-500
23-07-2014,-500
22-07-2014,-500
21-07-2014,-500
18-07-2014,-500
17-07-2014,-500
16-07-2014,-500
15-07-2014,-500
14-07-2014,-500
11-07-2014,-500
10-07-2014,-500
09-07-2014,-500
08-07-2014,-500
07-07-2014,-500
04-07-2014,-500
03-07-2014,-500
02-07-2014,-500
01-07-2014,-500
30-06-2014,-500
27-06-2014,-500
26-06-2014,-500
25-06-2014,-500
24-06-2014,-500
23-06-2014,-500
20-06-2014,-500
19-06-2014,-500
18-06-2014,-500
17-06-2014,-500
16-06-2014,-500
13-06-2014,-500
12-06-2014,-500
11-06-2014,-500
10-06-2014,-500
09-06-2014,-500
06-06-2014,-500
05-06-2014,-500
04-06-2014,-500
03-06-2014,-500
02-06-2014,-500
30-05-2014,-500
29-05-2014,-500
28-05-2014,-500
27-05-2014,-500
23-05-2014,-500
22-05-2014,-500
21-05-2014,-500
20-05-2014,-500
19-05-2014,-500
16-05-2014,-500
15-05-2014,-500
14-05-2014,-500
13-05-2014,-500
12-05-2014,-500
09-05-2014,-500
08-05-2014,-500
07-05-2014,-500
06-05-2014,-500
02-05-2014,-500
01-05-2014,-500
30-04-2014,-500
29-04-2014,-500
28-04-2014,-500
25-04-2014,-500
24-04-2014,-500
23-04-2014,-500
22-04-2014,-500
17-04-2014,-500
16-04-2014,-500
15-04-2014,-500
14-04-2014,-500
11-04-2014,-500
10-04-2014,-500
09-04-2014,-500
08-04-2014,-500
07-04-2014,-500
04-04-2014,-500
03-04-2014,-500
02-04-2014,-500
01-04-2014,-500
31-03-2014,-500
28-03-2014,-500
27-03-2014,-500
26-03-2014,-500
25-03-2014,-500
24-03-2014,-500
21-03-2014,-500
20-03-2014,-500
19-03-2014,-500
18-03-2014,-500
17-03-2014,-500
14-03-2014,-500
13-03-2014,-500
12-03-2014,-500
11-03-2014,-500
10-03-2014,-500
07-03-2014,-500
06-03-2014,-500
05-03-2014,-500
04-03-2014,-500
03-03-2014,-500
28-02-2014,-500
27-02-2014,-500
26-02-2014,-500
25-02-2014,-500
24-02-2014,-500
21-02-2014,-500
20-02-2014,-500
19-02-2014,-500
18-02-2014,-500
17-02-2014,-500
14-02-2014,-500
13-02-2014,-500
12-02-2014,-500
11-02-2014,-500
10-02-2014,-500
07-02-2014,-500
06-02-2014,-500
05-02-2014,-500
04-02-2014,-500
03-02-2014,-500
31-01-2014,-500
30-01-2014,-500
29-01-2014,-500
28-01-2014,-500
27-01-2014,-500
24-01-2014,-500
23-01-2014,-500
22-01-2014,-500
21-01-2014,-500
20-01-2014,-500
17-01-2014,-500
16-01-2014,-500
15-01-2014,-500
14-01-2014,-500
13-01-2014,-500
10-01-2014,-500
09-01-2014,-500
08-01-2014,-500
07-01-2014,-500
06-01-2014,-500
03-01-2014,-500
02-01-2014,-500
31-12-2013,-500
30-12-2013,-500
27-12-2013,-500
24-12-2013,-500
23-12-2013,-500
20-12-2013,-500
19-12-2013,-500
18-12-2013,-500
17-12-2013,-500
16-12-2013,-500
13-12-2013,-500
12-12-2013,-500
11-12-2013,-500
10-12-2013,-500
09-12-2013,-500
06-12-2013,-500
05-12-2013,-500
04-12-2013,-500
03-12-2013,-500
02-12-2013,-500
29-11-2013,-500
28-11-2013,-500
27-11-2013,-500
26-11-2013,-500
25-11-2013,-500
22-11-2013,-500
21-11-2013,-500
20-11-2013,-500
19-11-2013,-500
18-11-2013,-500
15-11-2013,-500
14-11-2013,-500
13-11-2013,-500
12-11-2013,-500
11-11-2013,-500
08-11-2013,-500
07-11-2013,-500
06-11-2013,-500
05-11-2013,-500
04-11-2013,-500
01-11-2013,-500
31-10-2013,-500
30-10-2013,-500
29-10-2013,-500
28-10-2013,-500
25-10-2013,-500
24-10-2013,-500
23-10-2013,-500
22-10-2013,-500
21-10-2013,-500
18-10-2013,-500
17-10-2013,-500
16-10-2013,-500
15-10-2013,-500
14-10-2013,-500
11-10-2013,-500
10-10-2013,-500
09-10-2013,-500
08-10-2013,-500
07-10-2013,-500
04-10-2013,-500
03-10-2013,-500
02-10-2013,-500
01-10-2013,-500
30-09-2013,-500
27-09-2013,-500
26-09-2013,-500
25-09-2013,-500
24-09-2013,-500
23-09-2013,-500
20-09-2013,-500
19-09-2013,-500
18-09-2013,-500
17-09-2013,-500
16-09-2013,-500
13-09-2013,-500
12-09-2013,-500
11-09-2013,-500
10-09-2013,-500
09-09-2013,-500
06-09-2013,-500
05-09-2013,-500
04-09-2013,-500
03-09-2013,-500
02-09-2013,-500
30-08-2013,-500
29-08-2013,-500
28-08-2013,-500
27-08-2013,-500
23-08-2013,-500
22-08-2013,-500
21-08-2013,-500
20-08-2013,-500
19-08-2013,-500
16-08-2013,-500
15-08-2013,-500
14-08-2013,-500
13-08-2013,-500
12-08-2013,-500
09-08-2013,-500
08-08-2013,-500
07-08-2013,-500
06-08-2013,-500
05-08-2013,-500
02-08-2013,-500
01-08-2013,-500
31-07-2013,-500
30-07-2013,-500
29-07-2013,-500
26-07-2013,-500
25-07-2013,-500
24-07-2013,-500
23-07-2013,-500
22-07-2013,-500
19-07-2013,-500
18-07-2013,-500
17-07-2013,-500
16-07-2013,-500
15-07-2013,-500
12-07-2013,-500
11-07-2013,-500
10-07-2013,-500
09-07-2013,-500
08-07-2013,-500
05-07-2013,-500
04-07-2013,-500
03-07-2013,-500
02-07-2013,-500
01-07-2013,-500
28-06-2013,-500
27-06-2013,-500
26-06-2013,-500
25-06-2013,-500
24-06-2013,-500
21-06-2013,-500
20-06-2013,-500
19-06-2013,-500
18-06-2013,-500
17-06-2013,-500
14-06-2013,-500
13-06-2013,-500
12-06-2013,-500
11-06-2013,-500
10-06-2013,-500
07-06-2013,-500
06-06-2013,-500
05-06-2013,-500
04-06-2013,-500
03-06-2013,-500
31-05-2013,-500
30-05-2013,-500
29-05-2013,-500
28-05-2013,-500
24-05-2013,-500
23-05-2013,-500
22-05-2013,-500
21-05-2013,-500
20-05-2013,-500
17-05-2013,-500
16-05-2013,-500
15-05-2013,-500
14-05-2013,-500
13-05-2013,-500
10-05-2013,-500
09-05-2013,-500
08-05-2013,-500
07-05-2013,-500
03-05-2013,-500
02-05-2013,-500
01-05-2013,-500
30-04-2013,-500
29-04-2013,-500
26-04-2013,-500
25-04-2013,-500
24-04-2013,-500
23-04-2013,-500
22-04-2013,-500
19-04-2013,-500
18-04-2013,-500
17-04-2013,-500
16-04-2013,-500
15-04-2013,-500
12-04-2013,-500
11-04-2013,-500
10-04-2013,-500
09-04-2013,-500
08-04-2013,-500
05-04-2013,-500
04-04-2013,-500
03-04-2013,-500
02-04-2013,-500
28-03-2013,-500
27-03-2013,-500
26-03-2013,-500
25-03-2013,-500
22-03-2013,-500
21-03-2013,-500
20-03-2013,-500
19-03-2013,-500
18-03-2013,-500
15-03-2013,-500
14-03-2013,-500
13-03-2013,-500
12-03-2013,-500
11-03-2013,-500
08-03-2013,-500
07-03-2013,-500
06-03-2013,-500
05-03-2013,-500
04-03-2013,-500
01-03-2013,-500
28-02-2013,-500
27-02-2013,-500
26-02-2013,-500
25-02-2013,-500
22-02-2013,-500
21-02-2013,-500
20-02-2013,-500
19-02-2013,-500
18-02-2013,-500
15-02-2013,-500
14-02-2013,-500
13-02-2013,-500
12-02-2013,-500
11-02-2013,-500
08-02-2013,-500
07-02-2013,-500
06-02-2013,-500
05-02-2013,-500
04-02-2013,-500
01-02-2013,-500
31-01-2013,-500
30-01-2013,-500
29-01-2013,-500
28-01-2013,-500
25-01-2013,-500
24-01-2013,-500
23-01-2013,-500
22-01-2013,-500
21-01-2013,-500
18-01-2013,-500
17-01-2013,-500
16-01-2013,-500
15-01-2013,-500
14-01-2013,-500
11-01-2013,-500
10-01-2013,-500
09-01-2013,-500
08-01-2013,-500
07-01-2013,-500
04-01-2013,-500
03-01-2013,-500
02-01-2013,-500
================================================
FILE: python/rateslib/data/historical/inr_rfr.csv
================================================
reference_date,rate
01-01-2025,-500
31-12-2024,-500
30-12-2024,-500
27-12-2024,-500
26-12-2024,-500
24-12-2024,-500
23-12-2024,-500
20-12-2024,-500
19-12-2024,-500
18-12-2024,-500
17-12-2024,-500
16-12-2024,-500
13-12-2024,-500
12-12-2024,-500
11-12-2024,-500
10-12-2024,-500
09-12-2024,-500
06-12-2024,-500
05-12-2024,-500
04-12-2024,-500
03-12-2024,-500
02-12-2024,-500
29-11-2024,-500
28-11-2024,-500
27-11-2024,-500
26-11-2024,-500
25-11-2024,-500
22-11-2024,-500
21-11-2024,-500
19-11-2024,-500
18-11-2024,-500
14-11-2024,-500
13-11-2024,-500
12-11-2024,-500
11-11-2024,-500
08-11-2024,-500
07-11-2024,-500
06-11-2024,-500
05-11-2024,-500
04-11-2024,-500
31-10-2024,-500
30-10-2024,-500
29-10-2024,-500
28-10-2024,-500
25-10-2024,-500
24-10-2024,-500
23-10-2024,-500
22-10-2024,-500
21-10-2024,-500
18-10-2024,-500
17-10-2024,-500
16-10-2024,-500
15-10-2024,-500
14-10-2024,-500
11-10-2024,-500
10-10-2024,-500
09-10-2024,-500
08-10-2024,-500
07-10-2024,-500
04-10-2024,-500
03-10-2024,-500
01-10-2024,-500
30-09-2024,-500
27-09-2024,-500
26-09-2024,-500
25-09-2024,-500
24-09-2024,-500
23-09-2024,-500
20-09-2024,-500
19-09-2024,-500
17-09-2024,-500
16-09-2024,-500
13-09-2024,-500
12-09-2024,-500
11-09-2024,-500
10-09-2024,-500
09-09-2024,-500
06-09-2024,-500
05-09-2024,-500
04-09-2024,-500
03-09-2024,-500
02-09-2024,-500
30-08-2024,-500
29-08-2024,-500
28-08-2024,-500
27-08-2024,-500
26-08-2024,-500
23-08-2024,-500
22-08-2024,-500
21-08-2024,-500
20-08-2024,-500
19-08-2024,-500
16-08-2024,-500
14-08-2024,-500
13-08-2024,-500
12-08-2024,-500
09-08-2024,-500
08-08-2024,-500
07-08-2024,-500
06-08-2024,-500
05-08-2024,-500
02-08-2024,-500
01-08-2024,-500
31-07-2024,-500
30-07-2024,-500
29-07-2024,-500
26-07-2024,-500
25-07-2024,-500
24-07-2024,-500
23-07-2024,-500
22-07-2024,-500
19-07-2024,-500
18-07-2024,-500
16-07-2024,-500
15-07-2024,-500
12-07-2024,-500
11-07-2024,-500
10-07-2024,-500
09-07-2024,-500
08-07-2024,-500
05-07-2024,-500
04-07-2024,-500
03-07-2024,-500
02-07-2024,-500
01-07-2024,-500
28-06-2024,-500
27-06-2024,-500
26-06-2024,-500
25-06-2024,-500
24-06-2024,-500
21-06-2024,-500
20-06-2024,-500
19-06-2024,-500
18-06-2024,-500
14-06-2024,-500
13-06-2024,-500
12-06-2024,-500
11-06-2024,-500
10-06-2024,-500
07-06-2024,-500
06-06-2024,-500
05-06-2024,-500
04-06-2024,-500
03-06-2024,-500
31-05-2024,-500
30-05-2024,-500
29-05-2024,-500
28-05-2024,-500
27-05-2024,-500
24-05-2024,-500
22-05-2024,-500
21-05-2024,-500
17-05-2024,-500
16-05-2024,-500
15-05-2024,-500
14-05-2024,-500
13-05-2024,-500
10-05-2024,-500
09-05-2024,-500
08-05-2024,-500
07-05-2024,-500
06-05-2024,-500
03-05-2024,-500
02-05-2024,-500
30-04-2024,-500
29-04-2024,-500
26-04-2024,-500
25-04-2024,-500
24-04-2024,-500
23-04-2024,-500
22-04-2024,-500
19-04-2024,-500
18-04-2024,-500
16-04-2024,-500
15-04-2024,-500
12-04-2024,-500
10-04-2024,-500
08-04-2024,-500
05-04-2024,-500
04-04-2024,-500
03-04-2024,-500
02-04-2024,-500
28-03-2024,-500
27-03-2024,-500
26-03-2024,-500
22-03-2024,-500
21-03-2024,-500
20-03-2024,-500
19-03-2024,-500
18-03-2024,-500
15-03-2024,-500
14-03-2024,-500
13-03-2024,-500
12-03-2024,-500
11-03-2024,-500
07-03-2024,-500
06-03-2024,-500
05-03-2024,-500
04-03-2024,-500
01-03-2024,-500
29-02-2024,-500
28-02-2024,-500
27-02-2024,-500
26-02-2024,-500
23-02-2024,-500
22-02-2024,-500
21-02-2024,-500
20-02-2024,-500
16-02-2024,-500
15-02-2024,-500
14-02-2024,-500
13-02-2024,-500
12-02-2024,-500
09-02-2024,-500
08-02-2024,-500
07-02-2024,-500
06-02-2024,-500
05-02-2024,-500
02-02-2024,-500
01-02-2024,-500
31-01-2024,-500
30-01-2024,-500
29-01-2024,-500
25-01-2024,-500
24-01-2024,-500
23-01-2024,-500
19-01-2024,-500
18-01-2024,-500
17-01-2024,-500
16-01-2024,-500
15-01-2024,-500
12-01-2024,-500
11-01-2024,-500
10-01-2024,-500
09-01-2024,-500
08-01-2024,-500
05-01-2024,-500
04-01-2024,-500
03-01-2024,-500
02-01-2024,-500
01-01-2024,-500
29-12-2023,-500
28-12-2023,-500
27-12-2023,-500
26-12-2023,-500
22-12-2023,-500
21-12-2023,-500
20-12-2023,-500
19-12-2023,-500
18-12-2023,-500
15-12-2023,-500
14-12-2023,-500
13-12-2023,-500
12-12-2023,-500
11-12-2023,-500
08-12-2023,-500
07-12-2023,-500
06-12-2023,-500
05-12-2023,-500
04-12-2023,-500
01-12-2023,-500
30-11-2023,-500
29-11-2023,-500
28-11-2023,-500
24-11-2023,-500
23-11-2023,-500
22-11-2023,-500
21-11-2023,-500
20-11-2023,-500
17-11-2023,-500
16-11-2023,-500
15-11-2023,-500
13-11-2023,-500
10-11-2023,-500
09-11-2023,-500
08-11-2023,-500
07-11-2023,-500
06-11-2023,-500
03-11-2023,-500
02-11-2023,-500
01-11-2023,-500
31-10-2023,-500
30-10-2023,-500
27-10-2023,-500
26-10-2023,-500
25-10-2023,-500
23-10-2023,-500
20-10-2023,-500
19-10-2023,-500
18-10-2023,-500
17-10-2023,-500
16-10-2023,-500
13-10-2023,-500
12-10-2023,-500
11-10-2023,-500
10-10-2023,-500
09-10-2023,-500
06-10-2023,-500
05-10-2023,-500
04-10-2023,-500
03-10-2023,-500
29-09-2023,-500
28-09-2023,-500
27-09-2023,-500
26-09-2023,-500
25-09-2023,-500
22-09-2023,-500
21-09-2023,-500
20-09-2023,-500
18-09-2023,-500
15-09-2023,-500
14-09-2023,-500
13-09-2023,-500
12-09-2023,-500
11-09-2023,-500
08-09-2023,-500
07-09-2023,-500
06-09-2023,-500
05-09-2023,-500
04-09-2023,-500
01-09-2023,-500
31-08-2023,-500
30-08-2023,-500
29-08-2023,-500
28-08-2023,-500
25-08-2023,-500
24-08-2023,-500
23-08-2023,-500
22-08-2023,-500
21-08-2023,-500
18-08-2023,-500
17-08-2023,-500
14-08-2023,-500
11-08-2023,-500
10-08-2023,-500
09-08-2023,-500
08-08-2023,-500
07-08-2023,-500
04-08-2023,-500
03-08-2023,-500
02-08-2023,-500
01-08-2023,-500
31-07-2023,-500
28-07-2023,-500
27-07-2023,-500
26-07-2023,-500
25-07-2023,-500
24-07-2023,-500
21-07-2023,-500
20-07-2023,-500
19-07-2023,-500
18-07-2023,-500
17-07-2023,-500
14-07-2023,-500
13-07-2023,-500
12-07-2023,-500
11-07-2023,-500
10-07-2023,-500
07-07-2023,-500
06-07-2023,-500
05-07-2023,-500
04-07-2023,-500
03-07-2023,-500
30-06-2023,-500
28-06-2023,-500
27-06-2023,-500
26-06-2023,-500
23-06-2023,-500
22-06-2023,-500
21-06-2023,-500
20-06-2023,-500
19-06-2023,-500
16-06-2023,-500
15-06-2023,-500
14-06-2023,-500
13-06-2023,-500
12-06-2023,-500
09-06-2023,-500
08-06-2023,-500
07-06-2023,-500
06-06-2023,-500
05-06-2023,-500
02-06-2023,-500
01-06-2023,-500
31-05-2023,-500
30-05-2023,-500
29-05-2023,-500
26-05-2023,-500
25-05-2023,-500
24-05-2023,-500
23-05-2023,-500
22-05-2023,-500
19-05-2023,-500
18-05-2023,-500
17-05-2023,-500
16-05-2023,-500
15-05-2023,-500
12-05-2023,-500
11-05-2023,-500
10-05-2023,-500
09-05-2023,-500
08-05-2023,-500
04-05-2023,-500
03-05-2023,-500
02-05-2023,-500
28-04-2023,-500
27-04-2023,-500
26-04-2023,-500
25-04-2023,-500
24-04-2023,-500
21-04-2023,-500
20-04-2023,-500
19-04-2023,-500
18-04-2023,-500
17-04-2023,-500
13-04-2023,-500
12-04-2023,-500
11-04-2023,-500
10-04-2023,-500
06-04-2023,-500
05-04-2023,-500
03-04-2023,-500
31-03-2023,-500
29-03-2023,-500
28-03-2023,-500
27-03-2023,-500
24-03-2023,-500
23-03-2023,-500
21-03-2023,-500
20-03-2023,-500
17-03-2023,-500
16-03-2023,-500
15-03-2023,-500
14-03-2023,-500
13-03-2023,-500
10-03-2023,-500
09-03-2023,-500
08-03-2023,-500
06-03-2023,-500
03-03-2023,-500
02-03-2023,-500
01-03-2023,-500
28-02-2023,-500
27-02-2023,-500
24-02-2023,-500
23-02-2023,-500
22-02-2023,-500
21-02-2023,-500
20-02-2023,-500
17-02-2023,-500
16-02-2023,-500
15-02-2023,-500
14-02-2023,-500
13-02-2023,-500
10-02-2023,-500
09-02-2023,-500
08-02-2023,-500
07-02-2023,-500
06-02-2023,-500
03-02-2023,-500
02-02-2023,-500
01-02-2023,-500
31-01-2023,-500
30-01-2023,-500
27-01-2023,-500
25-01-2023,-500
24-01-2023,-500
23-01-2023,-500
20-01-2023,-500
19-01-2023,-500
18-01-2023,-500
17-01-2023,-500
16-01-2023,-500
13-01-2023,-500
12-01-2023,-500
11-01-2023,-500
10-01-2023,-500
09-01-2023,-500
06-01-2023,-500
05-01-2023,-500
04-01-2023,-500
03-01-2023,-500
02-01-2023,-500
30-12-2022,-500
29-12-2022,-500
28-12-2022,-500
27-12-2022,-500
26-12-2022,-500
23-12-2022,-500
22-12-2022,-500
21-12-2022,-500
20-12-2022,-500
19-12-2022,-500
16-12-2022,-500
15-12-2022,-500
14-12-2022,-500
13-12-2022,-500
12-12-2022,-500
09-12-2022,-500
08-12-2022,-500
07-12-2022,-500
06-12-2022,-500
05-12-2022,-500
02-12-2022,-500
01-12-2022,-500
30-11-2022,-500
29-11-2022,-500
28-11-2022,-500
25-11-2022,-500
24-11-2022,-500
23-11-2022,-500
22-11-2022,-500
21-11-2022,-500
18-11-2022,-500
17-11-2022,-500
16-11-2022,-500
15-11-2022,-500
14-11-2022,-500
11-11-2022,-500
10-11-2022,-500
09-11-2022,-500
07-11-2022,-500
04-11-2022,-500
03-11-2022,-500
02-11-2022,-500
01-11-2022,-500
31-10-2022,-500
28-10-2022,-500
27-10-2022,-500
25-10-2022,-500
21-10-2022,-500
20-10-2022,-500
19-10-2022,-500
18-10-2022,-500
17-10-2022,-500
14-10-2022,-500
13-10-2022,-500
12-10-2022,-500
11-10-2022,-500
10-10-2022,-500
07-10-2022,-500
06-10-2022,-500
04-10-2022,-500
03-10-2022,-500
30-09-2022,-500
29-09-2022,-500
28-09-2022,-500
27-09-2022,-500
26-09-2022,-500
23-09-2022,-500
22-09-2022,-500
21-09-2022,-500
20-09-2022,-500
19-09-2022,-500
16-09-2022,-500
15-09-2022,-500
14-09-2022,-500
13-09-2022,-500
12-09-2022,-500
09-09-2022,-500
08-09-2022,-500
07-09-2022,-500
06-09-2022,-500
05-09-2022,-500
02-09-2022,-500
01-09-2022,-500
30-08-2022,-500
29-08-2022,-500
26-08-2022,-500
25-08-2022,-500
24-08-2022,-500
23-08-2022,-500
22-08-2022,-500
19-08-2022,-500
18-08-2022,-500
17-08-2022,-500
12-08-2022,-500
11-08-2022,-500
10-08-2022,-500
08-08-2022,-500
05-08-2022,-500
04-08-2022,-500
03-08-2022,-500
02-08-2022,-500
01-08-2022,-500
29-07-2022,-500
28-07-2022,-500
27-07-2022,-500
26-07-2022,-500
25-07-2022,-500
22-07-2022,-500
21-07-2022,-500
20-07-2022,-500
19-07-2022,-500
18-07-2022,-500
15-07-2022,-500
14-07-2022,-500
13-07-2022,-500
12-07-2022,-500
11-07-2022,-500
08-07-2022,-500
07-07-2022,-500
06-07-2022,-500
05-07-2022,-500
04-07-2022,-500
01-07-2022,-500
30-06-2022,-500
29-06-2022,-500
28-06-2022,-500
27-06-2022,-500
24-06-2022,-500
23-06-2022,-500
22-06-2022,-500
21-06-2022,-500
20-06-2022,-500
17-06-2022,-500
16-06-2022,-500
15-06-2022,-500
14-06-2022,-500
13-06-2022,-500
10-06-2022,-500
09-06-2022,-500
08-06-2022,-500
07-06-2022,-500
06-06-2022,-500
03-06-2022,-500
02-06-2022,-500
01-06-2022,-500
31-05-2022,-500
30-05-2022,-500
27-05-2022,-500
26-05-2022,-500
25-05-2022,-500
24-05-2022,-500
23-05-2022,-500
20-05-2022,-500
19-05-2022,-500
18-05-2022,-500
17-05-2022,-500
13-05-2022,-500
12-05-2022,-500
11-05-2022,-500
10-05-2022,-500
09-05-2022,-500
06-05-2022,-500
05-05-2022,-500
04-05-2022,-500
02-05-2022,-500
29-04-2022,-500
28-04-2022,-500
27-04-2022,-500
26-04-2022,-500
25-04-2022,-500
22-04-2022,-500
21-04-2022,-500
20-04-2022,-500
19-04-2022,-500
18-04-2022,-500
13-04-2022,-500
12-04-2022,-500
11-04-2022,-500
08-04-2022,-500
07-04-2022,-500
06-04-2022,-500
05-04-2022,-500
04-04-2022,-500
31-03-2022,-500
30-03-2022,-500
29-03-2022,-500
28-03-2022,-500
25-03-2022,-500
24-03-2022,-500
23-03-2022,-500
22-03-2022,-500
21-03-2022,-500
17-03-2022,-500
16-03-2022,-500
15-03-2022,-500
14-03-2022,-500
11-03-2022,-500
10-03-2022,-500
09-03-2022,-500
08-03-2022,-500
07-03-2022,-500
04-03-2022,-500
03-03-2022,-500
02-03-2022,-500
28-02-2022,-500
25-02-2022,-500
24-02-2022,-500
23-02-2022,-500
22-02-2022,-500
21-02-2022,-500
18-02-2022,-500
17-02-2022,-500
16-02-2022,-500
15-02-2022,-500
14-02-2022,-500
11-02-2022,-500
10-02-2022,-500
09-02-2022,-500
08-02-2022,-500
04-02-2022,-500
03-02-2022,-500
02-02-2022,-500
01-02-2022,-500
31-01-2022,-500
28-01-2022,-500
27-01-2022,-500
25-01-2022,-500
24-01-2022,-500
21-01-2022,-500
20-01-2022,-500
19-01-2022,-500
18-01-2022,-500
17-01-2022,-500
14-01-2022,-500
13-01-2022,-500
12-01-2022,-500
11-01-2022,-500
10-01-2022,-500
07-01-2022,-500
06-01-2022,-500
05-01-2022,-500
04-01-2022,-500
03-01-2022,-500
31-12-2021,-500
30-12-2021,-500
29-12-2021,-500
28-12-2021,-500
27-12-2021,-500
24-12-2021,-500
23-12-2021,-500
22-12-2021,-500
21-12-2021,-500
20-12-2021,-500
17-12-2021,-500
16-12-2021,-500
15-12-2021,-500
14-12-2021,-500
13-12-2021,-500
10-12-2021,-500
09-12-2021,-500
08-12-2021,-500
07-12-2021,-500
06-12-2021,-500
03-12-2021,-500
02-12-2021,-500
01-12-2021,-500
30-11-2021,-500
29-11-2021,-500
26-11-2021,-500
25-11-2021,-500
24-11-2021,-500
23-11-2021,-500
22-11-2021,-500
18-11-2021,-500
17-11-2021,-500
16-11-2021,-500
15-11-2021,-500
12-11-2021,-500
11-11-2021,-500
10-11-2021,-500
09-11-2021,-500
08-11-2021,-500
03-11-2021,-500
02-11-2021,-500
01-11-2021,-500
29-10-2021,-500
28-10-2021,-500
27-10-2021,-500
26-10-2021,-500
25-10-2021,-500
22-10-2021,-500
21-10-2021,-500
20-10-2021,-500
18-10-2021,-500
14-10-2021,-500
13-10-2021,-500
12-10-2021,-500
11-10-2021,-500
08-10-2021,-500
07-10-2021,-500
06-10-2021,-500
05-10-2021,-500
04-10-2021,-500
01-10-2021,-500
30-09-2021,-500
29-09-2021,-500
28-09-2021,-500
27-09-2021,-500
24-09-2021,-500
23-09-2021,-500
22-09-2021,-500
21-09-2021,-500
20-09-2021,-500
17-09-2021,-500
16-09-2021,-500
15-09-2021,-500
14-09-2021,-500
13-09-2021,-500
09-09-2021,-500
08-09-2021,-500
07-09-2021,-500
06-09-2021,-500
03-09-2021,-500
02-09-2021,-500
01-09-2021,-500
31-08-2021,-500
30-08-2021,-500
27-08-2021,-500
26-08-2021,-500
25-08-2021,-500
24-08-2021,-500
23-08-2021,-500
20-08-2021,-500
18-08-2021,-500
17-08-2021,-500
13-08-2021,-500
12-08-2021,-500
11-08-2021,-500
10-08-2021,-500
09-08-2021,-500
06-08-2021,-500
05-08-2021,-500
04-08-2021,-500
03-08-2021,-500
02-08-2021,-500
30-07-2021,-500
29-07-2021,-500
28-07-2021,-500
27-07-2021,-500
26-07-2021,-500
23-07-2021,-500
22-07-2021,-500
20-07-2021,-500
19-07-2021,-500
16-07-2021,-500
15-07-2021,-500
14-07-2021,-500
13-07-2021,-500
12-07-2021,-500
09-07-2021,-500
08-07-2021,-500
07-07-2021,-500
06-07-2021,-500
05-07-2021,-500
02-07-2021,-500
01-07-2021,-500
30-06-2021,-500
29-06-2021,-500
28-06-2021,-500
25-06-2021,-500
24-06-2021,-500
23-06-2021,-500
22-06-2021,-500
21-06-2021,-500
18-06-2021,-500
17-06-2021,-500
16-06-2021,-500
15-06-2021,-500
14-06-2021,-500
11-06-2021,-500
10-06-2021,-500
09-06-2021,-500
08-06-2021,-500
07-06-2021,-500
04-06-2021,-500
03-06-2021,-500
02-06-2021,-500
01-06-2021,-500
31-05-2021,-500
28-05-2021,-500
27-05-2021,-500
25-05-2021,-500
24-05-2021,-500
21-05-2021,-500
20-05-2021,-500
19-05-2021,-500
18-05-2021,-500
17-05-2021,-500
14-05-2021,-500
12-05-2021,-500
11-05-2021,-500
10-05-2021,-500
07-05-2021,-500
06-05-2021,-500
05-05-2021,-500
04-05-2021,-500
03-05-2021,-500
30-04-2021,-500
29-04-2021,-500
28-04-2021,-500
27-04-2021,-500
26-04-2021,-500
23-04-2021,-500
22-04-2021,-500
20-04-2021,-500
19-04-2021,-500
16-04-2021,-500
15-04-2021,-500
12-04-2021,-500
09-04-2021,-500
08-04-2021,-500
07-04-2021,-500
06-04-2021,-500
05-04-2021,-500
31-03-2021,-500
30-03-2021,-500
26-03-2021,-500
25-03-2021,-500
24-03-2021,-500
23-03-2021,-500
22-03-2021,-500
19-03-2021,-500
18-03-2021,-500
17-03-2021,-500
16-03-2021,-500
15-03-2021,-500
12-03-2021,-500
10-03-2021,-500
09-03-2021,-500
08-03-2021,-500
05-03-2021,-500
04-03-2021,-500
03-03-2021,-500
02-03-2021,-500
01-03-2021,-500
26-02-2021,-500
25-02-2021,-500
24-02-2021,-500
23-02-2021,-500
22-02-2021,-500
18-02-2021,-500
17-02-2021,-500
16-02-2021,-500
15-02-2021,-500
12-02-2021,-500
11-02-2021,-500
10-02-2021,-500
09-02-2021,-500
08-02-2021,-500
05-02-2021,-500
04-02-2021,-500
03-02-2021,-500
02-02-2021,-500
01-02-2021,-500
29-01-2021,-500
28-01-2021,-500
27-01-2021,-500
25-01-2021,-500
22-01-2021,-500
21-01-2021,-500
20-01-2021,-500
19-01-2021,-500
18-01-2021,-500
15-01-2021,-500
14-01-2021,-500
13-01-2021,-500
12-01-2021,-500
11-01-2021,-500
08-01-2021,-500
07-01-2021,-500
06-01-2021,-500
05-01-2021,-500
04-01-2021,-500
01-01-2021,-500
31-12-2020,-500
30-12-2020,-500
29-12-2020,-500
28-12-2020,-500
24-12-2020,-500
23-12-2020,-500
22-12-2020,-500
21-12-2020,-500
18-12-2020,-500
17-12-2020,-500
16-12-2020,-500
15-12-2020,-500
14-12-2020,-500
11-12-2020,-500
10-12-2020,-500
09-12-2020,-500
08-12-2020,-500
07-12-2020,-500
04-12-2020,-500
03-12-2020,-500
02-12-2020,-500
01-12-2020,-500
27-11-2020,-500
26-11-2020,-500
25-11-2020,-500
24-11-2020,-500
23-11-2020,-500
20-11-2020,-500
19-11-2020,-500
18-11-2020,-500
17-11-2020,-500
13-11-2020,-500
12-11-2020,-500
11-11-2020,-500
10-11-2020,-500
09-11-2020,-500
06-11-2020,-500
05-11-2020,-500
04-11-2020,-500
03-11-2020,-500
02-11-2020,-500
29-10-2020,-500
28-10-2020,-500
27-10-2020,-500
26-10-2020,-500
23-10-2020,-500
22-10-2020,-500
21-10-2020,-500
20-10-2020,-500
19-10-2020,-500
16-10-2020,-500
15-10-2020,-500
14-10-2020,-500
13-10-2020,-500
12-10-2020,-500
09-10-2020,-500
08-10-2020,-500
07-10-2020,-500
06-10-2020,-500
05-10-2020,-500
01-10-2020,-500
30-09-2020,-500
29-09-2020,-500
28-09-2020,-500
25-09-2020,-500
24-09-2020,-500
23-09-2020,-500
22-09-2020,-500
21-09-2020,-500
18-09-2020,-500
17-09-2020,-500
16-09-2020,-500
15-09-2020,-500
14-09-2020,-500
11-09-2020,-500
10-09-2020,-500
09-09-2020,-500
08-09-2020,-500
07-09-2020,-500
04-09-2020,-500
03-09-2020,-500
02-09-2020,-500
01-09-2020,-500
31-08-2020,-500
28-08-2020,-500
27-08-2020,-500
26-08-2020,-500
25-08-2020,-500
24-08-2020,-500
21-08-2020,-500
20-08-2020,-500
19-08-2020,-500
18-08-2020,-500
17-08-2020,-500
14-08-2020,-500
13-08-2020,-500
12-08-2020,-500
11-08-2020,-500
10-08-2020,-500
07-08-2020,-500
06-08-2020,-500
05-08-2020,-500
04-08-2020,-500
03-08-2020,-500
31-07-2020,-500
30-07-2020,-500
29-07-2020,-500
28-07-2020,-500
27-07-2020,-500
24-07-2020,-500
23-07-2020,-500
22-07-2020,-500
21-07-2020,-500
20-07-2020,-500
17-07-2020,-500
16-07-2020,-500
15-07-2020,-500
14-07-2020,-500
13-07-2020,-500
10-07-2020,-500
09-07-2020,-500
08-07-2020,-500
07-07-2020,-500
06-07-2020,-500
03-07-2020,-500
02-07-2020,-500
01-07-2020,-500
30-06-2020,-500
29-06-2020,-500
26-06-2020,-500
25-06-2020,-500
24-06-2020,-500
23-06-2020,-500
22-06-2020,-500
19-06-2020,-500
18-06-2020,-500
17-06-2020,-500
16-06-2020,-500
15-06-2020,-500
12-06-2020,-500
11-06-2020,-500
10-06-2020,-500
09-06-2020,-500
08-06-2020,-500
05-06-2020,-500
04-06-2020,-500
03-06-2020,-500
02-06-2020,-500
01-06-2020,-500
29-05-2020,-500
28-05-2020,-500
27-05-2020,-500
26-05-2020,-500
22-05-2020,-500
21-05-2020,-500
20-05-2020,-500
19-05-2020,-500
18-05-2020,-500
15-05-2020,-500
14-05-2020,-500
13-05-2020,-500
12-05-2020,-500
11-05-2020,-500
08-05-2020,-500
06-05-2020,-500
05-05-2020,-500
04-05-2020,-500
30-04-2020,-500
29-04-2020,-500
28-04-2020,-500
27-04-2020,-500
24-04-2020,-500
23-04-2020,-500
22-04-2020,-500
21-04-2020,-500
20-04-2020,-500
17-04-2020,-500
16-04-2020,-500
15-04-2020,-500
13-04-2020,-500
09-04-2020,-500
08-04-2020,-500
07-04-2020,-500
03-04-2020,-500
31-03-2020,-500
30-03-2020,-500
27-03-2020,-500
26-03-2020,-500
24-03-2020,-500
23-03-2020,-500
20-03-2020,-500
19-03-2020,-500
18-03-2020,-500
17-03-2020,-500
16-03-2020,-500
13-03-2020,-500
12-03-2020,-500
11-03-2020,-500
09-03-2020,-500
06-03-2020,-500
05-03-2020,-500
04-03-2020,-500
03-03-2020,-500
02-03-2020,-500
28-02-2020,-500
27-02-2020,-500
26-02-2020,-500
25-02-2020,-500
24-02-2020,-500
20-02-2020,-500
18-02-2020,-500
17-02-2020,-500
14-02-2020,-500
13-02-2020,-500
12-02-2020,-500
11-02-2020,-500
10-02-2020,-500
07-02-2020,-500
06-02-2020,-500
05-02-2020,-500
04-02-2020,-500
03-02-2020,-500
31-01-2020,-500
30-01-2020,-500
29-01-2020,-500
28-01-2020,-500
27-01-2020,-500
24-01-2020,-500
23-01-2020,-500
22-01-2020,-500
21-01-2020,-500
20-01-2020,-500
17-01-2020,-500
16-01-2020,-500
15-01-2020,-500
14-01-2020,-500
13-01-2020,-500
10-01-2020,-500
09-01-2020,-500
08-01-2020,-500
07-01-2020,-500
06-01-2020,-500
03-01-2020,-500
02-01-2020,-500
01-01-2020,-500
31-12-2019,-500
30-12-2019,-500
27-12-2019,-500
26-12-2019,-500
24-12-2019,-500
23-12-2019,-500
20-12-2019,-500
19-12-2019,-500
18-12-2019,-500
17-12-2019,-500
16-12-2019,-500
13-12-2019,-500
12-12-2019,-500
11-12-2019,-500
10-12-2019,-500
09-12-2019,-500
06-12-2019,-500
05-12-2019,-500
04-12-2019,-500
03-12-2019,-500
02-12-2019,-500
29-11-2019,-500
28-11-2019,-500
27-11-2019,-500
26-11-2019,-500
25-11-2019,-500
22-11-2019,-500
21-11-2019,-500
20-11-2019,-500
19-11-2019,-500
18-11-2019,-500
15-11-2019,-500
14-11-2019,-500
13-11-2019,-500
11-11-2019,-500
08-11-2019,-500
07-11-2019,-500
06-11-2019,-500
05-11-2019,-500
04-11-2019,-500
01-11-2019,-500
31-10-2019,-500
30-10-2019,-500
29-10-2019,-500
25-10-2019,-500
24-10-2019,-500
23-10-2019,-500
22-10-2019,-500
18-10-2019,-500
17-10-2019,-500
16-10-2019,-500
15-10-2019,-500
14-10-2019,-500
11-10-2019,-500
10-10-2019,-500
09-10-2019,-500
07-10-2019,-500
04-10-2019,-500
03-10-2019,-500
01-10-2019,-500
30-09-2019,-500
27-09-2019,-500
26-09-2019,-500
25-09-2019,-500
24-09-2019,-500
23-09-2019,-500
20-09-2019,-500
19-09-2019,-500
18-09-2019,-500
17-09-2019,-500
16-09-2019,-500
13-09-2019,-500
12-09-2019,-500
11-09-2019,-500
09-09-2019,-500
06-09-2019,-500
05-09-2019,-500
04-09-2019,-500
03-09-2019,-500
30-08-2019,-500
29-08-2019,-500
28-08-2019,-500
27-08-2019,-500
26-08-2019,-500
23-08-2019,-500
22-08-2019,-500
21-08-2019,-500
20-08-2019,-500
19-08-2019,-500
16-08-2019,-500
14-08-2019,-500
13-08-2019,-500
09-08-2019,-500
08-08-2019,-500
07-08-2019,-500
06-08-2019,-500
05-08-2019,-500
02-08-2019,-500
01-08-2019,-500
31-07-2019,-500
30-07-2019,-500
29-07-2019,-500
26-07-2019,-500
25-07-2019,-500
24-07-2019,-500
23-07-2019,-500
22-07-2019,-500
19-07-2019,-500
18-07-2019,-500
17-07-2019,-500
16-07-2019,-500
15-07-2019,-500
12-07-2019,-500
11-07-2019,-500
10-07-2019,-500
09-07-2019,-500
08-07-2019,-500
05-07-2019,-500
04-07-2019,-500
03-07-2019,-500
02-07-2019,-500
01-07-2019,-500
28-06-2019,-500
27-06-2019,-500
26-06-2019,-500
25-06-2019,-500
24-06-2019,-500
21-06-2019,-500
20-06-2019,-500
19-06-2019,-500
18-06-2019,-500
17-06-2019,-500
14-06-2019,-500
13-06-2019,-500
12-06-2019,-500
11-06-2019,-500
10-06-2019,-500
07-06-2019,-500
06-06-2019,-500
04-06-2019,-500
03-06-2019,-500
31-05-2019,-500
30-05-2019,-500
29-05-2019,-500
28-05-2019,-500
27-05-2019,-500
24-05-2019,-500
23-05-2019,-500
22-05-2019,-500
21-05-2019,-500
20-05-2019,-500
17-05-2019,-500
16-05-2019,-500
15-05-2019,-500
14-05-2019,-500
13-05-2019,-500
10-05-2019,-500
09-05-2019,-500
08-05-2019,-500
07-05-2019,-500
06-05-2019,-500
03-05-2019,-500
02-05-2019,-500
30-04-2019,-500
26-04-2019,-500
25-04-2019,-500
24-04-2019,-500
23-04-2019,-500
22-04-2019,-500
18-04-2019,-500
16-04-2019,-500
15-04-2019,-500
12-04-2019,-500
11-04-2019,-500
10-04-2019,-500
09-04-2019,-500
08-04-2019,-500
05-04-2019,-500
04-04-2019,-500
03-04-2019,-500
02-04-2019,-500
29-03-2019,-500
28-03-2019,-500
27-03-2019,-500
26-03-2019,-500
25-03-2019,-500
22-03-2019,-500
20-03-2019,-500
19-03-2019,-500
18-03-2019,-500
15-03-2019,-500
14-03-2019,-500
13-03-2019,-500
12-03-2019,-500
11-03-2019,-500
08-03-2019,-500
07-03-2019,-500
06-03-2019,-500
05-03-2019,-500
01-03-2019,-500
28-02-2019,-500
27-02-2019,-500
26-02-2019,-500
25-02-2019,-500
22-02-2019,-500
21-02-2019,-500
20-02-2019,-500
18-02-2019,-500
15-02-2019,-500
14-02-2019,-500
13-02-2019,-500
12-02-2019,-500
11-02-2019,-500
08-02-2019,-500
07-02-2019,-500
06-02-2019,-500
05-02-2019,-500
04-02-2019,-500
01-02-2019,-500
31-01-2019,-500
30-01-2019,-500
29-01-2019,-500
28-01-2019,-500
25-01-2019,-500
24-01-2019,-500
23-01-2019,-500
22-01-2019,-500
21-01-2019,-500
18-01-2019,-500
17-01-2019,-500
16-01-2019,-500
15-01-2019,-500
14-01-2019,-500
11-01-2019,-500
10-01-2019,-500
09-01-2019,-500
08-01-2019,-500
07-01-2019,-500
04-01-2019,-500
03-01-2019,-500
02-01-2019,-500
01-01-2019,-500
================================================
FILE: python/rateslib/data/historical/jpy_rfr.csv
================================================
reference_date,rate
09-06-2015,-500
10-06-2015,-500
11-06-2015,-500
12-06-2015,-500
15-06-2015,-500
16-06-2015,-500
17-06-2015,-500
18-06-2015,-500
19-06-2015,-500
22-06-2015,-500
23-06-2015,-500
24-06-2015,-500
25-06-2015,-500
26-06-2015,-500
29-06-2015,-500
30-06-2015,-500
01-07-2015,-500
02-07-2015,-500
03-07-2015,-500
06-07-2015,-500
07-07-2015,-500
08-07-2015,-500
09-07-2015,-500
10-07-2015,-500
13-07-2015,-500
14-07-2015,-500
15-07-2015,-500
16-07-2015,-500
17-07-2015,-500
21-07-2015,-500
22-07-2015,-500
23-07-2015,-500
24-07-2015,-500
27-07-2015,-500
28-07-2015,-500
29-07-2015,-500
30-07-2015,-500
31-07-2015,-500
03-08-2015,-500
04-08-2015,-500
05-08-2015,-500
06-08-2015,-500
07-08-2015,-500
10-08-2015,-500
11-08-2015,-500
12-08-2015,-500
13-08-2015,-500
14-08-2015,-500
17-08-2015,-500
18-08-2015,-500
19-08-2015,-500
20-08-2015,-500
21-08-2015,-500
24-08-2015,-500
25-08-2015,-500
26-08-2015,-500
27-08-2015,-500
28-08-2015,-500
31-08-2015,-500
01-09-2015,-500
02-09-2015,-500
03-09-2015,-500
04-09-2015,-500
07-09-2015,-500
08-09-2015,-500
09-09-2015,-500
10-09-2015,-500
11-09-2015,-500
14-09-2015,-500
15-09-2015,-500
16-09-2015,-500
17-09-2015,-500
18-09-2015,-500
24-09-2015,-500
25-09-2015,-500
28-09-2015,-500
29-09-2015,-500
30-09-2015,-500
01-10-2015,-500
02-10-2015,-500
05-10-2015,-500
06-10-2015,-500
07-10-2015,-500
08-10-2015,-500
09-10-2015,-500
13-10-2015,-500
14-10-2015,-500
15-10-2015,-500
16-10-2015,-500
19-10-2015,-500
20-10-2015,-500
21-10-2015,-500
22-10-2015,-500
23-10-2015,-500
26-10-2015,-500
27-10-2015,-500
28-10-2015,-500
29-10-2015,-500
30-10-2015,-500
02-11-2015,-500
04-11-2015,-500
05-11-2015,-500
06-11-2015,-500
09-11-2015,-500
10-11-2015,-500
11-11-2015,-500
12-11-2015,-500
13-11-2015,-500
16-11-2015,-500
17-11-2015,-500
18-11-2015,-500
19-11-2015,-500
20-11-2015,-500
24-11-2015,-500
25-11-2015,-500
26-11-2015,-500
27-11-2015,-500
30-11-2015,-500
01-12-2015,-500
02-12-2015,-500
03-12-2015,-500
04-12-2015,-500
07-12-2015,-500
08-12-2015,-500
09-12-2015,-500
10-12-2015,-500
11-12-2015,-500
14-12-2015,-500
15-12-2015,-500
16-12-2015,-500
17-12-2015,-500
18-12-2015,-500
21-12-2015,-500
22-12-2015,-500
24-12-2015,-500
25-12-2015,-500
28-12-2015,-500
29-12-2015,-500
30-12-2015,-500
04-01-2016,-500
05-01-2016,-500
06-01-2016,-500
07-01-2016,-500
08-01-2016,-500
12-01-2016,-500
13-01-2016,-500
14-01-2016,-500
15-01-2016,-500
18-01-2016,-500
19-01-2016,-500
20-01-2016,-500
21-01-2016,-500
22-01-2016,-500
25-01-2016,-500
26-01-2016,-500
27-01-2016,-500
28-01-2016,-500
29-01-2016,-500
01-02-2016,-500
02-02-2016,-500
03-02-2016,-500
04-02-2016,-500
05-02-2016,-500
08-02-2016,-500
09-02-2016,-500
10-02-2016,-500
12-02-2016,-500
15-02-2016,-500
16-02-2016,-500
17-02-2016,-500
18-02-2016,-500
19-02-2016,-500
22-02-2016,-500
23-02-2016,-500
24-02-2016,-500
25-02-2016,-500
26-02-2016,-500
29-02-2016,-500
01-03-2016,-500
02-03-2016,-500
03-03-2016,-500
04-03-2016,-500
07-03-2016,-500
08-03-2016,-500
09-03-2016,-500
10-03-2016,-500
11-03-2016,-500
14-03-2016,-500
15-03-2016,-500
16-03-2016,-500
17-03-2016,-500
18-03-2016,-500
22-03-2016,-500
23-03-2016,-500
24-03-2016,-500
25-03-2016,-500
28-03-2016,-500
29-03-2016,-500
30-03-2016,-500
31-03-2016,-500
01-04-2016,-500
04-04-2016,-500
05-04-2016,-500
06-04-2016,-500
07-04-2016,-500
08-04-2016,-500
11-04-2016,-500
12-04-2016,-500
13-04-2016,-500
14-04-2016,-500
15-04-2016,-500
18-04-2016,-500
19-04-2016,-500
20-04-2016,-500
21-04-2016,-500
22-04-2016,-500
25-04-2016,-500
26-04-2016,-500
27-04-2016,-500
28-04-2016,-500
02-05-2016,-500
06-05-2016,-500
09-05-2016,-500
10-05-2016,-500
11-05-2016,-500
12-05-2016,-500
13-05-2016,-500
16-05-2016,-500
17-05-2016,-500
18-05-2016,-500
19-05-2016,-500
20-05-2016,-500
23-05-2016,-500
24-05-2016,-500
25-05-2016,-500
26-05-2016,-500
27-05-2016,-500
30-05-2016,-500
31-05-2016,-500
01-06-2016,-500
02-06-2016,-500
03-06-2016,-500
06-06-2016,-500
07-06-2016,-500
08-06-2016,-500
09-06-2016,-500
10-06-2016,-500
13-06-2016,-500
14-06-2016,-500
15-06-2016,-500
16-06-2016,-500
17-06-2016,-500
20-06-2016,-500
21-06-2016,-500
22-06-2016,-500
23-06-2016,-500
24-06-2016,-500
27-06-2016,-500
28-06-2016,-500
29-06-2016,-500
30-06-2016,-500
01-07-2016,-500
04-07-2016,-500
05-07-2016,-500
06-07-2016,-500
07-07-2016,-500
08-07-2016,-500
11-07-2016,-500
12-07-2016,-500
13-07-2016,-500
14-07-2016,-500
15-07-2016,-500
19-07-2016,-500
20-07-2016,-500
21-07-2016,-500
22-07-2016,-500
25-07-2016,-500
26-07-2016,-500
27-07-2016,-500
28-07-2016,-500
29-07-2016,-500
01-08-2016,-500
02-08-2016,-500
03-08-2016,-500
04-08-2016,-500
05-08-2016,-500
08-08-2016,-500
09-08-2016,-500
10-08-2016,-500
12-08-2016,-500
15-08-2016,-500
16-08-2016,-500
17-08-2016,-500
18-08-2016,-500
19-08-2016,-500
22-08-2016,-500
23-08-2016,-500
24-08-2016,-500
25-08-2016,-500
26-08-2016,-500
29-08-2016,-500
30-08-2016,-500
31-08-2016,-500
01-09-2016,-500
02-09-2016,-500
05-09-2016,-500
06-09-2016,-500
07-09-2016,-500
08-09-2016,-500
09-09-2016,-500
12-09-2016,-500
13-09-2016,-500
14-09-2016,-500
15-09-2016,-500
16-09-2016,-500
20-09-2016,-500
21-09-2016,-500
23-09-2016,-500
26-09-2016,-500
27-09-2016,-500
28-09-2016,-500
29-09-2016,-500
30-09-2016,-500
03-10-2016,-500
04-10-2016,-500
05-10-2016,-500
06-10-2016,-500
07-10-2016,-500
11-10-2016,-500
12-10-2016,-500
13-10-2016,-500
14-10-2016,-500
17-10-2016,-500
18-10-2016,-500
19-10-2016,-500
20-10-2016,-500
21-10-2016,-500
24-10-2016,-500
25-10-2016,-500
26-10-2016,-500
27-10-2016,-500
28-10-2016,-500
31-10-2016,-500
01-11-2016,-500
02-11-2016,-500
04-11-2016,-500
07-11-2016,-500
08-11-2016,-500
09-11-2016,-500
10-11-2016,-500
11-11-2016,-500
14-11-2016,-500
15-11-2016,-500
16-11-2016,-500
17-11-2016,-500
18-11-2016,-500
21-11-2016,-500
22-11-2016,-500
24-11-2016,-500
25-11-2016,-500
28-11-2016,-500
29-11-2016,-500
30-11-2016,-500
01-12-2016,-500
02-12-2016,-500
05-12-2016,-500
06-12-2016,-500
07-12-2016,-500
08-12-2016,-500
09-12-2016,-500
12-12-2016,-500
13-12-2016,-500
14-12-2016,-500
15-12-2016,-500
16-12-2016,-500
19-12-2016,-500
20-12-2016,-500
21-12-2016,-500
22-12-2016,-500
26-12-2016,-500
27-12-2016,-500
28-12-2016,-500
29-12-2016,-500
30-12-2016,-500
04-01-2017,-500
05-01-2017,-500
06-01-2017,-500
10-01-2017,-500
11-01-2017,-500
12-01-2017,-500
13-01-2017,-500
16-01-2017,-500
17-01-2017,-500
18-01-2017,-500
19-01-2017,-500
20-01-2017,-500
23-01-2017,-500
24-01-2017,-500
25-01-2017,-500
26-01-2017,-500
27-01-2017,-500
30-01-2017,-500
31-01-2017,-500
01-02-2017,-500
02-02-2017,-500
03-02-2017,-500
06-02-2017,-500
07-02-2017,-500
08-02-2017,-500
09-02-2017,-500
10-02-2017,-500
13-02-2017,-500
14-02-2017,-500
15-02-2017,-500
16-02-2017,-500
17-02-2017,-500
20-02-2017,-500
21-02-2017,-500
22-02-2017,-500
23-02-2017,-500
24-02-2017,-500
27-02-2017,-500
28-02-2017,-500
01-03-2017,-500
02-03-2017,-500
03-03-2017,-500
06-03-2017,-500
07-03-2017,-500
08-03-2017,-500
09-03-2017,-500
10-03-2017,-500
13-03-2017,-500
14-03-2017,-500
15-03-2017,-500
16-03-2017,-500
17-03-2017,-500
21-03-2017,-500
22-03-2017,-500
23-03-2017,-500
24-03-2017,-500
27-03-2017,-500
28-03-2017,-500
29-03-2017,-500
30-03-2017,-500
31-03-2017,-500
03-04-2017,-500
04-04-2017,-500
05-04-2017,-500
06-04-2017,-500
07-04-2017,-500
10-04-2017,-500
11-04-2017,-500
12-04-2017,-500
13-04-2017,-500
14-04-2017,-500
17-04-2017,-500
18-04-2017,-500
19-04-2017,-500
20-04-2017,-500
21-04-2017,-500
24-04-2017,-500
25-04-2017,-500
26-04-2017,-500
27-04-2017,-500
28-04-2017,-500
01-05-2017,-500
02-05-2017,-500
08-05-2017,-500
09-05-2017,-500
10-05-2017,-500
11-05-2017,-500
12-05-2017,-500
15-05-2017,-500
16-05-2017,-500
17-05-2017,-500
18-05-2017,-500
19-05-2017,-500
22-05-2017,-500
23-05-2017,-500
24-05-2017,-500
25-05-2017,-500
26-05-2017,-500
29-05-2017,-500
30-05-2017,-500
31-05-2017,-500
01-06-2017,-500
02-06-2017,-500
05-06-2017,-500
06-06-2017,-500
07-06-2017,-500
08-06-2017,-500
09-06-2017,-500
12-06-2017,-500
13-06-2017,-500
14-06-2017,-500
15-06-2017,-500
16-06-2017,-500
19-06-2017,-500
20-06-2017,-500
21-06-2017,-500
22-06-2017,-500
23-06-2017,-500
26-06-2017,-500
27-06-2017,-500
28-06-2017,-500
29-06-2017,-500
30-06-2017,-500
03-07-2017,-500
04-07-2017,-500
05-07-2017,-500
06-07-2017,-500
07-07-2017,-500
10-07-2017,-500
11-07-2017,-500
12-07-2017,-500
13-07-2017,-500
14-07-2017,-500
18-07-2017,-500
19-07-2017,-500
20-07-2017,-500
21-07-2017,-500
24-07-2017,-500
25-07-2017,-500
26-07-2017,-500
27-07-2017,-500
28-07-2017,-500
31-07-2017,-500
01-08-2017,-500
02-08-2017,-500
03-08-2017,-500
04-08-2017,-500
07-08-2017,-500
08-08-2017,-500
09-08-2017,-500
10-08-2017,-500
14-08-2017,-500
15-08-2017,-500
16-08-2017,-500
17-08-2017,-500
18-08-2017,-500
21-08-2017,-500
22-08-2017,-500
23-08-2017,-500
24-08-2017,-500
25-08-2017,-500
28-08-2017,-500
29-08-2017,-500
30-08-2017,-500
31-08-2017,-500
01-09-2017,-500
04-09-2017,-500
05-09-2017,-500
06-09-2017,-500
07-09-2017,-500
08-09-2017,-500
11-09-2017,-500
12-09-2017,-500
13-09-2017,-500
14-09-2017,-500
15-09-2017,-500
19-09-2017,-500
20-09-2017,-500
21-09-2017,-500
22-09-2017,-500
25-09-2017,-500
26-09-2017,-500
27-09-2017,-500
28-09-2017,-500
29-09-2017,-500
02-10-2017,-500
03-10-2017,-500
04-10-2017,-500
05-10-2017,-500
06-10-2017,-500
10-10-2017,-500
11-10-2017,-500
12-10-2017,-500
13-10-2017,-500
16-10-2017,-500
17-10-2017,-500
18-10-2017,-500
19-10-2017,-500
20-10-2017,-500
23-10-2017,-500
24-10-2017,-500
25-10-2017,-500
26-10-2017,-500
27-10-2017,-500
30-10-2017,-500
31-10-2017,-500
01-11-2017,-500
02-11-2017,-500
06-11-2017,-500
07-11-2017,-500
08-11-2017,-500
09-11-2017,-500
10-11-2017,-500
13-11-2017,-500
14-11-2017,-500
15-11-2017,-500
16-11-2017,-500
17-11-2017,-500
20-11-2017,-500
21-11-2017,-500
22-11-2017,-500
24-11-2017,-500
27-11-2017,-500
28-11-2017,-500
29-11-2017,-500
30-11-2017,-500
01-12-2017,-500
04-12-2017,-500
05-12-2017,-500
06-12-2017,-500
07-12-2017,-500
08-12-2017,-500
11-12-2017,-500
12-12-2017,-500
13-12-2017,-500
14-12-2017,-500
15-12-2017,-500
18-12-2017,-500
19-12-2017,-500
20-12-2017,-500
21-12-2017,-500
22-12-2017,-500
25-12-2017,-500
26-12-2017,-500
27-12-2017,-500
28-12-2017,-500
29-12-2017,-500
04-01-2018,-500
05-01-2018,-500
09-01-2018,-500
10-01-2018,-500
11-01-2018,-500
12-01-2018,-500
15-01-2018,-500
16-01-2018,-500
17-01-2018,-500
18-01-2018,-500
19-01-2018,-500
22-01-2018,-500
23-01-2018,-500
24-01-2018,-500
25-01-2018,-500
26-01-2018,-500
29-01-2018,-500
30-01-2018,-500
31-01-2018,-500
01-02-2018,-500
02-02-2018,-500
05-02-2018,-500
06-02-2018,-500
07-02-2018,-500
08-02-2018,-500
09-02-2018,-500
13-02-2018,-500
14-02-2018,-500
15-02-2018,-500
16-02-2018,-500
19-02-2018,-500
20-02-2018,-500
21-02-2018,-500
22-02-2018,-500
23-02-2018,-500
26-02-2018,-500
27-02-2018,-500
28-02-2018,-500
01-03-2018,-500
02-03-2018,-500
05-03-2018,-500
06-03-2018,-500
07-03-2018,-500
08-03-2018,-500
09-03-2018,-500
12-03-2018,-500
13-03-2018,-500
14-03-2018,-500
15-03-2018,-500
16-03-2018,-500
19-03-2018,-500
20-03-2018,-500
22-03-2018,-500
23-03-2018,-500
26-03-2018,-500
27-03-2018,-500
28-03-2018,-500
29-03-2018,-500
30-03-2018,-500
02-04-2018,-500
03-04-2018,-500
04-04-2018,-500
05-04-2018,-500
06-04-2018,-500
09-04-2018,-500
10-04-2018,-500
11-04-2018,-500
12-04-2018,-500
13-04-2018,-500
16-04-2018,-500
17-04-2018,-500
18-04-2018,-500
19-04-2018,-500
20-04-2018,-500
23-04-2018,-500
24-04-2018,-500
25-04-2018,-500
26-04-2018,-500
27-04-2018,-500
01-05-2018,-500
02-05-2018,-500
07-05-2018,-500
08-05-2018,-500
09-05-2018,-500
10-05-2018,-500
11-05-2018,-500
14-05-2018,-500
15-05-2018,-500
16-05-2018,-500
17-05-2018,-500
18-05-2018,-500
21-05-2018,-500
22-05-2018,-500
23-05-2018,-500
24-05-2018,-500
25-05-2018,-500
28-05-2018,-500
29-05-2018,-500
30-05-2018,-500
31-05-2018,-500
01-06-2018,-500
04-06-2018,-500
05-06-2018,-500
06-06-2018,-500
07-06-2018,-500
08-06-2018,-500
11-06-2018,-500
12-06-2018,-500
13-06-2018,-500
14-06-2018,-500
15-06-2018,-500
18-06-2018,-500
19-06-2018,-500
20-06-2018,-500
21-06-2018,-500
22-06-2018,-500
25-06-2018,-500
26-06-2018,-500
27-06-2018,-500
28-06-2018,-500
29-06-2018,-500
02-07-2018,-500
03-07-2018,-500
04-07-2018,-500
05-07-2018,-500
06-07-2018,-500
09-07-2018,-500
10-07-2018,-500
11-07-2018,-500
12-07-2018,-500
13-07-2018,-500
17-07-2018,-500
18-07-2018,-500
19-07-2018,-500
20-07-2018,-500
23-07-2018,-500
24-07-2018,-500
25-07-2018,-500
26-07-2018,-500
27-07-2018,-500
30-07-2018,-500
31-07-2018,-500
01-08-2018,-500
02-08-2018,-500
03-08-2018,-500
06-08-2018,-500
07-08-2018,-500
08-08-2018,-500
09-08-2018,-500
10-08-2018,-500
13-08-2018,-500
14-08-2018,-500
15-08-2018,-500
16-08-2018,-500
17-08-2018,-500
20-08-2018,-500
21-08-2018,-500
22-08-2018,-500
23-08-2018,-500
24-08-2018,-500
27-08-2018,-500
28-08-2018,-500
29-08-2018,-500
30-08-2018,-500
31-08-2018,-500
03-09-2018,-500
04-09-2018,-500
05-09-2018,-500
06-09-2018,-500
07-09-2018,-500
10-09-2018,-500
11-09-2018,-500
12-09-2018,-500
13-09-2018,-500
14-09-2018,-500
18-09-2018,-500
19-09-2018,-500
20-09-2018,-500
21-09-2018,-500
25-09-2018,-500
26-09-2018,-500
27-09-2018,-500
28-09-2018,-500
01-10-2018,-500
02-10-2018,-500
03-10-2018,-500
04-10-2018,-500
05-10-2018,-500
09-10-2018,-500
10-10-2018,-500
11-10-2018,-500
12-10-2018,-500
15-10-2018,-500
16-10-2018,-500
17-10-2018,-500
18-10-2018,-500
19-10-2018,-500
22-10-2018,-500
23-10-2018,-500
24-10-2018,-500
25-10-2018,-500
26-10-2018,-500
29-10-2018,-500
30-10-2018,-500
31-10-2018,-500
01-11-2018,-500
02-11-2018,-500
05-11-2018,-500
06-11-2018,-500
07-11-2018,-500
08-11-2018,-500
09-11-2018,-500
12-11-2018,-500
13-11-2018,-500
14-11-2018,-500
15-11-2018,-500
16-11-2018,-500
19-11-2018,-500
20-11-2018,-500
21-11-2018,-500
22-11-2018,-500
26-11-2018,-500
27-11-2018,-500
28-11-2018,-500
29-11-2018,-500
30-11-2018,-500
03-12-2018,-500
04-12-2018,-500
05-12-2018,-500
06-12-2018,-500
07-12-2018,-500
10-12-2018,-500
11-12-2018,-500
12-12-2018,-500
13-12-2018,-500
14-12-2018,-500
17-12-2018,-500
18-12-2018,-500
19-12-2018,-500
20-12-2018,-500
21-12-2018,-500
25-12-2018,-500
26-12-2018,-500
27-12-2018,-500
28-12-2018,-500
04-01-2019,-500
07-01-2019,-500
08-01-2019,-500
09-01-2019,-500
10-01-2019,-500
11-01-2019,-500
15-01-2019,-500
16-01-2019,-500
17-01-2019,-500
18-01-2019,-500
21-01-2019,-500
22-01-2019,-500
23-01-2019,-500
24-01-2019,-500
25-01-2019,-500
28-01-2019,-500
29-01-2019,-500
30-01-2019,-500
31-01-2019,-500
01-02-2019,-500
04-02-2019,-500
05-02-2019,-500
06-02-2019,-500
07-02-2019,-500
08-02-2019,-500
12-02-2019,-500
13-02-2019,-500
14-02-2019,-500
15-02-2019,-500
18-02-2019,-500
19-02-2019,-500
20-02-2019,-500
21-02-2019,-500
22-02-2019,-500
25-02-2019,-500
26-02-2019,-500
27-02-2019,-500
28-02-2019,-500
01-03-2019,-500
04-03-2019,-500
05-03-2019,-500
06-03-2019,-500
07-03-2019,-500
08-03-2019,-500
11-03-2019,-500
12-03-2019,-500
13-03-2019,-500
14-03-2019,-500
15-03-2019,-500
18-03-2019,-500
19-03-2019,-500
20-03-2019,-500
22-03-2019,-500
25-03-2019,-500
26-03-2019,-500
27-03-2019,-500
28-03-2019,-500
29-03-2019,-500
01-04-2019,-500
02-04-2019,-500
03-04-2019,-500
04-04-2019,-500
05-04-2019,-500
08-04-2019,-500
09-04-2019,-500
10-04-2019,-500
11-04-2019,-500
12-04-2019,-500
15-04-2019,-500
16-04-2019,-500
17-04-2019,-500
18-04-2019,-500
19-04-2019,-500
22-04-2019,-500
23-04-2019,-500
24-04-2019,-500
25-04-2019,-500
26-04-2019,-500
07-05-2019,-500
08-05-2019,-500
09-05-2019,-500
10-05-2019,-500
13-05-2019,-500
14-05-2019,-500
15-05-2019,-500
16-05-2019,-500
17-05-2019,-500
20-05-2019,-500
21-05-2019,-500
22-05-2019,-500
23-05-2019,-500
24-05-2019,-500
27-05-2019,-500
28-05-2019,-500
29-05-2019,-500
30-05-2019,-500
31-05-2019,-500
03-06-2019,-500
04-06-2019,-500
05-06-2019,-500
06-06-2019,-500
07-06-2019,-500
10-06-2019,-500
11-06-2019,-500
12-06-2019,-500
13-06-2019,-500
14-06-2019,-500
17-06-2019,-500
18-06-2019,-500
19-06-2019,-500
20-06-2019,-500
21-06-2019,-500
24-06-2019,-500
25-06-2019,-500
26-06-2019,-500
27-06-2019,-500
28-06-2019,-500
01-07-2019,-500
02-07-2019,-500
03-07-2019,-500
04-07-2019,-500
05-07-2019,-500
08-07-2019,-500
09-07-2019,-500
10-07-2019,-500
11-07-2019,-500
12-07-2019,-500
16-07-2019,-500
17-07-2019,-500
18-07-2019,-500
19-07-2019,-500
22-07-2019,-500
23-07-2019,-500
24-07-2019,-500
25-07-2019,-500
26-07-2019,-500
29-07-2019,-500
30-07-2019,-500
31-07-2019,-500
01-08-2019,-500
02-08-2019,-500
05-08-2019,-500
06-08-2019,-500
07-08-2019,-500
08-08-2019,-500
09-08-2019,-500
13-08-2019,-500
14-08-2019,-500
15-08-2019,-500
16-08-2019,-500
19-08-2019,-500
20-08-2019,-500
21-08-2019,-500
22-08-2019,-500
23-08-2019,-500
26-08-2019,-500
27-08-2019,-500
28-08-2019,-500
29-08-2019,-500
30-08-2019,-500
02-09-2019,-500
03-09-2019,-500
04-09-2019,-500
05-09-2019,-500
06-09-2019,-500
09-09-2019,-500
10-09-2019,-500
11-09-2019,-500
12-09-2019,-500
13-09-2019,-500
17-09-2019,-500
18-09-2019,-500
19-09-2019,-500
20-09-2019,-500
24-09-2019,-500
25-09-2019,-500
26-09-2019,-500
27-09-2019,-500
30-09-2019,-500
01-10-2019,-500
02-10-2019,-500
03-10-2019,-500
04-10-2019,-500
07-10-2019,-500
08-10-2019,-500
09-10-2019,-500
10-10-2019,-500
11-10-2019,-500
15-10-2019,-500
16-10-2019,-500
17-10-2019,-500
18-10-2019,-500
21-10-2019,-500
23-10-2019,-500
24-10-2019,-500
25-10-2019,-500
28-10-2019,-500
29-10-2019,-500
30-10-2019,-500
31-10-2019,-500
01-11-2019,-500
05-11-2019,-500
06-11-2019,-500
07-11-2019,-500
08-11-2019,-500
11-11-2019,-500
12-11-2019,-500
13-11-2019,-500
14-11-2019,-500
15-11-2019,-500
18-11-2019,-500
19-11-2019,-500
20-11-2019,-500
21-11-2019,-500
22-11-2019,-500
25-11-2019,-500
26-11-2019,-500
27-11-2019,-500
28-11-2019,-500
29-11-2019,-500
02-12-2019,-500
03-12-2019,-500
04-12-2019,-500
05-12-2019,-500
06-12-2019,-500
09-12-2019,-500
10-12-2019,-500
11-12-2019,-500
12-12-2019,-500
13-12-2019,-500
16-12-2019,-500
17-12-2019,-500
18-12-2019,-500
19-12-2019,-500
20-12-2019,-500
23-12-2019,-500
24-12-2019,-500
25-12-2019,-500
26-12-2019,-500
27-12-2019,-500
30-12-2019,-500
06-01-2020,-500
07-01-2020,-500
08-01-2020,-500
09-01-2020,-500
10-01-2020,-500
14-01-2020,-500
15-01-2020,-500
16-01-2020,-500
17-01-2020,-500
20-01-2020,-500
21-01-2020,-500
22-01-2020,-500
23-01-2020,-500
24-01-2020,-500
27-01-2020,-500
28-01-2020,-500
29-01-2020,-500
30-01-2020,-500
31-01-2020,-500
03-02-2020,-500
04-02-2020,-500
05-02-2020,-500
06-02-2020,-500
07-02-2020,-500
10-02-2020,-500
12-02-2020,-500
13-02-2020,-500
14-02-2020,-500
17-02-2020,-500
18-02-2020,-500
19-02-2020,-500
20-02-2020,-500
21-02-2020,-500
25-02-2020,-500
26-02-2020,-500
27-02-2020,-500
28-02-2020,-500
02-03-2020,-500
03-03-2020,-500
04-03-2020,-500
05-03-2020,-500
06-03-2020,-500
09-03-2020,-500
10-03-2020,-500
11-03-2020,-500
12-03-2020,-500
13-03-2020,-500
16-03-2020,-500
17-03-2020,-500
18-03-2020,-500
19-03-2020,-500
23-03-2020,-500
24-03-2020,-500
25-03-2020,-500
26-03-2020,-500
27-03-2020,-500
30-03-2020,-500
31-03-2020,-500
01-04-2020,-500
02-04-2020,-500
03-04-2020,-500
06-04-2020,-500
07-04-2020,-500
08-04-2020,-500
09-04-2020,-500
10-04-2020,-500
13-04-2020,-500
14-04-2020,-500
15-04-2020,-500
16-04-2020,-500
17-04-2020,-500
20-04-2020,-500
21-04-2020,-500
22-04-2020,-500
23-04-2020,-500
24-04-2020,-500
27-04-2020,-500
28-04-2020,-500
30-04-2020,-500
01-05-2020,-500
07-05-2020,-500
08-05-2020,-500
11-05-2020,-500
12-05-2020,-500
13-05-2020,-500
14-05-2020,-500
15-05-2020,-500
18-05-2020,-500
19-05-2020,-500
20-05-2020,-500
21-05-2020,-500
22-05-2020,-500
25-05-2020,-500
26-05-2020,-500
27-05-2020,-500
28-05-2020,-500
29-05-2020,-500
01-06-2020,-500
02-06-2020,-500
03-06-2020,-500
04-06-2020,-500
05-06-2020,-500
08-06-2020,-500
09-06-2020,-500
10-06-2020,-500
11-06-2020,-500
12-06-2020,-500
15-06-2020,-500
16-06-2020,-500
17-06-2020,-500
18-06-2020,-500
19-06-2020,-500
22-06-2020,-500
23-06-2020,-500
24-06-2020,-500
25-06-2020,-500
26-06-2020,-500
29-06-2020,-500
30-06-2020,-500
01-07-2020,-500
02-07-2020,-500
03-07-2020,-500
06-07-2020,-500
07-07-2020,-500
08-07-2020,-500
09-07-2020,-500
10-07-2020,-500
13-07-2020,-500
14-07-2020,-500
15-07-2020,-500
16-07-2020,-500
17-07-2020,-500
20-07-2020,-500
21-07-2020,-500
22-07-2020,-500
27-07-2020,-500
28-07-2020,-500
29-07-2020,-500
30-07-2020,-500
31-07-2020,-500
03-08-2020,-500
04-08-2020,-500
05-08-2020,-500
06-08-2020,-500
07-08-2020,-500
11-08-2020,-500
12-08-2020,-500
13-08-2020,-500
14-08-2020,-500
17-08-2020,-500
18-08-2020,-500
19-08-2020,-500
20-08-2020,-500
21-08-2020,-500
24-08-2020,-500
25-08-2020,-500
26-08-2020,-500
27-08-2020,-500
28-08-2020,-500
31-08-2020,-500
01-09-2020,-500
02-09-2020,-500
03-09-2020,-500
04-09-2020,-500
07-09-2020,-500
08-09-2020,-500
09-09-2020,-500
10-09-2020,-500
11-09-2020,-500
14-09-2020,-500
15-09-2020,-500
16-09-2020,-500
17-09-2020,-500
18-09-2020,-500
23-09-2020,-500
24-09-2020,-500
25-09-2020,-500
28-09-2020,-500
29-09-2020,-500
30-09-2020,-500
01-10-2020,-500
02-10-2020,-500
05-10-2020,-500
06-10-2020,-500
07-10-2020,-500
08-10-2020,-500
09-10-2020,-500
12-10-2020,-500
13-10-2020,-500
14-10-2020,-500
15-10-2020,-500
16-10-2020,-500
19-10-2020,-500
20-10-2020,-500
21-10-2020,-500
22-10-2020,-500
23-10-2020,-500
26-10-2020,-500
27-10-2020,-500
28-10-2020,-500
29-10-2020,-500
30-10-2020,-500
02-11-2020,-500
04-11-2020,-500
05-11-2020,-500
06-11-2020,-500
09-11-2020,-500
10-11-2020,-500
11-11-2020,-500
12-11-2020,-500
13-11-2020,-500
16-11-2020,-500
17-11-2020,-500
18-11-2020,-500
19-11-2020,-500
20-11-2020,-500
24-11-2020,-500
25-11-2020,-500
26-11-2020,-500
27-11-2020,-500
30-11-2020,-500
01-12-2020,-500
02-12-2020,-500
03-12-2020,-500
04-12-2020,-500
07-12-2020,-500
08-12-2020,-500
09-12-2020,-500
10-12-2020,-500
11-12-2020,-500
14-12-2020,-500
15-12-2020,-500
16-12-2020,-500
17-12-2020,-500
18-12-2020,-500
21-12-2020,-500
22-12-2020,-500
23-12-2020,-500
24-12-2020,-500
25-12-2020,-500
28-12-2020,-500
29-12-2020,-500
30-12-2020,-500
04-01-2021,-500
05-01-2021,-500
06-01-2021,-500
07-01-2021,-500
08-01-2021,-500
12-01-2021,-500
13-01-2021,-500
14-01-2021,-500
15-01-2021,-500
18-01-2021,-500
19-01-2021,-500
20-01-2021,-500
21-01-2021,-500
22-01-2021,-500
25-01-2021,-500
26-01-2021,-500
27-01-2021,-500
28-01-2021,-500
29-01-2021,-500
01-02-2021,-500
02-02-2021,-500
03-02-2021,-500
04-02-2021,-500
05-02-2021,-500
08-02-2021,-500
09-02-2021,-500
10-02-2021,-500
12-02-2021,-500
15-02-2021,-500
16-02-2021,-500
17-02-2021,-500
18-02-2021,-500
19-02-2021,-500
22-02-2021,-500
24-02-2021,-500
25-02-2021,-500
26-02-2021,-500
01-03-2021,-500
02-03-2021,-500
03-03-2021,-500
04-03-2021,-500
05-03-2021,-500
08-03-2021,-500
09-03-2021,-500
10-03-2021,-500
11-03-2021,-500
12-03-2021,-500
15-03-2021,-500
16-03-2021,-500
17-03-2021,-500
18-03-2021,-500
19-03-2021,-500
22-03-2021,-500
23-03-2021,-500
24-03-2021,-500
25-03-2021,-500
26-03-2021,-500
29-03-2021,-500
30-03-2021,-500
31-03-2021,-500
01-04-2021,-500
02-04-2021,-500
05-04-2021,-500
06-04-2021,-500
07-04-2021,-500
08-04-2021,-500
09-04-2021,-500
12-04-2021,-500
13-04-2021,-500
14-04-2021,-500
15-04-2021,-500
16-04-2021,-500
19-04-2021,-500
20-04-2021,-500
21-04-2021,-500
22-04-2021,-500
23-04-2021,-500
26-04-2021,-500
27-04-2021,-500
28-04-2021,-500
30-04-2021,-500
06-05-2021,-500
07-05-2021,-500
10-05-2021,-500
11-05-2021,-500
12-05-2021,-500
13-05-2021,-500
14-05-2021,-500
17-05-2021,-500
18-05-2021,-500
19-05-2021,-500
20-05-2021,-500
21-05-2021,-500
24-05-2021,-500
25-05-2021,-500
26-05-2021,-500
27-05-2021,-500
28-05-2021,-500
31-05-2021,-500
01-06-2021,-500
02-06-2021,-500
03-06-2021,-500
04-06-2021,-500
07-06-2021,-500
08-06-2021,-500
09-06-2021,-500
10-06-2021,-500
11-06-2021,-500
14-06-2021,-500
15-06-2021,-500
16-06-2021,-500
17-06-2021,-500
18-06-2021,-500
21-06-2021,-500
22-06-2021,-500
23-06-2021,-500
24-06-2021,-500
25-06-2021,-500
28-06-2021,-500
29-06-2021,-500
30-06-2021,-500
01-07-2021,-500
02-07-2021,-500
05-07-2021,-500
06-07-2021,-500
07-07-2021,-500
08-07-2021,-500
09-07-2021,-500
12-07-2021,-500
13-07-2021,-500
14-07-2021,-500
15-07-2021,-500
16-07-2021,-500
19-07-2021,-500
20-07-2021,-500
21-07-2021,-500
26-07-2021,-500
27-07-2021,-500
28-07-2021,-500
29-07-2021,-500
30-07-2021,-500
02-08-2021,-500
03-08-2021,-500
04-08-2021,-500
05-08-2021,-500
06-08-2021,-500
10-08-2021,-500
11-08-2021,-500
12-08-2021,-500
13-08-2021,-500
16-08-2021,-500
17-08-2021,-500
18-08-2021,-500
19-08-2021,-500
20-08-2021,-500
23-08-2021,-500
24-08-2021,-500
25-08-2021,-500
26-08-2021,-500
27-08-2021,-500
30-08-2021,-500
31-08-2021,-500
01-09-2021,-500
02-09-2021,-500
03-09-2021,-500
06-09-2021,-500
07-09-2021,-500
08-09-2021,-500
09-09-2021,-500
10-09-2021,-500
13-09-2021,-500
14-09-2021,-500
15-09-2021,-500
16-09-2021,-500
17-09-2021,-500
21-09-2021,-500
22-09-2021,-500
24-09-2021,-500
27-09-2021,-500
28-09-2021,-500
29-09-2021,-500
30-09-2021,-500
01-10-2021,-500
04-10-2021,-500
05-10-2021,-500
06-10-2021,-500
07-10-2021,-500
08-10-2021,-500
11-10-2021,-500
12-10-2021,-500
13-10-2021,-500
14-10-2021,-500
15-10-2021,-500
18-10-2021,-500
19-10-2021,-500
20-10-2021,-500
21-10-2021,-500
22-10-2021,-500
25-10-2021,-500
26-10-2021,-500
27-10-2021,-500
28-10-2021,-500
29-10-2021,-500
01-11-2021,-500
02-11-2021,-500
04-11-2021,-500
05-11-2021,-500
08-11-2021,-500
09-11-2021,-500
10-11-2021,-500
11-11-2021,-500
12-11-2021,-500
15-11-2021,-500
16-11-2021,-500
17-11-2021,-500
18-11-2021,-500
19-11-2021,-500
22-11-2021,-500
24-11-2021,-500
25-11-2021,-500
26-11-2021,-500
29-11-2021,-500
30-11-2021,-500
01-12-2021,-500
02-12-2021,-500
03-12-2021,-500
06-12-2021,-500
07-12-2021,-500
08-12-2021,-500
09-12-2021,-500
10-12-2021,-500
13-12-2021,-500
14-12-2021,-500
15-12-2021,-500
16-12-2021,-500
17-12-2021,-500
20-12-2021,-500
21-12-2021,-500
22-12-2021,-500
23-12-2021,-500
24-12-2021,-500
27-12-2021,-500
28-12-2021,-500
29-12-2021,-500
30-12-2021,-500
04-01-2022,-500
05-01-2022,-500
06-01-2022,-500
07-01-2022,-500
11-01-2022,-500
12-01-2022,-500
13-01-2022,-500
14-01-2022,-500
17-01-2022,-500
18-01-2022,-500
19-01-2022,-500
20-01-2022,-500
21-01-2022,-500
24-01-2022,-500
25-01-2022,-500
26-01-2022,-500
27-01-2022,-500
28-01-2022,-500
31-01-2022,-500
01-02-2022,-500
02-02-2022,-500
03-02-2022,-500
04-02-2022,-500
07-02-2022,-500
08-02-2022,-500
09-02-2022,-500
10-02-2022,-500
14-02-2022,-500
15-02-2022,-500
16-02-2022,-500
17-02-2022,-500
18-02-2022,-500
21-02-2022,-500
22-02-2022,-500
24-02-2022,-500
25-02-2022,-500
28-02-2022,-500
01-03-2022,-500
02-03-2022,-500
03-03-2022,-500
04-03-2022,-500
07-03-2022,-500
08-03-2022,-500
09-03-2022,-500
10-03-2022,-500
11-03-2022,-500
14-03-2022,-500
15-03-2022,-500
16-03-2022,-500
17-03-2022,-500
18-03-2022,-500
22-03-2022,-500
23-03-2022,-500
24-03-2022,-500
25-03-2022,-500
28-03-2022,-500
29-03-2022,-500
30-03-2022,-500
31-03-2022,-500
01-04-2022,-500
04-04-2022,-500
05-04-2022,-500
06-04-2022,-500
07-04-2022,-500
08-04-2022,-500
11-04-2022,-500
12-04-2022,-500
13-04-2022,-500
14-04-2022,-500
15-04-2022,-500
18-04-2022,-500
19-04-2022,-500
20-04-2022,-500
21-04-2022,-500
22-04-2022,-500
25-04-2022,-500
26-04-2022,-500
27-04-2022,-500
28-04-2022,-500
02-05-2022,-500
06-05-2022,-500
09-05-2022,-500
10-05-2022,-500
11-05-2022,-500
12-05-2022,-500
13-05-2022,-500
16-05-2022,-500
17-05-2022,-500
18-05-2022,-500
19-05-2022,-500
20-05-2022,-500
23-05-2022,-500
24-05-2022,-500
25-05-2022,-500
26-05-2022,-500
27-05-2022,-500
30-05-2022,-500
31-05-2022,-500
01-06-2022,-500
02-06-2022,-500
03-06-2022,-500
06-06-2022,-500
07-06-2022,-500
08-06-2022,-500
09-06-2022,-500
10-06-2022,-500
13-06-2022,-500
14-06-2022,-500
15-06-2022,-500
16-06-2022,-500
17-06-2022,-500
20-06-2022,-500
21-06-2022,-500
22-06-2022,-500
23-06-2022,-500
24-06-2022,-500
27-06-2022,-500
28-06-2022,-500
29-06-2022,-500
30-06-2022,-500
01-07-2022,-500
04-07-2022,-500
05-07-2022,-500
06-07-2022,-500
07-07-2022,-500
08-07-2022,-500
11-07-2022,-500
12-07-2022,-500
13-07-2022,-500
14-07-2022,-500
15-07-2022,-500
19-07-2022,-500
20-07-2022,-500
21-07-2022,-500
22-07-2022,-500
25-07-2022,-500
26-07-2022,-500
27-07-2022,-500
28-07-2022,-500
29-07-2022,-500
01-08-2022,-500
02-08-2022,-500
03-08-2022,-500
04-08-2022,-500
05-08-2022,-500
08-08-2022,-500
09-08-2022,-500
10-08-2022,-500
12-08-2022,-500
15-08-2022,-500
16-08-2022,-500
17-08-2022,-500
18-08-2022,-500
19-08-2022,-500
22-08-2022,-500
23-08-2022,-500
24-08-2022,-500
25-08-2022,-500
26-08-2022,-500
29-08-2022,-500
30-08-2022,-500
31-08-2022,-500
01-09-2022,-500
02-09-2022,-500
05-09-2022,-500
06-09-2022,-500
07-09-2022,-500
08-09-2022,-500
09-09-2022,-500
12-09-2022,-500
13-09-2022,-500
14-09-2022,-500
15-09-2022,-500
16-09-2022,-500
20-09-2022,-500
21-09-2022,-500
22-09-2022,-500
26-09-2022,-500
27-09-2022,-500
28-09-2022,-500
29-09-2022,-500
30-09-2022,-500
03-10-2022,-500
04-10-2022,-500
05-10-2022,-500
06-10-2022,-500
07-10-2022,-500
11-10-2022,-500
12-10-2022,-500
13-10-2022,-500
14-10-2022,-500
17-10-2022,-500
18-10-2022,-500
19-10-2022,-500
20-10-2022,-500
21-10-2022,-500
24-10-2022,-500
25-10-2022,-500
26-10-2022,-500
27-10-2022,-500
28-10-2022,-500
31-10-2022,-500
01-11-2022,-500
02-11-2022,-500
04-11-2022,-500
07-11-2022,-500
08-11-2022,-500
09-11-2022,-500
10-11-2022,-500
11-11-2022,-500
14-11-2022,-500
15-11-2022,-500
16-11-2022,-500
17-11-2022,-500
18-11-2022,-500
21-11-2022,-500
22-11-2022,-500
24-11-2022,-500
25-11-2022,-500
28-11-2022,-500
29-11-2022,-500
30-11-2022,-500
01-12-2022,-500
02-12-2022,-500
05-12-2022,-500
06-12-2022,-500
07-12-2022,-500
08-12-2022,-500
09-12-2022,-500
12-12-2022,-500
13-12-2022,-500
14-12-2022,-500
15-12-2022,-500
16-12-2022,-500
19-12-2022,-500
20-12-2022,-500
21-12-2022,-500
22-12-2022,-500
23-12-2022,-500
26-12-2022,-500
27-12-2022,-500
28-12-2022,-500
29-12-2022,-500
30-12-2022,-500
04-01-2023,-500
05-01-2023,-500
06-01-2023,-500
10-01-2023,-500
11-01-2023,-500
12-01-2023,-500
13-01-2023,-500
16-01-2023,-500
17-01-2023,-500
18-01-2023,-500
19-01-2023,-500
20-01-2023,-500
23-01-2023,-500
24-01-2023,-500
25-01-2023,-500
26-01-2023,-500
27-01-2023,-500
30-01-2023,-500
31-01-2023,-500
01-02-2023,-500
02-02-2023,-500
03-02-2023,-500
06-02-2023,-500
07-02-2023,-500
08-02-2023,-500
09-02-2023,-500
10-02-2023,-500
13-02-2023,-500
14-02-2023,-500
15-02-2023,-500
16-02-2023,-500
17-02-2023,-500
20-02-2023,-500
21-02-2023,-500
22-02-2023,-500
24-02-2023,-500
27-02-2023,-500
28-02-2023,-500
01-03-2023,-500
02-03-2023,-500
03-03-2023,-500
06-03-2023,-500
07-03-2023,-500
08-03-2023,-500
09-03-2023,-500
10-03-2023,-500
13-03-2023,-500
14-03-2023,-500
15-03-2023,-500
16-03-2023,-500
17-03-2023,-500
20-03-2023,-500
22-03-2023,-500
23-03-2023,-500
24-03-2023,-500
27-03-2023,-500
28-03-2023,-500
29-03-2023,-500
30-03-2023,-500
31-03-2023,-500
03-04-2023,-500
04-04-2023,-500
05-04-2023,-500
06-04-2023,-500
07-04-2023,-500
10-04-2023,-500
11-04-2023,-500
12-04-2023,-500
13-04-2023,-500
14-04-2023,-500
17-04-2023,-500
18-04-2023,-500
19-04-2023,-500
20-04-2023,-500
21-04-2023,-500
24-04-2023,-500
25-04-2023,-500
26-04-2023,-500
27-04-2023,-500
28-04-2023,-500
01-05-2023,-500
02-05-2023,-500
08-05-2023,-500
09-05-2023,-500
10-05-2023,-500
11-05-2023,-500
12-05-2023,-500
15-05-2023,-500
16-05-2023,-500
17-05-2023,-500
18-05-2023,-500
19-05-2023,-500
22-05-2023,-500
23-05-2023,-500
24-05-2023,-500
25-05-2023,-500
26-05-2023,-500
29-05-2023,-500
30-05-2023,-500
31-05-2023,-500
01-06-2023,-500
02-06-2023,-500
05-06-2023,-500
06-06-2023,-500
07-06-2023,-500
08-06-2023,-500
09-06-2023,-500
12-06-2023,-500
13-06-2023,-500
14-06-2023,-500
15-06-2023,-500
16-06-2023,-500
19-06-2023,-500
20-06-2023,-500
21-06-2023,-500
22-06-2023,-500
23-06-2023,-500
26-06-2023,-500
27-06-2023,-500
28-06-2023,-500
29-06-2023,-500
30-06-2023,-500
03-07-2023,-500
04-07-2023,-500
05-07-2023,-500
06-07-2023,-500
07-07-2023,-500
10-07-2023,-500
11-07-2023,-500
12-07-2023,-500
13-07-2023,-500
14-07-2023,-500
18-07-2023,-500
19-07-2023,-500
20-07-2023,-500
21-07-2023,-500
24-07-2023,-500
25-07-2023,-500
26-07-2023,-500
27-07-2023,-500
28-07-2023,-500
31-07-2023,-500
01-08-2023,-500
02-08-2023,-500
03-08-2023,-500
04-08-2023,-500
07-08-2023,-500
08-08-2023,-500
09-08-2023,-500
10-08-2023,-500
14-08-2023,-500
15-08-2023,-500
16-08-2023,-500
17-08-2023,-500
18-08-2023,-500
21-08-2023,-500
22-08-2023,-500
23-08-2023,-500
24-08-2023,-500
25-08-2023,-500
28-08-2023,-500
29-08-2023,-500
30-08-2023,-500
31-08-2023,-500
01-09-2023,-500
04-09-2023,-500
05-09-2023,-500
06-09-2023,-500
07-09-2023,-500
08-09-2023,-500
11-09-2023,-500
12-09-2023,-500
13-09-2023,-500
14-09-2023,-500
15-09-2023,-500
19-09-2023,-500
20-09-2023,-500
21-09-2023,-500
22-09-2023,-500
25-09-2023,-500
26-09-2023,-500
27-09-2023,-500
28-09-2023,-500
29-09-2023,-500
02-10-2023,-500
03-10-2023,-500
04-10-2023,-500
05-10-2023,-500
06-10-2023,-500
10-10-2023,-500
11-10-2023,-500
12-10-2023,-500
13-10-2023,-500
16-10-2023,-500
17-10-2023,-500
18-10-2023,-500
19-10-2023,-500
20-10-2023,-500
23-10-2023,-500
24-10-2023,-500
25-10-2023,-500
26-10-2023,-500
27-10-2023,-500
30-10-2023,-500
31-10-2023,-500
01-11-2023,-500
02-11-2023,-500
06-11-2023,-500
07-11-2023,-500
08-11-2023,-500
09-11-2023,-500
10-11-2023,-500
13-11-2023,-500
14-11-2023,-500
15-11-2023,-500
16-11-2023,-500
17-11-2023,-500
20-11-2023,-500
21-11-2023,-500
22-11-2023,-500
24-11-2023,-500
27-11-2023,-500
28-11-2023,-500
29-11-2023,-500
30-11-2023,-500
01-12-2023,-500
04-12-2023,-500
05-12-2023,-500
06-12-2023,-500
07-12-2023,-500
08-12-2023,-500
11-12-2023,-500
12-12-2023,-500
13-12-2023,-500
14-12-2023,-500
15-12-2023,-500
18-12-2023,-500
19-12-2023,-500
20-12-2023,-500
21-12-2023,-500
22-12-2023,-500
25-12-2023,-500
26-12-2023,-500
27-12-2023,-500
28-12-2023,-500
29-12-2023,-500
04-01-2024,-500
05-01-2024,-500
09-01-2024,-500
10-01-2024,-500
11-01-2024,-500
12-01-2024,-500
15-01-2024,-500
16-01-2024,-500
17-01-2024,-500
18-01-2024,-500
19-01-2024,-500
22-01-2024,-500
23-01-2024,-500
24-01-2024,-500
25-01-2024,-500
26-01-2024,-500
29-01-2024,-500
30-01-2024,-500
31-01-2024,-500
01-02-2024,-500
02-02-2024,-500
05-02-2024,-500
06-02-2024,-500
07-02-2024,-500
08-02-2024,-500
09-02-2024,-500
13-02-2024,-500
14-02-2024,-500
15-02-2024,-500
16-02-2024,-500
19-02-2024,-500
20-02-2024,-500
21-02-2024,-500
22-02-2024,-500
26-02-2024,-500
27-02-2024,-500
28-02-2024,-500
29-02-2024,-500
01-03-2024,-500
04-03-2024,-500
05-03-2024,-500
06-03-2024,-500
07-03-2024,-500
08-03-2024,-500
11-03-2024,-500
12-03-2024,-500
13-03-2024,-500
14-03-2024,-500
15-03-2024,-500
18-03-2024,-500
19-03-2024,-500
21-03-2024,-500
22-03-2024,-500
25-03-2024,-500
26-03-2024,-500
27-03-2024,-500
28-03-2024,-500
29-03-2024,-500
01-04-2024,-500
02-04-2024,-500
03-04-2024,-500
04-04-2024,-500
05-04-2024,-500
08-04-2024,-500
09-04-2024,-500
10-04-2024,-500
11-04-2024,-500
12-04-2024,-500
15-04-2024,-500
16-04-2024,-500
17-04-2024,-500
18-04-2024,-500
19-04-2024,-500
22-04-2024,-500
23-04-2024,-500
24-04-2024,-500
25-04-2024,-500
26-04-2024,-500
30-04-2024,-500
01-05-2024,-500
02-05-2024,-500
07-05-2024,-500
08-05-2024,-500
09-05-2024,-500
10-05-2024,-500
13-05-2024,-500
14-05-2024,-500
15-05-2024,-500
16-05-2024,-500
17-05-2024,-500
20-05-2024,-500
21-05-2024,-500
22-05-2024,-500
23-05-2024,-500
24-05-2024,-500
27-05-2024,-500
28-05-2024,-500
29-05-2024,-500
30-05-2024,-500
31-05-2024,-500
03-06-2024,-500
04-06-2024,-500
05-06-2024,-500
06-06-2024,-500
================================================
FILE: python/rateslib/data/historical/nok_rfr.csv
================================================
reference_date,rate
02-01-2020,1.49
03-01-2020,1.49
06-01-2020,1.49
07-01-2020,1.49
08-01-2020,1.49
09-01-2020,1.49
10-01-2020,1.49
13-01-2020,1.49
14-01-2020,1.49
15-01-2020,1.49
16-01-2020,1.49
17-01-2020,1.49
20-01-2020,1.49
21-01-2020,1.49
22-01-2020,1.49
23-01-2020,1.49
24-01-2020,1.49
27-01-2020,1.49
28-01-2020,1.49
29-01-2020,1.49
30-01-2020,1.49
31-01-2020,1.49
03-02-2020,1.49
04-02-2020,1.49
05-02-2020,1.49
06-02-2020,1.49
07-02-2020,1.49
10-02-2020,1.49
11-02-2020,1.49
12-02-2020,1.49
13-02-2020,1.49
14-02-2020,1.49
17-02-2020,1.49
18-02-2020,1.49
19-02-2020,1.49
20-02-2020,1.49
21-02-2020,1.49
24-02-2020,1.49
25-02-2020,1.49
26-02-2020,1.49
27-02-2020,1.49
28-02-2020,1.51
02-03-2020,1.49
03-03-2020,1.49
04-03-2020,1.49
05-03-2020,1.49
06-03-2020,1.49
09-03-2020,1.49
10-03-2020,1.49
11-03-2020,1.49
12-03-2020,1.49
13-03-2020,1.49
16-03-2020,0.99
17-03-2020,0.99
18-03-2020,0.99
19-03-2020,0.99
20-03-2020,0.99
23-03-2020,0.24
24-03-2020,0.24
25-03-2020,0.24
26-03-2020,0.24
27-03-2020,0.24
30-03-2020,0.24
31-03-2020,0.24
01-04-2020,0.25
02-04-2020,0.25
03-04-2020,0.25
06-04-2020,0.24
07-04-2020,0.25
08-04-2020,0.25
14-04-2020,0.24
15-04-2020,0.24
16-04-2020,0.24
17-04-2020,0.24
20-04-2020,0.23
21-04-2020,0.24
22-04-2020,0.24
23-04-2020,0.25
24-04-2020,0.25
27-04-2020,0.24
28-04-2020,0.24
29-04-2020,0.24
30-04-2020,0.24
04-05-2020,0.24
05-05-2020,0.24
06-05-2020,0.24
07-05-2020,0.24
08-05-2020,0
11-05-2020,0
12-05-2020,0
13-05-2020,0
14-05-2020,0
15-05-2020,0
18-05-2020,0
19-05-2020,0
20-05-2020,0
22-05-2020,0
25-05-2020,0
26-05-2020,0
27-05-2020,0
28-05-2020,0
29-05-2020,0
02-06-2020,0
03-06-2020,0
04-06-2020,0
05-06-2020,0
08-06-2020,0
09-06-2020,0
10-06-2020,0
11-06-2020,0
12-06-2020,0
15-06-2020,0
16-06-2020,0
17-06-2020,0
18-06-2020,0
19-06-2020,0
22-06-2020,0
23-06-2020,0.01
24-06-2020,-0.01
25-06-2020,-0.01
26-06-2020,-0.01
29-06-2020,0
30-06-2020,-0.01
01-07-2020,0
02-07-2020,0
03-07-2020,0
06-07-2020,0
07-07-2020,0
08-07-2020,-0.01
09-07-2020,0
10-07-2020,-0.01
13-07-2020,0
14-07-2020,0
15-07-2020,0
16-07-2020,0
17-07-2020,0
20-07-2020,0
21-07-2020,0
22-07-2020,0
23-07-2020,0
24-07-2020,0
27-07-2020,0
28-07-2020,0
29-07-2020,-0.01
30-07-2020,0
31-07-2020,-0.01
03-08-2020,-0.01
04-08-2020,-0.01
05-08-2020,-0.01
06-08-2020,-0.01
07-08-2020,-0.01
10-08-2020,-0.01
11-08-2020,-0.01
12-08-2020,0
13-08-2020,0
14-08-2020,0
17-08-2020,-0.01
18-08-2020,-0.01
19-08-2020,-0.01
20-08-2020,-0.01
21-08-2020,-0.01
24-08-2020,-0.01
25-08-2020,-0.01
26-08-2020,-0.01
27-08-2020,-0.01
28-08-2020,-0.01
31-08-2020,0.04
01-09-2020,-0.01
02-09-2020,0
03-09-2020,-0.01
04-09-2020,-0.01
07-09-2020,-0.01
08-09-2020,0
09-09-2020,0
10-09-2020,0
11-09-2020,-0.01
14-09-2020,0
15-09-2020,0
16-09-2020,0
17-09-2020,0
18-09-2020,0
21-09-2020,0
22-09-2020,0
23-09-2020,0
24-09-2020,0
25-09-2020,0
28-09-2020,0
29-09-2020,0
30-09-2020,0
01-10-2020,0
02-10-2020,0
05-10-2020,0
06-10-2020,0
07-10-2020,0
08-10-2020,0
09-10-2020,0
12-10-2020,0
13-10-2020,0
14-10-2020,0
15-10-2020,0
16-10-2020,0
19-10-2020,0
20-10-2020,0
21-10-2020,0
22-10-2020,0
23-10-2020,0
26-10-2020,0
27-10-2020,0
28-10-2020,0
29-10-2020,0
30-10-2020,0
02-11-2020,0
03-11-2020,0
04-11-2020,0
05-11-2020,0
06-11-2020,0
09-11-2020,0
10-11-2020,0
11-11-2020,0
12-11-2020,0
13-11-2020,0
16-11-2020,0
17-11-2020,0
18-11-2020,0
19-11-2020,0
20-11-2020,0
23-11-2020,0
24-11-2020,0
25-11-2020,0
26-11-2020,0
27-11-2020,0
30-11-2020,0
01-12-2020,0
02-12-2020,0
03-12-2020,0
04-12-2020,0
07-12-2020,0
08-12-2020,0
09-12-2020,0
10-12-2020,0
11-12-2020,0
14-12-2020,0
15-12-2020,0
16-12-2020,0
17-12-2020,0
18-12-2020,0
21-12-2020,0
22-12-2020,0
23-12-2020,0
28-12-2020,0
29-12-2020,0
30-12-2020,0
31-12-2020,0
04-01-2021,0
05-01-2021,-0.01
06-01-2021,-0.01
07-01-2021,-0.01
08-01-2021,0
11-01-2021,0
12-01-2021,0
13-01-2021,0
14-01-2021,0
15-01-2021,0
18-01-2021,0
19-01-2021,0
20-01-2021,0
21-01-2021,0
22-01-2021,0
25-01-2021,0
26-01-2021,0
27-01-2021,0
28-01-2021,0
29-01-2021,0
01-02-2021,0
02-02-2021,0
03-02-2021,0
04-02-2021,0
05-02-2021,0
08-02-2021,0
09-02-2021,0
10-02-2021,0
11-02-2021,0
12-02-2021,0
15-02-2021,0
16-02-2021,0
17-02-2021,0
18-02-2021,0
19-02-2021,0
22-02-2021,0
23-02-2021,0
24-02-2021,0
25-02-2021,0
26-02-2021,0
01-03-2021,0
02-03-2021,0
03-03-2021,0
04-03-2021,0
05-03-2021,0
08-03-2021,0
09-03-2021,0
10-03-2021,0
11-03-2021,0
12-03-2021,0
15-03-2021,0
16-03-2021,0
17-03-2021,0
18-03-2021,0
19-03-2021,0
22-03-2021,0
23-03-2021,0
24-03-2021,0
25-03-2021,0
26-03-2021,0
29-03-2021,0
30-03-2021,0
31-03-2021,0
06-04-2021,0
07-04-2021,0
08-04-2021,0
09-04-2021,0
12-04-2021,0
13-04-2021,0
14-04-2021,0
15-04-2021,0
16-04-2021,0
19-04-2021,0
20-04-2021,0
21-04-2021,0
22-04-2021,0
23-04-2021,0
26-04-2021,0
27-04-2021,0
28-04-2021,0
29-04-2021,0
30-04-2021,0
03-05-2021,0
04-05-2021,0
05-05-2021,0
06-05-2021,0
07-05-2021,0
10-05-2021,0
11-05-2021,0
12-05-2021,0
14-05-2021,0
18-05-2021,0
19-05-2021,0
20-05-2021,0
21-05-2021,0
25-05-2021,0
26-05-2021,0
27-05-2021,0
28-05-2021,0
31-05-2021,0
01-06-2021,0
02-06-2021,0
03-06-2021,0
04-06-2021,-0.01
07-06-2021,0
08-06-2021,0
09-06-2021,0
10-06-2021,0
11-06-2021,0
14-06-2021,0
15-06-2021,0
16-06-2021,0
17-06-2021,0
18-06-2021,0
21-06-2021,0
22-06-2021,0
23-06-2021,0
24-06-2021,0
25-06-2021,0
28-06-2021,0
29-06-2021,0
30-06-2021,-0.01
01-07-2021,0
02-07-2021,0
05-07-2021,0
06-07-2021,0
07-07-2021,0
08-07-2021,0
09-07-2021,0
12-07-2021,0
13-07-2021,0
14-07-2021,0
15-07-2021,0
16-07-2021,0
19-07-2021,0
20-07-2021,0
21-07-2021,0
22-07-2021,0
23-07-2021,0
26-07-2021,0
27-07-2021,0
28-07-2021,0
29-07-2021,0
30-07-2021,0
02-08-2021,0
03-08-2021,0
04-08-2021,0
05-08-2021,0
06-08-2021,0
09-08-2021,0
10-08-2021,0
11-08-2021,0
12-08-2021,0
13-08-2021,0
16-08-2021,0
17-08-2021,0
18-08-2021,0
19-08-2021,0
20-08-2021,0
23-08-2021,0
24-08-2021,0
25-08-2021,0
26-08-2021,0
27-08-2021,0
30-08-2021,0
31-08-2021,0
01-09-2021,0
02-09-2021,0
03-09-2021,0
06-09-2021,0
07-09-2021,0
08-09-2021,0
09-09-2021,0
10-09-2021,0
13-09-2021,0
14-09-2021,0
15-09-2021,0
16-09-2021,0
17-09-2021,0
20-09-2021,0
21-09-2021,0
22-09-2021,0
23-09-2021,0
24-09-2021,0.25
27-09-2021,0.25
28-09-2021,0.25
29-09-2021,0.25
30-09-2021,0.25
01-10-2021,0.25
04-10-2021,0.25
05-10-2021,0.25
06-10-2021,0.25
07-10-2021,0.25
08-10-2021,0.25
11-10-2021,0.25
12-10-2021,0.25
13-10-2021,0.25
14-10-2021,0.25
15-10-2021,0.25
18-10-2021,0.25
19-10-2021,0.25
20-10-2021,0.25
21-10-2021,0.25
22-10-2021,0.25
25-10-2021,0.25
26-10-2021,0.25
27-10-2021,0.25
28-10-2021,0.25
29-10-2021,0.25
01-11-2021,0.25
02-11-2021,0.25
03-11-2021,0.25
04-11-2021,0.25
05-11-2021,0.25
08-11-2021,0.25
09-11-2021,0.25
10-11-2021,0.25
11-11-2021,0.25
12-11-2021,0.25
15-11-2021,0.25
16-11-2021,0.25
17-11-2021,0.25
18-11-2021,0.25
19-11-2021,0.25
22-11-2021,0.25
23-11-2021,0.25
24-11-2021,0.25
25-11-2021,0.25
26-11-2021,0.25
29-11-2021,0.25
30-11-2021,0.25
01-12-2021,0.25
02-12-2021,0.25
03-12-2021,0.25
06-12-2021,0.25
07-12-2021,0.25
08-12-2021,0.25
09-12-2021,0.25
10-12-2021,0.25
13-12-2021,0.25
14-12-2021,0.25
15-12-2021,0.25
16-12-2021,0.25
17-12-2021,0.5
20-12-2021,0.5
21-12-2021,0.5
22-12-2021,0.5
23-12-2021,0.5
27-12-2021,0.5
28-12-2021,0.5
29-12-2021,0.5
30-12-2021,0.5
31-12-2021,0.5
03-01-2022,0.5
04-01-2022,0.5
05-01-2022,0.5
06-01-2022,0.5
07-01-2022,0.5
10-01-2022,0.5
11-01-2022,0.5
12-01-2022,0.5
13-01-2022,0.5
14-01-2022,0.5
17-01-2022,0.5
18-01-2022,0.5
19-01-2022,0.5
20-01-2022,0.5
21-01-2022,0.5
24-01-2022,0.5
25-01-2022,0.5
26-01-2022,0.5
27-01-2022,0.5
28-01-2022,0.5
31-01-2022,0.5
01-02-2022,0.5
02-02-2022,0.5
03-02-2022,0.5
04-02-2022,0.5
07-02-2022,0.5
08-02-2022,0.5
09-02-2022,0.5
10-02-2022,0.5
11-02-2022,0.5
14-02-2022,0.5
15-02-2022,0.5
16-02-2022,0.5
17-02-2022,0.5
18-02-2022,0.5
21-02-2022,0.5
22-02-2022,0.5
23-02-2022,0.5
24-02-2022,0.5
25-02-2022,0.5
28-02-2022,0.5
01-03-2022,0.5
02-03-2022,0.5
03-03-2022,0.5
04-03-2022,0.5
07-03-2022,0.5
08-03-2022,0.5
09-03-2022,0.5
10-03-2022,0.5
11-03-2022,0.5
14-03-2022,0.5
15-03-2022,0.5
16-03-2022,0.5
17-03-2022,0.5
18-03-2022,0.5
21-03-2022,0.5
22-03-2022,0.5
23-03-2022,0.5
24-03-2022,0.5
25-03-2022,0.75
28-03-2022,0.75
29-03-2022,0.75
30-03-2022,0.75
31-03-2022,0.75
01-04-2022,0.75
04-04-2022,0.75
05-04-2022,0.75
06-04-2022,0.75
07-04-2022,0.75
08-04-2022,0.75
11-04-2022,0.75
12-04-2022,0.75
13-04-2022,0.75
19-04-2022,0.75
20-04-2022,0.75
21-04-2022,0.75
22-04-2022,0.75
25-04-2022,0.75
26-04-2022,0.75
27-04-2022,0.75
28-04-2022,0.75
29-04-2022,0.75
02-05-2022,0.75
03-05-2022,0.75
04-05-2022,0.75
05-05-2022,0.75
06-05-2022,0.75
09-05-2022,0.75
10-05-2022,0.75
11-05-2022,0.75
12-05-2022,0.75
13-05-2022,0.75
16-05-2022,0.75
18-05-2022,0.75
19-05-2022,0.75
20-05-2022,0.75
23-05-2022,0.75
24-05-2022,0.75
25-05-2022,0.75
27-05-2022,0.75
30-05-2022,0.75
31-05-2022,0.75
01-06-2022,0.75
02-06-2022,0.75
03-06-2022,0.75
07-06-2022,0.75
08-06-2022,0.75
09-06-2022,0.75
10-06-2022,0.75
13-06-2022,0.75
14-06-2022,0.75
15-06-2022,0.75
16-06-2022,0.75
17-06-2022,0.75
20-06-2022,0.75
21-06-2022,0.75
22-06-2022,0.75
23-06-2022,0.75
24-06-2022,1.25
27-06-2022,1.25
28-06-2022,1.25
29-06-2022,1.25
30-06-2022,1.25
01-07-2022,1.25
04-07-2022,1.25
05-07-2022,1.25
06-07-2022,1.25
07-07-2022,1.25
08-07-2022,1.25
11-07-2022,1.25
12-07-2022,1.25
13-07-2022,1.25
14-07-2022,1.25
15-07-2022,1.25
18-07-2022,1.25
19-07-2022,1.25
20-07-2022,1.25
21-07-2022,1.25
22-07-2022,1.25
25-07-2022,1.25
26-07-2022,1.25
27-07-2022,1.25
28-07-2022,1.25
29-07-2022,1.25
01-08-2022,1.25
02-08-2022,1.25
03-08-2022,1.25
04-08-2022,1.25
05-08-2022,1.25
08-08-2022,1.25
09-08-2022,1.25
10-08-2022,1.25
11-08-2022,1.25
12-08-2022,1.25
15-08-2022,1.25
16-08-2022,1.25
17-08-2022,1.25
18-08-2022,1.25
19-08-2022,1.75
22-08-2022,1.75
23-08-2022,1.75
24-08-2022,1.75
25-08-2022,1.75
26-08-2022,1.75
29-08-2022,1.75
30-08-2022,1.75
31-08-2022,1.75
01-09-2022,1.75
02-09-2022,1.75
05-09-2022,1.75
06-09-2022,1.75
07-09-2022,1.75
08-09-2022,1.75
09-09-2022,1.75
12-09-2022,1.75
13-09-2022,1.75
14-09-2022,1.75
15-09-2022,1.75
16-09-2022,1.75
19-09-2022,1.75
20-09-2022,1.75
21-09-2022,1.75
22-09-2022,1.75
23-09-2022,2.25
26-09-2022,2.25
27-09-2022,2.25
28-09-2022,2.25
29-09-2022,2.25
30-09-2022,2.27
03-10-2022,2.3
04-10-2022,2.25
05-10-2022,2.25
06-10-2022,2.25
07-10-2022,2.25
10-10-2022,2.25
11-10-2022,2.25
12-10-2022,2.25
13-10-2022,2.25
14-10-2022,2.25
17-10-2022,2.25
18-10-2022,2.25
19-10-2022,2.25
20-10-2022,2.25
21-10-2022,2.25
24-10-2022,2.25
25-10-2022,2.25
26-10-2022,2.25
27-10-2022,2.25
28-10-2022,2.25
31-10-2022,2.25
01-11-2022,2.25
02-11-2022,2.25
03-11-2022,2.25
04-11-2022,2.5
07-11-2022,2.5
08-11-2022,2.5
09-11-2022,2.5
10-11-2022,2.5
11-11-2022,2.5
14-11-2022,2.5
15-11-2022,2.5
16-11-2022,2.5
17-11-2022,2.5
18-11-2022,2.5
21-11-2022,2.5
22-11-2022,2.5
23-11-2022,2.5
24-11-2022,2.5
25-11-2022,2.5
28-11-2022,2.5
29-11-2022,2.5
30-11-2022,2.5
01-12-2022,2.5
02-12-2022,2.5
05-12-2022,2.5
06-12-2022,2.5
07-12-2022,2.5
08-12-2022,2.5
09-12-2022,2.5
12-12-2022,2.5
13-12-2022,2.5
14-12-2022,2.5
15-12-2022,2.5
16-12-2022,2.75
19-12-2022,2.75
20-12-2022,2.75
21-12-2022,2.75
22-12-2022,2.75
23-12-2022,2.75
27-12-2022,2.75
28-12-2022,2.75
29-12-2022,2.75
30-12-2022,2.75
02-01-2023,2.75
03-01-2023,2.75
04-01-2023,2.75
05-01-2023,2.75
06-01-2023,2.75
09-01-2023,2.75
10-01-2023,2.75
11-01-2023,2.75
12-01-2023,2.75
13-01-2023,2.75
16-01-2023,2.75
17-01-2023,2.75
18-01-2023,2.75
19-01-2023,2.75
20-01-2023,2.75
23-01-2023,2.75
24-01-2023,2.75
25-01-2023,2.75
26-01-2023,2.75
27-01-2023,2.75
30-01-2023,2.75
31-01-2023,2.75
01-02-2023,2.75
02-02-2023,2.75
03-02-2023,2.75
06-02-2023,2.75
07-02-2023,2.75
08-02-2023,2.75
09-02-2023,2.75
10-02-2023,2.75
13-02-2023,2.75
14-02-2023,2.75
15-02-2023,2.75
16-02-2023,2.75
17-02-2023,2.75
20-02-2023,2.75
21-02-2023,2.75
22-02-2023,2.75
23-02-2023,2.75
24-02-2023,2.75
27-02-2023,2.75
28-02-2023,2.75
01-03-2023,2.75
02-03-2023,2.75
03-03-2023,2.75
06-03-2023,2.75
07-03-2023,2.75
08-03-2023,2.75
09-03-2023,2.75
10-03-2023,2.75
13-03-2023,2.75
14-03-2023,2.75
15-03-2023,2.75
16-03-2023,2.75
17-03-2023,2.75
20-03-2023,2.75
21-03-2023,2.75
22-03-2023,2.75
23-03-2023,2.75
24-03-2023,3
27-03-2023,3
28-03-2023,3
29-03-2023,3
30-03-2023,3
31-03-2023,3
03-04-2023,3
04-04-2023,3
05-04-2023,3
11-04-2023,3
12-04-2023,3
13-04-2023,3
14-04-2023,3
17-04-2023,3
18-04-2023,3
19-04-2023,3
20-04-2023,3
21-04-2023,3
24-04-2023,3
25-04-2023,3
26-04-2023,3
27-04-2023,3
28-04-2023,3
02-05-2023,3
03-05-2023,3
04-05-2023,3
05-05-2023,3.25
08-05-2023,3.25
09-05-2023,3.25
10-05-2023,3.25
11-05-2023,3.25
12-05-2023,3.25
15-05-2023,3.25
16-05-2023,3.25
19-05-2023,3.25
22-05-2023,3.25
23-05-2023,3.25
24-05-2023,3.25
25-05-2023,3.25
26-05-2023,3.25
30-05-2023,3.25
31-05-2023,3.25
01-06-2023,3.25
02-06-2023,3.25
05-06-2023,3.25
06-06-2023,3.25
07-06-2023,3.25
08-06-2023,3.25
09-06-2023,3.25
12-06-2023,3.25
13-06-2023,3.25
14-06-2023,3.25
15-06-2023,3.25
16-06-2023,3.25
19-06-2023,3.25
20-06-2023,3.25
21-06-2023,3.25
22-06-2023,3.25
23-06-2023,3.75
26-06-2023,3.75
27-06-2023,3.75
28-06-2023,3.75
29-06-2023,3.75
30-06-2023,3.96
03-07-2023,3.75
04-07-2023,3.75
05-07-2023,3.75
06-07-2023,3.75
07-07-2023,3.75
10-07-2023,3.75
11-07-2023,3.75
12-07-2023,3.75
13-07-2023,3.75
14-07-2023,3.75
17-07-2023,3.75
18-07-2023,3.75
19-07-2023,3.75
20-07-2023,3.75
21-07-2023,3.75
24-07-2023,3.75
25-07-2023,3.75
26-07-2023,3.75
27-07-2023,3.75
28-07-2023,3.75
31-07-2023,3.75
01-08-2023,3.75
02-08-2023,3.75
================================================
FILE: python/rateslib/data/historical/nowa.csv
================================================
reference_date,rate
02-01-2020,1.49
03-01-2020,1.49
06-01-2020,1.49
07-01-2020,1.49
08-01-2020,1.49
09-01-2020,1.49
10-01-2020,1.49
13-01-2020,1.49
14-01-2020,1.49
15-01-2020,1.49
16-01-2020,1.49
17-01-2020,1.49
20-01-2020,1.49
21-01-2020,1.49
22-01-2020,1.49
23-01-2020,1.49
24-01-2020,1.49
27-01-2020,1.49
28-01-2020,1.49
29-01-2020,1.49
30-01-2020,1.49
31-01-2020,1.49
03-02-2020,1.49
04-02-2020,1.49
05-02-2020,1.49
06-02-2020,1.49
07-02-2020,1.49
10-02-2020,1.49
11-02-2020,1.49
12-02-2020,1.49
13-02-2020,1.49
14-02-2020,1.49
17-02-2020,1.49
18-02-2020,1.49
19-02-2020,1.49
20-02-2020,1.49
21-02-2020,1.49
24-02-2020,1.49
25-02-2020,1.49
26-02-2020,1.49
27-02-2020,1.49
28-02-2020,1.51
02-03-2020,1.49
03-03-2020,1.49
04-03-2020,1.49
05-03-2020,1.49
06-03-2020,1.49
09-03-2020,1.49
10-03-2020,1.49
11-03-2020,1.49
12-03-2020,1.49
13-03-2020,1.49
16-03-2020,0.99
17-03-2020,0.99
18-03-2020,0.99
19-03-2020,0.99
20-03-2020,0.99
23-03-2020,0.24
24-03-2020,0.24
25-03-2020,0.24
26-03-2020,0.24
27-03-2020,0.24
30-03-2020,0.24
31-03-2020,0.24
01-04-2020,0.25
02-04-2020,0.25
03-04-2020,0.25
06-04-2020,0.24
07-04-2020,0.25
08-04-2020,0.25
14-04-2020,0.24
15-04-2020,0.24
16-04-2020,0.24
17-04-2020,0.24
20-04-2020,0.23
21-04-2020,0.24
22-04-2020,0.24
23-04-2020,0.25
24-04-2020,0.25
27-04-2020,0.24
28-04-2020,0.24
29-04-2020,0.24
30-04-2020,0.24
04-05-2020,0.24
05-05-2020,0.24
06-05-2020,0.24
07-05-2020,0.24
08-05-2020,0
11-05-2020,0
12-05-2020,0
13-05-2020,0
14-05-2020,0
15-05-2020,0
18-05-2020,0
19-05-2020,0
20-05-2020,0
22-05-2020,0
25-05-2020,0
26-05-2020,0
27-05-2020,0
28-05-2020,0
29-05-2020,0
02-06-2020,0
03-06-2020,0
04-06-2020,0
05-06-2020,0
08-06-2020,0
09-06-2020,0
10-06-2020,0
11-06-2020,0
12-06-2020,0
15-06-2020,0
16-06-2020,0
17-06-2020,0
18-06-2020,0
19-06-2020,0
22-06-2020,0
23-06-2020,0.01
24-06-2020,-0.01
25-06-2020,-0.01
26-06-2020,-0.01
29-06-2020,0
30-06-2020,-0.01
01-07-2020,0
02-07-2020,0
03-07-2020,0
06-07-2020,0
07-07-2020,0
08-07-2020,-0.01
09-07-2020,0
10-07-2020,-0.01
13-07-2020,0
14-07-2020,0
15-07-2020,0
16-07-2020,0
17-07-2020,0
20-07-2020,0
21-07-2020,0
22-07-2020,0
23-07-2020,0
24-07-2020,0
27-07-2020,0
28-07-2020,0
29-07-2020,-0.01
30-07-2020,0
31-07-2020,-0.01
03-08-2020,-0.01
04-08-2020,-0.01
05-08-2020,-0.01
06-08-2020,-0.01
07-08-2020,-0.01
10-08-2020,-0.01
11-08-2020,-0.01
12-08-2020,0
13-08-2020,0
14-08-2020,0
17-08-2020,-0.01
18-08-2020,-0.01
19-08-2020,-0.01
20-08-2020,-0.01
21-08-2020,-0.01
24-08-2020,-0.01
25-08-2020,-0.01
26-08-2020,-0.01
27-08-2020,-0.01
28-08-2020,-0.01
31-08-2020,0.04
01-09-2020,-0.01
02-09-2020,0
03-09-2020,-0.01
04-09-2020,-0.01
07-09-2020,-0.01
08-09-2020,0
09-09-2020,0
10-09-2020,0
11-09-2020,-0.01
14-09-2020,0
15-09-2020,0
16-09-2020,0
17-09-2020,0
18-09-2020,0
21-09-2020,0
22-09-2020,0
23-09-2020,0
24-09-2020,0
25-09-2020,0
28-09-2020,0
29-09-2020,0
30-09-2020,0
01-10-2020,0
02-10-2020,0
05-10-2020,0
06-10-2020,0
07-10-2020,0
08-10-2020,0
09-10-2020,0
12-10-2020,0
13-10-2020,0
14-10-2020,0
15-10-2020,0
16-10-2020,0
19-10-2020,0
20-10-2020,0
21-10-2020,0
22-10-2020,0
23-10-2020,0
26-10-2020,0
27-10-2020,0
28-10-2020,0
29-10-2020,0
30-10-2020,0
02-11-2020,0
03-11-2020,0
04-11-2020,0
05-11-2020,0
06-11-2020,0
09-11-2020,0
10-11-2020,0
11-11-2020,0
12-11-2020,0
13-11-2020,0
16-11-2020,0
17-11-2020,0
18-11-2020,0
19-11-2020,0
20-11-2020,0
23-11-2020,0
24-11-2020,0
25-11-2020,0
26-11-2020,0
27-11-2020,0
30-11-2020,0
01-12-2020,0
02-12-2020,0
03-12-2020,0
04-12-2020,0
07-12-2020,0
08-12-2020,0
09-12-2020,0
10-12-2020,0
11-12-2020,0
14-12-2020,0
15-12-2020,0
16-12-2020,0
17-12-2020,0
18-12-2020,0
21-12-2020,0
22-12-2020,0
23-12-2020,0
28-12-2020,0
29-12-2020,0
30-12-2020,0
31-12-2020,0
04-01-2021,0
05-01-2021,-0.01
06-01-2021,-0.01
07-01-2021,-0.01
08-01-2021,0
11-01-2021,0
12-01-2021,0
13-01-2021,0
14-01-2021,0
15-01-2021,0
18-01-2021,0
19-01-2021,0
20-01-2021,0
21-01-2021,0
22-01-2021,0
25-01-2021,0
26-01-2021,0
27-01-2021,0
28-01-2021,0
29-01-2021,0
01-02-2021,0
02-02-2021,0
03-02-2021,0
04-02-2021,0
05-02-2021,0
08-02-2021,0
09-02-2021,0
10-02-2021,0
11-02-2021,0
12-02-2021,0
15-02-2021,0
16-02-2021,0
17-02-2021,0
18-02-2021,0
19-02-2021,0
22-02-2021,0
23-02-2021,0
24-02-2021,0
25-02-2021,0
26-02-2021,0
01-03-2021,0
02-03-2021,0
03-03-2021,0
04-03-2021,0
05-03-2021,0
08-03-2021,0
09-03-2021,0
10-03-2021,0
11-03-2021,0
12-03-2021,0
15-03-2021,0
16-03-2021,0
17-03-2021,0
18-03-2021,0
19-03-2021,0
22-03-2021,0
23-03-2021,0
24-03-2021,0
25-03-2021,0
26-03-2021,0
29-03-2021,0
30-03-2021,0
31-03-2021,0
06-04-2021,0
07-04-2021,0
08-04-2021,0
09-04-2021,0
12-04-2021,0
13-04-2021,0
14-04-2021,0
15-04-2021,0
16-04-2021,0
19-04-2021,0
20-04-2021,0
21-04-2021,0
22-04-2021,0
23-04-2021,0
26-04-2021,0
27-04-2021,0
28-04-2021,0
29-04-2021,0
30-04-2021,0
03-05-2021,0
04-05-2021,0
05-05-2021,0
06-05-2021,0
07-05-2021,0
10-05-2021,0
11-05-2021,0
12-05-2021,0
14-05-2021,0
18-05-2021,0
19-05-2021,0
20-05-2021,0
21-05-2021,0
25-05-2021,0
26-05-2021,0
27-05-2021,0
28-05-2021,0
31-05-2021,0
01-06-2021,0
02-06-2021,0
03-06-2021,0
04-06-2021,-0.01
07-06-2021,0
08-06-2021,0
09-06-2021,0
10-06-2021,0
11-06-2021,0
14-06-2021,0
15-06-2021,0
16-06-2021,0
17-06-2021,0
18-06-2021,0
21-06-2021,0
22-06-2021,0
23-06-2021,0
24-06-2021,0
25-06-2021,0
28-06-2021,0
29-06-2021,0
30-06-2021,-0.01
01-07-2021,0
02-07-2021,0
05-07-2021,0
06-07-2021,0
07-07-2021,0
08-07-2021,0
09-07-2021,0
12-07-2021,0
13-07-2021,0
14-07-2021,0
15-07-2021,0
16-07-2021,0
19-07-2021,0
20-07-2021,0
21-07-2021,0
22-07-2021,0
23-07-2021,0
26-07-2021,0
27-07-2021,0
28-07-2021,0
29-07-2021,0
30-07-2021,0
02-08-2021,0
03-08-2021,0
04-08-2021,0
05-08-2021,0
06-08-2021,0
09-08-2021,0
10-08-2021,0
11-08-2021,0
12-08-2021,0
13-08-2021,0
16-08-2021,0
17-08-2021,0
18-08-2021,0
19-08-2021,0
20-08-2021,0
23-08-2021,0
24-08-2021,0
25-08-2021,0
26-08-2021,0
27-08-2021,0
30-08-2021,0
31-08-2021,0
01-09-2021,0
02-09-2021,0
03-09-2021,0
06-09-2021,0
07-09-2021,0
08-09-2021,0
09-09-2021,0
10-09-2021,0
13-09-2021,0
14-09-2021,0
15-09-2021,0
16-09-2021,0
17-09-2021,0
20-09-2021,0
21-09-2021,0
22-09-2021,0
23-09-2021,0
24-09-2021,0.25
27-09-2021,0.25
28-09-2021,0.25
29-09-2021,0.25
30-09-2021,0.25
01-10-2021,0.25
04-10-2021,0.25
05-10-2021,0.25
06-10-2021,0.25
07-10-2021,0.25
08-10-2021,0.25
11-10-2021,0.25
12-10-2021,0.25
13-10-2021,0.25
14-10-2021,0.25
15-10-2021,0.25
18-10-2021,0.25
19-10-2021,0.25
20-10-2021,0.25
21-10-2021,0.25
22-10-2021,0.25
25-10-2021,0.25
26-10-2021,0.25
27-10-2021,0.25
28-10-2021,0.25
29-10-2021,0.25
01-11-2021,0.25
02-11-2021,0.25
03-11-2021,0.25
04-11-2021,0.25
05-11-2021,0.25
08-11-2021,0.25
09-11-2021,0.25
10-11-2021,0.25
11-11-2021,0.25
12-11-2021,0.25
15-11-2021,0.25
16-11-2021,0.25
17-11-2021,0.25
18-11-2021,0.25
19-11-2021,0.25
22-11-2021,0.25
23-11-2021,0.25
24-11-2021,0.25
25-11-2021,0.25
26-11-2021,0.25
29-11-2021,0.25
30-11-2021,0.25
01-12-2021,0.25
02-12-2021,0.25
03-12-2021,0.25
06-12-2021,0.25
07-12-2021,0.25
08-12-2021,0.25
09-12-2021,0.25
10-12-2021,0.25
13-12-2021,0.25
14-12-2021,0.25
15-12-2021,0.25
16-12-2021,0.25
17-12-2021,0.5
20-12-2021,0.5
21-12-2021,0.5
22-12-2021,0.5
23-12-2021,0.5
27-12-2021,0.5
28-12-2021,0.5
29-12-2021,0.5
30-12-2021,0.5
31-12-2021,0.5
03-01-2022,0.5
04-01-2022,0.5
05-01-2022,0.5
06-01-2022,0.5
07-01-2022,0.5
10-01-2022,0.5
11-01-2022,0.5
12-01-2022,0.5
13-01-2022,0.5
14-01-2022,0.5
17-01-2022,0.5
18-01-2022,0.5
19-01-2022,0.5
20-01-2022,0.5
21-01-2022,0.5
24-01-2022,0.5
25-01-2022,0.5
26-01-2022,0.5
27-01-2022,0.5
28-01-2022,0.5
31-01-2022,0.5
01-02-2022,0.5
02-02-2022,0.5
03-02-2022,0.5
04-02-2022,0.5
07-02-2022,0.5
08-02-2022,0.5
09-02-2022,0.5
10-02-2022,0.5
11-02-2022,0.5
14-02-2022,0.5
15-02-2022,0.5
16-02-2022,0.5
17-02-2022,0.5
18-02-2022,0.5
21-02-2022,0.5
22-02-2022,0.5
23-02-2022,0.5
24-02-2022,0.5
25-02-2022,0.5
28-02-2022,0.5
01-03-2022,0.5
02-03-2022,0.5
03-03-2022,0.5
04-03-2022,0.5
07-03-2022,0.5
08-03-2022,0.5
09-03-2022,0.5
10-03-2022,0.5
11-03-2022,0.5
14-03-2022,0.5
15-03-2022,0.5
16-03-2022,0.5
17-03-2022,0.5
18-03-2022,0.5
21-03-2022,0.5
22-03-2022,0.5
23-03-2022,0.5
24-03-2022,0.5
25-03-2022,0.75
28-03-2022,0.75
29-03-2022,0.75
30-03-2022,0.75
31-03-2022,0.75
01-04-2022,0.75
04-04-2022,0.75
05-04-2022,0.75
06-04-2022,0.75
07-04-2022,0.75
08-04-2022,0.75
11-04-2022,0.75
12-04-2022,0.75
13-04-2022,0.75
19-04-2022,0.75
20-04-2022,0.75
21-04-2022,0.75
22-04-2022,0.75
25-04-2022,0.75
26-04-2022,0.75
27-04-2022,0.75
28-04-2022,0.75
29-04-2022,0.75
02-05-2022,0.75
03-05-2022,0.75
04-05-2022,0.75
05-05-2022,0.75
06-05-2022,0.75
09-05-2022,0.75
10-05-2022,0.75
11-05-2022,0.75
12-05-2022,0.75
13-05-2022,0.75
16-05-2022,0.75
18-05-2022,0.75
19-05-2022,0.75
20-05-2022,0.75
23-05-2022,0.75
24-05-2022,0.75
25-05-2022,0.75
27-05-2022,0.75
30-05-2022,0.75
31-05-2022,0.75
01-06-2022,0.75
02-06-2022,0.75
03-06-2022,0.75
07-06-2022,0.75
08-06-2022,0.75
09-06-2022,0.75
10-06-2022,0.75
13-06-2022,0.75
14-06-2022,0.75
15-06-2022,0.75
16-06-2022,0.75
17-06-2022,0.75
20-06-2022,0.75
21-06-2022,0.75
22-06-2022,0.75
23-06-2022,0.75
24-06-2022,1.25
27-06-2022,1.25
28-06-2022,1.25
29-06-2022,1.25
30-06-2022,1.25
01-07-2022,1.25
04-07-2022,1.25
05-07-2022,1.25
06-07-2022,1.25
07-07-2022,1.25
08-07-2022,1.25
11-07-2022,1.25
12-07-2022,1.25
13-07-2022,1.25
14-07-2022,1.25
15-07-2022,1.25
18-07-2022,1.25
19-07-2022,1.25
20-07-2022,1.25
21-07-2022,1.25
22-07-2022,1.25
25-07-2022,1.25
26-07-2022,1.25
27-07-2022,1.25
28-07-2022,1.25
29-07-2022,1.25
01-08-2022,1.25
02-08-2022,1.25
03-08-2022,1.25
04-08-2022,1.25
05-08-2022,1.25
08-08-2022,1.25
09-08-2022,1.25
10-08-2022,1.25
11-08-2022,1.25
12-08-2022,1.25
15-08-2022,1.25
16-08-2022,1.25
17-08-2022,1.25
18-08-2022,1.25
19-08-2022,1.75
22-08-2022,1.75
23-08-2022,1.75
24-08-2022,1.75
25-08-2022,1.75
26-08-2022,1.75
29-08-2022,1.75
30-08-2022,1.75
31-08-2022,1.75
01-09-2022,1.75
02-09-2022,1.75
05-09-2022,1.75
06-09-2022,1.75
07-09-2022,1.75
08-09-2022,1.75
09-09-2022,1.75
12-09-2022,1.75
13-09-2022,1.75
14-09-2022,1.75
15-09-2022,1.75
16-09-2022,1.75
19-09-2022,1.75
20-09-2022,1.75
21-09-2022,1.75
22-09-2022,1.75
23-09-2022,2.25
26-09-2022,2.25
27-09-2022,2.25
28-09-2022,2.25
29-09-2022,2.25
30-09-2022,2.27
03-10-2022,2.3
04-10-2022,2.25
05-10-2022,2.25
06-10-2022,2.25
07-10-2022,2.25
10-10-2022,2.25
11-10-2022,2.25
12-10-2022,2.25
13-10-2022,2.25
14-10-2022,2.25
17-10-2022,2.25
18-10-2022,2.25
19-10-2022,2.25
20-10-2022,2.25
21-10-2022,2.25
24-10-2022,2.25
25-10-2022,2.25
26-10-2022,2.25
27-10-2022,2.25
28-10-2022,2.25
31-10-2022,2.25
01-11-2022,2.25
02-11-2022,2.25
03-11-2022,2.25
04-11-2022,2.5
07-11-2022,2.5
08-11-2022,2.5
09-11-2022,2.5
10-11-2022,2.5
11-11-2022,2.5
14-11-2022,2.5
15-11-2022,2.5
16-11-2022,2.5
17-11-2022,2.5
18-11-2022,2.5
21-11-2022,2.5
22-11-2022,2.5
23-11-2022,2.5
24-11-2022,2.5
25-11-2022,2.5
28-11-2022,2.5
29-11-2022,2.5
30-11-2022,2.5
01-12-2022,2.5
02-12-2022,2.5
05-12-2022,2.5
06-12-2022,2.5
07-12-2022,2.5
08-12-2022,2.5
09-12-2022,2.5
12-12-2022,2.5
13-12-2022,2.5
14-12-2022,2.5
15-12-2022,2.5
16-12-2022,2.75
19-12-2022,2.75
20-12-2022,2.75
21-12-2022,2.75
22-12-2022,2.75
23-12-2022,2.75
27-12-2022,2.75
28-12-2022,2.75
29-12-2022,2.75
30-12-2022,2.75
02-01-2023,2.75
03-01-2023,2.75
04-01-2023,2.75
05-01-2023,2.75
06-01-2023,2.75
09-01-2023,2.75
10-01-2023,2.75
11-01-2023,2.75
12-01-2023,2.75
13-01-2023,2.75
16-01-2023,2.75
17-01-2023,2.75
18-01-2023,2.75
19-01-2023,2.75
20-01-2023,2.75
23-01-2023,2.75
24-01-2023,2.75
25-01-2023,2.75
26-01-2023,2.75
27-01-2023,2.75
30-01-2023,2.75
31-01-2023,2.75
01-02-2023,2.75
02-02-2023,2.75
03-02-2023,2.75
06-02-2023,2.75
07-02-2023,2.75
08-02-2023,2.75
09-02-2023,2.75
10-02-2023,2.75
13-02-2023,2.75
14-02-2023,2.75
15-02-2023,2.75
16-02-2023,2.75
17-02-2023,2.75
20-02-2023,2.75
21-02-2023,2.75
22-02-2023,2.75
23-02-2023,2.75
24-02-2023,2.75
27-02-2023,2.75
28-02-2023,2.75
01-03-2023,2.75
02-03-2023,2.75
03-03-2023,2.75
06-03-2023,2.75
07-03-2023,2.75
08-03-2023,2.75
09-03-2023,2.75
10-03-2023,2.75
13-03-2023,2.75
14-03-2023,2.75
15-03-2023,2.75
16-03-2023,2.75
17-03-2023,2.75
20-03-2023,2.75
21-03-2023,2.75
22-03-2023,2.75
23-03-2023,2.75
24-03-2023,3
27-03-2023,3
28-03-2023,3
29-03-2023,3
30-03-2023,3
31-03-2023,3
03-04-2023,3
04-04-2023,3
05-04-2023,3
11-04-2023,3
12-04-2023,3
13-04-2023,3
14-04-2023,3
17-04-2023,3
18-04-2023,3
19-04-2023,3
20-04-2023,3
21-04-2023,3
24-04-2023,3
25-04-2023,3
26-04-2023,3
27-04-2023,3
28-04-2023,3
02-05-2023,3
03-05-2023,3
04-05-2023,3
05-05-2023,3.25
08-05-2023,3.25
09-05-2023,3.25
10-05-2023,3.25
11-05-2023,3.25
12-05-2023,3.25
15-05-2023,3.25
16-05-2023,3.25
19-05-2023,3.25
22-05-2023,3.25
23-05-2023,3.25
24-05-2023,3.25
25-05-2023,3.25
26-05-2023,3.25
30-05-2023,3.25
31-05-2023,3.25
01-06-2023,3.25
02-06-2023,3.25
05-06-2023,3.25
06-06-2023,3.25
07-06-2023,3.25
08-06-2023,3.25
09-06-2023,3.25
12-06-2023,3.25
13-06-2023,3.25
14-06-2023,3.25
15-06-2023,3.25
16-06-2023,3.25
19-06-2023,3.25
20-06-2023,3.25
21-06-2023,3.25
22-06-2023,3.25
23-06-2023,3.75
26-06-2023,3.75
27-06-2023,3.75
28-06-2023,3.75
29-06-2023,3.75
30-06-2023,3.96
03-07-2023,3.75
04-07-2023,3.75
05-07-2023,3.75
06-07-2023,3.75
07-07-2023,3.75
10-07-2023,3.75
11-07-2023,3.75
12-07-2023,3.75
13-07-2023,3.75
14-07-2023,3.75
17-07-2023,3.75
18-07-2023,3.75
19-07-2023,3.75
20-07-2023,3.75
21-07-2023,3.75
24-07-2023,3.75
25-07-2023,3.75
26-07-2023,3.75
27-07-2023,3.75
28-07-2023,3.75
31-07-2023,3.75
01-08-2023,3.75
02-08-2023,3.75
================================================
FILE: python/rateslib/data/historical/sek_rfr.csv
================================================
reference_date,rate
01-09-2021,-500
02-09-2021,-500
03-09-2021,-500
06-09-2021,-500
07-09-2021,-500
08-09-2021,-500
09-09-2021,-500
10-09-2021,-500
13-09-2021,-500
14-09-2021,-500
15-09-2021,-500
16-09-2021,-500
17-09-2021,-500
20-09-2021,-500
21-09-2021,-500
22-09-2021,-500
23-09-2021,-500
24-09-2021,-500
27-09-2021,-500
28-09-2021,-500
29-09-2021,-500
30-09-2021,-500
01-10-2021,-500
04-10-2021,-500
05-10-2021,-500
06-10-2021,-500
07-10-2021,-500
08-10-2021,-500
11-10-2021,-500
12-10-2021,-500
13-10-2021,-500
14-10-2021,-500
15-10-2021,-500
18-10-2021,-500
19-10-2021,-500
20-10-2021,-500
21-10-2021,-500
22-10-2021,-500
25-10-2021,-500
26-10-2021,-500
27-10-2021,-500
28-10-2021,-500
29-10-2021,-500
01-11-2021,-500
02-11-2021,-500
03-11-2021,-500
04-11-2021,-500
05-11-2021,-500
08-11-2021,-500
09-11-2021,-500
10-11-2021,-500
11-11-2021,-500
12-11-2021,-500
15-11-2021,-500
16-11-2021,-500
17-11-2021,-500
18-11-2021,-500
19-11-2021,-500
22-11-2021,-500
23-11-2021,-500
24-11-2021,-500
25-11-2021,-500
26-11-2021,-500
29-11-2021,-500
30-11-2021,-500
01-12-2021,-500
02-12-2021,-500
03-12-2021,-500
06-12-2021,-500
07-12-2021,-500
08-12-2021,-500
09-12-2021,-500
10-12-2021,-500
13-12-2021,-500
14-12-2021,-500
15-12-2021,-500
16-12-2021,-500
17-12-2021,-500
20-12-2021,-500
21-12-2021,-500
22-12-2021,-500
23-12-2021,-500
27-12-2021,-500
28-12-2021,-500
29-12-2021,-500
30-12-2021,-500
03-01-2022,-500
04-01-2022,-500
05-01-2022,-500
07-01-2022,-500
10-01-2022,-500
11-01-2022,-500
12-01-2022,-500
13-01-2022,-500
14-01-2022,-500
17-01-2022,-500
18-01-2022,-500
19-01-2022,-500
20-01-2022,-500
21-01-2022,-500
24-01-2022,-500
25-01-2022,-500
26-01-2022,-500
27-01-2022,-500
28-01-2022,-500
31-01-2022,-500
01-02-2022,-500
02-02-2022,-500
03-02-2022,-500
04-02-2022,-500
07-02-2022,-500
08-02-2022,-500
09-02-2022,-500
10-02-2022,-500
11-02-2022,-500
14-02-2022,-500
15-02-2022,-500
16-02-2022,-500
17-02-2022,-500
18-02-2022,-500
21-02-2022,-500
22-02-2022,-500
23-02-2022,-500
24-02-2022,-500
25-02-2022,-500
28-02-2022,-500
01-03-2022,-500
02-03-2022,-500
03-03-2022,-500
04-03-2022,-500
07-03-2022,-500
08-03-2022,-500
09-03-2022,-500
10-03-2022,-500
11-03-2022,-500
14-03-2022,-500
15-03-2022,-500
16-03-2022,-500
17-03-2022,-500
18-03-2022,-500
21-03-2022,-500
22-03-2022,-500
23-03-2022,-500
24-03-2022,-500
25-03-2022,-500
28-03-2022,-500
29-03-2022,-500
30-03-2022,-500
31-03-2022,-500
01-04-2022,-500
04-04-2022,-500
05-04-2022,-500
06-04-2022,-500
07-04-2022,-500
08-04-2022,-500
11-04-2022,-500
12-04-2022,-500
13-04-2022,-500
14-04-2022,-500
19-04-2022,-500
20-04-2022,-500
21-04-2022,-500
22-04-2022,-500
25-04-2022,-500
26-04-2022,-500
27-04-2022,-500
28-04-2022,-500
29-04-2022,-500
02-05-2022,-500
03-05-2022,-500
04-05-2022,-500
05-05-2022,-500
06-05-2022,-500
09-05-2022,-500
10-05-2022,-500
11-05-2022,-500
12-05-2022,-500
13-05-2022,-500
16-05-2022,-500
17-05-2022,-500
18-05-2022,-500
19-05-2022,-500
20-05-2022,-500
23-05-2022,-500
24-05-2022,-500
25-05-2022,-500
27-05-2022,-500
30-05-2022,-500
31-05-2022,-500
01-06-2022,-500
02-06-2022,-500
03-06-2022,-500
07-06-2022,-500
08-06-2022,-500
09-06-2022,-500
10-06-2022,-500
13-06-2022,-500
14-06-2022,-500
15-06-2022,-500
16-06-2022,-500
17-06-2022,-500
20-06-2022,-500
21-06-2022,-500
22-06-2022,-500
23-06-2022,-500
27-06-2022,-500
28-06-2022,-500
29-06-2022,-500
30-06-2022,-500
01-07-2022,-500
04-07-2022,-500
05-07-2022,-500
06-07-2022,-500
07-07-2022,-500
08-07-2022,-500
11-07-2022,-500
12-07-2022,-500
13-07-2022,-500
14-07-2022,-500
15-07-2022,-500
18-07-2022,-500
19-07-2022,-500
20-07-2022,-500
21-07-2022,-500
22-07-2022,-500
25-07-2022,-500
26-07-2022,-500
27-07-2022,-500
28-07-2022,-500
29-07-2022,-500
01-08-2022,-500
02-08-2022,-500
03-08-2022,-500
04-08-2022,-500
05-08-2022,-500
08-08-2022,-500
09-08-2022,-500
10-08-2022,-500
11-08-2022,-500
12-08-2022,-500
15-08-2022,-500
16-08-2022,-500
17-08-2022,-500
18-08-2022,-500
19-08-2022,-500
22-08-2022,-500
23-08-2022,-500
24-08-2022,-500
25-08-2022,-500
26-08-2022,-500
29-08-2022,-500
30-08-2022,-500
31-08-2022,-500
01-09-2022,-500
02-09-2022,-500
05-09-2022,-500
06-09-2022,-500
07-09-2022,-500
08-09-2022,-500
09-09-2022,-500
12-09-2022,-500
13-09-2022,-500
14-09-2022,-500
15-09-2022,-500
16-09-2022,-500
19-09-2022,-500
20-09-2022,-500
21-09-2022,-500
22-09-2022,-500
23-09-2022,-500
26-09-2022,-500
27-09-2022,-500
28-09-2022,-500
29-09-2022,-500
30-09-2022,-500
03-10-2022,-500
04-10-2022,-500
05-10-2022,-500
06-10-2022,-500
07-10-2022,-500
10-10-2022,-500
11-10-2022,-500
12-10-2022,-500
13-10-2022,-500
14-10-2022,-500
17-10-2022,-500
18-10-2022,-500
19-10-2022,-500
20-10-2022,-500
21-10-2022,-500
24-10-2022,-500
25-10-2022,-500
26-10-2022,-500
27-10-2022,-500
28-10-2022,-500
31-10-2022,-500
01-11-2022,-500
02-11-2022,-500
03-11-2022,-500
04-11-2022,-500
07-11-2022,-500
08-11-2022,-500
09-11-2022,-500
10-11-2022,-500
11-11-2022,-500
14-11-2022,-500
15-11-2022,-500
16-11-2022,-500
17-11-2022,-500
18-11-2022,-500
21-11-2022,-500
22-11-2022,-500
23-11-2022,-500
24-11-2022,-500
25-11-2022,-500
28-11-2022,-500
29-11-2022,-500
30-11-2022,-500
01-12-2022,-500
02-12-2022,-500
05-12-2022,-500
06-12-2022,-500
07-12-2022,-500
08-12-2022,-500
09-12-2022,-500
12-12-2022,-500
13-12-2022,-500
14-12-2022,-500
15-12-2022,-500
16-12-2022,-500
19-12-2022,-500
20-12-2022,-500
21-12-2022,-500
22-12-2022,-500
23-12-2022,-500
27-12-2022,-500
28-12-2022,-500
29-12-2022,-500
30-12-2022,-500
02-01-2023,-500
03-01-2023,-500
04-01-2023,-500
05-01-2023,-500
09-01-2023,-500
10-01-2023,-500
11-01-2023,-500
12-01-2023,-500
13-01-2023,-500
16-01-2023,-500
17-01-2023,-500
18-01-2023,-500
19-01-2023,-500
20-01-2023,-500
23-01-2023,-500
24-01-2023,-500
25-01-2023,-500
26-01-2023,-500
27-01-2023,-500
30-01-2023,-500
31-01-2023,-500
01-02-2023,-500
02-02-2023,-500
03-02-2023,-500
06-02-2023,-500
07-02-2023,-500
08-02-2023,-500
09-02-2023,-500
10-02-2023,-500
13-02-2023,-500
14-02-2023,-500
15-02-2023,-500
16-02-2023,-500
17-02-2023,-500
20-02-2023,-500
21-02-2023,-500
22-02-2023,-500
23-02-2023,-500
24-02-2023,-500
27-02-2023,-500
28-02-2023,-500
01-03-2023,-500
02-03-2023,-500
03-03-2023,-500
06-03-2023,-500
07-03-2023,-500
08-03-2023,-500
09-03-2023,-500
10-03-2023,-500
13-03-2023,-500
14-03-2023,-500
15-03-2023,-500
16-03-2023,-500
17-03-2023,-500
20-03-2023,-500
21-03-2023,-500
22-03-2023,-500
23-03-2023,-500
24-03-2023,-500
27-03-2023,-500
28-03-2023,-500
29-03-2023,-500
30-03-2023,-500
31-03-2023,-500
03-04-2023,-500
04-04-2023,-500
05-04-2023,-500
06-04-2023,-500
11-04-2023,-500
12-04-2023,-500
13-04-2023,-500
14-04-2023,-500
17-04-2023,-500
18-04-2023,-500
19-04-2023,-500
20-04-2023,-500
21-04-2023,-500
24-04-2023,-500
25-04-2023,-500
26-04-2023,-500
27-04-2023,-500
28-04-2023,-500
02-05-2023,-500
03-05-2023,-500
04-05-2023,-500
05-05-2023,-500
08-05-2023,-500
09-05-2023,-500
10-05-2023,-500
11-05-2023,-500
12-05-2023,-500
15-05-2023,-500
16-05-2023,-500
17-05-2023,-500
19-05-2023,-500
22-05-2023,-500
23-05-2023,-500
24-05-2023,-500
25-05-2023,-500
26-05-2023,-500
29-05-2023,-500
30-05-2023,-500
31-05-2023,-500
01-06-2023,-500
02-06-2023,-500
05-06-2023,-500
07-06-2023,-500
08-06-2023,-500
09-06-2023,-500
12-06-2023,-500
13-06-2023,-500
14-06-2023,-500
15-06-2023,-500
16-06-2023,-500
19-06-2023,-500
20-06-2023,-500
21-06-2023,-500
22-06-2023,-500
26-06-2023,-500
27-06-2023,-500
28-06-2023,-500
29-06-2023,-500
30-06-2023,-500
03-07-2023,-500
04-07-2023,-500
05-07-2023,-500
06-07-2023,-500
07-07-2023,-500
10-07-2023,-500
11-07-2023,-500
12-07-2023,-500
13-07-2023,-500
14-07-2023,-500
17-07-2023,-500
18-07-2023,-500
19-07-2023,-500
20-07-2023,-500
21-07-2023,-500
24-07-2023,-500
25-07-2023,-500
26-07-2023,-500
27-07-2023,-500
28-07-2023,-500
31-07-2023,-500
01-08-2023,-500
02-08-2023,-500
================================================
FILE: python/rateslib/data/historical/sofr.csv
================================================
reference_date,rate
01-08-2023,-500
31-07-2023,-500
28-07-2023,-500
27-07-2023,-500
26-07-2023,-500
25-07-2023,-500
24-07-2023,-500
21-07-2023,-500
20-07-2023,-500
19-07-2023,-500
18-07-2023,-500
17-07-2023,-500
14-07-2023,-500
13-07-2023,-500
12-07-2023,-500
11-07-2023,-500
10-07-2023,-500
07-07-2023,-500
06-07-2023,-500
05-07-2023,-500
03-07-2023,-500
30-06-2023,-500
29-06-2023,-500
28-06-2023,-500
27-06-2023,-500
26-06-2023,-500
23-06-2023,-500
22-06-2023,-500
21-06-2023,-500
20-06-2023,-500
16-06-2023,-500
15-06-2023,-500
14-06-2023,-500
13-06-2023,-500
12-06-2023,-500
09-06-2023,-500
08-06-2023,-500
07-06-2023,-500
06-06-2023,-500
05-06-2023,-500
02-06-2023,-500
01-06-2023,-500
31-05-2023,-500
30-05-2023,-500
26-05-2023,-500
25-05-2023,-500
24-05-2023,-500
23-05-2023,-500
22-05-2023,-500
19-05-2023,-500
18-05-2023,-500
17-05-2023,-500
16-05-2023,-500
15-05-2023,-500
12-05-2023,-500
11-05-2023,-500
10-05-2023,-500
09-05-2023,-500
08-05-2023,-500
05-05-2023,-500
04-05-2023,-500
03-05-2023,-500
02-05-2023,-500
01-05-2023,-500
28-04-2023,-500
27-04-2023,-500
26-04-2023,-500
25-04-2023,-500
24-04-2023,-500
21-04-2023,-500
20-04-2023,-500
19-04-2023,-500
18-04-2023,-500
17-04-2023,-500
14-04-2023,-500
13-04-2023,-500
12-04-2023,-500
11-04-2023,-500
10-04-2023,-500
06-04-2023,-500
05-04-2023,-500
04-04-2023,-500
03-04-2023,-500
31-03-2023,-500
30-03-2023,-500
29-03-2023,-500
28-03-2023,-500
27-03-2023,-500
24-03-2023,-500
23-03-2023,-500
22-03-2023,-500
21-03-2023,-500
20-03-2023,-500
17-03-2023,-500
16-03-2023,-500
15-03-2023,-500
14-03-2023,-500
13-03-2023,-500
10-03-2023,-500
09-03-2023,-500
08-03-2023,-500
07-03-2023,-500
06-03-2023,-500
03-03-2023,-500
02-03-2023,-500
01-03-2023,-500
28-02-2023,-500
27-02-2023,-500
24-02-2023,-500
23-02-2023,-500
22-02-2023,-500
21-02-2023,-500
17-02-2023,-500
16-02-2023,-500
15-02-2023,-500
14-02-2023,-500
13-02-2023,-500
10-02-2023,-500
09-02-2023,-500
08-02-2023,-500
07-02-2023,-500
06-02-2023,-500
03-02-2023,-500
02-02-2023,-500
01-02-2023,-500
31-01-2023,-500
30-01-2023,-500
27-01-2023,-500
26-01-2023,-500
25-01-2023,-500
24-01-2023,-500
23-01-2023,-500
20-01-2023,-500
19-01-2023,-500
18-01-2023,-500
17-01-2023,-500
13-01-2023,-500
12-01-2023,-500
11-01-2023,-500
10-01-2023,-500
09-01-2023,-500
06-01-2023,-500
05-01-2023,-500
04-01-2023,-500
03-01-2023,-500
30-12-2022,-500
29-12-2022,-500
28-12-2022,-500
27-12-2022,-500
23-12-2022,-500
22-12-2022,-500
21-12-2022,-500
20-12-2022,-500
19-12-2022,-500
16-12-2022,-500
15-12-2022,-500
14-12-2022,-500
13-12-2022,-500
12-12-2022,-500
09-12-2022,-500
08-12-2022,-500
07-12-2022,-500
06-12-2022,-500
05-12-2022,-500
02-12-2022,-500
01-12-2022,-500
30-11-2022,-500
29-11-2022,-500
28-11-2022,-500
25-11-2022,-500
23-11-2022,-500
22-11-2022,-500
21-11-2022,-500
18-11-2022,-500
17-11-2022,-500
16-11-2022,-500
15-11-2022,-500
14-11-2022,-500
10-11-2022,-500
09-11-2022,-500
08-11-2022,-500
07-11-2022,-500
04-11-2022,-500
03-11-2022,-500
02-11-2022,-500
01-11-2022,-500
31-10-2022,-500
28-10-2022,-500
27-10-2022,-500
26-10-2022,-500
25-10-2022,-500
24-10-2022,-500
21-10-2022,-500
20-10-2022,-500
19-10-2022,-500
18-10-2022,-500
17-10-2022,-500
14-10-2022,-500
13-10-2022,-500
12-10-2022,-500
11-10-2022,-500
07-10-2022,-500
06-10-2022,-500
05-10-2022,-500
04-10-2022,-500
03-10-2022,-500
30-09-2022,-500
29-09-2022,-500
28-09-2022,-500
27-09-2022,-500
26-09-2022,-500
23-09-2022,-500
22-09-2022,-500
21-09-2022,-500
20-09-2022,-500
19-09-2022,-500
16-09-2022,-500
15-09-2022,-500
14-09-2022,-500
13-09-2022,-500
12-09-2022,-500
09-09-2022,-500
08-09-2022,-500
07-09-2022,-500
06-09-2022,-500
02-09-2022,-500
01-09-2022,-500
31-08-2022,-500
30-08-2022,-500
29-08-2022,-500
26-08-2022,-500
25-08-2022,-500
24-08-2022,-500
23-08-2022,-500
22-08-2022,-500
19-08-2022,-500
18-08-2022,-500
17-08-2022,-500
16-08-2022,-500
15-08-2022,-500
12-08-2022,-500
11-08-2022,-500
10-08-2022,-500
09-08-2022,-500
08-08-2022,-500
05-08-2022,-500
04-08-2022,-500
03-08-2022,-500
02-08-2022,-500
01-08-2022,-500
29-07-2022,-500
28-07-2022,-500
27-07-2022,-500
26-07-2022,-500
25-07-2022,-500
22-07-2022,-500
21-07-2022,-500
20-07-2022,-500
19-07-2022,-500
18-07-2022,-500
15-07-2022,-500
14-07-2022,-500
13-07-2022,-500
12-07-2022,-500
11-07-2022,-500
08-07-2022,-500
07-07-2022,-500
06-07-2022,-500
05-07-2022,-500
01-07-2022,-500
30-06-2022,-500
29-06-2022,-500
28-06-2022,-500
27-06-2022,-500
24-06-2022,-500
23-06-2022,-500
22-06-2022,-500
21-06-2022,-500
17-06-2022,-500
16-06-2022,-500
15-06-2022,-500
14-06-2022,-500
13-06-2022,-500
10-06-2022,-500
09-06-2022,-500
08-06-2022,-500
07-06-2022,-500
06-06-2022,-500
03-06-2022,-500
02-06-2022,-500
01-06-2022,-500
31-05-2022,-500
27-05-2022,-500
26-05-2022,-500
25-05-2022,-500
24-05-2022,-500
23-05-2022,-500
20-05-2022,-500
19-05-2022,-500
18-05-2022,-500
17-05-2022,-500
16-05-2022,-500
13-05-2022,-500
12-05-2022,-500
11-05-2022,-500
10-05-2022,-500
09-05-2022,-500
06-05-2022,-500
05-05-2022,-500
04-05-2022,-500
03-05-2022,-500
02-05-2022,-500
29-04-2022,-500
28-04-2022,-500
27-04-2022,-500
26-04-2022,-500
25-04-2022,-500
22-04-2022,-500
21-04-2022,-500
20-04-2022,-500
19-04-2022,-500
18-04-2022,-500
14-04-2022,-500
13-04-2022,-500
12-04-2022,-500
11-04-2022,-500
08-04-2022,-500
07-04-2022,-500
06-04-2022,-500
05-04-2022,-500
04-04-2022,-500
01-04-2022,-500
31-03-2022,-500
30-03-2022,-500
29-03-2022,-500
28-03-2022,-500
25-03-2022,-500
24-03-2022,-500
23-03-2022,-500
22-03-2022,-500
21-03-2022,-500
18-03-2022,-500
17-03-2022,-500
16-03-2022,-500
15-03-2022,-500
14-03-2022,-500
11-03-2022,-500
10-03-2022,-500
09-03-2022,-500
08-03-2022,-500
07-03-2022,-500
04-03-2022,-500
03-03-2022,-500
02-03-2022,-500
01-03-2022,-500
28-02-2022,-500
25-02-2022,-500
24-02-2022,-500
23-02-2022,-500
22-02-2022,-500
18-02-2022,-500
17-02-2022,-500
16-02-2022,-500
15-02-2022,-500
14-02-2022,-500
11-02-2022,-500
10-02-2022,-500
09-02-2022,-500
08-02-2022,-500
07-02-2022,-500
04-02-2022,-500
03-02-2022,-500
02-02-2022,-500
01-02-2022,-500
31-01-2022,-500
28-01-2022,-500
27-01-2022,-500
26-01-2022,-500
25-01-2022,-500
24-01-2022,-500
21-01-2022,-500
20-01-2022,-500
19-01-2022,-500
18-01-2022,-500
14-01-2022,-500
13-01-2022,-500
12-01-2022,-500
11-01-2022,-500
10-01-2022,-500
07-01-2022,-500
06-01-2022,-500
05-01-2022,-500
04-01-2022,-500
03-01-2022,-500
31-12-2021,-500
30-12-2021,-500
29-12-2021,-500
28-12-2021,-500
27-12-2021,-500
23-12-2021,-500
22-12-2021,-500
21-12-2021,-500
20-12-2021,-500
17-12-2021,-500
16-12-2021,-500
15-12-2021,-500
14-12-2021,-500
13-12-2021,-500
10-12-2021,-500
09-12-2021,-500
08-12-2021,-500
07-12-2021,-500
06-12-2021,-500
03-12-2021,-500
02-12-2021,-500
01-12-2021,-500
30-11-2021,-500
29-11-2021,-500
26-11-2021,-500
24-11-2021,-500
23-11-2021,-500
22-11-2021,-500
19-11-2021,-500
18-11-2021,-500
17-11-2021,-500
16-11-2021,-500
15-11-2021,-500
12-11-2021,-500
10-11-2021,-500
09-11-2021,-500
08-11-2021,-500
05-11-2021,-500
04-11-2021,-500
03-11-2021,-500
02-11-2021,-500
01-11-2021,-500
29-10-2021,-500
28-10-2021,-500
27-10-2021,-500
26-10-2021,-500
25-10-2021,-500
22-10-2021,-500
21-10-2021,-500
20-10-2021,-500
19-10-2021,-500
18-10-2021,-500
15-10-2021,-500
14-10-2021,-500
13-10-2021,-500
12-10-2021,-500
08-10-2021,-500
07-10-2021,-500
06-10-2021,-500
05-10-2021,-500
04-10-2021,-500
01-10-2021,-500
30-09-2021,-500
29-09-2021,-500
28-09-2021,-500
27-09-2021,-500
24-09-2021,-500
23-09-2021,-500
22-09-2021,-500
21-09-2021,-500
20-09-2021,-500
17-09-2021,-500
16-09-2021,-500
15-09-2021,-500
14-09-2021,-500
13-09-2021,-500
10-09-2021,-500
09-09-2021,-500
08-09-2021,-500
07-09-2021,-500
03-09-2021,-500
02-09-2021,-500
01-09-2021,-500
31-08-2021,-500
30-08-2021,-500
27-08-2021,-500
26-08-2021,-500
25-08-2021,-500
24-08-2021,-500
23-08-2021,-500
20-08-2021,-500
19-08-2021,-500
18-08-2021,-500
17-08-2021,-500
16-08-2021,-500
13-08-2021,-500
12-08-2021,-500
11-08-2021,-500
10-08-2021,-500
09-08-2021,-500
06-08-2021,-500
05-08-2021,-500
04-08-2021,-500
03-08-2021,-500
02-08-2021,-500
30-07-2021,-500
29-07-2021,-500
28-07-2021,-500
27-07-2021,-500
26-07-2021,-500
23-07-2021,-500
22-07-2021,-500
21-07-2021,-500
20-07-2021,-500
19-07-2021,-500
16-07-2021,-500
15-07-2021,-500
14-07-2021,-500
13-07-2021,-500
12-07-2021,-500
09-07-2021,-500
08-07-2021,-500
07-07-2021,-500
06-07-2021,-500
02-07-2021,-500
01-07-2021,-500
30-06-2021,-500
29-06-2021,-500
28-06-2021,-500
25-06-2021,-500
24-06-2021,-500
23-06-2021,-500
22-06-2021,-500
21-06-2021,-500
18-06-2021,-500
17-06-2021,-500
16-06-2021,-500
15-06-2021,-500
14-06-2021,-500
11-06-2021,-500
10-06-2021,-500
09-06-2021,-500
08-06-2021,-500
07-06-2021,-500
04-06-2021,-500
03-06-2021,-500
02-06-2021,-500
01-06-2021,-500
28-05-2021,-500
27-05-2021,-500
26-05-2021,-500
25-05-2021,-500
24-05-2021,-500
21-05-2021,-500
20-05-2021,-500
19-05-2021,-500
18-05-2021,-500
17-05-2021,-500
14-05-2021,-500
13-05-2021,-500
12-05-2021,-500
11-05-2021,-500
10-05-2021,-500
07-05-2021,-500
06-05-2021,-500
05-05-2021,-500
04-05-2021,-500
03-05-2021,-500
30-04-2021,-500
29-04-2021,-500
28-04-2021,-500
27-04-2021,-500
26-04-2021,-500
23-04-2021,-500
22-04-2021,-500
21-04-2021,-500
20-04-2021,-500
19-04-2021,-500
16-04-2021,-500
15-04-2021,-500
14-04-2021,-500
13-04-2021,-500
12-04-2021,-500
09-04-2021,-500
08-04-2021,-500
07-04-2021,-500
06-04-2021,-500
05-04-2021,-500
01-04-2021,-500
31-03-2021,-500
30-03-2021,-500
29-03-2021,-500
26-03-2021,-500
25-03-2021,-500
24-03-2021,-500
23-03-2021,-500
22-03-2021,-500
19-03-2021,-500
18-03-2021,-500
17-03-2021,-500
16-03-2021,-500
15-03-2021,-500
12-03-2021,-500
11-03-2021,-500
10-03-2021,-500
09-03-2021,-500
08-03-2021,-500
05-03-2021,-500
04-03-2021,-500
03-03-2021,-500
02-03-2021,-500
01-03-2021,-500
26-02-2021,-500
25-02-2021,-500
24-02-2021,-500
23-02-2021,-500
22-02-2021,-500
19-02-2021,-500
18-02-2021,-500
17-02-2021,-500
16-02-2021,-500
12-02-2021,-500
11-02-2021,-500
10-02-2021,-500
09-02-2021,-500
08-02-2021,-500
05-02-2021,-500
04-02-2021,-500
03-02-2021,-500
02-02-2021,-500
01-02-2021,-500
29-01-2021,-500
28-01-2021,-500
27-01-2021,-500
26-01-2021,-500
25-01-2021,-500
22-01-2021,-500
21-01-2021,-500
20-01-2021,-500
19-01-2021,-500
15-01-2021,-500
14-01-2021,-500
13-01-2021,-500
12-01-2021,-500
11-01-2021,-500
08-01-2021,-500
07-01-2021,-500
06-01-2021,-500
05-01-2021,-500
04-01-2021,-500
31-12-2020,-500
30-12-2020,-500
29-12-2020,-500
28-12-2020,-500
24-12-2020,-500
23-12-2020,-500
22-12-2020,-500
21-12-2020,-500
18-12-2020,-500
17-12-2020,-500
16-12-2020,-500
15-12-2020,-500
14-12-2020,-500
11-12-2020,-500
10-12-2020,-500
09-12-2020,-500
08-12-2020,-500
07-12-2020,-500
04-12-2020,-500
03-12-2020,-500
02-12-2020,-500
01-12-2020,-500
30-11-2020,-500
27-11-2020,-500
25-11-2020,-500
24-11-2020,-500
23-11-2020,-500
20-11-2020,-500
19-11-2020,-500
18-11-2020,-500
17-11-2020,-500
16-11-2020,-500
13-11-2020,-500
12-11-2020,-500
10-11-2020,-500
09-11-2020,-500
06-11-2020,-500
05-11-2020,-500
04-11-2020,-500
03-11-2020,-500
02-11-2020,-500
30-10-2020,-500
29-10-2020,-500
28-10-2020,-500
27-10-2020,-500
26-10-2020,-500
23-10-2020,-500
22-10-2020,-500
21-10-2020,-500
20-10-2020,-500
19-10-2020,-500
16-10-2020,-500
15-10-2020,-500
14-10-2020,-500
13-10-2020,-500
09-10-2020,-500
08-10-2020,-500
07-10-2020,-500
06-10-2020,-500
05-10-2020,-500
02-10-2020,-500
01-10-2020,-500
30-09-2020,-500
29-09-2020,-500
28-09-2020,-500
25-09-2020,-500
24-09-2020,-500
23-09-2020,-500
22-09-2020,-500
21-09-2020,-500
18-09-2020,-500
17-09-2020,-500
16-09-2020,-500
15-09-2020,-500
14-09-2020,-500
11-09-2020,-500
10-09-2020,-500
09-09-2020,-500
08-09-2020,-500
04-09-2020,-500
03-09-2020,-500
02-09-2020,-500
01-09-2020,-500
31-08-2020,-500
28-08-2020,-500
27-08-2020,-500
26-08-2020,-500
25-08-2020,-500
24-08-2020,-500
21-08-2020,-500
20-08-2020,-500
19-08-2020,-500
18-08-2020,-500
17-08-2020,-500
14-08-2020,-500
13-08-2020,-500
12-08-2020,-500
11-08-2020,-500
10-08-2020,-500
07-08-2020,-500
06-08-2020,-500
05-08-2020,-500
04-08-2020,-500
03-08-2020,-500
31-07-2020,-500
30-07-2020,-500
29-07-2020,-500
28-07-2020,-500
27-07-2020,-500
24-07-2020,-500
23-07-2020,-500
22-07-2020,-500
21-07-2020,-500
20-07-2020,-500
17-07-2020,-500
16-07-2020,-500
15-07-2020,-500
14-07-2020,-500
13-07-2020,-500
10-07-2020,-500
09-07-2020,-500
08-07-2020,-500
07-07-2020,-500
06-07-2020,-500
02-07-2020,-500
01-07-2020,-500
30-06-2020,-500
29-06-2020,-500
26-06-2020,-500
25-06-2020,-500
24-06-2020,-500
23-06-2020,-500
22-06-2020,-500
19-06-2020,-500
18-06-2020,-500
17-06-2020,-500
16-06-2020,-500
15-06-2020,-500
12-06-2020,-500
11-06-2020,-500
10-06-2020,-500
09-06-2020,-500
08-06-2020,-500
05-06-2020,-500
04-06-2020,-500
03-06-2020,-500
02-06-2020,-500
01-06-2020,-500
29-05-2020,-500
28-05-2020,-500
27-05-2020,-500
26-05-2020,-500
22-05-2020,-500
21-05-2020,-500
20-05-2020,-500
19-05-2020,-500
18-05-2020,-500
15-05-2020,-500
14-05-2020,-500
13-05-2020,-500
12-05-2020,-500
11-05-2020,-500
08-05-2020,-500
07-05-2020,-500
06-05-2020,-500
05-05-2020,-500
04-05-2020,-500
01-05-2020,-500
30-04-2020,-500
29-04-2020,-500
28-04-2020,-500
27-04-2020,-500
24-04-2020,-500
23-04-2020,-500
22-04-2020,-500
21-04-2020,-500
20-04-2020,-500
17-04-2020,-500
16-04-2020,-500
15-04-2020,-500
14-04-2020,-500
13-04-2020,-500
09-04-2020,-500
08-04-2020,-500
07-04-2020,-500
06-04-2020,-500
03-04-2020,-500
02-04-2020,-500
01-04-2020,-500
31-03-2020,-500
30-03-2020,-500
27-03-2020,-500
26-03-2020,-500
25-03-2020,-500
24-03-2020,-500
23-03-2020,-500
20-03-2020,-500
19-03-2020,-500
18-03-2020,-500
17-03-2020,-500
16-03-2020,-500
13-03-2020,-500
12-03-2020,-500
11-03-2020,-500
10-03-2020,-500
09-03-2020,-500
06-03-2020,-500
05-03-2020,-500
04-03-2020,-500
03-03-2020,-500
02-03-2020,-500
28-02-2020,-500
27-02-2020,-500
26-02-2020,-500
25-02-2020,-500
24-02-2020,-500
21-02-2020,-500
20-02-2020,-500
19-02-2020,-500
18-02-2020,-500
14-02-2020,-500
13-02-2020,-500
12-02-2020,-500
11-02-2020,-500
10-02-2020,-500
07-02-2020,-500
06-02-2020,-500
05-02-2020,-500
04-02-2020,-500
03-02-2020,-500
31-01-2020,-500
30-01-2020,-500
29-01-2020,-500
28-01-2020,-500
27-01-2020,-500
24-01-2020,-500
23-01-2020,-500
22-01-2020,-500
21-01-2020,-500
17-01-2020,-500
16-01-2020,-500
15-01-2020,-500
14-01-2020,-500
13-01-2020,-500
10-01-2020,-500
09-01-2020,-500
08-01-2020,-500
07-01-2020,-500
06-01-2020,-500
03-01-2020,-500
02-01-2020,-500
31-12-2019,-500
30-12-2019,-500
27-12-2019,-500
26-12-2019,-500
24-12-2019,-500
23-12-2019,-500
20-12-2019,-500
19-12-2019,-500
18-12-2019,-500
17-12-2019,-500
16-12-2019,-500
13-12-2019,-500
12-12-2019,-500
11-12-2019,-500
10-12-2019,-500
09-12-2019,-500
06-12-2019,-500
05-12-2019,-500
04-12-2019,-500
03-12-2019,-500
02-12-2019,-500
29-11-2019,-500
27-11-2019,-500
26-11-2019,-500
25-11-2019,-500
22-11-2019,-500
21-11-2019,-500
20-11-2019,-500
19-11-2019,-500
18-11-2019,-500
15-11-2019,-500
14-11-2019,-500
13-11-2019,-500
12-11-2019,-500
08-11-2019,-500
07-11-2019,-500
06-11-2019,-500
05-11-2019,-500
04-11-2019,-500
01-11-2019,-500
31-10-2019,-500
30-10-2019,-500
29-10-2019,-500
28-10-2019,-500
25-10-2019,-500
24-10-2019,-500
23-10-2019,-500
22-10-2019,-500
21-10-2019,-500
18-10-2019,-500
17-10-2019,-500
16-10-2019,-500
15-10-2019,-500
11-10-2019,-500
10-10-2019,-500
09-10-2019,-500
08-10-2019,-500
07-10-2019,-500
04-10-2019,-500
03-10-2019,-500
02-10-2019,-500
01-10-2019,-500
30-09-2019,-500
27-09-2019,-500
26-09-2019,-500
25-09-2019,-500
24-09-2019,-500
23-09-2019,-500
20-09-2019,-500
19-09-2019,-500
18-09-2019,-500
17-09-2019,-500
16-09-2019,-500
13-09-2019,-500
12-09-2019,-500
11-09-2019,-500
10-09-2019,-500
09-09-2019,-500
06-09-2019,-500
05-09-2019,-500
04-09-2019,-500
03-09-2019,-500
30-08-2019,-500
29-08-2019,-500
28-08-2019,-500
27-08-2019,-500
26-08-2019,-500
23-08-2019,-500
22-08-2019,-500
21-08-2019,-500
20-08-2019,-500
19-08-2019,-500
16-08-2019,-500
15-08-2019,-500
14-08-2019,-500
13-08-2019,-500
12-08-2019,-500
09-08-2019,-500
08-08-2019,-500
07-08-2019,-500
06-08-2019,-500
05-08-2019,-500
02-08-2019,-500
01-08-2019,-500
31-07-2019,-500
30-07-2019,-500
29-07-2019,-500
26-07-2019,-500
25-07-2019,-500
24-07-2019,-500
23-07-2019,-500
22-07-2019,-500
19-07-2019,-500
18-07-2019,-500
17-07-2019,-500
16-07-2019,-500
15-07-2019,-500
12-07-2019,-500
11-07-2019,-500
10-07-2019,-500
09-07-2019,-500
08-07-2019,-500
05-07-2019,-500
03-07-2019,-500
02-07-2019,-500
01-07-2019,-500
28-06-2019,-500
27-06-2019,-500
26-06-2019,-500
25-06-2019,-500
24-06-2019,-500
21-06-2019,-500
20-06-2019,-500
19-06-2019,-500
18-06-2019,-500
17-06-2019,-500
14-06-2019,-500
13-06-2019,-500
12-06-2019,-500
11-06-2019,-500
10-06-2019,-500
07-06-2019,-500
06-06-2019,-500
05-06-2019,-500
04-06-2019,-500
03-06-2019,-500
31-05-2019,-500
30-05-2019,-500
29-05-2019,-500
28-05-2019,-500
24-05-2019,-500
23-05-2019,-500
22-05-2019,-500
21-05-2019,-500
20-05-2019,-500
17-05-2019,-500
16-05-2019,-500
15-05-2019,-500
14-05-2019,-500
13-05-2019,-500
10-05-2019,-500
09-05-2019,-500
08-05-2019,-500
07-05-2019,-500
06-05-2019,-500
03-05-2019,-500
02-05-2019,-500
01-05-2019,-500
30-04-2019,-500
29-04-2019,-500
26-04-2019,-500
25-04-2019,-500
24-04-2019,-500
23-04-2019,-500
22-04-2019,-500
18-04-2019,-500
17-04-2019,-500
16-04-2019,-500
15-04-2019,-500
12-04-2019,-500
11-04-2019,-500
10-04-2019,-500
09-04-2019,-500
08-04-2019,-500
05-04-2019,-500
04-04-2019,-500
03-04-2019,-500
02-04-2019,-500
01-04-2019,-500
29-03-2019,-500
28-03-2019,-500
27-03-2019,-500
26-03-2019,-500
25-03-2019,-500
22-03-2019,-500
21-03-2019,-500
20-03-2019,-500
19-03-2019,-500
18-03-2019,-500
15-03-2019,-500
14-03-2019,-500
13-03-2019,-500
12-03-2019,-500
11-03-2019,-500
08-03-2019,-500
07-03-2019,-500
06-03-2019,-500
05-03-2019,-500
04-03-2019,-500
01-03-2019,-500
28-02-2019,-500
27-02-2019,-500
26-02-2019,-500
25-02-2019,-500
22-02-2019,-500
21-02-2019,-500
20-02-2019,-500
19-02-2019,-500
15-02-2019,-500
14-02-2019,-500
13-02-2019,-500
12-02-2019,-500
11-02-2019,-500
08-02-2019,-500
07-02-2019,-500
06-02-2019,-500
05-02-2019,-500
04-02-2019,-500
01-02-2019,-500
31-01-2019,-500
30-01-2019,-500
29-01-2019,-500
28-01-2019,-500
25-01-2019,-500
24-01-2019,-500
23-01-2019,-500
22-01-2019,-500
18-01-2019,-500
17-01-2019,-500
16-01-2019,-500
15-01-2019,-500
14-01-2019,-500
11-01-2019,-500
10-01-2019,-500
09-01-2019,-500
08-01-2019,-500
07-01-2019,-500
04-01-2019,-500
03-01-2019,-500
02-01-2019,-500
31-12-2018,-500
28-12-2018,-500
27-12-2018,-500
26-12-2018,-500
24-12-2018,-500
21-12-2018,-500
20-12-2018,-500
19-12-2018,-500
18-12-2018,-500
17-12-2018,-500
14-12-2018,-500
13-12-2018,-500
12-12-2018,-500
11-12-2018,-500
10-12-2018,-500
07-12-2018,-500
06-12-2018,-500
04-12-2018,-500
03-12-2018,-500
30-11-2018,-500
29-11-2018,-500
28-11-2018,-500
27-11-2018,-500
26-11-2018,-500
23-11-2018,-500
21-11-2018,-500
20-11-2018,-500
19-11-2018,-500
16-11-2018,-500
15-11-2018,-500
14-11-2018,-500
13-11-2018,-500
09-11-2018,-500
08-11-2018,-500
07-11-2018,-500
06-11-2018,-500
05-11-2018,-500
02-11-2018,-500
01-11-2018,-500
31-10-2018,-500
30-10-2018,-500
29-10-2018,-500
26-10-2018,-500
25-10-2018,-500
24-10-2018,-500
23-10-2018,-500
22-10-2018,-500
19-10-2018,-500
18-10-2018,-500
17-10-2018,-500
16-10-2018,-500
15-10-2018,-500
12-10-2018,-500
11-10-2018,-500
10-10-2018,-500
09-10-2018,-500
05-10-2018,-500
04-10-2018,-500
03-10-2018,-500
02-10-2018,-500
01-10-2018,-500
28-09-2018,-500
27-09-2018,-500
26-09-2018,-500
25-09-2018,-500
24-09-2018,-500
21-09-2018,-500
20-09-2018,-500
19-09-2018,-500
18-09-2018,-500
17-09-2018,-500
14-09-2018,-500
13-09-2018,-500
12-09-2018,-500
11-09-2018,-500
10-09-2018,-500
07-09-2018,-500
06-09-2018,-500
05-09-2018,-500
04-09-2018,-500
31-08-2018,-500
30-08-2018,-500
29-08-2018,-500
28-08-2018,-500
27-08-2018,-500
24-08-2018,-500
23-08-2018,-500
22-08-2018,-500
21-08-2018,-500
20-08-2018,-500
17-08-2018,-500
16-08-2018,-500
15-08-2018,-500
14-08-2018,-500
13-08-2018,-500
10-08-2018,-500
09-08-2018,-500
08-08-2018,-500
07-08-2018,-500
06-08-2018,-500
03-08-2018,-500
02-08-2018,-500
01-08-2018,-500
31-07-2018,-500
30-07-2018,-500
27-07-2018,-500
26-07-2018,-500
25-07-2018,-500
24-07-2018,-500
23-07-2018,-500
20-07-2018,-500
19-07-2018,-500
18-07-2018,-500
17-07-2018,-500
16-07-2018,-500
13-07-2018,-500
12-07-2018,-500
11-07-2018,-500
10-07-2018,-500
09-07-2018,-500
06-07-2018,-500
05-07-2018,-500
03-07-2018,-500
02-07-2018,-500
29-06-2018,-500
28-06-2018,-500
27-06-2018,-500
26-06-2018,-500
25-06-2018,-500
22-06-2018,-500
21-06-2018,-500
20-06-2018,-500
19-06-2018,-500
18-06-2018,-500
15-06-2018,-500
14-06-2018,-500
13-06-2018,-500
12-06-2018,-500
11-06-2018,-500
08-06-2018,-500
07-06-2018,-500
06-06-2018,-500
05-06-2018,-500
04-06-2018,-500
01-06-2018,-500
31-05-2018,-500
30-05-2018,-500
29-05-2018,-500
25-05-2018,-500
24-05-2018,-500
23-05-2018,-500
22-05-2018,-500
21-05-2018,-500
18-05-2018,-500
17-05-2018,-500
16-05-2018,-500
15-05-2018,-500
14-05-2018,-500
11-05-2018,-500
10-05-2018,-500
09-05-2018,-500
08-05-2018,-500
07-05-2018,-500
04-05-2018,-500
03-05-2018,-500
02-05-2018,-500
01-05-2018,-500
30-04-2018,-500
27-04-2018,-500
26-04-2018,-500
25-04-2018,-500
24-04-2018,-500
23-04-2018,-500
20-04-2018,-500
19-04-2018,-500
18-04-2018,-500
17-04-2018,-500
16-04-2018,-500
13-04-2018,-500
12-04-2018,-500
11-04-2018,-500
10-04-2018,-500
09-04-2018,-500
06-04-2018,-500
05-04-2018,-500
04-04-2018,-500
03-04-2018,-500
02-04-2018,-500
================================================
FILE: python/rateslib/data/historical/sonia.csv
================================================
reference_date,rate
01-08-2023,-500
31-07-2023,-500
28-07-2023,-500
27-07-2023,-500
26-07-2023,-500
25-07-2023,-500
24-07-2023,-500
21-07-2023,-500
20-07-2023,-500
19-07-2023,-500
18-07-2023,-500
17-07-2023,-500
14-07-2023,-500
13-07-2023,-500
12-07-2023,-500
11-07-2023,-500
10-07-2023,-500
07-07-2023,-500
06-07-2023,-500
05-07-2023,-500
04-07-2023,-500
03-07-2023,-500
30-06-2023,-500
29-06-2023,-500
28-06-2023,-500
27-06-2023,-500
26-06-2023,-500
23-06-2023,-500
22-06-2023,-500
21-06-2023,-500
20-06-2023,-500
19-06-2023,-500
16-06-2023,-500
15-06-2023,-500
14-06-2023,-500
13-06-2023,-500
12-06-2023,-500
09-06-2023,-500
08-06-2023,-500
07-06-2023,-500
06-06-2023,-500
05-06-2023,-500
02-06-2023,-500
01-06-2023,-500
31-05-2023,-500
30-05-2023,-500
26-05-2023,-500
25-05-2023,-500
24-05-2023,-500
23-05-2023,-500
22-05-2023,-500
19-05-2023,-500
18-05-2023,-500
17-05-2023,-500
16-05-2023,-500
15-05-2023,-500
12-05-2023,-500
11-05-2023,-500
10-05-2023,-500
09-05-2023,-500
05-05-2023,-500
04-05-2023,-500
03-05-2023,-500
02-05-2023,-500
28-04-2023,-500
27-04-2023,-500
26-04-2023,-500
25-04-2023,-500
24-04-2023,-500
21-04-2023,-500
20-04-2023,-500
19-04-2023,-500
18-04-2023,-500
17-04-2023,-500
14-04-2023,-500
13-04-2023,-500
12-04-2023,-500
11-04-2023,-500
06-04-2023,-500
05-04-2023,-500
04-04-2023,-500
03-04-2023,-500
31-03-2023,-500
30-03-2023,-500
29-03-2023,-500
28-03-2023,-500
27-03-2023,-500
24-03-2023,-500
23-03-2023,-500
22-03-2023,-500
21-03-2023,-500
20-03-2023,-500
17-03-2023,-500
16-03-2023,-500
15-03-2023,-500
14-03-2023,-500
13-03-2023,-500
10-03-2023,-500
09-03-2023,-500
08-03-2023,-500
07-03-2023,-500
06-03-2023,-500
03-03-2023,-500
02-03-2023,-500
01-03-2023,-500
28-02-2023,-500
27-02-2023,-500
24-02-2023,-500
23-02-2023,-500
22-02-2023,-500
21-02-2023,-500
20-02-2023,-500
17-02-2023,-500
16-02-2023,-500
15-02-2023,-500
14-02-2023,-500
13-02-2023,-500
10-02-2023,-500
09-02-2023,-500
08-02-2023,-500
07-02-2023,-500
06-02-2023,-500
03-02-2023,-500
02-02-2023,-500
01-02-2023,-500
31-01-2023,-500
30-01-2023,-500
27-01-2023,-500
26-01-2023,-500
25-01-2023,-500
24-01-2023,-500
23-01-2023,-500
20-01-2023,-500
19-01-2023,-500
18-01-2023,-500
17-01-2023,-500
16-01-2023,-500
13-01-2023,-500
12-01-2023,-500
11-01-2023,-500
10-01-2023,-500
09-01-2023,-500
06-01-2023,-500
05-01-2023,-500
04-01-2023,-500
03-01-2023,-500
30-12-2022,-500
29-12-2022,-500
28-12-2022,-500
23-12-2022,-500
22-12-2022,-500
21-12-2022,-500
20-12-2022,-500
19-12-2022,-500
16-12-2022,-500
15-12-2022,-500
14-12-2022,-500
13-12-2022,-500
12-12-2022,-500
09-12-2022,-500
08-12-2022,-500
07-12-2022,-500
06-12-2022,-500
05-12-2022,-500
02-12-2022,-500
01-12-2022,-500
30-11-2022,-500
29-11-2022,-500
28-11-2022,-500
25-11-2022,-500
24-11-2022,-500
23-11-2022,-500
22-11-2022,-500
21-11-2022,-500
18-11-2022,-500
17-11-2022,-500
16-11-2022,-500
15-11-2022,-500
14-11-2022,-500
11-11-2022,-500
10-11-2022,-500
09-11-2022,-500
08-11-2022,-500
07-11-2022,-500
04-11-2022,-500
03-11-2022,-500
02-11-2022,-500
01-11-2022,-500
31-10-2022,-500
28-10-2022,-500
27-10-2022,-500
26-10-2022,-500
25-10-2022,-500
24-10-2022,-500
21-10-2022,-500
20-10-2022,-500
19-10-2022,-500
18-10-2022,-500
17-10-2022,-500
14-10-2022,-500
13-10-2022,-500
12-10-2022,-500
11-10-2022,-500
10-10-2022,-500
07-10-2022,-500
06-10-2022,-500
05-10-2022,-500
04-10-2022,-500
03-10-2022,-500
30-09-2022,-500
29-09-2022,-500
28-09-2022,-500
27-09-2022,-500
26-09-2022,-500
23-09-2022,-500
22-09-2022,-500
21-09-2022,-500
20-09-2022,-500
16-09-2022,-500
15-09-2022,-500
14-09-2022,-500
13-09-2022,-500
12-09-2022,-500
09-09-2022,-500
08-09-2022,-500
07-09-2022,-500
06-09-2022,-500
05-09-2022,-500
02-09-2022,-500
01-09-2022,-500
31-08-2022,-500
30-08-2022,-500
26-08-2022,-500
25-08-2022,-500
24-08-2022,-500
23-08-2022,-500
22-08-2022,-500
19-08-2022,-500
18-08-2022,-500
17-08-2022,-500
16-08-2022,-500
15-08-2022,-500
12-08-2022,-500
11-08-2022,-500
10-08-2022,-500
09-08-2022,-500
08-08-2022,-500
05-08-2022,-500
04-08-2022,-500
03-08-2022,-500
02-08-2022,-500
01-08-2022,-500
29-07-2022,-500
28-07-2022,-500
27-07-2022,-500
26-07-2022,-500
25-07-2022,-500
22-07-2022,-500
21-07-2022,-500
20-07-2022,-500
19-07-2022,-500
18-07-2022,-500
15-07-2022,-500
14-07-2022,-500
13-07-2022,-500
12-07-2022,-500
11-07-2022,-500
08-07-2022,-500
07-07-2022,-500
06-07-2022,-500
05-07-2022,-500
04-07-2022,-500
01-07-2022,-500
30-06-2022,-500
29-06-2022,-500
28-06-2022,-500
27-06-2022,-500
24-06-2022,-500
23-06-2022,-500
22-06-2022,-500
21-06-2022,-500
20-06-2022,-500
17-06-2022,-500
16-06-2022,-500
15-06-2022,-500
14-06-2022,-500
13-06-2022,-500
10-06-2022,-500
09-06-2022,-500
08-06-2022,-500
07-06-2022,-500
06-06-2022,-500
01-06-2022,-500
31-05-2022,-500
30-05-2022,-500
27-05-2022,-500
26-05-2022,-500
25-05-2022,-500
24-05-2022,-500
23-05-2022,-500
20-05-2022,-500
19-05-2022,-500
18-05-2022,-500
17-05-2022,-500
16-05-2022,-500
13-05-2022,-500
12-05-2022,-500
11-05-2022,-500
10-05-2022,-500
09-05-2022,-500
06-05-2022,-500
05-05-2022,-500
04-05-2022,-500
03-05-2022,-500
29-04-2022,-500
28-04-2022,-500
27-04-2022,-500
26-04-2022,-500
25-04-2022,-500
22-04-2022,-500
21-04-2022,-500
20-04-2022,-500
19-04-2022,-500
14-04-2022,-500
13-04-2022,-500
12-04-2022,-500
11-04-2022,-500
08-04-2022,-500
07-04-2022,-500
06-04-2022,-500
05-04-2022,-500
04-04-2022,-500
01-04-2022,-500
31-03-2022,-500
30-03-2022,-500
29-03-2022,-500
28-03-2022,-500
25-03-2022,-500
24-03-2022,-500
23-03-2022,-500
22-03-2022,-500
21-03-2022,-500
18-03-2022,-500
17-03-2022,-500
16-03-2022,-500
15-03-2022,-500
14-03-2022,-500
11-03-2022,-500
10-03-2022,-500
09-03-2022,-500
08-03-2022,-500
07-03-2022,-500
04-03-2022,-500
03-03-2022,-500
02-03-2022,-500
01-03-2022,-500
28-02-2022,-500
25-02-2022,-500
24-02-2022,-500
23-02-2022,-500
22-02-2022,-500
21-02-2022,-500
18-02-2022,-500
17-02-2022,-500
16-02-2022,-500
15-02-2022,-500
14-02-2022,-500
11-02-2022,-500
10-02-2022,-500
09-02-2022,-500
08-02-2022,-500
07-02-2022,-500
04-02-2022,-500
03-02-2022,-500
02-02-2022,-500
01-02-2022,-500
31-01-2022,-500
28-01-2022,-500
27-01-2022,-500
26-01-2022,-500
25-01-2022,-500
24-01-2022,-500
21-01-2022,-500
20-01-2022,-500
19-01-2022,-500
18-01-2022,-500
17-01-2022,-500
14-01-2022,-500
13-01-2022,-500
12-01-2022,-500
11-01-2022,-500
10-01-2022,-500
07-01-2022,-500
06-01-2022,-500
05-01-2022,-500
04-01-2022,-500
31-12-2021,-500
30-12-2021,-500
29-12-2021,-500
24-12-2021,-500
23-12-2021,-500
22-12-2021,-500
21-12-2021,-500
20-12-2021,-500
17-12-2021,-500
16-12-2021,-500
15-12-2021,-500
14-12-2021,-500
13-12-2021,-500
10-12-2021,-500
09-12-2021,-500
08-12-2021,-500
07-12-2021,-500
06-12-2021,-500
03-12-2021,-500
02-12-2021,-500
01-12-2021,-500
30-11-2021,-500
29-11-2021,-500
26-11-2021,-500
25-11-2021,-500
24-11-2021,-500
23-11-2021,-500
22-11-2021,-500
19-11-2021,-500
18-11-2021,-500
17-11-2021,-500
16-11-2021,-500
15-11-2021,-500
12-11-2021,-500
11-11-2021,-500
10-11-2021,-500
09-11-2021,-500
08-11-2021,-500
05-11-2021,-500
04-11-2021,-500
03-11-2021,-500
02-11-2021,-500
01-11-2021,-500
29-10-2021,-500
28-10-2021,-500
27-10-2021,-500
26-10-2021,-500
25-10-2021,-500
22-10-2021,-500
21-10-2021,-500
20-10-2021,-500
19-10-2021,-500
18-10-2021,-500
15-10-2021,-500
14-10-2021,-500
13-10-2021,-500
12-10-2021,-500
11-10-2021,-500
08-10-2021,-500
07-10-2021,-500
06-10-2021,-500
05-10-2021,-500
04-10-2021,-500
01-10-2021,-500
30-09-2021,-500
29-09-2021,-500
28-09-2021,-500
27-09-2021,-500
24-09-2021,-500
23-09-2021,-500
22-09-2021,-500
21-09-2021,-500
20-09-2021,-500
17-09-2021,-500
16-09-2021,-500
15-09-2021,-500
14-09-2021,-500
13-09-2021,-500
10-09-2021,-500
09-09-2021,-500
08-09-2021,-500
07-09-2021,-500
06-09-2021,-500
03-09-2021,-500
02-09-2021,-500
01-09-2021,-500
31-08-2021,-500
27-08-2021,-500
26-08-2021,-500
25-08-2021,-500
24-08-2021,-500
23-08-2021,-500
20-08-2021,-500
19-08-2021,-500
18-08-2021,-500
17-08-2021,-500
16-08-2021,-500
13-08-2021,-500
12-08-2021,-500
11-08-2021,-500
10-08-2021,-500
09-08-2021,-500
06-08-2021,-500
05-08-2021,-500
04-08-2021,-500
03-08-2021,-500
02-08-2021,-500
30-07-2021,-500
29-07-2021,-500
28-07-2021,-500
27-07-2021,-500
26-07-2021,-500
23-07-2021,-500
22-07-2021,-500
21-07-2021,-500
20-07-2021,-500
19-07-2021,-500
16-07-2021,-500
15-07-2021,-500
14-07-2021,-500
13-07-2021,-500
12-07-2021,-500
09-07-2021,-500
08-07-2021,-500
07-07-2021,-500
06-07-2021,-500
05-07-2021,-500
02-07-2021,-500
01-07-2021,-500
30-06-2021,-500
29-06-2021,-500
28-06-2021,-500
25-06-2021,-500
24-06-2021,-500
23-06-2021,-500
22-06-2021,-500
21-06-2021,-500
18-06-2021,-500
17-06-2021,-500
16-06-2021,-500
15-06-2021,-500
14-06-2021,-500
11-06-2021,-500
10-06-2021,-500
09-06-2021,-500
08-06-2021,-500
07-06-2021,-500
04-06-2021,-500
03-06-2021,-500
02-06-2021,-500
01-06-2021,-500
28-05-2021,-500
27-05-2021,-500
26-05-2021,-500
25-05-2021,-500
24-05-2021,-500
21-05-2021,-500
20-05-2021,-500
19-05-2021,-500
18-05-2021,-500
17-05-2021,-500
14-05-2021,-500
13-05-2021,-500
12-05-2021,-500
11-05-2021,-500
10-05-2021,-500
07-05-2021,-500
06-05-2021,-500
05-05-2021,-500
04-05-2021,-500
30-04-2021,-500
29-04-2021,-500
28-04-2021,-500
27-04-2021,-500
26-04-2021,-500
23-04-2021,-500
22-04-2021,-500
21-04-2021,-500
20-04-2021,-500
19-04-2021,-500
16-04-2021,-500
15-04-2021,-500
14-04-2021,-500
13-04-2021,-500
12-04-2021,-500
09-04-2021,-500
08-04-2021,-500
07-04-2021,-500
06-04-2021,-500
01-04-2021,-500
31-03-2021,-500
30-03-2021,-500
29-03-2021,-500
26-03-2021,-500
25-03-2021,-500
24-03-2021,-500
23-03-2021,-500
22-03-2021,-500
19-03-2021,-500
18-03-2021,-500
17-03-2021,-500
16-03-2021,-500
15-03-2021,-500
12-03-2021,-500
11-03-2021,-500
10-03-2021,-500
09-03-2021,-500
08-03-2021,-500
05-03-2021,-500
04-03-2021,-500
03-03-2021,-500
02-03-2021,-500
01-03-2021,-500
26-02-2021,-500
25-02-2021,-500
24-02-2021,-500
23-02-2021,-500
22-02-2021,-500
19-02-2021,-500
18-02-2021,-500
17-02-2021,-500
16-02-2021,-500
15-02-2021,-500
12-02-2021,-500
11-02-2021,-500
10-02-2021,-500
09-02-2021,-500
08-02-2021,-500
05-02-2021,-500
04-02-2021,-500
03-02-2021,-500
02-02-2021,-500
01-02-2021,-500
29-01-2021,-500
28-01-2021,-500
27-01-2021,-500
26-01-2021,-500
25-01-2021,-500
22-01-2021,-500
21-01-2021,-500
20-01-2021,-500
19-01-2021,-500
18-01-2021,-500
15-01-2021,-500
14-01-2021,-500
13-01-2021,-500
12-01-2021,-500
11-01-2021,-500
08-01-2021,-500
07-01-2021,-500
06-01-2021,-500
05-01-2021,-500
04-01-2021,-500
31-12-2020,-500
30-12-2020,-500
29-12-2020,-500
24-12-2020,-500
23-12-2020,-500
22-12-2020,-500
21-12-2020,-500
18-12-2020,-500
17-12-2020,-500
16-12-2020,-500
15-12-2020,-500
14-12-2020,-500
11-12-2020,-500
10-12-2020,-500
09-12-2020,-500
08-12-2020,-500
07-12-2020,-500
04-12-2020,-500
03-12-2020,-500
02-12-2020,-500
01-12-2020,-500
30-11-2020,-500
27-11-2020,-500
26-11-2020,-500
25-11-2020,-500
24-11-2020,-500
23-11-2020,-500
20-11-2020,-500
19-11-2020,-500
18-11-2020,-500
17-11-2020,-500
16-11-2020,-500
13-11-2020,-500
12-11-2020,-500
11-11-2020,-500
10-11-2020,-500
09-11-2020,-500
06-11-2020,-500
05-11-2020,-500
04-11-2020,-500
03-11-2020,-500
02-11-2020,-500
30-10-2020,-500
29-10-2020,-500
28-10-2020,-500
27-10-2020,-500
26-10-2020,-500
23-10-2020,-500
22-10-2020,-500
21-10-2020,-500
20-10-2020,-500
19-10-2020,-500
16-10-2020,-500
15-10-2020,-500
14-10-2020,-500
13-10-2020,-500
12-10-2020,-500
09-10-2020,-500
08-10-2020,-500
07-10-2020,-500
06-10-2020,-500
05-10-2020,-500
02-10-2020,-500
01-10-2020,-500
30-09-2020,-500
29-09-2020,-500
28-09-2020,-500
25-09-2020,-500
24-09-2020,-500
23-09-2020,-500
22-09-2020,-500
21-09-2020,-500
18-09-2020,-500
17-09-2020,-500
16-09-2020,-500
15-09-2020,-500
14-09-2020,-500
11-09-2020,-500
10-09-2020,-500
09-09-2020,-500
08-09-2020,-500
07-09-2020,-500
04-09-2020,-500
03-09-2020,-500
02-09-2020,-500
01-09-2020,-500
28-08-2020,-500
27-08-2020,-500
26-08-2020,-500
25-08-2020,-500
24-08-2020,-500
21-08-2020,-500
20-08-2020,-500
19-08-2020,-500
18-08-2020,-500
17-08-2020,-500
14-08-2020,-500
13-08-2020,-500
12-08-2020,-500
11-08-2020,-500
10-08-2020,-500
07-08-2020,-500
06-08-2020,-500
05-08-2020,-500
04-08-2020,-500
03-08-2020,-500
31-07-2020,-500
30-07-2020,-500
29-07-2020,-500
28-07-2020,-500
27-07-2020,-500
24-07-2020,-500
23-07-2020,-500
22-07-2020,-500
21-07-2020,-500
20-07-2020,-500
17-07-2020,-500
16-07-2020,-500
15-07-2020,-500
14-07-2020,-500
13-07-2020,-500
10-07-2020,-500
09-07-2020,-500
08-07-2020,-500
07-07-2020,-500
06-07-2020,-500
03-07-2020,-500
02-07-2020,-500
01-07-2020,-500
30-06-2020,-500
29-06-2020,-500
26-06-2020,-500
25-06-2020,-500
24-06-2020,-500
23-06-2020,-500
22-06-2020,-500
19-06-2020,-500
18-06-2020,-500
17-06-2020,-500
16-06-2020,-500
15-06-2020,-500
12-06-2020,-500
11-06-2020,-500
10-06-2020,-500
09-06-2020,-500
08-06-2020,-500
05-06-2020,-500
04-06-2020,-500
03-06-2020,-500
02-06-2020,-500
01-06-2020,-500
29-05-2020,-500
28-05-2020,-500
27-05-2020,-500
26-05-2020,-500
22-05-2020,-500
21-05-2020,-500
20-05-2020,-500
19-05-2020,-500
18-05-2020,-500
15-05-2020,-500
14-05-2020,-500
13-05-2020,-500
12-05-2020,-500
11-05-2020,-500
07-05-2020,-500
06-05-2020,-500
05-05-2020,-500
04-05-2020,-500
01-05-2020,-500
30-04-2020,-500
29-04-2020,-500
28-04-2020,-500
27-04-2020,-500
24-04-2020,-500
23-04-2020,-500
22-04-2020,-500
21-04-2020,-500
20-04-2020,-500
17-04-2020,-500
16-04-2020,-500
15-04-2020,-500
14-04-2020,-500
09-04-2020,-500
08-04-2020,-500
07-04-2020,-500
06-04-2020,-500
03-04-2020,-500
02-04-2020,-500
01-04-2020,-500
31-03-2020,-500
30-03-2020,-500
27-03-2020,-500
26-03-2020,-500
25-03-2020,-500
24-03-2020,-500
23-03-2020,-500
20-03-2020,-500
19-03-2020,-500
18-03-2020,-500
17-03-2020,-500
16-03-2020,-500
13-03-2020,-500
12-03-2020,-500
11-03-2020,-500
10-03-2020,-500
09-03-2020,-500
06-03-2020,-500
05-03-2020,-500
04-03-2020,-500
03-03-2020,-500
02-03-2020,-500
28-02-2020,-500
27-02-2020,-500
26-02-2020,-500
25-02-2020,-500
24-02-2020,-500
21-02-2020,-500
20-02-2020,-500
19-02-2020,-500
18-02-2020,-500
17-02-2020,-500
14-02-2020,-500
13-02-2020,-500
12-02-2020,-500
11-02-2020,-500
10-02-2020,-500
07-02-2020,-500
06-02-2020,-500
05-02-2020,-500
04-02-2020,-500
03-02-2020,-500
31-01-2020,-500
30-01-2020,-500
29-01-2020,-500
28-01-2020,-500
27-01-2020,-500
24-01-2020,-500
23-01-2020,-500
22-01-2020,-500
21-01-2020,-500
20-01-2020,-500
17-01-2020,-500
16-01-2020,-500
15-01-2020,-500
14-01-2020,-500
13-01-2020,-500
10-01-2020,-500
09-01-2020,-500
08-01-2020,-500
07-01-2020,-500
06-01-2020,-500
03-01-2020,-500
02-01-2020,-500
31-12-2019,-500
30-12-2019,-500
27-12-2019,-500
24-12-2019,-500
23-12-2019,-500
20-12-2019,-500
19-12-2019,-500
18-12-2019,-500
17-12-2019,-500
16-12-2019,-500
13-12-2019,-500
12-12-2019,-500
11-12-2019,-500
10-12-2019,-500
09-12-2019,-500
06-12-2019,-500
05-12-2019,-500
04-12-2019,-500
03-12-2019,-500
02-12-2019,-500
29-11-2019,-500
28-11-2019,-500
27-11-2019,-500
26-11-2019,-500
25-11-2019,-500
22-11-2019,-500
21-11-2019,-500
20-11-2019,-500
19-11-2019,-500
18-11-2019,-500
15-11-2019,-500
14-11-2019,-500
13-11-2019,-500
12-11-2019,-500
11-11-2019,-500
08-11-2019,-500
07-11-2019,-500
06-11-2019,-500
05-11-2019,-500
04-11-2019,-500
01-11-2019,-500
31-10-2019,-500
30-10-2019,-500
29-10-2019,-500
28-10-2019,-500
25-10-2019,-500
24-10-2019,-500
23-10-2019,-500
22-10-2019,-500
21-10-2019,-500
18-10-2019,-500
17-10-2019,-500
16-10-2019,-500
15-10-2019,-500
14-10-2019,-500
11-10-2019,-500
10-10-2019,-500
09-10-2019,-500
08-10-2019,-500
07-10-2019,-500
04-10-2019,-500
03-10-2019,-500
02-10-2019,-500
01-10-2019,-500
30-09-2019,-500
27-09-2019,-500
26-09-2019,-500
25-09-2019,-500
24-09-2019,-500
23-09-2019,-500
20-09-2019,-500
19-09-2019,-500
18-09-2019,-500
17-09-2019,-500
16-09-2019,-500
13-09-2019,-500
12-09-2019,-500
11-09-2019,-500
10-09-2019,-500
09-09-2019,-500
06-09-2019,-500
05-09-2019,-500
04-09-2019,-500
03-09-2019,-500
02-09-2019,-500
30-08-2019,-500
29-08-2019,-500
28-08-2019,-500
27-08-2019,-500
23-08-2019,-500
22-08-2019,-500
21-08-2019,-500
20-08-2019,-500
19-08-2019,-500
16-08-2019,-500
15-08-2019,-500
14-08-2019,-500
13-08-2019,-500
12-08-2019,-500
09-08-2019,-500
08-08-2019,-500
07-08-2019,-500
06-08-2019,-500
05-08-2019,-500
02-08-2019,-500
01-08-2019,-500
31-07-2019,-500
30-07-2019,-500
29-07-2019,-500
26-07-2019,-500
25-07-2019,-500
24-07-2019,-500
23-07-2019,-500
22-07-2019,-500
19-07-2019,-500
18-07-2019,-500
17-07-2019,-500
16-07-2019,-500
15-07-2019,-500
12-07-2019,-500
11-07-2019,-500
10-07-2019,-500
09-07-2019,-500
08-07-2019,-500
05-07-2019,-500
04-07-2019,-500
03-07-2019,-500
02-07-2019,-500
01-07-2019,-500
28-06-2019,-500
27-06-2019,-500
26-06-2019,-500
25-06-2019,-500
24-06-2019,-500
21-06-2019,-500
20-06-2019,-500
19-06-2019,-500
18-06-2019,-500
17-06-2019,-500
14-06-2019,-500
13-06-2019,-500
12-06-2019,-500
11-06-2019,-500
10-06-2019,-500
07-06-2019,-500
06-06-2019,-500
05-06-2019,-500
04-06-2019,-500
03-06-2019,-500
31-05-2019,-500
30-05-2019,-500
29-05-2019,-500
28-05-2019,-500
24-05-2019,-500
23-05-2019,-500
22-05-2019,-500
21-05-2019,-500
20-05-2019,-500
17-05-2019,-500
16-05-2019,-500
15-05-2019,-500
14-05-2019,-500
13-05-2019,-500
10-05-2019,-500
09-05-2019,-500
08-05-2019,-500
07-05-2019,-500
03-05-2019,-500
02-05-2019,-500
01-05-2019,-500
30-04-2019,-500
29-04-2019,-500
26-04-2019,-500
25-04-2019,-500
24-04-2019,-500
23-04-2019,-500
18-04-2019,-500
17-04-2019,-500
16-04-2019,-500
15-04-2019,-500
12-04-2019,-500
11-04-2019,-500
10-04-2019,-500
09-04-2019,-500
08-04-2019,-500
05-04-2019,-500
04-04-2019,-500
03-04-2019,-500
02-04-2019,-500
01-04-2019,-500
29-03-2019,-500
28-03-2019,-500
27-03-2019,-500
26-03-2019,-500
25-03-2019,-500
22-03-2019,-500
21-03-2019,-500
20-03-2019,-500
19-03-2019,-500
18-03-2019,-500
15-03-2019,-500
14-03-2019,-500
13-03-2019,-500
12-03-2019,-500
11-03-2019,-500
08-03-2019,-500
07-03-2019,-500
06-03-2019,-500
05-03-2019,-500
04-03-2019,-500
01-03-2019,-500
28-02-2019,-500
27-02-2019,-500
26-02-2019,-500
25-02-2019,-500
22-02-2019,-500
21-02-2019,-500
20-02-2019,-500
19-02-2019,-500
18-02-2019,-500
15-02-2019,-500
14-02-2019,-500
13-02-2019,-500
12-02-2019,-500
11-02-2019,-500
08-02-2019,-500
07-02-2019,-500
06-02-2019,-500
05-02-2019,-500
04-02-2019,-500
01-02-2019,-500
31-01-2019,-500
30-01-2019,-500
29-01-2019,-500
28-01-2019,-500
25-01-2019,-500
24-01-2019,-500
23-01-2019,-500
22-01-2019,-500
21-01-2019,-500
18-01-2019,-500
17-01-2019,-500
16-01-2019,-500
15-01-2019,-500
14-01-2019,-500
11-01-2019,-500
10-01-2019,-500
09-01-2019,-500
08-01-2019,-500
07-01-2019,-500
04-01-2019,-500
03-01-2019,-500
02-01-2019,-500
31-12-2018,-500
28-12-2018,-500
27-12-2018,-500
24-12-2018,-500
21-12-2018,-500
20-12-2018,-500
19-12-2018,-500
18-12-2018,-500
17-12-2018,-500
14-12-2018,-500
13-12-2018,-500
12-12-2018,-500
11-12-2018,-500
10-12-2018,-500
07-12-2018,-500
06-12-2018,-500
05-12-2018,-500
04-12-2018,-500
03-12-2018,-500
30-11-2018,-500
29-11-2018,-500
28-11-2018,-500
27-11-2018,-500
26-11-2018,-500
23-11-2018,-500
22-11-2018,-500
21-11-2018,-500
20-11-2018,-500
19-11-2018,-500
16-11-2018,-500
15-11-2018,-500
14-11-2018,-500
13-11-2018,-500
12-11-2018,-500
09-11-2018,-500
08-11-2018,-500
07-11-2018,-500
06-11-2018,-500
05-11-2018,-500
02-11-2018,-500
01-11-2018,-500
31-10-2018,-500
30-10-2018,-500
29-10-2018,-500
26-10-2018,-500
25-10-2018,-500
24-10-2018,-500
23-10-2018,-500
22-10-2018,-500
19-10-2018,-500
18-10-2018,-500
17-10-2018,-500
16-10-2018,-500
15-10-2018,-500
12-10-2018,-500
11-10-2018,-500
10-10-2018,-500
09-10-2018,-500
08-10-2018,-500
05-10-2018,-500
04-10-2018,-500
03-10-2018,-500
02-10-2018,-500
01-10-2018,-500
28-09-2018,-500
27-09-2018,-500
26-09-2018,-500
25-09-2018,-500
24-09-2018,-500
21-09-2018,-500
20-09-2018,-500
19-09-2018,-500
18-09-2018,-500
17-09-2018,-500
14-09-2018,-500
13-09-2018,-500
12-09-2018,-500
11-09-2018,-500
10-09-2018,-500
07-09-2018,-500
06-09-2018,-500
05-09-2018,-500
04-09-2018,-500
03-09-2018,-500
31-08-2018,-500
30-08-2018,-500
29-08-2018,-500
28-08-2018,-500
24-08-2018,-500
23-08-2018,-500
22-08-2018,-500
21-08-2018,-500
20-08-2018,-500
17-08-2018,-500
16-08-2018,-500
15-08-2018,-500
14-08-2018,-500
13-08-2018,-500
10-08-2018,-500
09-08-2018,-500
08-08-2018,-500
07-08-2018,-500
06-08-2018,-500
03-08-2018,-500
02-08-2018,-500
01-08-2018,-500
31-07-2018,-500
30-07-2018,-500
27-07-2018,-500
26-07-2018,-500
25-07-2018,-500
24-07-2018,-500
23-07-2018,-500
20-07-2018,-500
19-07-2018,-500
18-07-2018,-500
17-07-2018,-500
16-07-2018,-500
13-07-2018,-500
12-07-2018,-500
11-07-2018,-500
10-07-2018,-500
09-07-2018,-500
06-07-2018,-500
05-07-2018,-500
04-07-2018,-500
03-07-2018,-500
02-07-2018,-500
29-06-2018,-500
28-06-2018,-500
27-06-2018,-500
26-06-2018,-500
25-06-2018,-500
22-06-2018,-500
21-06-2018,-500
20-06-2018,-500
19-06-2018,-500
18-06-2018,-500
15-06-2018,-500
14-06-2018,-500
13-06-2018,-500
12-06-2018,-500
11-06-2018,-500
08-06-2018,-500
07-06-2018,-500
06-06-2018,-500
05-06-2018,-500
04-06-2018,-500
01-06-2018,-500
31-05-2018,-500
30-05-2018,-500
29-05-2018,-500
25-05-2018,-500
24-05-2018,-500
23-05-2018,-500
22-05-2018,-500
21-05-2018,-500
18-05-2018,-500
17-05-2018,-500
16-05-2018,-500
15-05-2018,-500
14-05-2018,-500
11-05-2018,-500
10-05-2018,-500
09-05-2018,-500
08-05-2018,-500
04-05-2018,-500
03-05-2018,-500
02-05-2018,-500
01-05-2018,-500
30-04-2018,-500
27-04-2018,-500
26-04-2018,-500
25-04-2018,-500
24-04-2018,-500
23-04-2018,-500
20-04-2018,-500
19-04-2018,-500
18-04-2018,-500
17-04-2018,-500
16-04-2018,-500
13-04-2018,-500
12-04-2018,-500
11-04-2018,-500
10-04-2018,-500
09-04-2018,-500
06-04-2018,-500
05-04-2018,-500
04-04-2018,-500
03-04-2018,-500
29-03-2018,-500
28-03-2018,-500
27-03-2018,-500
26-03-2018,-500
23-03-2018,-500
22-03-2018,-500
21-03-2018,-500
20-03-2018,-500
19-03-2018,-500
16-03-2018,-500
15-03-2018,-500
14-03-2018,-500
13-03-2018,-500
12-03-2018,-500
09-03-2018,-500
08-03-2018,-500
07-03-2018,-500
06-03-2018,-500
05-03-2018,-500
02-03-2018,-500
01-03-2018,-500
28-02-2018,-500
27-02-2018,-500
26-02-2018,-500
23-02-2018,-500
22-02-2018,-500
21-02-2018,-500
20-02-2018,-500
19-02-2018,-500
16-02-2018,-500
15-02-2018,-500
14-02-2018,-500
13-02-2018,-500
12-02-2018,-500
09-02-2018,-500
08-02-2018,-500
07-02-2018,-500
06-02-2018,-500
05-02-2018,-500
02-02-2018,-500
01-02-2018,-500
31-01-2018,-500
30-01-2018,-500
29-01-2018,-500
26-01-2018,-500
25-01-2018,-500
24-01-2018,-500
23-01-2018,-500
22-01-2018,-500
19-01-2018,-500
18-01-2018,-500
17-01-2018,-500
16-01-2018,-500
15-01-2018,-500
12-01-2018,-500
11-01-2018,-500
10-01-2018,-500
09-01-2018,-500
08-01-2018,-500
05-01-2018,-500
04-01-2018,-500
03-01-2018,-500
02-01-2018,-500
29-12-2017,-500
28-12-2017,-500
27-12-2017,-500
22-12-2017,-500
21-12-2017,-500
20-12-2017,-500
19-12-2017,-500
18-12-2017,-500
15-12-2017,-500
14-12-2017,-500
13-12-2017,-500
12-12-2017,-500
11-12-2017,-500
08-12-2017,-500
07-12-2017,-500
06-12-2017,-500
05-12-2017,-500
04-12-2017,-500
01-12-2017,-500
30-11-2017,-500
29-11-2017,-500
28-11-2017,-500
27-11-2017,-500
24-11-2017,-500
23-11-2017,-500
22-11-2017,-500
21-11-2017,-500
20-11-2017,-500
17-11-2017,-500
16-11-2017,-500
15-11-2017,-500
14-11-2017,-500
13-11-2017,-500
10-11-2017,-500
09-11-2017,-500
08-11-2017,-500
07-11-2017,-500
06-11-2017,-500
03-11-2017,-500
02-11-2017,-500
01-11-2017,-500
31-10-2017,-500
30-10-2017,-500
27-10-2017,-500
26-10-2017,-500
25-10-2017,-500
24-10-2017,-500
23-10-2017,-500
20-10-2017,-500
19-10-2017,-500
18-10-2017,-500
17-10-2017,-500
16-10-2017,-500
13-10-2017,-500
12-10-2017,-500
11-10-2017,-500
10-10-2017,-500
09-10-2017,-500
06-10-2017,-500
05-10-2017,-500
04-10-2017,-500
03-10-2017,-500
02-10-2017,-500
29-09-2017,-500
28-09-2017,-500
27-09-2017,-500
26-09-2017,-500
25-09-2017,-500
22-09-2017,-500
21-09-2017,-500
20-09-2017,-500
19-09-2017,-500
18-09-2017,-500
15-09-2017,-500
14-09-2017,-500
13-09-2017,-500
12-09-2017,-500
11-09-2017,-500
08-09-2017,-500
07-09-2017,-500
06-09-2017,-500
05-09-2017,-500
04-09-2017,-500
01-09-2017,-500
31-08-2017,-500
30-08-2017,-500
29-08-2017,-500
25-08-2017,-500
24-08-2017,-500
23-08-2017,-500
22-08-2017,-500
21-08-2017,-500
18-08-2017,-500
17-08-2017,-500
16-08-2017,-500
15-08-2017,-500
14-08-2017,-500
11-08-2017,-500
10-08-2017,-500
09-08-2017,-500
08-08-2017,-500
07-08-2017,-500
04-08-2017,-500
03-08-2017,-500
02-08-2017,-500
01-08-2017,-500
31-07-2017,-500
28-07-2017,-500
27-07-2017,-500
26-07-2017,-500
25-07-2017,-500
24-07-2017,-500
21-07-2017,-500
20-07-2017,-500
19-07-2017,-500
18-07-2017,-500
17-07-2017,-500
14-07-2017,-500
13-07-2017,-500
12-07-2017,-500
11-07-2017,-500
10-07-2017,-500
07-07-2017,-500
06-07-2017,-500
05-07-2017,-500
04-07-2017,-500
03-07-2017,-500
30-06-2017,-500
29-06-2017,-500
28-06-2017,-500
27-06-2017,-500
26-06-2017,-500
23-06-2017,-500
22-06-2017,-500
21-06-2017,-500
20-06-2017,-500
19-06-2017,-500
16-06-2017,-500
15-06-2017,-500
14-06-2017,-500
13-06-2017,-500
12-06-2017,-500
09-06-2017,-500
08-06-2017,-500
07-06-2017,-500
06-06-2017,-500
05-06-2017,-500
02-06-2017,-500
01-06-2017,-500
31-05-2017,-500
30-05-2017,-500
26-05-2017,-500
25-05-2017,-500
24-05-2017,-500
23-05-2017,-500
22-05-2017,-500
19-05-2017,-500
18-05-2017,-500
17-05-2017,-500
16-05-2017,-500
15-05-2017,-500
12-05-2017,-500
11-05-2017,-500
10-05-2017,-500
09-05-2017,-500
08-05-2017,-500
05-05-2017,-500
04-05-2017,-500
03-05-2017,-500
02-05-2017,-500
28-04-2017,-500
27-04-2017,-500
26-04-2017,-500
25-04-2017,-500
24-04-2017,-500
21-04-2017,-500
20-04-2017,-500
19-04-2017,-500
18-04-2017,-500
13-04-2017,-500
12-04-2017,-500
11-04-2017,-500
10-04-2017,-500
07-04-2017,-500
06-04-2017,-500
05-04-2017,-500
04-04-2017,-500
03-04-2017,-500
31-03-2017,-500
30-03-2017,-500
29-03-2017,-500
28-03-2017,-500
27-03-2017,-500
24-03-2017,-500
23-03-2017,-500
22-03-2017,-500
21-03-2017,-500
20-03-2017,-500
17-03-2017,-500
16-03-2017,-500
15-03-2017,-500
14-03-2017,-500
13-03-2017,-500
10-03-2017,-500
09-03-2017,-500
08-03-2017,-500
07-03-2017,-500
06-03-2017,-500
03-03-2017,-500
02-03-2017,-500
01-03-2017,-500
28-02-2017,-500
27-02-2017,-500
24-02-2017,-500
23-02-2017,-500
22-02-2017,-500
21-02-2017,-500
20-02-2017,-500
17-02-2017,-500
16-02-2017,-500
15-02-2017,-500
14-02-2017,-500
13-02-2017,-500
10-02-2017,-500
09-02-2017,-500
08-02-2017,-500
07-02-2017,-500
06-02-2017,-500
03-02-2017,-500
02-02-2017,-500
01-02-2017,-500
31-01-2017,-500
30-01-2017,-500
27-01-2017,-500
26-01-2017,-500
25-01-2017,-500
24-01-2017,-500
23-01-2017,-500
20-01-2017,-500
19-01-2017,-500
18-01-2017,-500
17-01-2017,-500
16-01-2017,-500
13-01-2017,-500
12-01-2017,-500
11-01-2017,-500
10-01-2017,-500
09-01-2017,-500
06-01-2017,-500
05-01-2017,-500
04-01-2017,-500
03-01-2017,-500
30-12-2016,-500
29-12-2016,-500
28-12-2016,-500
23-12-2016,-500
22-12-2016,-500
21-12-2016,-500
20-12-2016,-500
19-12-2016,-500
16-12-2016,-500
15-12-2016,-500
14-12-2016,-500
13-12-2016,-500
12-12-2016,-500
09-12-2016,-500
08-12-2016,-500
07-12-2016,-500
06-12-2016,-500
05-12-2016,-500
02-12-2016,-500
01-12-2016,-500
30-11-2016,-500
29-11-2016,-500
28-11-2016,-500
25-11-2016,-500
24-11-2016,-500
23-11-2016,-500
22-11-2016,-500
21-11-2016,-500
18-11-2016,-500
17-11-2016,-500
16-11-2016,-500
15-11-2016,-500
14-11-2016,-500
11-11-2016,-500
10-11-2016,-500
09-11-2016,-500
08-11-2016,-500
07-11-2016,-500
04-11-2016,-500
03-11-2016,-500
02-11-2016,-500
01-11-2016,-500
31-10-2016,-500
28-10-2016,-500
27-10-2016,-500
26-10-2016,-500
25-10-2016,-500
24-10-2016,-500
21-10-2016,-500
20-10-2016,-500
19-10-2016,-500
18-10-2016,-500
17-10-2016,-500
14-10-2016,-500
13-10-2016,-500
12-10-2016,-500
11-10-2016,-500
10-10-2016,-500
07-10-2016,-500
06-10-2016,-500
05-10-2016,-500
04-10-2016,-500
03-10-2016,-500
30-09-2016,-500
29-09-2016,-500
28-09-2016,-500
27-09-2016,-500
26-09-2016,-500
23-09-2016,-500
22-09-2016,-500
21-09-2016,-500
20-09-2016,-500
19-09-2016,-500
16-09-2016,-500
15-09-2016,-500
14-09-2016,-500
13-09-2016,-500
12-09-2016,-500
09-09-2016,-500
08-09-2016,-500
07-09-2016,-500
06-09-2016,-500
05-09-2016,-500
02-09-2016,-500
01-09-2016,-500
31-08-2016,-500
30-08-2016,-500
26-08-2016,-500
25-08-2016,-500
24-08-2016,-500
23-08-2016,-500
22-08-2016,-500
19-08-2016,-500
18-08-2016,-500
17-08-2016,-500
16-08-2016,-500
15-08-2016,-500
12-08-2016,-500
11-08-2016,-500
10-08-2016,-500
09-08-2016,-500
08-08-2016,-500
05-08-2016,-500
04-08-2016,-500
03-08-2016,-500
02-08-2016,-500
01-08-2016,-500
29-07-2016,-500
28-07-2016,-500
27-07-2016,-500
26-07-2016,-500
25-07-2016,-500
22-07-2016,-500
21-07-2016,-500
20-07-2016,-500
19-07-2016,-500
18-07-2016,-500
15-07-2016,-500
14-07-2016,-500
13-07-2016,-500
12-07-2016,-500
11-07-2016,-500
08-07-2016,-500
07-07-2016,-500
06-07-2016,-500
05-07-2016,-500
04-07-2016,-500
01-07-2016,-500
30-06-2016,-500
29-06-2016,-500
28-06-2016,-500
27-06-2016,-500
24-06-2016,-500
23-06-2016,-500
22-06-2016,-500
21-06-2016,-500
20-06-2016,-500
17-06-2016,-500
16-06-2016,-500
15-06-2016,-500
14-06-2016,-500
13-06-2016,-500
10-06-2016,-500
09-06-2016,-500
08-06-2016,-500
07-06-2016,-500
06-06-2016,-500
03-06-2016,-500
02-06-2016,-500
01-06-2016,-500
31-05-2016,-500
27-05-2016,-500
26-05-2016,-500
25-05-2016,-500
24-05-2016,-500
23-05-2016,-500
20-05-2016,-500
19-05-2016,-500
18-05-2016,-500
17-05-2016,-500
16-05-2016,-500
13-05-2016,-500
12-05-2016,-500
11-05-2016,-500
10-05-2016,-500
09-05-2016,-500
06-05-2016,-500
05-05-2016,-500
04-05-2016,-500
03-05-2016,-500
29-04-2016,-500
28-04-2016,-500
27-04-2016,-500
26-04-2016,-500
25-04-2016,-500
22-04-2016,-500
21-04-2016,-500
20-04-2016,-500
19-04-2016,-500
18-04-2016,-500
15-04-2016,-500
14-04-2016,-500
13-04-2016,-500
12-04-2016,-500
11-04-2016,-500
08-04-2016,-500
07-04-2016,-500
06-04-2016,-500
05-04-2016,-500
04-04-2016,-500
01-04-2016,-500
31-03-2016,-500
30-03-2016,-500
29-03-2016,-500
24-03-2016,-500
23-03-2016,-500
22-03-2016,-500
21-03-2016,-500
18-03-2016,-500
17-03-2016,-500
16-03-2016,-500
15-03-2016,-500
14-03-2016,-500
11-03-2016,-500
10-03-2016,-500
09-03-2016,-500
08-03-2016,-500
07-03-2016,-500
04-03-2016,-500
03-03-2016,-500
02-03-2016,-500
01-03-2016,-500
29-02-2016,-500
26-02-2016,-500
25-02-2016,-500
24-02-2016,-500
23-02-2016,-500
22-02-2016,-500
19-02-2016,-500
18-02-2016,-500
17-02-2016,-500
16-02-2016,-500
15-02-2016,-500
12-02-2016,-500
11-02-2016,-500
10-02-2016,-500
09-02-2016,-500
08-02-2016,-500
05-02-2016,-500
04-02-2016,-500
03-02-2016,-500
02-02-2016,-500
01-02-2016,-500
29-01-2016,-500
28-01-2016,-500
27-01-2016,-500
26-01-2016,-500
25-01-2016,-500
22-01-2016,-500
21-01-2016,-500
20-01-2016,-500
19-01-2016,-500
18-01-2016,-500
15-01-2016,-500
14-01-2016,-500
13-01-2016,-500
12-01-2016,-500
11-01-2016,-500
08-01-2016,-500
07-01-2016,-500
06-01-2016,-500
05-01-2016,-500
04-01-2016,-500
31-12-2015,-500
30-12-2015,-500
29-12-2015,-500
24-12-2015,-500
23-12-2015,-500
22-12-2015,-500
21-12-2015,-500
18-12-2015,-500
17-12-2015,-500
16-12-2015,-500
15-12-2015,-500
14-12-2015,-500
11-12-2015,-500
10-12-2015,-500
09-12-2015,-500
08-12-2015,-500
07-12-2015,-500
04-12-2015,-500
03-12-2015,-500
02-12-2015,-500
01-12-2015,-500
30-11-2015,-500
27-11-2015,-500
26-11-2015,-500
25-11-2015,-500
24-11-2015,-500
23-11-2015,-500
20-11-2015,-500
19-11-2015,-500
18-11-2015,-500
17-11-2015,-500
16-11-2015,-500
13-11-2015,-500
12-11-2015,-500
11-11-2015,-500
10-11-2015,-500
09-11-2015,-500
06-11-2015,-500
05-11-2015,-500
04-11-2015,-500
03-11-2015,-500
02-11-2015,-500
30-10-2015,-500
29-10-2015,-500
28-10-2015,-500
27-10-2015,-500
26-10-2015,-500
23-10-2015,-500
22-10-2015,-500
21-10-2015,-500
20-10-2015,-500
19-10-2015,-500
16-10-2015,-500
15-10-2015,-500
14-10-2015,-500
13-10-2015,-500
12-10-2015,-500
09-10-2015,-500
08-10-2015,-500
07-10-2015,-500
06-10-2015,-500
05-10-2015,-500
02-10-2015,-500
01-10-2015,-500
30-09-2015,-500
29-09-2015,-500
28-09-2015,-500
25-09-2015,-500
24-09-2015,-500
23-09-2015,-500
22-09-2015,-500
21-09-2015,-500
18-09-2015,-500
17-09-2015,-500
16-09-2015,-500
15-09-2015,-500
14-09-2015,-500
11-09-2015,-500
10-09-2015,-500
09-09-2015,-500
08-09-2015,-500
07-09-2015,-500
04-09-2015,-500
03-09-2015,-500
02-09-2015,-500
01-09-2015,-500
28-08-2015,-500
27-08-2015,-500
26-08-2015,-500
25-08-2015,-500
24-08-2015,-500
21-08-2015,-500
20-08-2015,-500
19-08-2015,-500
18-08-2015,-500
17-08-2015,-500
14-08-2015,-500
13-08-2015,-500
12-08-2015,-500
11-08-2015,-500
10-08-2015,-500
07-08-2015,-500
06-08-2015,-500
05-08-2015,-500
04-08-2015,-500
03-08-2015,-500
31-07-2015,-500
30-07-2015,-500
29-07-2015,-500
28-07-2015,-500
27-07-2015,-500
24-07-2015,-500
23-07-2015,-500
22-07-2015,-500
21-07-2015,-500
20-07-2015,-500
17-07-2015,-500
16-07-2015,-500
15-07-2015,-500
14-07-2015,-500
13-07-2015,-500
10-07-2015,-500
09-07-2015,-500
08-07-2015,-500
07-07-2015,-500
06-07-2015,-500
03-07-2015,-500
02-07-2015,-500
01-07-2015,-500
30-06-2015,-500
29-06-2015,-500
26-06-2015,-500
25-06-2015,-500
24-06-2015,-500
23-06-2015,-500
22-06-2015,-500
19-06-2015,-500
18-06-2015,-500
17-06-2015,-500
16-06-2015,-500
15-06-2015,-500
12-06-2015,-500
11-06-2015,-500
10-06-2015,-500
09-06-2015,-500
08-06-2015,-500
05-06-2015,-500
04-06-2015,-500
03-06-2015,-500
02-06-2015,-500
01-06-2015,-500
29-05-2015,-500
28-05-2015,-500
27-05-2015,-500
26-05-2015,-500
22-05-2015,-500
21-05-2015,-500
20-05-2015,-500
19-05-2015,-500
18-05-2015,-500
15-05-2015,-500
14-05-2015,-500
13-05-2015,-500
12-05-2015,-500
11-05-2015,-500
08-05-2015,-500
07-05-2015,-500
06-05-2015,-500
05-05-2015,-500
01-05-2015,-500
30-04-2015,-500
29-04-2015,-500
28-04-2015,-500
27-04-2015,-500
24-04-2015,-500
23-04-2015,-500
22-04-2015,-500
21-04-2015,-500
20-04-2015,-500
17-04-2015,-500
16-04-2015,-500
15-04-2015,-500
14-04-2015,-500
13-04-2015,-500
10-04-2015,-500
09-04-2015,-500
08-04-2015,-500
07-04-2015,-500
02-04-2015,-500
01-04-2015,-500
31-03-2015,-500
30-03-2015,-500
27-03-2015,-500
26-03-2015,-500
25-03-2015,-500
24-03-2015,-500
23-03-2015,-500
20-03-2015,-500
19-03-2015,-500
18-03-2015,-500
17-03-2015,-500
16-03-2015,-500
13-03-2015,-500
12-03-2015,-500
11-03-2015,-500
10-03-2015,-500
09-03-2015,-500
06-03-2015,-500
05-03-2015,-500
04-03-2015,-500
03-03-2015,-500
02-03-2015,-500
27-02-2015,-500
26-02-2015,-500
25-02-2015,-500
24-02-2015,-500
23-02-2015,-500
20-02-2015,-500
19-02-2015,-500
18-02-2015,-500
17-02-2015,-500
16-02-2015,-500
13-02-2015,-500
12-02-2015,-500
11-02-2015,-500
10-02-2015,-500
09-02-2015,-500
06-02-2015,-500
05-02-2015,-500
04-02-2015,-500
03-02-2015,-500
02-02-2015,-500
30-01-2015,-500
29-01-2015,-500
28-01-2015,-500
27-01-2015,-500
26-01-2015,-500
23-01-2015,-500
22-01-2015,-500
21-01-2015,-500
20-01-2015,-500
19-01-2015,-500
16-01-2015,-500
15-01-2015,-500
14-01-2015,-500
13-01-2015,-500
12-01-2015,-500
09-01-2015,-500
08-01-2015,-500
07-01-2015,-500
06-01-2015,-500
05-01-2015,-500
02-01-2015,-500
31-12-2014,-500
30-12-2014,-500
29-12-2014,-500
24-12-2014,-500
23-12-2014,-500
22-12-2014,-500
19-12-2014,-500
18-12-2014,-500
17-12-2014,-500
16-12-2014,-500
15-12-2014,-500
12-12-2014,-500
11-12-2014,-500
10-12-2014,-500
09-12-2014,-500
08-12-2014,-500
05-12-2014,-500
04-12-2014,-500
03-12-2014,-500
02-12-2014,-500
01-12-2014,-500
28-11-2014,-500
27-11-2014,-500
26-11-2014,-500
25-11-2014,-500
24-11-2014,-500
21-11-2014,-500
20-11-2014,-500
19-11-2014,-500
18-11-2014,-500
17-11-2014,-500
14-11-2014,-500
13-11-2014,-500
12-11-2014,-500
11-11-2014,-500
10-11-2014,-500
07-11-2014,-500
06-11-2014,-500
05-11-2014,-500
04-11-2014,-500
03-11-2014,-500
31-10-2014,-500
30-10-2014,-500
29-10-2014,-500
28-10-2014,-500
27-10-2014,-500
24-10-2014,-500
23-10-2014,-500
22-10-2014,-500
21-10-2014,-500
20-10-2014,-500
17-10-2014,-500
16-10-2014,-500
15-10-2014,-500
14-10-2014,-500
13-10-2014,-500
10-10-2014,-500
09-10-2014,-500
08-10-2014,-500
07-10-2014,-500
06-10-2014,-500
03-10-2014,-500
02-10-2014,-500
01-10-2014,-500
30-09-2014,-500
29-09-2014,-500
26-09-2014,-500
25-09-2014,-500
24-09-2014,-500
23-09-2014,-500
22-09-2014,-500
19-09-2014,-500
18-09-2014,-500
17-09-2014,-500
16-09-2014,-500
15-09-2014,-500
12-09-2014,-500
11-09-2014,-500
10-09-2014,-500
09-09-2014,-500
08-09-2014,-500
05-09-2014,-500
04-09-2014,-500
03-09-2014,-500
02-09-2014,-500
01-09-2014,-500
29-08-2014,-500
28-08-2014,-500
27-08-2014,-500
26-08-2014,-500
22-08-2014,-500
21-08-2014,-500
20-08-2014,-500
19-08-2014,-500
18-08-2014,-500
15-08-2014,-500
14-08-2014,-500
13-08-2014,-500
12-08-2014,-500
11-08-2014,-500
08-08-2014,-500
07-08-2014,-500
06-08-2014,-500
05-08-2014,-500
04-08-2014,-500
01-08-2014,-500
31-07-2014,-500
30-07-2014,-500
29-07-2014,-500
28-07-2014,-500
25-07-2014,-500
24-07-2014,-500
23-07-2014,-500
22-07-2014,-500
21-07-2014,-500
18-07-2014,-500
17-07-2014,-500
16-07-2014,-500
15-07-2014,-500
14-07-2014,-500
11-07-2014,-500
10-07-2014,-500
09-07-2014,-500
08-07-2014,-500
07-07-2014,-500
04-07-2014,-500
03-07-2014,-500
02-07-2014,-500
01-07-2014,-500
30-06-2014,-500
27-06-2014,-500
26-06-2014,-500
25-06-2014,-500
24-06-2014,-500
23-06-2014,-500
20-06-2014,-500
19-06-2014,-500
18-06-2014,-500
17-06-2014,-500
16-06-2014,-500
13-06-2014,-500
12-06-2014,-500
11-06-2014,-500
10-06-2014,-500
09-06-2014,-500
06-06-2014,-500
05-06-2014,-500
04-06-2014,-500
03-06-2014,-500
02-06-2014,-500
30-05-2014,-500
29-05-2014,-500
28-05-2014,-500
27-05-2014,-500
23-05-2014,-500
22-05-2014,-500
21-05-2014,-500
20-05-2014,-500
19-05-2014,-500
16-05-2014,-500
15-05-2014,-500
14-05-2014,-500
13-05-2014,-500
12-05-2014,-500
09-05-2014,-500
08-05-2014,-500
07-05-2014,-500
06-05-2014,-500
02-05-2014,-500
01-05-2014,-500
30-04-2014,-500
29-04-2014,-500
28-04-2014,-500
25-04-2014,-500
24-04-2014,-500
23-04-2014,-500
22-04-2014,-500
17-04-2014,-500
16-04-2014,-500
15-04-2014,-500
14-04-2014,-500
11-04-2014,-500
10-04-2014,-500
09-04-2014,-500
08-04-2014,-500
07-04-2014,-500
04-04-2014,-500
03-04-2014,-500
02-04-2014,-500
01-04-2014,-500
31-03-2014,-500
28-03-2014,-500
27-03-2014,-500
26-03-2014,-500
25-03-2014,-500
24-03-2014,-500
21-03-2014,-500
20-03-2014,-500
19-03-2014,-500
18-03-2014,-500
17-03-2014,-500
14-03-2014,-500
13-03-2014,-500
12-03-2014,-500
11-03-2014,-500
10-03-2014,-500
07-03-2014,-500
06-03-2014,-500
05-03-2014,-500
04-03-2014,-500
03-03-2014,-500
28-02-2014,-500
27-02-2014,-500
26-02-2014,-500
25-02-2014,-500
24-02-2014,-500
21-02-2014,-500
20-02-2014,-500
19-02-2014,-500
18-02-2014,-500
17-02-2014,-500
14-02-2014,-500
13-02-2014,-500
12-02-2014,-500
11-02-2014,-500
10-02-2014,-500
07-02-2014,-500
06-02-2014,-500
05-02-2014,-500
04-02-2014,-500
03-02-2014,-500
31-01-2014,-500
30-01-2014,-500
29-01-2014,-500
28-01-2014,-500
27-01-2014,-500
24-01-2014,-500
23-01-2014,-500
22-01-2014,-500
21-01-2014,-500
20-01-2014,-500
17-01-2014,-500
16-01-2014,-500
15-01-2014,-500
14-01-2014,-500
13-01-2014,-500
10-01-2014,-500
09-01-2014,-500
08-01-2014,-500
07-01-2014,-500
06-01-2014,-500
03-01-2014,-500
02-01-2014,-500
31-12-2013,-500
30-12-2013,-500
27-12-2013,-500
24-12-2013,-500
23-12-2013,-500
20-12-2013,-500
19-12-2013,-500
18-12-2013,-500
17-12-2013,-500
16-12-2013,-500
13-12-2013,-500
12-12-2013,-500
11-12-2013,-500
10-12-2013,-500
09-12-2013,-500
06-12-2013,-500
05-12-2013,-500
04-12-2013,-500
03-12-2013,-500
02-12-2013,-500
29-11-2013,-500
28-11-2013,-500
27-11-2013,-500
26-11-2013,-500
25-11-2013,-500
22-11-2013,-500
21-11-2013,-500
20-11-2013,-500
19-11-2013,-500
18-11-2013,-500
15-11-2013,-500
14-11-2013,-500
13-11-2013,-500
12-11-2013,-500
11-11-2013,-500
08-11-2013,-500
07-11-2013,-500
06-11-2013,-500
05-11-2013,-500
04-11-2013,-500
01-11-2013,-500
31-10-2013,-500
30-10-2013,-500
29-10-2013,-500
28-10-2013,-500
25-10-2013,-500
24-10-2013,-500
23-10-2013,-500
22-10-2013,-500
21-10-2013,-500
18-10-2013,-500
17-10-2013,-500
16-10-2013,-500
15-10-2013,-500
14-10-2013,-500
11-10-2013,-500
10-10-2013,-500
09-10-2013,-500
08-10-2013,-500
07-10-2013,-500
04-10-2013,-500
03-10-2013,-500
02-10-2013,-500
01-10-2013,-500
30-09-2013,-500
27-09-2013,-500
26-09-2013,-500
25-09-2013,-500
24-09-2013,-500
23-09-2013,-500
20-09-2013,-500
19-09-2013,-500
18-09-2013,-500
17-09-2013,-500
16-09-2013,-500
13-09-2013,-500
12-09-2013,-500
11-09-2013,-500
10-09-2013,-500
09-09-2013,-500
06-09-2013,-500
05-09-2013,-500
04-09-2013,-500
03-09-2013,-500
02-09-2013,-500
30-08-2013,-500
29-08-2013,-500
28-08-2013,-500
27-08-2013,-500
23-08-2013,-500
22-08-2013,-500
21-08-2013,-500
20-08-2013,-500
19-08-2013,-500
16-08-2013,-500
15-08-2013,-500
14-08-2013,-500
13-08-2013,-500
12-08-2013,-500
09-08-2013,-500
08-08-2013,-500
07-08-2013,-500
06-08-2013,-500
05-08-2013,-500
02-08-2013,-500
01-08-2013,-500
31-07-2013,-500
30-07-2013,-500
29-07-2013,-500
26-07-2013,-500
25-07-2013,-500
24-07-2013,-500
23-07-2013,-500
22-07-2013,-500
19-07-2013,-500
18-07-2013,-500
17-07-2013,-500
16-07-2013,-500
15-07-2013,-500
12-07-2013,-500
11-07-2013,-500
10-07-2013,-500
09-07-2013,-500
08-07-2013,-500
05-07-2013,-500
04-07-2013,-500
03-07-2013,-500
02-07-2013,-500
01-07-2013,-500
28-06-2013,-500
27-06-2013,-500
26-06-2013,-500
25-06-2013,-500
24-06-2013,-500
21-06-2013,-500
20-06-2013,-500
19-06-2013,-500
18-06-2013,-500
17-06-2013,-500
14-06-2013,-500
13-06-2013,-500
12-06-2013,-500
11-06-2013,-500
10-06-2013,-500
07-06-2013,-500
06-06-2013,-500
05-06-2013,-500
04-06-2013,-500
03-06-2013,-500
31-05-2013,-500
30-05-2013,-500
29-05-2013,-500
28-05-2013,-500
24-05-2013,-500
23-05-2013,-500
22-05-2013,-500
21-05-2013,-500
20-05-2013,-500
17-05-2013,-500
16-05-2013,-500
15-05-2013,-500
14-05-2013,-500
13-05-2013,-500
10-05-2013,-500
09-05-2013,-500
08-05-2013,-500
07-05-2013,-500
03-05-2013,-500
02-05-2013,-500
01-05-2013,-500
30-04-2013,-500
29-04-2013,-500
26-04-2013,-500
25-04-2013,-500
24-04-2013,-500
23-04-2013,-500
22-04-2013,-500
19-04-2013,-500
18-04-2013,-500
17-04-2013,-500
16-04-2013,-500
15-04-2013,-500
12-04-2013,-500
11-04-2013,-500
10-04-2013,-500
09-04-2013,-500
08-04-2013,-500
05-04-2013,-500
04-04-2013,-500
03-04-2013,-500
02-04-2013,-500
28-03-2013,-500
27-03-2013,-500
26-03-2013,-500
25-03-2013,-500
22-03-2013,-500
21-03-2013,-500
20-03-2013,-500
19-03-2013,-500
18-03-2013,-500
15-03-2013,-500
14-03-2013,-500
13-03-2013,-500
12-03-2013,-500
11-03-2013,-500
08-03-2013,-500
07-03-2013,-500
06-03-2013,-500
05-03-2013,-500
04-03-2013,-500
01-03-2013,-500
28-02-2013,-500
27-02-2013,-500
26-02-2013,-500
25-02-2013,-500
22-02-2013,-500
21-02-2013,-500
20-02-2013,-500
19-02-2013,-500
18-02-2013,-500
15-02-2013,-500
14-02-2013,-500
13-02-2013,-500
12-02-2013,-500
11-02-2013,-500
08-02-2013,-500
07-02-2013,-500
06-02-2013,-500
05-02-2013,-500
04-02-2013,-500
01-02-2013,-500
31-01-2013,-500
30-01-2013,-500
29-01-2013,-500
28-01-2013,-500
25-01-2013,-500
24-01-2013,-500
23-01-2013,-500
22-01-2013,-500
21-01-2013,-500
18-01-2013,-500
17-01-2013,-500
16-01-2013,-500
15-01-2013,-500
14-01-2013,-500
11-01-2013,-500
10-01-2013,-500
09-01-2013,-500
08-01-2013,-500
07-01-2013,-500
04-01-2013,-500
03-01-2013,-500
02-01-2013,-500
================================================
FILE: python/rateslib/data/historical/swestr.csv
================================================
reference_date,rate
01-09-2021,-500
02-09-2021,-500
03-09-2021,-500
06-09-2021,-500
07-09-2021,-500
08-09-2021,-500
09-09-2021,-500
10-09-2021,-500
13-09-2021,-500
14-09-2021,-500
15-09-2021,-500
16-09-2021,-500
17-09-2021,-500
20-09-2021,-500
21-09-2021,-500
22-09-2021,-500
23-09-2021,-500
24-09-2021,-500
27-09-2021,-500
28-09-2021,-500
29-09-2021,-500
30-09-2021,-500
01-10-2021,-500
04-10-2021,-500
05-10-2021,-500
06-10-2021,-500
07-10-2021,-500
08-10-2021,-500
11-10-2021,-500
12-10-2021,-500
13-10-2021,-500
14-10-2021,-500
15-10-2021,-500
18-10-2021,-500
19-10-2021,-500
20-10-2021,-500
21-10-2021,-500
22-10-2021,-500
25-10-2021,-500
26-10-2021,-500
27-10-2021,-500
28-10-2021,-500
29-10-2021,-500
01-11-2021,-500
02-11-2021,-500
03-11-2021,-500
04-11-2021,-500
05-11-2021,-500
08-11-2021,-500
09-11-2021,-500
10-11-2021,-500
11-11-2021,-500
12-11-2021,-500
15-11-2021,-500
16-11-2021,-500
17-11-2021,-500
18-11-2021,-500
19-11-2021,-500
22-11-2021,-500
23-11-2021,-500
24-11-2021,-500
25-11-2021,-500
26-11-2021,-500
29-11-2021,-500
30-11-2021,-500
01-12-2021,-500
02-12-2021,-500
03-12-2021,-500
06-12-2021,-500
07-12-2021,-500
08-12-2021,-500
09-12-2021,-500
10-12-2021,-500
13-12-2021,-500
14-12-2021,-500
15-12-2021,-500
16-12-2021,-500
17-12-2021,-500
20-12-2021,-500
21-12-2021,-500
22-12-2021,-500
23-12-2021,-500
27-12-2021,-500
28-12-2021,-500
29-12-2021,-500
30-12-2021,-500
03-01-2022,-500
04-01-2022,-500
05-01-2022,-500
07-01-2022,-500
10-01-2022,-500
11-01-2022,-500
12-01-2022,-500
13-01-2022,-500
14-01-2022,-500
17-01-2022,-500
18-01-2022,-500
19-01-2022,-500
20-01-2022,-500
21-01-2022,-500
24-01-2022,-500
25-01-2022,-500
26-01-2022,-500
27-01-2022,-500
28-01-2022,-500
31-01-2022,-500
01-02-2022,-500
02-02-2022,-500
03-02-2022,-500
04-02-2022,-500
07-02-2022,-500
08-02-2022,-500
09-02-2022,-500
10-02-2022,-500
11-02-2022,-500
14-02-2022,-500
15-02-2022,-500
16-02-2022,-500
17-02-2022,-500
18-02-2022,-500
21-02-2022,-500
22-02-2022,-500
23-02-2022,-500
24-02-2022,-500
25-02-2022,-500
28-02-2022,-500
01-03-2022,-500
02-03-2022,-500
03-03-2022,-500
04-03-2022,-500
07-03-2022,-500
08-03-2022,-500
09-03-2022,-500
10-03-2022,-500
11-03-2022,-500
14-03-2022,-500
15-03-2022,-500
16-03-2022,-500
17-03-2022,-500
18-03-2022,-500
21-03-2022,-500
22-03-2022,-500
23-03-2022,-500
24-03-2022,-500
25-03-2022,-500
28-03-2022,-500
29-03-2022,-500
30-03-2022,-500
31-03-2022,-500
01-04-2022,-500
04-04-2022,-500
05-04-2022,-500
06-04-2022,-500
07-04-2022,-500
08-04-2022,-500
11-04-2022,-500
12-04-2022,-500
13-04-2022,-500
14-04-2022,-500
19-04-2022,-500
20-04-2022,-500
21-04-2022,-500
22-04-2022,-500
25-04-2022,-500
26-04-2022,-500
27-04-2022,-500
28-04-2022,-500
29-04-2022,-500
02-05-2022,-500
03-05-2022,-500
04-05-2022,-500
05-05-2022,-500
06-05-2022,-500
09-05-2022,-500
10-05-2022,-500
11-05-2022,-500
12-05-2022,-500
13-05-2022,-500
16-05-2022,-500
17-05-2022,-500
18-05-2022,-500
19-05-2022,-500
20-05-2022,-500
23-05-2022,-500
24-05-2022,-500
25-05-2022,-500
27-05-2022,-500
30-05-2022,-500
31-05-2022,-500
01-06-2022,-500
02-06-2022,-500
03-06-2022,-500
07-06-2022,-500
08-06-2022,-500
09-06-2022,-500
10-06-2022,-500
13-06-2022,-500
14-06-2022,-500
15-06-2022,-500
16-06-2022,-500
17-06-2022,-500
20-06-2022,-500
21-06-2022,-500
22-06-2022,-500
23-06-2022,-500
27-06-2022,-500
28-06-2022,-500
29-06-2022,-500
30-06-2022,-500
01-07-2022,-500
04-07-2022,-500
05-07-2022,-500
06-07-2022,-500
07-07-2022,-500
08-07-2022,-500
11-07-2022,-500
12-07-2022,-500
13-07-2022,-500
14-07-2022,-500
15-07-2022,-500
18-07-2022,-500
19-07-2022,-500
20-07-2022,-500
21-07-2022,-500
22-07-2022,-500
25-07-2022,-500
26-07-2022,-500
27-07-2022,-500
28-07-2022,-500
29-07-2022,-500
01-08-2022,-500
02-08-2022,-500
03-08-2022,-500
04-08-2022,-500
05-08-2022,-500
08-08-2022,-500
09-08-2022,-500
10-08-2022,-500
11-08-2022,-500
12-08-2022,-500
15-08-2022,-500
16-08-2022,-500
17-08-2022,-500
18-08-2022,-500
19-08-2022,-500
22-08-2022,-500
23-08-2022,-500
24-08-2022,-500
25-08-2022,-500
26-08-2022,-500
29-08-2022,-500
30-08-2022,-500
31-08-2022,-500
01-09-2022,-500
02-09-2022,-500
05-09-2022,-500
06-09-2022,-500
07-09-2022,-500
08-09-2022,-500
09-09-2022,-500
12-09-2022,-500
13-09-2022,-500
14-09-2022,-500
15-09-2022,-500
16-09-2022,-500
19-09-2022,-500
20-09-2022,-500
21-09-2022,-500
22-09-2022,-500
23-09-2022,-500
26-09-2022,-500
27-09-2022,-500
28-09-2022,-500
29-09-2022,-500
30-09-2022,-500
03-10-2022,-500
04-10-2022,-500
05-10-2022,-500
06-10-2022,-500
07-10-2022,-500
10-10-2022,-500
11-10-2022,-500
12-10-2022,-500
13-10-2022,-500
14-10-2022,-500
17-10-2022,-500
18-10-2022,-500
19-10-2022,-500
20-10-2022,-500
21-10-2022,-500
24-10-2022,-500
25-10-2022,-500
26-10-2022,-500
27-10-2022,-500
28-10-2022,-500
31-10-2022,-500
01-11-2022,-500
02-11-2022,-500
03-11-2022,-500
04-11-2022,-500
07-11-2022,-500
08-11-2022,-500
09-11-2022,-500
10-11-2022,-500
11-11-2022,-500
14-11-2022,-500
15-11-2022,-500
16-11-2022,-500
17-11-2022,-500
18-11-2022,-500
21-11-2022,-500
22-11-2022,-500
23-11-2022,-500
24-11-2022,-500
25-11-2022,-500
28-11-2022,-500
29-11-2022,-500
30-11-2022,-500
01-12-2022,-500
02-12-2022,-500
05-12-2022,-500
06-12-2022,-500
07-12-2022,-500
08-12-2022,-500
09-12-2022,-500
12-12-2022,-500
13-12-2022,-500
14-12-2022,-500
15-12-2022,-500
16-12-2022,-500
19-12-2022,-500
20-12-2022,-500
21-12-2022,-500
22-12-2022,-500
23-12-2022,-500
27-12-2022,-500
28-12-2022,-500
29-12-2022,-500
30-12-2022,-500
02-01-2023,-500
03-01-2023,-500
04-01-2023,-500
05-01-2023,-500
09-01-2023,-500
10-01-2023,-500
11-01-2023,-500
12-01-2023,-500
13-01-2023,-500
16-01-2023,-500
17-01-2023,-500
18-01-2023,-500
19-01-2023,-500
20-01-2023,-500
23-01-2023,-500
24-01-2023,-500
25-01-2023,-500
26-01-2023,-500
27-01-2023,-500
30-01-2023,-500
31-01-2023,-500
01-02-2023,-500
02-02-2023,-500
03-02-2023,-500
06-02-2023,-500
07-02-2023,-500
08-02-2023,-500
09-02-2023,-500
10-02-2023,-500
13-02-2023,-500
14-02-2023,-500
15-02-2023,-500
16-02-2023,-500
17-02-2023,-500
20-02-2023,-500
21-02-2023,-500
22-02-2023,-500
23-02-2023,-500
24-02-2023,-500
27-02-2023,-500
28-02-2023,-500
01-03-2023,-500
02-03-2023,-500
03-03-2023,-500
06-03-2023,-500
07-03-2023,-500
08-03-2023,-500
09-03-2023,-500
10-03-2023,-500
13-03-2023,-500
14-03-2023,-500
15-03-2023,-500
16-03-2023,-500
17-03-2023,-500
20-03-2023,-500
21-03-2023,-500
22-03-2023,-500
23-03-2023,-500
24-03-2023,-500
27-03-2023,-500
28-03-2023,-500
29-03-2023,-500
30-03-2023,-500
31-03-2023,-500
03-04-2023,-500
04-04-2023,-500
05-04-2023,-500
06-04-2023,-500
11-04-2023,-500
12-04-2023,-500
13-04-2023,-500
14-04-2023,-500
17-04-2023,-500
18-04-2023,-500
19-04-2023,-500
20-04-2023,-500
21-04-2023,-500
24-04-2023,-500
25-04-2023,-500
26-04-2023,-500
27-04-2023,-500
28-04-2023,-500
02-05-2023,-500
03-05-2023,-500
04-05-2023,-500
05-05-2023,-500
08-05-2023,-500
09-05-2023,-500
10-05-2023,-500
11-05-2023,-500
12-05-2023,-500
15-05-2023,-500
16-05-2023,-500
17-05-2023,-500
19-05-2023,-500
22-05-2023,-500
23-05-2023,-500
24-05-2023,-500
25-05-2023,-500
26-05-2023,-500
29-05-2023,-500
30-05-2023,-500
31-05-2023,-500
01-06-2023,-500
02-06-2023,-500
05-06-2023,-500
07-06-2023,-500
08-06-2023,-500
09-06-2023,-500
12-06-2023,-500
13-06-2023,-500
14-06-2023,-500
15-06-2023,-500
16-06-2023,-500
19-06-2023,-500
20-06-2023,-500
21-06-2023,-500
22-06-2023,-500
26-06-2023,-500
27-06-2023,-500
28-06-2023,-500
29-06-2023,-500
30-06-2023,-500
03-07-2023,-500
04-07-2023,-500
05-07-2023,-500
06-07-2023,-500
07-07-2023,-500
10-07-2023,-500
11-07-2023,-500
12-07-2023,-500
13-07-2023,-500
14-07-2023,-500
17-07-2023,-500
18-07-2023,-500
19-07-2023,-500
20-07-2023,-500
21-07-2023,-500
24-07-2023,-500
25-07-2023,-500
26-07-2023,-500
27-07-2023,-500
28-07-2023,-500
31-07-2023,-500
01-08-2023,-500
02-08-2023,-500
================================================
FILE: python/rateslib/data/historical/usd_rfr.csv
================================================
reference_date,rate
01-08-2019,2.19
31-07-2019,2.55
30-07-2019,2.39
29-07-2019,2.4
26-07-2019,2.41
25-07-2019,2.42
24-07-2019,2.41
23-07-2019,2.4
22-07-2019,2.4
================================================
FILE: python/rateslib/data/loader.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import os
from abc import ABCMeta, abstractmethod
from datetime import datetime
from typing import TYPE_CHECKING
import rateslib.errors as err
from packaging import version
from pandas import Series, read_csv
from pandas import __version__ as pandas_version
from rateslib.enums.generics import Err, NoInput, Ok
if TYPE_CHECKING:
from rateslib.local_types import (
Adjuster,
CalTypes,
DualTypes,
FloatRateSeries,
Result,
datetime_,
int_,
)
class _BaseFixingsLoader(metaclass=ABCMeta):
"""
Abstract base class to allow custom implementations of a fixings data loader.
Notes
-----
This class requires an implementation of ``__getitem__``, which should accept an
``identifier`` and return a 3-tuple. The 3-tuple should include;
- an integer representing the state id of the loaded data, i.e. its hash or pseudo-hash.
- the data itself as a Series indexed by daily datetimes.
- a 2-tuple of datetimes indicating the min and max of the timeseries index.
If a valid Series object cannot be loaded for the ``identifier`` then this method
is required to raise a `ValeuError`.
"""
@abstractmethod
def __getitem__(self, name: str) -> tuple[int, Series[DualTypes], tuple[datetime, datetime]]: # type: ignore[type-var]
"""
Get item method to load a fixing series and ist state id from a custom container object.
Parameters
----------
name: str
The name of the fixing series to load.
Returns
-------
tuple of int, pandas Series, and tuple of datetime
Notes
-----
The first tuple element is a hash integer which represents the state of the Series object.
This is used to determine if the Series object has changed since it was last loaded,
and makes for more efficient fixings lookup calculations in *Periods*.
The second element is the timeseries object itself.
The third tuple element is a cached record of the first and last dates in the Series index.
If a valid Series object cannot be loaded this method **must** raise an `Exception`,
preferably a `ValueError`.
"""
pass
@abstractmethod
def add(self, name: str, series: Series[DualTypes], state: int_ = NoInput(0)) -> None: # type: ignore[type-var]
"""
Add a timeseries to the data loader directly from Python.
Parameters
----------
name: str
The string identifier for the timeseries.
series: Series[DualTypes]
The timeseries to add to static data.
Returns
-------
None
Examples
--------
.. ipython:: python
:suppress:
from rateslib import fixings, dt
from pandas import Series
.. ipython:: python
ts = Series(index=[dt(2000, 1, 1)], data=[666.0])
fixings.add("my_timeseries", ts)
fixings["my_timeseries"]
fixings.pop("my_timeseries")
"""
pass
@abstractmethod
def pop(self, name: str) -> Series[DualTypes] | None: # type: ignore[type-var]
"""
Remove a timeseries from the data loader.
Parameters
----------
name: str
The string identifier for the timeseries.
Returns
-------
Series[DualTypes] or None
Notes
-----
If the ``name`` does not exist None will be returned.
"""
pass
def __try_getitem__(
self, name: str
) -> Result[tuple[int, Series[DualTypes], tuple[datetime, datetime]]]: # type: ignore[type-var]
try:
tuple_value = self.__getitem__(name)
except Exception as e:
return Err(e)
else:
return Ok(tuple_value)
def __base_lookup__(
self,
fixing_series: Series[DualTypes], # type: ignore[type-var]
lookup_date: datetime_,
bounds: tuple[datetime, datetime] | None = None,
) -> Result[DualTypes]:
if bounds is not None:
left, right = bounds
else:
# default to slower mechanism of lookup
left, right = fixing_series.index[0], fixing_series.index[-1]
if isinstance(lookup_date, NoInput):
# program break, raise directly
raise ValueError("A `lookup_date` must be provided for fetching fixings from Series.")
if lookup_date < left or lookup_date > right:
return Err(FixingRangeError(lookup_date, (left, right)))
if lookup_date not in fixing_series.index:
return Err(FixingMissingDataError(lookup_date, (left, right)))
else:
return Ok(fixing_series.loc[lookup_date])
def get_stub_ibor_fixings(
self,
value_start_date: datetime,
value_end_date: datetime,
fixing_date: datetime,
fixing_calendar: CalTypes,
fixing_modifier: Adjuster,
fixing_identifier: str,
) -> tuple[list[str], list[datetime], list[DualTypes | None]]:
"""
Return the tenors available in the :class:`~rateslib.defaults.Fixings` object for
determining an IBOR type stub period.
Parameters
----------
value_start_date: datetime
The value start date of the IBOR period.
value_end_date: datetime
The value end date of the current stub period.
fixing_date: datetime
The index date to examine from the fixing series.
fixing_calendar: Cal, UnionCal, NamedCal,
The calendar to derive IBOR value end dates.
fixing_modifier: Adjuster
The date adjuster to derive IBOR value end dates.
fixing_identifier: str
The fixing name, prior to the addition of tenor, e.g. "EUR_EURIBOR"
Returns
-------
tuple of list[string tenors] and list[evaluated end dates]
"""
def _is_available(tenor: str) -> bool:
try:
self.__getitem__(f"{fixing_identifier.upper()}_{tenor}")
except Exception: # noqa: S112
return False
else:
return True
tenors = ["1D", "1B", "2B", "1W", "2W", "3W", "4W"] + [
"1M",
"2M",
"3M",
"4M",
"5M",
"6M",
"7M",
"8M",
"9M",
"10M",
"11M",
"12M",
"1Y",
]
available_tenors = [tenor for tenor in tenors if _is_available(tenor)]
from rateslib.data.fixings import FloatRateSeries
neighbouring_tenors = _find_neighbouring_tenors(
end=value_end_date,
start=value_start_date,
tenors=available_tenors,
rate_series=FloatRateSeries(
lag=0, calendar=fixing_calendar, convention="1", modifier=fixing_modifier, eom=False
),
)
values: list[DualTypes | None] = []
for tenor in neighbouring_tenors[0]:
try:
val: DualTypes = self.__getitem__(f"{fixing_identifier.upper()}_{tenor}")[1][
fixing_date
]
except KeyError:
values.append(None)
else:
values.append(val)
return neighbouring_tenors + (values,)
class DefaultFixingsLoader(_BaseFixingsLoader):
"""
The :class:`~rateslib.data.loader._BaseFixingsLoader` implemented by default.
This loader searches a particular local directory for CSV files.
"""
def __init__(self) -> None:
self._directory = os.path.dirname(os.path.abspath(__file__)) + "/historical"
self._loaded: dict[str, tuple[int, Series[DualTypes], tuple[datetime, datetime]]] = {} # type: ignore[type-var]
@property
def directory(self) -> str:
"""The local directory in which data CSV files may be located."""
return self._directory
@directory.setter
def directory(self, val: str) -> None:
self._directory = val
@property
def loaded(self) -> dict[str, tuple[int, Series[DualTypes], tuple[datetime, datetime]]]: # type: ignore[type-var]
"""A dictionary of the (state id, timeseries, data range) keyed by identifiers."""
return self._loaded
@staticmethod
def _load_csv(directory: str, path: str) -> Series[DualTypes]: # type: ignore[type-var]
target = os.path.join(directory, path)
if version.parse(pandas_version) < version.parse("2.0"): # pragma: no cover
# this is tested by the minimum version gitflow actions.
# TODO (low:dependencies) remove when pandas min version is bumped to 2.0
df = read_csv(target)
df["reference_date"] = df["reference_date"].map(
lambda x: datetime.strptime(x, "%d-%m-%Y"),
)
df = df.set_index("reference_date")
else:
df = read_csv(target, index_col=0, parse_dates=[0], date_format="%d-%m-%Y")
return df["rate"].sort_index(ascending=True)
def __getitem__(self, name: str) -> tuple[int, Series[DualTypes], tuple[datetime, datetime]]: # type: ignore[type-var]
name_ = name.upper()
if name_ in self.loaded:
return self.loaded[name_]
try:
s: Series[DualTypes] = self._load_csv(self.directory, f"{name}.csv") # type: ignore[type-var]
except FileNotFoundError:
raise ValueError(
f"Fixing data for the index '{name}' has been attempted, but there is no file:\n"
f"'{name}.csv' located in the search directory.\n"
"For further info see the documentation section regarding `Fixings`.",
)
data = (hash(os.urandom(8)), s, (s.index[0], s.index[-1]))
self.loaded[name_] = data
return data
def add(self, name: str, series: Series[DualTypes], state: int_ = NoInput(0)) -> None: # type: ignore[type-var]
if name in self.loaded:
raise ValueError(f"Fixing data for the index '{name}' has already been loaded.")
s = series.sort_index(ascending=True)
s.index.name = "reference_date"
s.name = "rate"
name_ = name.upper()
if isinstance(state, NoInput):
state_: int = hash(os.urandom(64))
else:
state_ = state
self.loaded[name_] = (state_, s, (s.index[0], s.index[-1]))
def pop(self, name: str) -> Series[DualTypes] | None: # type: ignore[type-var]
name_ = name.upper()
popped = self.loaded.pop(name_, None)
if popped is not None:
return popped[1] # return the Series object only
else:
return None
class Fixings(_BaseFixingsLoader):
"""
Object to store and load fixing data to populate *Leg* and *Period* calculations.
.. warning::
You must maintain and populate your own fixing data.
*Rateslib* does not come pre-packaged with accurate, nor upto date fixing data.
1) It does not have data licensing to distribute such data.
2) It is a statically uploaded code package will become immediately out of date.
.. attention::
This object is loaded **once** by *rateslib* and in its global module,
under the attribute `fixings`.
Only this object is referenced internally and other instantiations of this class
will be ignored.
Notes
-----
The ``loader`` is initialised as the :class:`DefaultFixingsLoader`. This can be set as
a user implemented :class:`_BaseFixingsLoader`.
This class maintains a dictionary of financial fixing Series indexed by string identifiers.
**Fixing Population**
This dictionary can be populated in one of two ways:
- Either by maintaining a set of CSV files in the source lookup directory (whose path is
visible/settable by calling `fixings.directory`)
- Or creating a pandas *Series* and using the :meth:`~rateslib.default.Fixings.add` to
add this object to the dictionary.
**Fixing Lookup**
Lookup of a fixing *Series* is performed, for example using the get item pattern. If an
object does not already exist in the dictionary it will be attempted to load from source CSV
file. If neither exists it will raise a `ValueError`.
.. ipython:: python
:suppress:
from pandas import Series
from datetime import datetime as dt
from rateslib import fixings
.. ipython:: python
cpi = Series(
index=[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)],
data=[100.0, 101.2, 102.2]
)
fixings.add("MY_CPI", cpi)
fixings["MY_CPI"]
.. ipython:: python
try:
fixings["NON_EXISTENT_SERIES"]
except ValueError as e:
print(e)
"""
_instance = None
def __new__(cls) -> Fixings:
if cls._instance is None:
# Singleton pattern creates only one instance: TODO (low) might not be thread safe
cls._instance = super(_BaseFixingsLoader, cls).__new__(cls) # noqa: UP008
cls._loader: _BaseFixingsLoader = DefaultFixingsLoader()
return cls._instance
def __getitem__(self, name: str) -> tuple[int, Series[DualTypes], tuple[datetime, datetime]]: # type: ignore[type-var]
return self.loader.__getitem__(name)
@property
def loader(self) -> _BaseFixingsLoader:
"""
Object responsible for fetching data from external sources.
"""
return self._loader
@loader.setter
def loader(self, loader: _BaseFixingsLoader) -> None:
self._loader = loader
def add(self, name: str, series: Series[DualTypes], state: int_ = NoInput(0)) -> None: # type: ignore[type-var]
"""
Add a Series to the Fixings object directly from Python
.. role:: red
.. role:: green
Parameters
----------
name: str, :red:`required`
The string identifier key for the timeseries.
series: Series, :red:`required`
The timeseries indexed by datetime.
state: int, :green:`optional`
The state id to be used upon insertion of the Series.
Returns
-------
None
"""
return self.loader.add(name, series, state)
def pop(self, name: str) -> Series[DualTypes] | None: # type: ignore[type-var]
"""
Remove a Series from the Fixings object.
.. role:: red
Parameters
----------
name: str, :red:`required`
The string identifier key for the timeseries.
Returns
-------
Series, or None (if name not found)
"""
return self.loader.pop(name)
class FixingRangeError(Exception):
def __init__(self, date: datetime, boundary: tuple[datetime, datetime]) -> None:
super().__init__(
f"Fixing lookup for date '{date}' failed.\n"
f"The fixings series has range [{boundary[0]}, {boundary[1]}]"
)
self.date = date
self.boundary = boundary
class FixingMissingDataError(Exception):
def __init__(self, date: datetime, boundary: tuple[datetime, datetime]) -> None:
super().__init__(
f"Fixing lookup for date '{date}' failed.\n"
f"The requested date falls within the fixings series range "
f"[{boundary[0]}, {boundary[1]}] but was not found."
)
self.date = date
self.boundary = boundary
class FixingMissingForecasterError(Exception):
def __init__(self) -> None:
super().__init__(err.VE_NEEDS_RATE_TO_FORECAST_RFR)
def _find_neighbouring_tenors(
end: datetime,
start: datetime,
tenors: list[str],
rate_series: FloatRateSeries,
) -> tuple[list[str], list[datetime]]:
"""
Given a list of string tenors find the two, measured from `start`, that encompass `end`
on neighbouring sides. If outside, find the closest single tenor.
"""
from rateslib.scheduling import add_tenor
left: tuple[str | None, datetime] = (None, datetime(1, 1, 1))
right: tuple[str | None, datetime] = (None, datetime(9999, 1, 1))
for tenor in tenors:
sample_end = add_tenor(
start=start,
tenor=tenor,
modifier=rate_series.modifier,
calendar=rate_series.calendar,
)
if sample_end <= end and sample_end > left[1]:
left = (tenor, sample_end)
if sample_end >= end and sample_end < right[1]:
right = (tenor, sample_end)
break
ret: tuple[list[str], list[datetime]] = ([], [])
if left[0] is not None:
ret[0].append(left[0].upper())
ret[1].append(left[1])
if right[0] is not None:
ret[0].append(right[0].upper())
ret[1].append(right[1])
return ret
__all__ = ["Fixings", "DefaultFixingsLoader", "_BaseFixingsLoader"]
================================================
FILE: python/rateslib/default.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from copy import deepcopy
from datetime import datetime
from typing import TYPE_CHECKING
import matplotlib.dates as mdates
import matplotlib.pyplot as plt
import numpy as np
from rateslib._spec_loader import INSTRUMENT_SPECS
from rateslib.enums.generics import NoInput, _drb
from rateslib.rs import Adjuster, Convention, Frequency, NamedCal
PlotOutput = tuple[plt.Figure, plt.Axes, list[plt.Line2D]] # type: ignore[name-defined]
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CalTypes,
)
DEFAULTS = dict(
stub="SHORTFRONT",
stub_length="SHORT",
eval_mode="swaps_align",
modifier="MF",
eom=False,
eom_fx=True,
# Instrument parameterisation
metric={
"SBS": "leg1",
},
convention="ACT360",
notional=1.0e6,
index_lag=3,
index_lag_curve=0,
index_method="daily",
payment_lag=2,
payment_lag_exchange=0,
payment_lag_specific={
"Fee": 0,
"Loan": 0,
"IRS": 2,
"STIRFuture": 0,
"IIRS": 2,
"YoYIS": 2,
"ZCS": 2,
"ZCIS": 0,
"FXSwap": 0,
"SBS": 2,
"Swap": 2,
"XCS": 2,
"NDXCS": 2,
"FixedRateBond": 0,
"IndexFixedRateBond": 0,
"FloatRateNote": 0,
"Bill": 0,
"FRA": 0,
"CDS": 0,
"NDF": 2,
},
fixing_method="rfr_payment_delay",
spread_compound_method="none_simple",
index_base_type="initial",
base_currency="usd",
fx_delivery_lag=2,
fx_delta_type="spot",
fx_option_metric="pips",
ir_option_metric="black_vol_shift_0",
ir_option_settlement="physical",
cds_premium_accrued=True,
cds_recovery_rate=0.40,
cds_protection_discretization=23,
# Curves
interpolation={
"dfs": "log_linear",
"values": "linear",
},
endpoints="natural",
multi_csa_steps=[
2,
5,
10,
20,
30,
50,
77,
81,
86,
91,
96,
103,
110,
119,
128,
140,
153,
169,
188,
212,
242,
281,
332,
401,
498,
636,
835,
1104,
1407,
1646,
1766,
1808,
1821,
1824,
1825,
],
multi_csa_min_step=1,
multi_csa_max_step=1825,
curve_caching=True,
curve_caching_max=1000,
# Solver
tag="v",
algorithm="levenberg_marquardt",
curve_not_in_solver="ignore", # or "warn" or "raise"
ini_lambda=(1000.0, 0.25, 2.0),
# bonds
calc_mode={
"FixedRateBond": "uk_gb",
"FloatRateNote": "uk_gb",
"Bill": "us_gbb",
"IndexFixedRateBond": "uk_gb",
},
settle=1,
ex_div=1,
calc_mode_futures="ytm",
# misc
pool=1,
no_fx_fixings_for_xcs="warn", # or "raise" or "ignore"
headers={
"type": "Type",
"stub_type": "Period",
"u_acc_start": "Unadj Acc Start",
"u_acc_end": "Unadj Acc End",
"a_acc_start": "Acc Start",
"a_acc_end": "Acc End",
"payment": "Payment",
"convention": "Convention",
"dcf": "DCF",
"df": "DF",
"notional": "Notional",
"reference_currency": "Reference Ccy",
"currency": "Ccy",
"fx_fixing": "FX Fixing",
"fx_fixing_date": "FX Fix Date",
"rate": "Rate",
"spread": "Spread",
"npv": "NPV",
"cashflow": "Cashflow",
"fx": "FX Rate",
"npv_fx": "NPV Ccy",
"base": "Base Ccy",
"unindexed_cashflow": "Unindexed Cashflow",
"index_fix_date": "Index Fix Date",
"index_value": "Index Val",
"index_ratio": "Index Ratio",
"index_base": "Index Base",
"collateral": "Collateral",
# Options headers
"pair": "Pair",
"expiry": "Expiry",
"t_e": "Time to Expiry",
"delivery": "Delivery",
"model": "Model",
"vol": "Vol",
"strike": "Strike",
# CDS headers
"survival": "Survival",
"recovery": "Recovery",
},
_global_ad_order=1,
oaspread_func_tol=1e-6,
oaspread_conv_tol=1e-8,
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
spec=INSTRUMENT_SPECS,
fx_index={
# ISDA values determined from the ISDA MTM Matrix documentation
"eurusd": dict(
pair="eurusd",
calendar=NamedCal("tgt|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=False,
),
"eurgbp": dict(
pair="eurgbp",
calendar=NamedCal("ldn,tgt|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=True,
),
"eursek": dict(
pair="eursek",
calendar=NamedCal("tgt,stk|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("tgt,stk"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=False,
),
"gbpusd": dict(
pair="gbpusd",
calendar=NamedCal("ldn|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=False,
),
"usdcad": dict(
pair="usdcad",
calendar=NamedCal("tro|fed"),
settle=Adjuster.BusDaysLagSettle(1),
isda_mtm_calendar=NamedCal("tro,nyc,ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=False,
),
"gbpcad": dict(
pair="gbpcad",
calendar=NamedCal("tro,ldn|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("tro,nyc,ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=True,
),
"usdnok": dict(
pair="usdnok",
calendar=NamedCal("osl|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("osl"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=True,
),
"usdsek": dict(
pair="usdsek",
calendar=NamedCal("stk|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("stk"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=True,
),
"chfsek": dict(
pair="chfsek",
calendar=NamedCal("stk,zur|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("stk,zur,ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=True,
),
"usdchf": dict(
pair="usdchf",
calendar=NamedCal("zur|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=True,
),
"seknok": dict(
pair="seknok",
calendar=NamedCal("stk,osl|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("stk,osl"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=True,
),
"audusd": dict(
pair="audusd",
calendar=NamedCal("syd|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("syd,nyc,ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=False,
),
"usdjpy": dict(
pair="usdjpy",
calendar=NamedCal("tyo|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("tyo,nyc,ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=False,
),
"nzdusd": dict(
pair="nzdusd",
calendar=NamedCal("wlg|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("wlg,nyc,ldn"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=True,
),
# The following are not defined in the ISDA MTM Matrix
"audnzd": dict(
pair="audnzd",
calendar=NamedCal("wlg,syd|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("wlg,syd,ldn,nyc"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=True,
),
"usdinr": dict(
pair="usdinr",
calendar=NamedCal("mum|fed"),
settle=Adjuster.BusDaysLagSettle(2),
isda_mtm_calendar=NamedCal("mum"),
isda_mtm_settle=Adjuster.BusDaysLagSettle(-2),
allow_cross=False,
),
},
irs_series={
"eur_irs6": dict(
currency="eur",
settle=Adjuster.BusDaysLagSettle(2),
calendar="tgt",
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.ThirtyE360,
leg2_convention=Convention.Act360,
frequency=Frequency.Months(12, None),
leg2_frequency=Frequency.Months(6, None),
leg2_fixing_method="ibor(2)",
eom=False,
payment_lag=Adjuster.BusDaysLagSettle(0),
),
"eur_irs3": dict(
currency="eur",
settle=Adjuster.BusDaysLagSettle(2),
calendar="tgt",
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.ThirtyE360,
leg2_convention=Convention.Act360,
frequency=Frequency.Months(12, None),
leg2_frequency=Frequency.Months(3, None),
leg2_fixing_method="ibor(2)",
eom=False,
payment_lag=Adjuster.BusDaysLagSettle(0),
),
"usd_irs": dict(
currency="usd",
settle=Adjuster.BusDaysLagSettle(2),
calendar="nyc",
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act360,
frequency=Frequency.Months(12, None),
leg2_fixing_method="rfr_payment_delay",
eom=False,
payment_lag=Adjuster.BusDaysLagSettle(2),
),
},
float_series={
"usd_ibor": dict(
lag=2,
calendar=NamedCal("nyc"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act360,
eom=False,
tenors=["1B", "1W", "1M", "2M", "3M", "6M", "12M"],
),
"usd_rfr": dict(
lag=0,
calendar=NamedCal("nyc"),
modifier=Adjuster.Following(),
convention=Convention.Act360,
eom=False,
tenors=["1b"],
),
"gbp_ibor": dict(
lag=0,
calendar=NamedCal("ldn"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act365F,
eom=True,
tenors=["1B", "1W", "1M", "2M", "3M", "6M", "12M"],
),
"gbp_rfr": dict(
lag=0,
calendar=NamedCal("ldn"),
modifier=Adjuster.Following(),
convention=Convention.Act365F,
eom=False,
tenors=["1b"],
),
"sek_ibor": dict(
lag=2,
calendar=NamedCal("ldn"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act360,
eom=True,
tenors=["2B", "1W", "1M", "2M", "3M", "6M"],
),
"sek_rfr": dict(
lag=0,
calendar=NamedCal("ldn"),
modifier=Adjuster.Following(),
convention=Convention.Act360,
eom=False,
tenors=["1b"],
),
"eur_ibor": dict(
lag=2,
calendar=NamedCal("tgt"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act360,
eom=False,
tenors=["1W", "1M", "3M", "6M", "12M"],
),
"eur_rfr": dict(
lag=0,
calendar=NamedCal("tgt"),
modifier=Adjuster.Following(),
convention=Convention.Act360,
eom=False,
tenors=["1b"],
),
"nok_ibor": dict(
lag=2,
calendar=NamedCal("osl"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act360,
eom=False,
tenors=["1W", "1M", "2M", "3M", "6M"],
),
"nok_rfr": dict(
lag=0,
calendar=NamedCal("osl"),
modifier=Adjuster.Following(),
convention=Convention.Act365F,
eom=False,
tenors=["1b"],
),
"chf_ibor": dict(
lag=2,
calendar=NamedCal("zur"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act360,
eom=False,
tenors=["1B", "1W", "1M", "2M", "3M", "6M", "12M"],
),
"chf_rfr": dict(
lag=0,
calendar=NamedCal("zur"),
modifier=Adjuster.Following(),
convention=Convention.Act360,
eom=False,
tenors=["1b"],
),
"cad_ibor": dict(
lag=2,
calendar=NamedCal("tro"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act365F,
eom=False,
tenors=["1M", "2M", "3M", "6M", "12M"],
),
"cad_rfr": dict(
lag=0,
calendar=NamedCal("tro"),
modifier=Adjuster.Following(),
convention=Convention.Act365F,
eom=False,
tenors=["1b"],
),
"jpy_ibor": dict(
lag=2,
calendar=NamedCal("tyo"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act365F,
eom=False,
tenors=["1M", "3M", "6M"],
),
"jpy_rfr": dict(
lag=0,
calendar=NamedCal("tyo"),
modifier=Adjuster.Following(),
convention=Convention.Act365F,
eom=False,
tenors=["1b"],
),
"aud_ibor": dict(
lag=0,
calendar=NamedCal("syd"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act365F,
eom=True,
tenors=["1M", "2M", "3M", "4M", "5M", "6M"],
),
"aud_rfr": dict(
lag=0,
calendar=NamedCal("syd"),
modifier=Adjuster.Following(),
convention=Convention.Act365F,
eom=False,
tenors=["1b"],
),
"nzd_ibor": dict(
lag=0,
calendar=NamedCal("wlg"),
modifier=Adjuster.ModifiedFollowing(),
convention=Convention.Act365F,
eom=True,
tenors=["1M", "3M", "6M"],
),
"nzd_rfr": dict(
lag=0,
calendar=NamedCal("wlg"),
modifier=Adjuster.Following(),
convention=Convention.Act365F,
eom=False,
tenors=["1b"],
),
},
)
class Defaults:
"""
The *defaults* object used by initialising objects. Values are printed below:
.. ipython:: python
from rateslib import defaults
print(defaults.print())
"""
_instance = None
stub: str
stub_length: str
eval_mode: str
modifier: str
calendars: dict[str, CalTypes]
eom: bool
eom_fx: bool
metric: dict[str, str]
convention: str
notional: float
index_lag: int
index_lag_curve: int
index_method: str
payment_lag: int
payment_lag_exchange: int
payment_lag_specific: dict[str, int]
fixing_method: str
spread_compound_method: str
index_base_type: str
base_currency: str
fx_delivery_lag: int
fx_delta_type: str
fx_option_metric: str
ir_option_metric: str
ir_option_settlement: str
cds_premium_accrued: bool
cds_recovery_rate: float
cds_protection_discretization: int
interpolation: dict[str, str]
endpoints: str
multi_csa_steps: list[int]
multi_csa_min_step: int
multi_csa_max_step: int
curve_caching: bool
curve_caching_max: int
tag: str
algorithm: str
curve_not_in_solver: str
ini_lambda: tuple[int, float, float]
calc_mode: dict[str, str]
settle: int
ex_div: int
calc_mode_futures: str
pool: int
no_fx_fixings_for_xcs: str
headers: dict[str, str]
_global_ad_order: int
oaspread_func_tol: float
oaspread_conv_tol: float
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
spec: dict[str, dict[str, Any]]
fx_index: dict[str, Any]
irs_series: dict[str, Any]
float_series: dict[str, Any]
def __new__(cls) -> Defaults:
if cls._instance is None:
# Singleton pattern creates only one instance: TODO (low) might not be thread safe
cls._instance = super(Defaults, cls).__new__(cls) # noqa: UP008
for k, v in DEFAULTS.items():
setattr(cls._instance, k, deepcopy(v))
return cls._instance
def reset_defaults(self) -> None:
"""
Revert defaults back to their initialisation status.
Examples
--------
.. ipython:: python
from rateslib import defaults
defaults.reset_defaults()
"""
attrs = [
v
for v in dir(self)
if "__" not in v and not callable(getattr(self, v)) and v != "_instance"
]
for attr in attrs:
delattr(self, attr)
for k, v in DEFAULTS.items():
setattr(self, k, deepcopy(v))
def print(self) -> str:
"""
Return a string representation of the current values in the defaults object.
"""
def _t_n(v: str) -> str: # teb-newline
return f"\t{v}\n"
_: str = f"""\
Scheduling:\n
{
"".join(
[
_t_n(f"{attribute}: {getattr(self, attribute)}")
for attribute in [
"stub",
"stub_length",
"modifier",
"eom",
"eom_fx",
"eval_mode",
]
]
)
}
Instruments:\n
{
"".join(
[
_t_n(f"{attribute}: {getattr(self, attribute)}")
for attribute in [
"convention",
"payment_lag",
"payment_lag_exchange",
"payment_lag_specific",
"notional",
"fixing_method",
"spread_compound_method",
"base_currency",
"fx_delivery_lag",
"fx_delta_type",
"fx_option_metric",
"cds_premium_accrued",
"cds_recovery_rate",
"cds_protection_discretization",
]
]
)
}
Curves:\n
{
"".join(
[
_t_n(f"{attribute}: {getattr(self, attribute)}")
for attribute in [
"interpolation",
"endpoints",
"multi_csa_steps",
"curve_caching",
]
]
)
}
Solver:\n
{
"".join(
[
_t_n(f"{attribute}: {getattr(self, attribute)}")
for attribute in [
"algorithm",
"tag",
"curve_not_in_solver",
]
]
)
}
Miscellaneous:\n
{
"".join(
[
_t_n(f"{attribute}: {getattr(self, attribute)}")
for attribute in [
"headers",
"no_fx_fixings_for_xcs",
"pool",
]
]
)
}
""" # noqa: W291
return _
def plot(
x: list[list[Any]], y: list[list[Any]], labels: list[str] | NoInput = NoInput(0)
) -> PlotOutput:
labels = _drb([], labels)
fig, ax = plt.subplots(1, 1)
lines = []
for _x, _y in zip(x, y, strict=True):
(line,) = ax.plot(_x, _y)
lines.append(line)
if not isinstance(labels, NoInput) and len(labels) == len(lines):
ax.legend(lines, labels)
ax.grid(True)
if isinstance(x[0][0], datetime):
years = mdates.YearLocator() # type: ignore[no-untyped-call]
months = mdates.MonthLocator() # type: ignore[no-untyped-call]
yearsFmt = mdates.DateFormatter("%Y") # type: ignore[no-untyped-call]
ax.xaxis.set_major_locator(years)
ax.xaxis.set_major_formatter(yearsFmt)
ax.xaxis.set_minor_locator(months)
fig.autofmt_xdate()
return fig, ax, lines
def plot3d(
x: list[Any], y: list[Any], z: np.ndarray[tuple[int, int], np.dtype[np.float64]]
) -> tuple[plt.Figure, plt.Axes, None]: # type: ignore[name-defined]
import matplotlib.pyplot as plt
from matplotlib import cm
# import matplotlib.dates as mdates # type: ignore[import]
fig, ax = plt.subplots(subplot_kw={"projection": "3d"})
X, Y = np.meshgrid(x, y)
# Plot the surface.
ax.plot_surface(X, Y, z, cmap=cm.coolwarm, linewidth=0, antialiased=False) # type: ignore[attr-defined]
return fig, ax, None
def _make_py_json(json: str, class_name: str) -> str:
"""Modifies the output JSON output for Rust structs wrapped by Python classes."""
return '{"PyWrapped":' + json + "}"
__all__ = ["Defaults"]
================================================
FILE: python/rateslib/dual/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from rateslib.dual.ift import ift_1dim
from rateslib.dual.newton import newton_1dim, newton_ndim
from rateslib.dual.quadratic import quadratic_eqn
from rateslib.dual.utils import (
dual_exp,
dual_inv_norm_cdf,
dual_log,
dual_norm_cdf,
dual_norm_pdf,
dual_solve,
gradient,
set_order,
set_order_convert,
)
from rateslib.dual.variable import Variable
from rateslib.rs import ADOrder, Dual, Dual2
Dual.__doc__ = """
Dual number data type to perform first derivative automatic differentiation.
Parameters
----------
real : float
The real coefficient of the dual number: its value.
vars : tuple/list of str
The labels of the variables for which to record derivatives. If empty,
the dual number represents a constant, equivalent to a float.
dual : list of float
First derivative information contained as coefficient of linear manifold.
Defaults to an array of ones the length of ``vars`` if empty.
See Also
---------
.. seealso::
:class:`~rateslib.dual.Dual2`: Dual number data type to perform second derivative automatic differentiation.
Examples
---------
.. ipython:: python
:suppress:
from rateslib.dual import Dual, gradient
.. ipython:: python
def func(x, y):
return 5 * x**2 + 10 * y**3
x = Dual(1.0, ["x"], [])
y = Dual(1.0, ["y"], [])
gradient(func(x,y), ["x", "y"])
""" # noqa: E501
Dual2.__doc__ = """
Dual number data type to perform second derivative automatic differentiation.
Parameters
-----------
real : float
The real coefficient of the dual number: its value.
vars : tuple/list of str
The labels of the variables for which to record derivatives. If empty,
the dual number represents a constant, equivalent to a float.
dual : list of float
First derivative information contained as coefficient of linear manifold.
Defaults to an array of ones the length of ``vars`` if empty.
dual2 : list of float
Second derivative information contained as coefficients of quadratic manifold.
Defaults to a 2d array of zeros the size of ``vars`` if empty.
These values represent a 2d array but must be given as a 1d list of values in row-major order,
which is reshaped.
See Also
--------
.. seealso::
:class:`~rateslib.dual.Dual`: Dual number data type to perform first derivative automatic differentiation.
Examples
---------
.. ipython:: python
from rateslib.dual import Dual2, gradient
def func(x, y):
return 5 * x**2 + 10 * y**3
x = Dual2(1.0, ["x"], [], [])
y = Dual2(1.0, ["y"], [], [])
gradient(func(x,y), ["x", "y"], order=2)
""" # noqa: E501
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
__all__ = [
"ADOrder",
"Dual",
"Dual2",
"Variable",
"dual_log",
"dual_exp",
"dual_solve",
"dual_norm_pdf",
"dual_norm_cdf",
"dual_inv_norm_cdf",
"gradient",
"set_order_convert",
"set_order",
"newton_ndim",
"newton_1dim",
"ift_1dim",
"quadratic_eqn",
]
================================================
FILE: python/rateslib/dual/ift.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from collections.abc import Callable
from time import time
from typing import TYPE_CHECKING, Any, ParamSpec
import numpy as np
from rateslib.dual.newton import _dual_float_or_unchanged, _solver_result
from rateslib.dual.utils import _dual_float, _get_order_of, gradient, set_order
from rateslib.rs import Dual, Dual2
if TYPE_CHECKING:
from rateslib.local_types import DualTypes, Number
P = ParamSpec("P")
def ift_1dim(
s: Callable[[DualTypes], DualTypes],
s_tgt: DualTypes,
h: Callable[P, tuple[float, float, int, tuple[Any, ...]]] | str,
ini_h_args: tuple[Any, ...] = (),
max_iter: int = 50,
func_tol: float = 1e-14,
conv_tol: float = 1e-9,
raise_on_fail: bool = True,
) -> dict[str, Any]:
r"""
A **one** dimensional root solver using the inverse function theorem to capture
AD sensitivities.
This method can be used to find the value of :math:`g(s)` for a given :math:`s_{tgt}`, where:
- :math:`g(s)` is **not** analytical and hence requires iterations to determine.
- :math:`s(g)` is a known analytical inverse of :math:`g`.
This problem is framed by finding the root of :math:`f(g) = s(g) - s_{tgt} = 0`.
Parameters
----------
s: Callable[DualTypes, DualTypes]
The known inverse function of *g* such that *g(s(x))=x*. Of the signature: `s(x)`.
s_tgt: DualTypes
The value of *s* for which *g* is to be found.
h: Callable, string
The iterative function to use to determine the solution g. See notes.
ini_h_args:
Initial arguments passed to the iterative function, ``h``.
max_iter: int > 1
Number of maximum iterations to perform.
func_tol: float, optional
The absolute function tolerance to reach before exiting.
conv_tol: float, optional
The convergence tolerance for subsequent iterations of *g*, passed to ``h`` to implement.
raise_on_fail: bool, optional
If *False* will return a solver result dict with state and message indicating failure.
Notes
------
**Available iterative methods**
The iteration algorithm to find the root can be given directly as a callable ``h`` or
can be specified from one of the below pre-implemented algorithms:
- **'bisection'**: repeatedly halves an interval and selects the interval in which the root
falls, until convergence. Requires ``ini_h_args`` to be a tuple of two floats defining
the interval whose *f* values have opposite signs.
- **'modified_dekker'**: enhances the *'bisection'* method to include a *'secant'* step when
it produces a better iterate. Requires ``ini_h_args`` to be a tuple of two floats defining
the interval whose *f* values have opposite signs. For info see
:download:`Halving Interval for Dekker<_static/modified-dekker.pdf>`.
- **'modified_brent'**: enhances the *'modified_dekker'* method to also permit
inverse quadratic interpolation within an iteration.
Requires ``ini_h_args`` to be a tuple of two floats defining
the interval whose *f* values have opposite signs. For info see
:download:`Halving Interval for Dekker<_static/modified-dekker.pdf>`.
- **'ytm_quadratic'**: Requires ``ini_h_args`` to be a tuple of three floats defining the
interval and interior point. This algorithm utilises successive parabolic approximations
for *g(f)* and is specifically tuned for solving bond yield-to-maturity efficiently.
**What is ``h``**
*h()* is a function that is used to perform iterations to determine *g* from *s*. If
a custom function is provided, it must conform to the following signature:
`h(s, s_target, conv_tol, *h_args) -> (g_i, f_i, state, *h_args_i)`
The input parameters provide:
- *s*: The inverse function of *g* such that *g(s(x))=x*.
- *s_target*: The target value of *s* for which *g* is to be found.
- *conv_tol*: The convergence tolerance which is measured internally by *h*.
- *h_args*: Additional arguments passed to *h* which facilitate its internal operation.
The output parameters provide:
- *g_i*: The value of *g* at the current iteration, representative of :math:`g(s_i)`.
- *f_i*: A measure of error in the iteration
- *state*: A state flag return from the iteration as indicator to the controlling process.
- *h_args_i*: Arguments passed to the next iteration of *h*.
``state`` flag returns are:
- -2: The algorithm failed for an internal reason.
- 1: `conv_tol` has been satisfied and the solution is considered to have converged.
- None: The algorithm has not yet converged and will continue.
**AD Implementation**
The AD order of the solution is determined by the AD order of the ``s_tgt`` input.
Examples
--------
The most prevalent use of this technique in *rateslib* is to solve bond yield-to-maturity from
a given price. Suppose we develop a formula, *s(g)* which determines the price (*s*) of a
2y bond with 3% annual coupon given its ytm (*g*):
.. math::
s(g) = \frac{3}{1+g/100} + \frac{103}{(1+g/100)^2}
Then we use the *bisection* method to discover the ytm given a price of 101:
.. ipython:: python
from rateslib.dual import ift_1dim, Dual
def s(g):
return 3 / (1 + g / 100) + 103 / (1 + g / 100) ** 2
# solve for a bond price of 101 with lower and upper ytm bounds of 2.0 and 3.0.
result = ift_1dim(s, Dual(101.0, ["price"], []), "bisection", (2.0, 3.0))
print(result)
For **traditional root solving** the function :math:`s(g)` is given with the :math:`s_{tgt}`
set to zero, therefore the returned *g* will be the root of *s(g)*.
.. ipython:: python
def s(g):
return g ** 2 - 2
result = ift_1dim(s, 0.0, "modified_brent", (-2.0, 0.0))
print(result)
"""
if isinstance(h, str):
h_: Callable[P, tuple[float, float, int, tuple[Any, ...]]] = ift_map[h]
else:
h_ = h
t0 = time()
i = 1
float_ini_hargs = tuple(_dual_float_or_unchanged(_) for _ in ini_h_args)
s0_: float = _dual_float(s_tgt)
g0, f0, state, *hargs = h_(s, s0_, conv_tol, *float_ini_hargs) # type: ignore[call-arg, arg-type]
while i < max_iter:
if state == 1:
g1 = g0
break
elif state == -2:
if raise_on_fail:
raise ValueError(
"The internal iterative function `h` has reported a iteration failure."
)
else:
return _solver_result(-2, i, g0, time() - t0, log=True, algo="ift_1dim")
if abs(f0) < func_tol:
state = 2
g1 = g0
break
g1, f1, state, *hargs = h_(s, s0_, conv_tol, *hargs) # type: ignore[call-arg, arg-type]
i += 1
g0 = g1
f0 = f1
if i == max_iter:
if raise_on_fail:
raise ValueError(
f"`max_iter`: {max_iter} exceeded in 'ift_1dim' algorithm'.\n"
f"Last iteration values:\nf0: {f0}\nf1: {f1}\ng0: {g0}"
)
else:
return _solver_result(-1, i, g1, time() - t0, log=True, algo="ift_1dim")
# # IFT to preserve AD # TODO: this uses `set_order` to handle Variable, maybe `_to_number`?
ad_order = _get_order_of(s_tgt)
if ad_order == 0:
# return g1 as is.
ret: Number = g1
elif ad_order == 1:
s_: Dual | Dual2 = s(Dual(g1, ["x"], [])) # type: ignore[assignment]
ds_dx = gradient(s_, vars=["x"])[0]
ret = Dual.vars_from(set_order(s_tgt, 1), g1, s_tgt.vars, 1.0 / ds_dx * s_tgt.dual) # type: ignore[union-attr, arg-type]
else: # ad_order == 2
s_ = s(Dual2(g1, ["x"], [], [])) # type: ignore[assignment]
ds_dx = gradient(s_, vars=["x"])[0]
d2s_dx2 = gradient(s_, vars=["x"], order=2)[0][0]
ret = Dual2.vars_from(
set_order(s_tgt, 2), # type: ignore[arg-type]
g1,
s_tgt.vars, # type: ignore[union-attr, arg-type]
1.0 / ds_dx * s_tgt.dual, # type: ignore[union-attr]
np.ravel(
1.0 / ds_dx * s_tgt.dual2 # type: ignore[union-attr]
- 0.5 * d2s_dx2 * ds_dx**-3 * np.outer(s_tgt.dual, s_tgt.dual) # type: ignore[union-attr]
),
)
return _solver_result(state, i, ret, time() - t0, log=False, algo="ift_1dim")
def _bisection(
s: Callable[[DualTypes], DualTypes],
s_tgt: float,
conv_tol: float,
g_lower: float,
g_upper: float,
s_lower: float | None = None,
s_upper: float | None = None,
) -> tuple[float, float, int | None, float, float, float]:
"""
Perform an iteration by bisection.
The bounds `g` must yield values of `s` that are either side of the target value.
The interval will be bisected and the side kept that envelopes the target value.
All calculated values are returned to prevent re-calculation in the next iteration.
The `ini_hargs` needed for this method are only (g_lower, g_upper).
Returns
-------
g_i, f_i, state, *h_args_i
"""
if s_lower is None:
s_lower = _dual_float(s(g_lower))
if s_upper is None:
s_upper = _dual_float(s(g_upper))
f_lower = s_lower - s_tgt
f_upper = s_upper - s_tgt
if _dual_float(f_lower * f_upper) > 0:
# return a failed state because boundaries must be opposite sign to imply root.
return 0, 0, -2, 0, 0, 0
g_mid = (g_lower + g_upper) / 2.0
s_mid = _dual_float(s(g_mid))
f_mid = s_mid - s_tgt
if abs(g_mid - g_lower) < conv_tol:
state: int | None = 1
else:
state = None
if _dual_float(f_lower * f_mid) > 0:
# then lower and mid have same sign so must return upper interval
if abs(f_mid) < abs(f_upper):
return g_mid, f_mid, state, g_mid, g_upper, s_mid, s_upper # type: ignore[return-value]
else:
return g_upper, f_upper, state, g_mid, g_upper, s_mid, s_upper # type: ignore[return-value]
else:
# then lower and mid have opposite sign so return the lower interval
if abs(f_mid) < abs(f_lower):
# g_mid is closest to the target value with g_lower being the better side
return g_mid, f_mid, state, g_lower, g_mid, s_lower, s_mid # type: ignore[return-value]
else:
return g_lower, f_lower, state, g_lower, g_mid, s_lower, s_mid # type: ignore[return-value]
def _root_f(x: float, s: Callable[[DualTypes], DualTypes], s_tgt: float) -> float:
"""Root reformulation for Dekker's algorithm"""
return s(x) - s_tgt # type: ignore[return-value]
def _dekker(
s: Callable[[DualTypes], DualTypes],
s_tgt: float,
conv_tol: float,
a_k: float,
b_k: float,
b_k_m1: float | None = None,
cached_f_a_k: float | None = None,
cached_f_b_k: float | None = None,
cached_f_b_k_m1: float | None = None,
) -> tuple[float, float, int | None, float, float, float, float, float, float]:
"""
Alternative root solver.
See docs/source/_static/modified-dekker.pdf for details.
Cached values allow value transmission from one function to the next with many efficiencies.
"""
if b_k_m1 is None: # (which is read b k minus 1)
# b_k_m1 is None only once. This indicates the first iteration so no caches are present.
f_a_k = _dual_float(_root_f(a_k, s, s_tgt))
f_b_k = _dual_float(_root_f(b_k, s, s_tgt))
if abs(f_a_k) < abs(f_b_k):
# switch to make b_k the 'closest' solution
f_a_k, f_b_k = f_b_k, f_a_k
a_k, b_k = b_k, a_k
# in the first iteration set b_k_m1 = a_k
b_k_m1 = a_k
f_b_k_m1 = f_a_k
else:
# subsequent iterations will contain all cached values
f_a_k = cached_f_a_k # type: ignore[assignment]
f_b_k = cached_f_b_k # type: ignore[assignment]
f_b_k_m1 = cached_f_b_k_m1 # type: ignore[assignment]
if abs(a_k - b_k) < conv_tol:
# the interval is within tolerance so report converged, b_k should be the 'best' solution.
return b_k, f_b_k, 1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
m = (a_k + b_k) / 2.0 # midpoint
# secant
if abs(f_b_k - f_b_k_m1) < 1e-16:
# secant is divide by zero error
q = max(b_k, m) + 1.0
else:
q = b_k - f_b_k * (b_k - b_k_m1) / (f_b_k - f_b_k_m1)
if q >= min(b_k, m) and q <= max(b_k, m):
b_k_p1 = q # accept the secant as an estimated better iterate
else:
b_k_p1 = m # discard the secant as it is outside of the window
f_b_k_p1 = _dual_float(_root_f(b_k_p1, s, s_tgt))
# determine a_k_p1
a_k_p1 = a_k
f_a_k_p1 = f_a_k
if f_a_k * f_b_k_p1 > 0:
a_k_p1 = b_k
f_a_k_p1 = f_b_k
elif q >= min(b_k, m) and q <= max(b_k, m):
f_m = _dual_float(_root_f(m, s, s_tgt))
if f_m * f_b_k_p1 < 0:
a_k_p1 = m
f_a_k_p1 = f_m
if abs(f_a_k_p1) < abs(f_b_k_p1):
# switch to make b_k the 'best' solution
f_a_k_p1, f_b_k_p1 = f_b_k_p1, f_a_k_p1
a_k_p1, b_k_p1 = b_k_p1, a_k_p1
if abs(f_b_k_p1 - f_b_k) < 1e-15:
# also switch the existing values to avoid secant divide by zero errros
f_b_k, b_k = f_a_k, a_k
# f_a_k, f_b_k = f_b_k, f_a_k
# a_k, b_k = b_k, a_k
return b_k_p1, f_b_k_p1, None, a_k_p1, b_k_p1, b_k, f_a_k_p1, f_b_k_p1, f_b_k
def _brent(
s: Callable[[DualTypes], DualTypes],
s_tgt: float,
conv_tol: float,
a_k: float,
b_k: float,
b_k_m1: float | None = None,
cached_f_a_k: float | None = None,
cached_f_b_k: float | None = None,
cached_f_b_k_m1: float | None = None,
) -> tuple[float, float, int | None, float, float, float, float, float, float]:
"""
Alternative root solver.
See docs/source/_static/modified-dekker.pdf for details.
Cached values allow value transmission from one function to the next with many efficiencies.
"""
if b_k_m1 is None: # (which is read b k minus 1)
# b_k_m1 is None only once. This indicates the first iteration so no caches are present.
f_a_k = _dual_float(_root_f(a_k, s, s_tgt))
f_b_k = _dual_float(_root_f(b_k, s, s_tgt))
if abs(f_a_k) < abs(f_b_k):
# switch to make b_k the 'closest' solution
f_a_k, f_b_k = f_b_k, f_a_k
a_k, b_k = b_k, a_k
# in the first iteration set b_k_m1 = a_k
b_k_m1 = a_k
f_b_k_m1 = f_a_k
else:
# subsequent iterations will contain all cached values
f_a_k = cached_f_a_k # type: ignore[assignment]
f_b_k = cached_f_b_k # type: ignore[assignment]
f_b_k_m1 = cached_f_b_k_m1 # type: ignore[assignment]
if abs(a_k - b_k) < conv_tol:
return b_k, f_b_k, 1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
m = (a_k + b_k) / 2.0
# provisional values for the next iteration
if f_a_k != f_b_k and f_a_k != f_b_k_m1 and f_b_k != f_b_k_m1:
# then all three function values are distinct: use inverse quadratic interpolation
fba = f_b_k / f_a_k
fbbm1 = f_b_k / f_b_k_m1
fabm1 = f_a_k / f_b_k_m1
numerator = fba * ((1.0 - fbbm1) * (a_k - b_k) + fabm1 * (fbbm1 - fabm1) * (b_k_m1 - b_k))
denominator = (fbbm1 - 1.0) * (fba - 1.0) * (fabm1 - 1.0)
q = b_k + numerator / denominator
else:
# use secant
if abs(f_b_k - f_b_k_m1) < 1e-16:
# secant is div by zero error this ensures bisection is chosen
q = min(b_k, (3.0 * a_k + b_k) / 4.0) - 1.0
else:
q = b_k - f_b_k * (b_k - b_k_m1) / (f_b_k - f_b_k_m1)
w = (min(b_k, (3.0 * a_k + b_k) / 4.0), max(b_k, (3.0 * a_k + b_k) / 4.0))
if q <= w[0] or q >= w[1]:
q = m
b_k_p1 = q
f_b_k_p1 = _dual_float(_root_f(b_k_p1, s, s_tgt))
a_k_p1 = a_k
f_a_k_p1 = f_a_k
if float(f_a_k * f_b_k_p1) > 0:
a_k_p1 = b_k
f_a_k_p1 = f_b_k
else:
f_m = _dual_float(_root_f(m, s, s_tgt))
if float(f_m * f_b_k_p1) < 0:
a_k_p1 = m
f_a_k_p1 = f_m
if abs(f_a_k_p1) < abs(f_b_k_p1):
# switch to make b_k the 'best' solution
f_a_k_p1, f_b_k_p1 = f_b_k_p1, f_a_k_p1
a_k_p1, b_k_p1 = b_k_p1, a_k_p1
# # also switch the existing values
# f_a_k, f_b_k = f_b_k, f_a_k
# a_k, b_k = b_k, a_k
return b_k_p1, f_b_k_p1, None, a_k_p1, b_k_p1, b_k, f_a_k_p1, f_b_k_p1, f_b_k
def _ytm_quadratic(
s: Callable[[DualTypes], DualTypes],
s_tgt: float,
conv_tol: float,
g0: float,
g1: float,
g2: float,
cached_f0: float | None = None,
cached_f1: float | None = None,
cached_f2: float | None = None,
) -> tuple[float, float, int | None, float, float, float, float | None, float | None, float | None]:
"""
Alternative root solver.
See docs/source/_static/modified-dekker.pdf for details.
Cached values allow value transmission from one function to the next with many efficiencies.
Returns
-------
g_i, f_i=s_i-s_tgt, state, *h_args_i = (g0, g1, g2, f0, f1, f2)
"""
# Load cached values
f0: float = cached_f0 if cached_f0 is not None else _root_f(g0, s, s_tgt)
f1: float = cached_f1 if cached_f1 is not None else _root_f(g1, s, s_tgt)
f2: float = cached_f2 if cached_f2 is not None else _root_f(g2, s, s_tgt)
# Test interval: if all values are same sign translate the interval.
if f0 < 0 and f1 < 0 and f2 < 0:
# then g(s*) must be
g0_ = g0 - (g2 - g0)
g1_ = g1 - (g2 - g1)
g2_ = g0
return g1_, 1e9, None, g0_, g1_, g2_, None, None, None
elif f0 > 0 and f1 > 0 and f2 > 0:
g0_ = g2
g1_ = g1 + (g2 - g0)
g2_ = g2 + 2 * (g2 - g0)
return g1_, 1e9, None, g0_, g1_, g2_, None, None, None
# Solve g_new via quadratic approximation
# # Linear algebra solution
# _b = np.array([g0, g1, g2])[:, None]
# _A = np.array([[f0**2, f0, 1], [f1**2, f1, 1], [f2**2, f2, 1]])
# x = np.linalg.solve(_A, _b)
# g_new = x[2, 0]
# Analytical solution (via Gaussian elimination)
f012, f022, f01, f02, g01, g02 = (
f0**2 - f1**2,
f0**2 - f2**2,
f0 - f1,
f0 - f2,
g0 - g1,
g0 - g2,
)
x0 = (g01 * f02 - g02 * f01) / (f012 * f02 - f022 * f01)
x1 = (g01 - x0 * f012) / f01
x2 = g0 - x1 * f0 - x0 * f0**2
g_new = x2
# # Lagrange interpolation formula is a valid alternative solution
# g_new_compare = g0 * f1 * f2 / ((f0 - f1) * (f0 - f2))
# g_new_compare += g1 * f0 * f2 / ((f1 - f0) * (f1 - f2))
# g_new_compare += g2 * f0 * f1 / ((f2 - f0) * (f2 - f1))
# assert abs(g_new_compare - g_new) < 1e-8
if g_new < g0 or g_new > g2:
# if the quadratic approximation is outside the interval then use a bisection method
if f0 * f1 < 0:
# bisect in the left hand side
g_new = g0 + (g1 - g0) * f0 / (f0 - f1)
else:
# bisect in the right hand side
g_new = g1 - (g2 - g1) * f1 / (f2 - f1)
f_new = _root_f(g_new, s, s_tgt)
for g_ in [g0, g1, g2]:
if abs(g_ - g_new) < conv_tol:
return g_new, f_new, 1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
if g0 < g_new and g_new < g1:
return g_new, f_new, None, g0, g_new, g1, f0, f_new, f1
else: # g1 < g_new and g_new < g2:
return g_new, f_new, None, g1, g_new, g2, f1, f_new, f2
# else:
# raise RuntimeError("Unexpected interval: this line should never be reached.")
def _quadratic_approx(
s: Callable[[DualTypes], DualTypes],
s_tgt: float,
conv_tol: float,
g0: float,
g1: float,
g2: float,
cached_f0: float | None = None,
cached_f1: float | None = None,
cached_f2: float | None = None,
) -> tuple[float, float, int | None, float, float, float, float | None, float | None, float | None]:
"""
Appro
Cached values allow value transmission from one function to the next with many efficiencies.
Returns
-------
g_i, f_i=s_i-s_tgt, state, *h_args_i = (g0, g1, g2, f0, f1, f2)
"""
# Load cached values
f0: float = cached_f0 if cached_f0 is not None else _root_f(g0, s, s_tgt)
f1: float = cached_f1 if cached_f1 is not None else _root_f(g1, s, s_tgt)
f2: float = cached_f2 if cached_f2 is not None else _root_f(g2, s, s_tgt)
# Test interval: if all values are same sign translate the guess interval.
if (f0 < 0 and f1 < 0 and f2 < 0) or (f0 > 0 and f1 > 0 and f2 > 0):
# Then all f = s-s_tgt are above or below zero and there is no crossing point.
# Shift the entire initial guesses lower or higher based the linear gradient.
if (f0 < 0 and f2 > f0) or (f0 > 0 and f2 < f0):
# Shift g to the right
g0_ = g2
g1_ = g1 + (g2 - g0)
g2_ = g2 + 2 * (g2 - g0)
return g1_, conv_tol, None, g0_, g1_, g2_, f2, None, None
else:
# Shift g to the left
g0_ = g0 - (g2 - g0)
g1_ = g1 - (g2 - g1)
g2_ = g0
return g1_, conv_tol, None, g0_, g1_, g2_, None, None, f0
# Solve g_new via quadratic approximation
# # Linear algebra solution
# _b = np.array([g0, g1, g2])[:, None]
# _A = np.array([[f0**2, f0, 1], [f1**2, f1, 1], [f2**2, f2, 1]])
# x = np.linalg.solve(_A, _b)
# g_new = x[2, 0]
# Analytical solution
f012, f022, f01, f02, g01, g02 = (
f0**2 - f1**2,
f0**2 - f2**2,
f0 - f1,
f0 - f2,
g0 - g1,
g0 - g2,
)
x0 = (g01 * f02 - g02 * f01) / (f012 * f02 - f022 * f01)
x1 = (g01 - x0 * f012) / f01
x2 = g0 - x1 * f0 - x0 * f0**2
g_new = x2
if g_new < g0 or g_new > g2:
# if the quadratic approximation is outside the interval then use a bisection method
if f0 * f1 < 0:
# bisect in the left hand side
g_new = g0 + (g1 - g0) * f0 / (f0 - f1)
else:
# bisect in the right hand side
g_new = g1 - (g2 - g1) * f1 / (f2 - f1)
f_new = _root_f(g_new, s, s_tgt)
for g_ in [g0, g1, g2]:
if abs(g_ - g_new) < conv_tol:
return g_new, f_new, 1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
if g0 < g_new and g_new < g1:
return g_new, f_new, None, g0, g_new, g1, f0, f_new, f1
else: # g1 < g_new and g_new < g2:
return g_new, f_new, None, g1, g_new, g2, f1, f_new, f2
# else:
# raise RuntimeError("Unexpected interval: this line should never be reached.")
ift_map: dict[str, Callable[P, tuple[float, float, int, tuple[Any, ...]]]] = {
"bisection": _bisection, # type: ignore[dict-item]
"modified_dekker": _dekker, # type: ignore[dict-item]
"modified_brent": _brent, # type: ignore[dict-item]
"ytm_quadratic": _ytm_quadratic, # type: ignore[dict-item]
"quadratic_approx": _quadratic_approx, # type: ignore[dict-item]
}
================================================
FILE: python/rateslib/dual/newton.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from collections.abc import Callable, Sequence
from time import time
from typing import TYPE_CHECKING, Any, ParamSpec, TypeVar
import numpy as np
from rateslib.dual.utils import _dual_float, dual_solve
from rateslib.dual.variable import Variable
from rateslib.rs import Dual, Dual2
if TYPE_CHECKING:
from rateslib.local_types import DualTypes
P = ParamSpec("P")
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
STATE_MAP = {
1: ["SUCCESS", "`conv_tol` reached"],
2: ["SUCCESS", "`func_tol` reached"],
3: ["SUCCESS", "closed form valid"],
4: ["SUCCESS", "`step_tol` reached"],
5: ["SUCCESS", "`grad_tol` reached"],
-1: ["FAILURE", "`max_iter` breached"],
-2: ["FAILURE", "internal iteration function failure"],
}
def _solver_result(
state: int, i: int, func_val: DualTypes, time: float, log: bool, algo: str
) -> dict[str, Any]:
if log:
print(
f"{STATE_MAP[state][0]}: {STATE_MAP[state][1]} after {i} iterations "
f"({algo}), `f_val`: {func_val}, "
f"`time`: {time:.4f}s",
)
return {
"status": STATE_MAP[state][0],
"state": state,
"g": func_val,
"iterations": i,
"time": time,
}
T = TypeVar("T")
def _dual_float_or_unchanged(x: T | DualTypes) -> T | float:
"""If x is a DualType convert it to float otherwise leave it as is"""
if isinstance(x, float | Dual | Dual2 | Variable):
return _dual_float(x)
return x
def newton_1dim(
f: Callable[P, tuple[DualTypes, DualTypes]],
g0: DualTypes,
max_iter: int = 50,
func_tol: float = 1e-14,
conv_tol: float = 1e-9,
args: tuple[Any, ...] = (),
pre_args: tuple[Any, ...] = (),
final_args: tuple[Any, ...] = (),
raise_on_fail: bool = True,
) -> dict[str, Any]:
"""
Use the Newton-Raphson algorithm to determine the root of a function searching **one** variable.
Parameters
----------
f: callable
The function, *f*, to find the root of. Of the signature: `f(g, *args)`.
Must return a tuple where the second value is the derivative of *f* with respect to *g*.
g0: DualTypes
Initial guess of the root. Should be reasonable to avoid failure.
max_iter: int
The maximum number of iterations to try before exiting.
func_tol: float, optional
The absolute function tolerance to reach before exiting.
conv_tol: float, optional
The convergence tolerance for subsequent iterations of *g*.
args: tuple of float, Dual, Dual2 or str
Additional arguments passed to ``f``.
pre_args: tuple of float, Dual, Dual2 or str
Additional arguments passed to ``f`` used only in the float solve section of
the algorithm.
Functions are called with the signature `f(g, *(*args[as float], *pre_args))`.
final_args: tuple of float, Dual, Dual2 or str
Additional arguments passed to ``f`` in the final iteration of the algorithm
to capture AD sensitivities.
Functions are called with the signature `f(g, *(*args, *final_args))`.
raise_on_fail: bool, optional
If *False* will return a solver result dict with state and message indicating failure.
Returns
-------
dict
Notes
------
Solves the root equation :math:`f(g; s_i)=0` for *g*. This method is AD-safe, meaning the
iteratively determined solution will preserve AD sensitivities, if the functions are suitable.
Functions which are not AD suitable, such as discontinuous functions or functions with
no derivative at given points, may yield spurious derivative results.
This method works by first solving in the domain of floats (which is typically faster
for most complex functions), and then performing final iterations in higher AD modes to
capture derivative sensitivities.
For special cases arguments can be passed separately to each of these modes using the
``pre_args`` and ``final_args`` arguments, rather than generically supplying it to ``args``.
Examples
--------
Iteratively solve the equation: :math:`f(g, s) = g^2 - s = 0`. This has solution
:math:`g=\\pm \\sqrt{s}` and :math:`\\frac{dg}{ds} = \\frac{1}{2 \\sqrt{s}}`.
Thus for :math:`s=2` we expect the solution :code:`g=Dual(1.41.., ["s"], [0.35..])`.
.. ipython:: python
from rateslib.dual import newton_1dim
def f(g, s):
f0 = g**2 - s # Function value
f1 = 2*g # Analytical derivative is required
return f0, f1
s = Dual(2.0, ["s"], [])
newton_1dim(f, g0=1.0, args=(s,))
"""
t0 = time()
i = 0
# First attempt solution using faster float calculations
float_args = tuple(_dual_float_or_unchanged(_) for _ in args)
g0 = _dual_float(g0)
state = -1
while i < max_iter:
f0, f1 = f(*(g0, *float_args, *pre_args)) # type: ignore[call-arg]
i += 1
g1 = g0 - f0 / f1
if abs(f0) < func_tol:
state = 2
break
elif abs(g1 - g0) < conv_tol:
state = 1
break
g0 = g1
if i == max_iter:
if raise_on_fail:
raise ValueError(
f"`max_iter`: {max_iter} exceeded in 'newton_1dim' algorithm'.\n"
f"Last iteration values:\nf0: {f0}\nf1: {f1}\ng0: {g0}"
)
else:
return _solver_result(-1, i, g1, time() - t0, log=True, algo="newton_1dim")
# # Final iteration method to preserve AD
f0, f1 = f(*(g1, *args, *final_args)) # type: ignore[call-arg]
if isinstance(f0, Dual | Dual2) or isinstance(f1, Dual | Dual2):
i += 1
g1 = g1 - f0 / f1
if isinstance(f0, Dual2) or isinstance(f1, Dual2):
f0, f1 = f(*(g1, *args, *final_args)) # type: ignore[call-arg]
i += 1
g1 = g1 - f0 / f1
# # Analytical approach to capture AD sensitivities
# f0, f1 = f(g1, *(*args, *final_args))
# if isinstance(f0, Dual):
# g1 = Dual.vars_from(f0, float(g1), f0.vars, float(f1) ** -1 * -gradient(f0))
# if isinstance(f0, Dual2):
# g1 = Dual2.vars_from(f0, float(g1), f0.vars, float(f1) ** -1 * -gradient(f0), [])
# f02, f1 = f(g1, *(*args, *final_args))
#
# #f0_beta = gradient(f0, order=1, vars=f0.vars, keep_manifold=True)
#
# f0_gamma = gradient(f02, order=2)
# f0_beta = gradient(f0, order=1)
# # f1 = set_order_convert(g1, tag=[], order=2)
# f1_gamma = gradient(f1, f0.vars, order=2)
# f1_beta = gradient(f1, f0.vars, order=1)
#
# g1_beta = -float(f1) ** -1 * f0_beta
# g1_gamma = (
# -float(f1)**-1 * f0_gamma +
# float(f1)**-2 * (
# np.matmul(f0_beta[:, None], f1_beta[None, :]) +
# np.matmul(f1_beta[:, None], f0_beta[None, :]) +
# float(f0) * f1_gamma
# ) -
# 2 * float(f1)**-3 * float(f0) * np.matmul(f1_beta[:, None], f1_beta[None, :])
# )
# g1 = Dual2.vars_from(f0, float(g1), f0.vars, g1_beta, g1_gamma.flatten())
return _solver_result(state, i, g1, time() - t0, log=False, algo="newton_1dim")
def newton_ndim(
f: Callable[P, tuple[Any, Any]],
g0: Sequence[DualTypes],
max_iter: int = 50,
func_tol: float = 1e-14,
conv_tol: float = 1e-9,
args: tuple[Any, ...] = (),
pre_args: tuple[Any, ...] = (),
final_args: tuple[Any, ...] = (),
raise_on_fail: bool = True,
) -> dict[str, Any]:
r"""
Use the Newton-Raphson algorithm to determine a function root searching **many** variables.
Solves the *n* root equations :math:`f_i(g_1, \hdots, g_n; s_k)=0` for each :math:`g_j`.
Parameters
----------
f: callable
The function, *f*, to find the root of. Of the signature: `f([g_1, .., g_n], *args)`.
Must return a tuple where the second value is the Jacobian of *f* with respect to *g*.
g0: Sequence of DualTypes
Initial guess of the root values. Should be reasonable to avoid failure.
max_iter: int
The maximum number of iterations to try before exiting.
func_tol: float, optional
The absolute function tolerance to reach before exiting.
conv_tol: float, optional
The convergence tolerance for subsequent iterations of *g*.
args: tuple of float, Dual or Dual2
Additional arguments passed to ``f``.
pre_args: tuple
Additional arguments passed to ``f`` only in the float solve section
of the algorithm.
Functions are called with the signature `f(g, *(*args[as float], *pre_args))`.
final_args: tuple of float, Dual, Dual2
Additional arguments passed to ``f`` in the final iteration of the algorithm
to capture AD sensitivities.
Functions are called with the signature `f(g, *(*args, *final_args))`.
raise_on_fail: bool, optional
If *False* will return a solver result dict with state and message indicating failure.
Returns
-------
dict
Examples
--------
Iteratively solve the equation system:
- :math:`f_0(\mathbf{g}, s) = g_1^2 + g_2^2 + s = 0`.
- :math:`f_1(\mathbf{g}, s) = g_1^2 - 2g_2^2 + s = 0`.
.. ipython:: python
from rateslib.dual import newton_ndim
def f(g, s):
# Function value
f0 = g[0] ** 2 + g[1] ** 2 + s
f1 = g[0] ** 2 - 2 * g[1]**2 - s
# Analytical derivative as Jacobian matrix is required
f00 = 2 * g[0]
f01 = 2 * g[1]
f10 = 2 * g[0]
f11 = -4 * g[1]
return [f0, f1], [[f00, f01], [f10, f11]]
s = Dual(-2.0, ["s"], [])
newton_ndim(f, g0=[1.0, 1.0], args=(s,))
"""
t0 = time()
i = 0
n = len(g0)
# First attempt solution using faster float calculations
float_args = tuple(_dual_float_or_unchanged(_) for _ in args)
g0_ = np.array([_dual_float(_) for _ in g0])
state = -1
while i < max_iter:
f0, f1 = f(*(g0_, *float_args, *pre_args)) # type: ignore[call-arg]
f0 = np.array(f0)[:, np.newaxis]
f1 = np.array(f1)
i += 1
g1 = g0_ - np.matmul(np.linalg.inv(f1), f0)[:, 0]
if all(abs(_) < func_tol for _ in f0[:, 0]):
state = 2
break
elif all(abs(g1[_] - g0_[_]) < conv_tol for _ in range(n)):
state = 1
break
g0_ = g1
if i == max_iter:
if raise_on_fail:
raise ValueError(f"`max_iter`: {max_iter} exceeded in 'newton_ndim' algorithm'.")
else:
return _solver_result(-1, i, g1, time() - t0, log=True, algo="newton_ndim")
# Final iteration method to preserve AD
f0, f1 = f(*(g1, *args, *final_args)) # type: ignore[call-arg]
f1, f0 = np.array(f1), np.array(f0)
# get AD type
ad: int = 0
if _is_any_dual(f0) or _is_any_dual(f1):
ad = 1
DualType: type[Dual] | type[Dual2] = Dual
elif _is_any_dual2(f0) or _is_any_dual2(f1):
ad = 2
DualType = Dual2
if ad > 0:
i += 1
g1 = g0_ - dual_solve(f1, f0[:, None], allow_lsq=False, types=(DualType, DualType))[:, 0]
if ad == 2:
f0, f1 = f(*(g1, *args, *final_args)) # type: ignore[call-arg]
f1, f0 = np.array(f1), np.array(f0)
i += 1
g1 = g1 - dual_solve(f1, f0[:, None], allow_lsq=False, types=(DualType, DualType))[:, 0]
return _solver_result(state, i, g1, time() - t0, log=False, algo="newton_ndim")
def _is_any_dual(arr: np.ndarray[tuple[int, ...], np.dtype[np.object_]]) -> bool:
return any(isinstance(_, Dual) for _ in arr.flatten())
def _is_any_dual2(arr: np.ndarray[tuple[int, ...], np.dtype[np.object_]]) -> bool:
return any(isinstance(_, Dual2) for _ in arr.flatten())
================================================
FILE: python/rateslib/dual/quadratic.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from rateslib.dual.newton import _solver_result
if TYPE_CHECKING:
from rateslib.local_types import DualTypes
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
def quadratic_eqn(
a: DualTypes, b: DualTypes, c: DualTypes, x0: DualTypes, raise_on_fail: bool = True
) -> dict[str, Any]:
"""
Solve the quadratic equation, :math:`ax^2 + bx +c = 0`, with error reporting.
Parameters
----------
a: float, Dual Dual2
The *a* coefficient value.
b: float, Dual Dual2
The *b* coefficient value.
c: float, Dual Dual2
The *c* coefficient value.
x0: float, Dual, Dual2
The expected solution to discriminate between two possible solutions.
raise_on_fail: bool, optional
Whether to raise if unsolved or return a solver result in failed state.
Returns
-------
dict
Notes
-----
If ``a`` is evaluated to be less that 1e-15 in absolute terms then it is treated as zero and the
equation is solved as a linear equation in ``b`` and ``c`` only.
Examples
--------
.. ipython:: python
from rateslib.dual import quadratic_eqn
quadratic_eqn(a=1.0, b=1.0, c=Dual(-6.0, ["c"], []), x0=-2.9)
"""
discriminant = b**2 - 4 * a * c
if discriminant < 0.0:
if raise_on_fail:
raise ValueError("`quadratic_eqn` has failed to solve: discriminant is less than zero.")
else:
return _solver_result(
state=-1,
i=0,
func_val=1e308,
time=0.0,
log=True,
algo="quadratic_eqn",
)
if abs(a) > 1e-15: # machine tolerance on normal float64 is 2.22e-16
sqrt_d = discriminant**0.5
_1 = (-b + sqrt_d) / (2 * a)
_2 = (-b - sqrt_d) / (2 * a)
if abs(x0 - _1) < abs(x0 - _2):
return _solver_result(
state=3,
i=1,
func_val=_1,
time=0.0,
log=False,
algo="quadratic_eqn",
)
else:
return _solver_result(
state=3,
i=1,
func_val=_2,
time=0.0,
log=False,
algo="quadratic_eqn",
)
else:
# 'a' is considered too close to zero for the quadratic eqn, solve the linear eqn
# to avoid division by zero errors
return _solver_result(
state=3,
i=1,
func_val=-c / b,
time=0.0,
log=False,
algo="quadratic_eqn->linear_eqn",
)
================================================
FILE: python/rateslib/dual/utils.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import math
from functools import partial
from statistics import NormalDist
from typing import TYPE_CHECKING
import numpy as np
from rateslib import defaults
from rateslib.dual.variable import FLOATS, INTS, Variable
from rateslib.enums.generics import Err, NoInput, Ok
from rateslib.rs import ADOrder, Dual, Dual2, _dsolve1, _dsolve2, _fdsolve1, _fdsolve2
if TYPE_CHECKING:
from rateslib.local_types import (
Any,
Arr1dF64,
Arr1dObj,
Arr2dF64,
Arr2dObj,
DualTypes,
Number,
Result,
Sequence,
)
Dual.__doc__ = "Dual number data type to perform first derivative automatic differentiation."
Dual2.__doc__ = "Dual number data type to perform second derivative automatic differentiation."
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
def _dual_float(val: DualTypes) -> float:
"""Overload for the float() builtin to handle Pyo3 issues with Variable"""
try:
return float(val) # type: ignore[arg-type]
except TypeError as e: # val is not Number but a Variable
if isinstance(val, Variable):
# This does not work well with rust.
# See: https://github.com/PyO3/pyo3/issues/3672
# and https://github.com/PyO3/pyo3/discussions/3911
return val.real
raise e
def _dual_round(val: DualTypes, ndigits: int) -> DualTypes:
"""Overload for the round() builtin to handle Duals: ONLY impacting the real quantity"""
try:
return round(val, ndigits) # type: ignore[arg-type]
except TypeError as e: # val is not Number but a Variable
if isinstance(val, Dual):
return Dual.vars_from(val, round(val.real, ndigits), val.vars, val.dual)
elif isinstance(val, Dual2):
return Dual2.vars_from(
val, round(val.real, ndigits), val.vars, val.dual, val.dual2.ravel()
)
elif isinstance(val, Variable):
return Variable(round(val.real, ndigits), vars=val.vars, dual=val.dual)
raise e
def _float_or_none(val: DualTypes | None | NoInput | Result[DualTypes]) -> float | None:
if val is None or isinstance(val, NoInput | Err):
return None
elif isinstance(val, Ok):
return _float_or_none(val.unwrap())
else:
return _dual_float(val)
def _abs_float(val: DualTypes) -> float:
"""Overload the abs() builtin to return the abs of the real component only"""
if isinstance(val, Dual | Dual2 | Variable):
return abs(val.real)
else:
return abs(val)
def _get_order_of(val: DualTypes) -> int:
"""Get the AD order of a DualType including checking the globals for the current order."""
if isinstance(val, Dual):
ad_order: int = 1
elif isinstance(val, Dual2):
ad_order = 2
elif isinstance(val, Variable):
ad_order = defaults._global_ad_order
else:
ad_order = 0
return ad_order
def _to_number(val: DualTypes) -> Number:
"""Convert a DualType to a Number Type by casting a Variable to the required global AD order."""
if isinstance(val, Variable):
return set_order(val, defaults._global_ad_order)
return val
def set_order(val: DualTypes, order: int) -> Number:
"""
Changes the order of a :class:`Dual` or :class:`Dual2` and a sets a :class:`Variable`.
Parameters
----------
val : float, Dual, Dual2, Variable
The value to convert the order of.
order : int in [0, 1, 2]
The AD order to convert to. If ``val`` is float or int 0 will be used.
Returns
-------
float, Dual or Dual2
Notes
------
**floats** are not affected by this function. There is no benefit to converting
one of these types to a dual number type with no tagged variable sensitivity.
If ``order`` is **zero**, all objects are converted to float.
If ``order`` is **one**, *Dual2* are converted to *Dual* by dropping second order gradients.
If ``order`` is **two**, *Dual* are converted to *DUal2* by setting second order gradients to
default zero values.
"""
if order == 0:
return _dual_float(val)
elif order == 1:
if isinstance(val, Dual):
return val
elif isinstance(val, Dual2 | Variable):
return val.to_dual()
return val # as float
else: # order == 2
if isinstance(val, Dual2):
return val
elif isinstance(val, Dual | Variable):
return val.to_dual2()
return val # as float
def set_order_convert(
val: DualTypes, order: int, tag: list[str] | None, vars_from: Dual | Dual2 | None = None
) -> Number:
"""
Convert a float, :class:`Dual` or :class:`Dual2` type to a specified alternate type with
tagged variables.
Parameters
----------
val : float, Dual, Dual2, Variable
The value to convert.
order : int
The AD order to convert the value to if necessary.
tag : list of str, optional
The variable name(s) if upcasting a float to a Dual or Dual2
vars_from : optional, Dual or Dual2
A pre-existing Dual of correct order from which the Vars are extracted. Improves efficiency
when given.
Returns
-------
float, Dual, Dual2
Notes
-----
This function is used for AD variable management.
``tag`` and ``vars_from`` are only used when floats are upcast and the variables need to be
specifically define.
"""
if isinstance(val, FLOATS | INTS):
_ = [] if tag is None else tag
if order == 0:
return float(val)
elif order == 1:
if vars_from is None:
return Dual(val, _, [])
elif isinstance(vars_from, Dual):
return Dual.vars_from(vars_from, val, _, [])
else:
raise TypeError("`vars_from` must be a Dual when converting to ADOrder:1.")
elif order == 2:
if vars_from is None:
return Dual2(val, _, [], [])
elif isinstance(vars_from, Dual2):
return Dual2.vars_from(vars_from, val, _, [], [])
else:
raise TypeError("`vars_from` must be a Dual2 when converting to ADOrder:2.")
# else val is Dual or Dual2 so convert directly
return set_order(val, order)
def gradient(
dual: DualTypes,
vars: Sequence[str] | None = None, # noqa: A002
order: int = 1,
keep_manifold: bool = False,
) -> Arr1dF64 | Arr2dF64:
"""
Return derivatives of a dual number.
Parameters
----------
dual : Dual, Dual2, Variable, float
The dual variable from which to derive derivatives.
vars : str, tuple, list optional
Name of the variables which to return gradients for. If not given
defaults to all vars attributed to the instance.
order : {1, 2}
Whether to return the first or second derivative of the dual number.
Second order will raise if applied to a ``Dual`` and not ``Dual2`` instance.
keep_manifold : bool
If ``order`` is 1 and the type is ``Dual2`` one can return a ``Dual2``
where the ``dual2`` values are converted to ``dual`` values to represent
a first order manifold of the first derivative (and the ``dual2`` values
set to zero). Useful for propagation in iterations.
Returns
-------
float, ndarray, Dual2
"""
_validate_keep_manifold(keep_manifold, order, dual)
if order == 1:
if not isinstance(dual, Dual | Dual2 | Variable):
if isinstance(dual, float | int):
return np.zeros(shape=(len(vars) if vars is not None else 0,))
else:
raise TypeError("Can call `gradient` only on dual-type variables.")
if isinstance(dual, Variable):
dual = Dual(dual.real, vars=dual.vars, dual=dual.dual)
if vars is None and not keep_manifold:
return dual.dual
elif vars is not None and not keep_manifold:
return dual.grad1(vars)
_ = dual.grad1_manifold(dual.vars if vars is None else vars) # type: ignore[union-attr]
return np.asarray(_)
elif order == 2:
if not isinstance(dual, Dual | Dual2 | Variable):
if isinstance(dual, float | int):
n = len(vars) if vars is not None else 0
return np.zeros(shape=(n, n))
else:
raise TypeError("Can call `gradient` only on dual-type variables.")
if isinstance(dual, Variable):
dual = Dual2(dual.real, vars=dual.vars, dual=dual.dual, dual2=[])
elif isinstance(dual, Dual):
raise TypeError("Dual type cannot derive second order automatic derivatives.")
if vars is None:
return 2.0 * dual.dual2
else:
return dual.grad2(vars)
else:
raise ValueError("`order` must be in {1, 2} for gradient calculation.")
def _validate_keep_manifold(keep_manifold: bool, order: int, dual: DualTypes) -> None:
"""Validate the keep_manifold argument for gradient."""
if keep_manifold and not isinstance(dual, Dual2):
if isinstance(dual, Dual):
raise TypeError("Dual type cannot perform `keep_manifold`.")
elif isinstance(dual, Variable):
raise TypeError("Variable type cannot perform `keep_manifold`.")
else:
raise TypeError("Float type cannot perform `keep_manifold`.")
def dual_exp(x: DualTypes) -> Number:
"""
Calculate the exponential value of a regular int or float or a dual number.
Parameters
----------
x : int, float, Dual, Dual2, Variable
Value to calculate exponent of.
Returns
-------
float, Dual, Dual2
"""
if isinstance(x, Dual | Dual2 | Variable):
return x.__exp__()
return math.exp(x)
def dual_log(x: DualTypes, base: int | None = None) -> Number:
"""
Calculate the logarithm of a regular int or float or a dual number.
Parameters
----------
x : int, float, Dual, Dual2, Variable
Value to calculate exponent of.
base : int, float, optional
Base of the logarithm. Defaults to e to compute natural logarithm
Returns
-------
float, Dual, Dual2
"""
if isinstance(x, Dual | Dual2 | Variable):
val = x.__log__()
if base is None:
return val
else:
return val * (1 / math.log(base))
elif base is None:
return math.log(x)
else:
return math.log(x, base)
def dual_norm_pdf(x: DualTypes) -> Number:
"""
Return the standard normal probability density function.
Parameters
----------
x : float, Dual, Dual2, Variable
Returns
-------
float, Dual, Dual2
"""
return dual_exp(-0.5 * x**2) / math.sqrt(2.0 * math.pi)
def dual_norm_cdf(x: DualTypes) -> Number:
"""
Return the cumulative standard normal distribution for given value.
Parameters
----------
x : float, Dual, Dual2, Variable
Returns
-------
float, Dual, Dual2
"""
if isinstance(x, Dual | Dual2 | Variable):
return x.__norm_cdf__()
else:
return NormalDist().cdf(x)
def dual_inv_norm_cdf(x: DualTypes) -> Number:
"""
Return the inverse cumulative standard normal distribution for given value.
Parameters
----------
x : float, Dual, Dual2, Variable
Returns
-------
float, Dual, Dual2
"""
if isinstance(x, Dual | Dual2 | Variable):
return x.__norm_inv_cdf__()
else:
return NormalDist().inv_cdf(x)
def dual_solve(
A: Arr2dObj | Arr2dF64,
b: Arr1dObj | Arr1dF64,
allow_lsq: bool = False,
types: tuple[type[float] | type[Dual] | type[Dual2], type[float] | type[Dual] | type[Dual2]] = (
Dual,
Dual,
),
) -> Arr1dObj | Arr1dF64:
"""
Solve a linear system of equations involving dual number data types.
The `x` value is found for the equation :math:`Ax=b`.
.. warning::
This method has not yet implemented :class:`~rateslib.dual.Variable` types.
Parameters
----------
A: 2-d array
Left side matrix of values.
b: 1-d array
Right side vector of values.
allow_lsq: bool
Whether to allow solutions for non-square `A`, i.e. when `len(b) > len(x)`.
types: tuple
Defining the input data type elements of `A` and `b`, e.g. (float, float) or (Dual, Dual).
Returns
-------
1-d array
"""
if types == (float, float):
# Use basic Numpy LinAlg
if allow_lsq:
return np.linalg.lstsq(A, b, rcond=None)[0] # type: ignore[arg-type]
else:
return np.linalg.solve(A, b) # type: ignore[arg-type]
# Move to Rust implementation
if types in [(Dual, float), (Dual2, float)]:
raise TypeError(
"Not implemented for type crossing. Use (Dual, Dual) or (Dual2, Dual2). It is no less"
"efficient to preconvert `b` to dual types and then solve.",
)
map_ = {float: 0, Dual: 1, Dual2: 2}
A_ = np.vectorize(partial(set_order_convert, tag=[], order=map_[types[0]], vars_from=None))(A)
b_ = np.vectorize(partial(set_order_convert, tag=[], order=map_[types[1]], vars_from=None))(b)
a_ = [item for sublist in A_.tolist() for item in sublist] # 1D array of A_
b_ = b_[:, 0].tolist()
if types == (Dual, Dual):
return np.array(_dsolve1(a_, b_, allow_lsq))[:, None]
elif types == (Dual2, Dual2):
return np.array(_dsolve2(a_, b_, allow_lsq))[:, None]
elif types == (float, Dual):
return np.array(_fdsolve1(A_, b_, allow_lsq))[:, None]
elif types == (float, Dual2):
return np.array(_fdsolve2(A_, b_, allow_lsq))[:, None]
else:
raise TypeError(
"Provided `types` argument are not permitted. Must be a 2-tuple with "
"elements from {float, Dual, Dual2}"
)
def _get_adorder(order: int) -> ADOrder:
"""Convert int AD order to an ADOrder enum type."""
if order == 1:
return ADOrder.One
elif order == 0:
return ADOrder.Zero
elif order == 2:
return ADOrder.Two
else:
raise ValueError("Order for AD can only be in {0,1,2}")
def _set_ad_order_objects(order: list[int] | dict[int, int], objs: list[Any]) -> dict[int, int]:
"""
Set the order on multiple Objects, returning their previous order indexed my memory id.
Parameters
----------
order: list[int] or dict[int,int]
A list of orders to set the objects to. If a dict indexed my memory id.
objs: list[Any]
A list of objects to convert the AD orders of.
Returns
-------
dict[int]
Notes
-----
If an Object does not have a `_set_ad_order` method then
it will simply be passed and return 0 for its associated
previous AD order.
"""
# this function catches duplicate objects that are identical by memory id
if isinstance(order, list) and len(order) != len(objs):
raise ValueError("`order` and `objs` must have the same length")
original_order: dict[int, int] = {}
for i, obj in enumerate(objs):
if id(obj) in original_order:
continue # object has already been parsed
_ad = getattr(obj, "_ad", None)
if _ad is None:
# object cannot be set_ad_order
continue
if isinstance(order, dict):
obj._set_ad_order(order[id(obj)])
original_order[id(obj)] = _ad
else: # isinstance(order, list)
obj._set_ad_order(order[i])
original_order[id(obj)] = _ad
return original_order
================================================
FILE: python/rateslib/dual/variable.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import json
import math
from collections.abc import Sequence
from typing import TYPE_CHECKING, Any
import numpy as np
from rateslib import defaults
from rateslib.enums.generics import NoInput
from rateslib.rs import Dual, Dual2
if TYPE_CHECKING:
from rateslib.local_types import Arr1dF64
PRECISION = 1e-14
FLOATS = float | np.float16 | np.float32 | np.float64 | np.longdouble
INTS = int | np.int8 | np.int16 | np.int32 | np.int32 | np.int64
class Variable:
"""
A user defined, exogenous variable that automatically converts to a
:class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2` type dependent upon the overall AD calculation order.
See :ref:`what is an exogenous variable? `
Parameters
----------
real : float
The real coefficient of the underlying dual number.
vars : tuple of str, optional
The labels of the variables for which to record derivatives. If not given
the *Variable* represents a constant - it would be better to define just a float.
dual : 1d ndarray, optional
First derivative information contained as coefficient of linear manifold.
Defaults to an array of ones the length of ``vars`` if not given.
Attributes
----------
real : float
vars : str, tuple of str
dual : 1d ndarray
"""
def __init__(
self,
real: float,
vars: Sequence[str] = (), # noqa: A002
dual: list[float] | Arr1dF64 | NoInput = NoInput(0),
):
self.real: float = float(real)
self.vars: tuple[str, ...] = tuple(vars)
n = len(self.vars)
if isinstance(dual, NoInput) or len(dual) == 0:
self.dual: Arr1dF64 = np.ones(n, dtype=np.float64)
else:
self.dual = np.asarray(dual.copy())
def _to_dual_type(self, order: int) -> Dual | Dual2:
if order == 1:
_: Dual | Dual2 = self.to_dual()
return _
elif order == 2:
_ = self.to_dual2()
return _
else:
raise TypeError(
f"`Variable` can only be converted with `order` in [1, 2], got order: {order}."
)
def to_json(self) -> str:
"""
Serialize this object to JSON format.
The object can be deserialized using the :meth:`~rateslib.serialization.from_json` method.
Returns
-------
str
"""
obj = dict(
PyNative=dict(
Variable=dict(
real=self.real,
vars=self.vars,
dual=list(self.dual),
)
)
)
return json.dumps(obj)
@classmethod
def _from_json(cls, loaded_json: dict[str, Any]) -> Variable:
return Variable(
real=loaded_json["real"],
vars=loaded_json["vars"],
dual=loaded_json["dual"],
)
def to_dual(self) -> Dual:
return Dual(self.real, vars=self.vars, dual=self.dual)
def to_dual2(self) -> Dual2:
return Dual2(self.real, vars=self.vars, dual=self.dual, dual2=[])
def __eq__(self, argument: Any) -> bool:
"""
Compare an argument with a Variable for equality.
This does not account for variable ordering.
"""
if not isinstance(argument, type(self)):
return False
if self.vars == argument.vars:
return self.__eq_coeffs__(argument, PRECISION)
return False
def __lt__(self, other: Any) -> bool:
return self.real.__lt__(other)
def __le__(self, other: Any) -> bool:
return self.real.__le__(other)
def __gt__(self, other: Any) -> bool:
return self.real.__gt__(other)
def __ge__(self, other: Any) -> bool:
return self.real.__ge__(other)
def __eq_coeffs__(self, argument: Dual | Dual2 | Variable, precision: float) -> bool:
"""Compare the coefficients of two dual array numbers for equality."""
return not (
not math.isclose(self.real, argument.real, abs_tol=precision)
or not np.all(np.isclose(self.dual, argument.dual, atol=precision))
)
# def __float__(self):
# This does not work well with rust.
# See: https://github.com/PyO3/pyo3/issues/3672
# and https://github.com/PyO3/pyo3/discussions/3911
# return self.real
def __abs__(self) -> float:
return abs(self.real)
def __neg__(self) -> Variable:
return Variable(-self.real, vars=self.vars, dual=-self.dual)
def __add__(self, other: Dual | Dual2 | float | Variable) -> Dual | Dual2 | Variable:
if isinstance(other, Variable):
_1 = self._to_dual_type(defaults._global_ad_order)
_2 = other._to_dual_type(defaults._global_ad_order)
return _1.__add__(_2)
elif isinstance(other, FLOATS | INTS):
return Variable(self.real + float(other), vars=self.vars, dual=self.dual)
elif isinstance(other, Dual):
return Dual(self.real, vars=self.vars, dual=self.dual).__add__(other)
elif isinstance(other, Dual2):
return Dual2(self.real, vars=self.vars, dual=self.dual, dual2=[]).__add__(other)
else:
raise TypeError(f"No operation defined between `Variable` and type: `{type(other)}`")
def __radd__(self, other: Dual | Dual2 | float | Variable) -> Dual | Dual2 | Variable:
return self.__add__(other)
def __rsub__(self, other: Dual | Dual2 | float | Variable) -> Dual | Dual2 | Variable:
return (self.__neg__()).__add__(other)
def __sub__(self, other: Dual | Dual2 | float | Variable) -> Dual | Dual2 | Variable:
return self.__add__(other.__neg__())
def __mul__(self, other: Dual | Dual2 | float | Variable) -> Dual | Dual2 | Variable:
if isinstance(other, Variable):
_1 = self._to_dual_type(defaults._global_ad_order)
_2 = other._to_dual_type(defaults._global_ad_order)
return _1.__mul__(_2)
elif isinstance(other, FLOATS | INTS):
return Variable(self.real * float(other), vars=self.vars, dual=self.dual * float(other))
elif isinstance(other, Dual):
return Dual(self.real, vars=self.vars, dual=self.dual).__mul__(other)
elif isinstance(other, Dual2):
return Dual2(self.real, vars=self.vars, dual=self.dual, dual2=[]).__mul__(other)
else:
raise TypeError(f"No operation defined between `Variable` and type: `{type(other)}`")
def __rmul__(self, other: Dual | Dual2 | float | Variable) -> Dual | Dual2 | Variable:
return self.__mul__(other)
def __truediv__(self, other: Dual | Dual2 | float | Variable) -> Dual | Dual2 | Variable:
if isinstance(other, Variable):
_1 = self._to_dual_type(defaults._global_ad_order)
_2 = other._to_dual_type(defaults._global_ad_order)
return _1.__truediv__(_2)
elif isinstance(other, FLOATS | INTS):
return Variable(self.real / float(other), vars=self.vars, dual=self.dual / float(other))
elif isinstance(other, Dual):
return Dual(self.real, vars=self.vars, dual=self.dual).__truediv__(other)
elif isinstance(other, Dual2):
return Dual2(self.real, vars=self.vars, dual=self.dual, dual2=[]).__truediv__(other)
else:
raise TypeError(f"No operation defined between `Variable` and type: `{type(other)}`")
def __rtruediv__(self, other: Dual | Dual2 | float | Variable) -> Dual | Dual2 | Variable:
if isinstance(other, Variable):
# cannot reach this line
raise TypeError("Impossible line execution - please report issue.") # pragma: no cover
elif isinstance(other, FLOATS | INTS):
_1 = Variable(other, ())
return _1 / self
elif isinstance(other, Dual):
_ = Dual(self.real, vars=self.vars, dual=self.dual)
return other.__truediv__(_)
elif isinstance(other, Dual2):
_ = Dual2(self.real, vars=self.vars, dual=self.dual, dual2=[])
return other.__truediv__(_)
else:
raise TypeError(f"No operation defined between `Variable` and type: `{type(other)}`")
def __exp__(self) -> Dual | Dual2:
_1 = self._to_dual_type(defaults._global_ad_order)
return _1.__exp__()
def __log__(self) -> Dual | Dual2:
_1 = self._to_dual_type(defaults._global_ad_order)
return _1.__log__()
def __norm_cdf__(self) -> Dual | Dual2:
_1 = self._to_dual_type(defaults._global_ad_order)
return _1.__norm_cdf__()
def __norm_inv_cdf__(self) -> Dual | Dual2:
_1 = self._to_dual_type(defaults._global_ad_order)
return _1.__norm_inv_cdf__()
def __pow__(self, exponent: float | Dual | Dual2, modulo: int | None = None) -> Dual | Dual2:
_1 = self._to_dual_type(defaults._global_ad_order)
return _1.__pow__(exponent, modulo)
def __repr__(self) -> str:
a = ", ".join(self.vars[:3])
b = ", ".join([str(_) for _ in self.dual[:3]])
if len(self.vars) > 3:
a += ", ..."
b += ", ..."
return f""
================================================
FILE: python/rateslib/enums/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.enums.generics import Err, NoInput, Ok, Result
from rateslib.enums.parameters import (
FloatFixingMethod,
FXDeltaMethod,
FXOptionMetric,
IndexMethod,
IROptionMetric,
LegIndexBase,
LegMtm,
OptionPricingModel,
OptionType,
SpreadCompoundMethod,
SwaptionSettlementMethod,
)
__all__ = [
"FloatFixingMethod",
"SpreadCompoundMethod",
"IndexMethod",
"FXDeltaMethod",
"SwaptionSettlementMethod",
"FXOptionMetric",
"IROptionMetric",
"OptionPricingModel",
"OptionType",
"LegMtm",
"LegIndexBase",
"NoInput",
"Result",
"Ok",
"Err",
]
================================================
FILE: python/rateslib/enums/generics.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from enum import Enum
from typing import Any, Generic, NoReturn, TypeAlias, TypeVar
T = TypeVar("T")
class Err:
"""
Standard result class indicating **failure** and containing some *Exception* type.
"""
_exception: Exception
def __init__(self, exception: Exception) -> None:
self._exception = exception
def __repr__(self) -> str:
return f""
@property
def is_err(self) -> bool:
return True
@property
def is_ok(self) -> bool:
return False
def unwrap(self) -> NoReturn:
raise self._exception
class Ok(Generic[T]):
"""Standard result class indicating **success** and containing some value."""
_value: T
def __init__(self, value: T) -> None:
self._value = value
def __repr__(self) -> str:
return f""
@property
def is_err(self) -> bool:
return False
@property
def is_ok(self) -> bool:
return True
def unwrap(self) -> T:
return self._value
Result: TypeAlias = Ok[T] | Err
class NoInput(Enum):
"""
Enumerable type to handle setting default values.
See :ref:`default values `.
"""
blank = 0
inherit = 1
negate = -1
def _validate_obj_not_no_input(obj: T | NoInput, expected: str) -> T:
if isinstance(obj, NoInput):
raise ValueError(f"Object of type `{expected}` must be supplied. Got NoInput.")
return obj
def _try_validate_obj_not_no_input(obj: T | NoInput, expected: str) -> Result[T]:
if isinstance(obj, NoInput):
return Err(ValueError(f"Object of type `{expected}` must be supplied. Got NoInput."))
else:
return Ok(obj)
def _drb(default: Any, possible_ni: Any | NoInput) -> Any:
"""(D)efault (r)eplaces (b)lank"""
return default if isinstance(possible_ni, NoInput) else possible_ni
================================================
FILE: python/rateslib/enums/parameters.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from enum import Enum
from typing import TYPE_CHECKING
from rateslib.rs import FloatFixingMethod, IROptionMetric, LegIndexBase
if TYPE_CHECKING:
from typing import NoReturn # TODO: convert to Never on Python >= 3.11
# LegIndexBase.__doc__ = """
# Enumerable type for placement of ``index_base_date`` on each *Period* within a *Leg*.
#
# .. rubric:: Variants
#
# .. ipython:: python
# :suppress:
#
# from rateslib.rs import LegIndexBase
# variants = [item for item in LegIndexBase.__dict__ if \\
# "__" != item[:2] and \\
# item not in ['to_json'] \
# ]
#
# .. ipython:: python
#
# variants
#
# This is a **simple** enum and does not require initialization with other parameters. For example
#
# .. ipython:: python
#
# _ = LegIndexBase.Initial
#
# """
class OptionType(float, Enum):
"""
Enumerable type to define option directions.
"""
Put = -1.0
Call = 1.0
class FXOptionMetric(Enum):
"""
Enumerable type for FXOption metrics.
"""
Pips = 0
Percent = 1
class OptionPricingModel(Enum):
"""
Enumerable type for option pricing models
"""
Black76 = 0
Bachelier = 1
class SwaptionSettlementMethod(Enum):
"""
Enumerable type for swaption settlement methods.
"""
Physical = 0
CashParTenor = 1
CashCollateralized = 2
class LegMtm(Enum):
"""
Enumerable type to define :class:`~rateslib.data.fixings.FXFixing` dates for non-deliverable
*Legs*.
For further information see non-deliverability **Notes** of :class:`~rateslib.legs.FixedLeg`.
"""
Initial = 0
XCS = 1
Payment = 2
class IndexMethod(Enum):
"""
Enumerable type to define determining the index value on some reference value date.
Notes
-----
``Curve`` variant derives an index value directly from a *Curve* by using its discount factors
and its index base date.
"""
Daily = 0
Monthly = 1
Curve = 2
def __str__(self) -> str:
return self.name
class SpreadCompoundMethod(Enum):
"""
Enumerable type to define spread compounding methods for floating rates.
"""
NoneSimple = 0
ISDACompounding = 1
ISDAFlatCompounding = 2
def __str__(self) -> str:
return self.name
class FXDeltaMethod(Enum):
"""
Enumerable type to define the delta expression of an FX option.
"""
Forward = 0
Spot = 1
ForwardPremiumAdjusted = 2
SpotPremiumAdjusted = 3
def __str__(self) -> str:
return self.name
_OPTION_PRICING_MAP = {
"black76": OptionPricingModel.Black76,
"bachelier": OptionPricingModel.Bachelier,
# aliases
"black": OptionPricingModel.Black76,
"log_normal": OptionPricingModel.Black76,
"normal": OptionPricingModel.Bachelier,
"normal_vol": OptionPricingModel.Bachelier,
"log_normal_vol": OptionPricingModel.Black76,
"black_vol": OptionPricingModel.Black76,
"black_vol_shift": OptionPricingModel.Black76,
}
def _get_option_pricing_model(
method: str | OptionPricingModel,
) -> OptionPricingModel:
if isinstance(method, OptionPricingModel):
return method
else:
try:
return _OPTION_PRICING_MAP[method.lower()]
except KeyError:
raise ValueError(
f"`pricing_model` as string: '{method}' is not a valid option. Please consult docs."
)
_SWAPTION_SETTLEMENT_MAP = {
"physical": SwaptionSettlementMethod.Physical,
"cash_par_tenor": SwaptionSettlementMethod.CashParTenor,
"cash_collateralized": SwaptionSettlementMethod.CashCollateralized,
# aliases
"cashcollateralized": SwaptionSettlementMethod.CashCollateralized,
"cashpartenor": SwaptionSettlementMethod.CashParTenor,
}
def _get_swaption_settlement_method(
method: str | SwaptionSettlementMethod,
) -> SwaptionSettlementMethod:
if isinstance(method, SwaptionSettlementMethod):
return method
else:
try:
return _SWAPTION_SETTLEMENT_MAP[method.lower()]
except KeyError:
raise ValueError(
f"`swaption_settlement_method` as string: '{method}' is not a valid option. "
f"Please consult docs."
)
_LEG_MTM_MAP = {
"initial": LegMtm.Initial,
"xcs": LegMtm.XCS,
"payment": LegMtm.Payment,
}
def _get_leg_mtm(leg_mtm: str | LegMtm) -> LegMtm:
if isinstance(leg_mtm, LegMtm):
return leg_mtm
else:
try:
return _LEG_MTM_MAP[leg_mtm.lower()]
except KeyError:
raise ValueError(
f"`mtm` as string: '{leg_mtm}' is not a valid option. Please consult docs."
)
_LEG_INDEX_BASE_MAP = {
"initial": LegIndexBase.Initial,
"periodonperiod": LegIndexBase.PeriodOnPeriod,
"period_on_period": LegIndexBase.PeriodOnPeriod,
}
def _get_leg_index_base(leg_index: str | LegIndexBase) -> LegIndexBase:
if isinstance(leg_index, LegIndexBase):
return leg_index
else:
try:
return _LEG_INDEX_BASE_MAP[leg_index.lower()]
except KeyError:
raise ValueError(
f"`leg_index_base` as string: '{leg_index}' is not a valid option. "
f"Please consult docs."
)
_INDEX_METHOD_MAP = {
"daily": IndexMethod.Daily,
"monthly": IndexMethod.Monthly,
"curve": IndexMethod.Curve,
}
def _get_index_method(index_method: str | IndexMethod) -> IndexMethod:
if isinstance(index_method, IndexMethod):
return index_method
else:
try:
return _INDEX_METHOD_MAP[index_method.lower()]
except KeyError:
raise ValueError(
f"`index_method` as string: '{index_method}' is not a valid option. "
f"Please consult docs."
)
_FIXING_METHOD_MAP: dict[str, type[FloatFixingMethod]] = {
"ibor": FloatFixingMethod.IBOR,
"rfrpaymentdelay": FloatFixingMethod.RFRPaymentDelay,
"rfrobservationshift": FloatFixingMethod.RFRObservationShift,
"rfrlockout": FloatFixingMethod.RFRLockout,
"rfrlookback": FloatFixingMethod.RFRLookback,
"rfrpaymentdelayaverage": FloatFixingMethod.RFRPaymentDelayAverage,
"rfrobservationshiftaverage": FloatFixingMethod.RFRObservationShiftAverage,
"rfrlockoutaverage": FloatFixingMethod.RFRLockoutAverage,
"rfrlookbackaverage": FloatFixingMethod.RFRLookbackAverage,
# legacy compatibility
"rfr_payment_delay": FloatFixingMethod.RFRPaymentDelay,
"rfr_observation_shift": FloatFixingMethod.RFRObservationShift,
"rfr_lockout": FloatFixingMethod.RFRLockout,
"rfr_lookback": FloatFixingMethod.RFRLookback,
"rfr_payment_delay_avg": FloatFixingMethod.RFRPaymentDelayAverage,
"rfr_observation_shift_avg": FloatFixingMethod.RFRObservationShiftAverage,
"rfr_lockout_avg": FloatFixingMethod.RFRLockoutAverage,
"rfr_lookback_avg": FloatFixingMethod.RFRLookbackAverage,
}
def _get_float_fixing_method(method: str | FloatFixingMethod) -> FloatFixingMethod:
if isinstance(method, FloatFixingMethod):
return method
else:
if method.lower() in ["rfrpaymentdelay", "rfr_payment_delay"]:
return FloatFixingMethod.RFRPaymentDelay()
elif method.lower() in ["rfrpaymentdelayaverage", "rfr_payment_delay_avg"]:
return FloatFixingMethod.RFRPaymentDelayAverage()
if not ("(" in method and method[-1] == ")"):
raise ValueError(
f"`fixing_method` as string: '{method}' must have an associated parameter "
f"contained in parentheses, for example 'ibor(2)' or 'rfr_observation_shift(5)'. "
)
method_, number_part = method[:-1].split("(")
number = int(number_part)
try:
enum_ = _FIXING_METHOD_MAP[method_.lower()]
except KeyError:
raise ValueError(
f"`fixing_method` as string: '{method_}' is not a valid FloatFixingMethod."
)
return enum_(number) # type: ignore[call-arg]
_SPREAD_COMPOUNDING_METHOD_MAP = {
"nonesimple": SpreadCompoundMethod.NoneSimple,
"isdacompounding": SpreadCompoundMethod.ISDACompounding,
"isdaflatcompounding": SpreadCompoundMethod.ISDAFlatCompounding,
# legacy compatibility
"none_simple": SpreadCompoundMethod.NoneSimple,
"isda_compounding": SpreadCompoundMethod.ISDACompounding,
"isda_flat_compounding": SpreadCompoundMethod.ISDAFlatCompounding,
}
def _get_spread_compound_method(method: str | SpreadCompoundMethod) -> SpreadCompoundMethod:
if isinstance(method, SpreadCompoundMethod):
return method
else:
try:
return _SPREAD_COMPOUNDING_METHOD_MAP[method.lower()]
except KeyError:
raise ValueError(
f"`spread_compound_method` as string: '{method}' is not a valid option. "
f"Please consult docs."
)
_FX_DELTA_TYPE_MAP = {
"forward": FXDeltaMethod.Forward,
"spot": FXDeltaMethod.Spot,
"forward_pa": FXDeltaMethod.ForwardPremiumAdjusted,
"spot_pa": FXDeltaMethod.SpotPremiumAdjusted,
"forwardpremkiumadjusted": FXDeltaMethod.ForwardPremiumAdjusted,
"spotpremiumadjusted": FXDeltaMethod.SpotPremiumAdjusted,
}
def _get_fx_delta_type(method: str | FXDeltaMethod) -> FXDeltaMethod:
if isinstance(method, FXDeltaMethod):
return method
else:
try:
return _FX_DELTA_TYPE_MAP[method.lower()]
except KeyError:
raise ValueError(
f"`delta_type` as string: '{method}' is not a valid option. Please consult docs."
)
_FX_METRIC_MAP = {
"pips": FXOptionMetric.Pips,
"percent": FXOptionMetric.Percent,
}
def _get_fx_option_metric(method: str | FXOptionMetric) -> FXOptionMetric:
if isinstance(method, FXOptionMetric):
return method
else:
try:
return _FX_METRIC_MAP[method.lower()]
except KeyError:
raise ValueError(
f"FXOption `metric` as string: '{method}' is not a valid option. Please consult "
f"docs."
)
_IR_METRIC_MAP: dict[str, type[IROptionMetric]] = {
"normal_vol": IROptionMetric.NormalVol,
"premium": IROptionMetric.Premium,
"percent_notional": IROptionMetric.PercentNotional,
"black_vol_shift": IROptionMetric.BlackVolShift,
# aliases
"normalvol": IROptionMetric.NormalVol,
"percentnotional": IROptionMetric.PercentNotional,
"blackvolshift": IROptionMetric.BlackVolShift,
}
def _get_ir_option_metric(method: str | IROptionMetric) -> IROptionMetric:
if isinstance(method, IROptionMetric):
return method
else:
method = method.lower()
if "shift" in method:
idx = method.rfind("_")
if idx < 0:
raise ValueError(
"The 'BlackVolShift' metric must have an underscore and shift, e.g. "
"'black_vol_shift_100"
)
else:
args: tuple[NoReturn, ...] | tuple[int] = (int(method[idx + 1 :]),)
method = method[:idx]
else:
args = tuple()
try:
return _IR_METRIC_MAP[method](*args)
except KeyError:
raise ValueError(
f"IROption `metric` as string: '{method}' is not a valid option. Please consult "
f"documentation."
)
__all__ = [
"SpreadCompoundMethod",
"FloatFixingMethod",
"FXDeltaMethod",
"FXOptionMetric",
"IROptionMetric",
"LegMtm",
"LegIndexBase",
"OptionType",
"OptionPricingModel",
"IndexMethod",
]
================================================
FILE: python/rateslib/errors.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
# Arg Parsing
VE_NEEDS_FREQUENCY = "`frequency` as string or Frequency is needed to perform tenor calculations."
VE_NEEDS_FIXEDRATE = "A `fixed_rate` must be set for a cashflow to be determined."
VE_ATTRIBUTE_IS_IMMUTABLE = (
"The '{}' attribute is immutable to avoid conflicting calculations. Re-initialize the instance."
)
VE_ND_LEG_NEEDS_NO_EXCHANGES = (
"An Leg defined as non-deliverable by some parameter, e.g. `pair` cannot have "
"notional exchanges."
)
VE_PAIR_AND_LEG_MTM = "Setting `mtm` on a Leg requires a non-deliverable `pair` input."
# Curve Parsing
NI_NO_DISC_FROM_DICT = "`disc_curve` cannot currently be parsed from a dictionary of curves."
VE_NEEDS_DISC_CURVE = (
"`disc_curve` is required but it has not been provided, or cannot be parsed from an external "
"`curves` argument."
)
VE_NO_DISC_FROM_VALUES = "`disc_curve` cannot be inferred from a non-DF based curve."
VE_BEFORE_INITIAL = "The Curve initial node date is after the required forecasting date."
# Period Parameters
VE_NEEDS_INDEX_PARAMS = (
"`{0}` must be initialised with index parameters, i.e. those for `_IndexParams`. See docs."
)
VE_HAS_INDEX_PARAMS = (
"`{0}` must not be initialised with index parameters, i.e. those for `_IndexParams`. See docs."
)
VE_NEEDS_ND_CURRENCY_PARAMS = (
"`{0}` must be initialised with non-deliverable currency parameters, i.e. those for "
"`_CurrencyParams`. See docs."
)
VE_HAS_ND_CURRENCY_PARAMS = (
"`{0}` must not be initialised with non-deliverable currency parameters, i.e. those for "
"`_CurrencyParams`. See docs."
)
VE_MISMATCHED_FX_PAIR_ND_PAIR = (
"Non-deliverable FXOptions into a third currency are not allowed.\n"
"Got nd-currency: '{0}' and option index pair: '{1}'.\n"
"FXOptions of this nature require quanto volatility adjustements that the basic models"
"do not include."
)
# Fixings
# Tenors are now derived from a `fixing_series` and not a fixings timeseries
# UW_NO_TENORS = (
# "The IBORStubFixing has not detected any tenors under the identifier: '{0}' and "
# "will therefore never obtain any fixing value."
# )
TE_NO_FIXING_EXPOSURE_ON_OBJ = (
"The object type '{0}' does not contain or have available methods to calculate fixings "
"exposure."
)
VE01_1 = (
"Fixing data for the index '{0}' has been attempted, but none found.\nEither there "
"is no data file ('{0}.csv') located in the searched data directory,\nor a Series "
"has not been added manually by performing `fixings.add"
"('{0}', some_series)`.\nTo create a CSV file in the searched data directory "
"use the exact template structure for the file between the hashes:\n"
"###################\n"
"reference_date,rate\n26-08-2023,5.6152\n27-08-2023,5.6335\n##################\n"
"For further info see 'Working with Fixings' in the documentation cookbook.",
)
AE_NEEDS_PAIR_TO_FORECAST = (
"A currency `pair` is required for non-deliverable `fx_fixing` forecasting."
)
VE_NEEDS_FX_FORWARDS = (
"An FXForwards object for `fx` is required for instrument pricing.\n"
"If this instrument is part of a Solver, have you omitted the `fx` input?",
)
VE_NEEDS_FX_FORWARDS_BAD_TYPE = (
"An FXForwards object for `fx` is required for instrument pricing.\n"
"The given type, '{0}', cannot be used here."
)
FW_FIXINGS_AS_SERIES = (
"Setting any `fixings` argument as a Series directly is currently supported, but not "
"recommended and may be removed in future versions.\n"
"Best practice is to add the fixings object to the default _BaseFixingsLoader and then "
"reference that object by Series name.\n"
"For example, change: `rate_fixings`=my_series_object` to\n"
"`fixings.add('EURIBOR_3M', my_series_object)`\n"
"`fixings.add('EURIBOR_6M', another_series_object)`\n"
"`rate_fixings='EURIBOR'`\n"
"See cookbook article 'Working with Fixings' for more information."
)
VE_INDEX_FIXINGS_AS_STR_OR_VALUE = (
"`index_fixings` must be specified either as a scalar value or a string identifier for a "
"fixings set in the _BaseFixingsLoader. Got type: {0}."
)
VE_INDEX_LAG_MUST_BE_ZERO = (
"`index_lag` must be zero when using a 'Curve' `index_method`.\n"
"`index_date`: {0}, is in Series but got `index_lag`: {1}."
)
VE_EMPTY_SERIES = "An fixing value cannot be derived from an `fixings` Series having no entries."
VE_INDEX_BASE_NO_STR = (
"`index_base` argument cannot be initialised as string.\n If seeking to determine its "
"value with a Fixings series then do not provide any `index_base` value and use "
"`index_fixings` instead.\nOr use the 'index_value' method to separately determine a "
"scalar value to enter directly as the `index_base` argument."
)
# VE_NEEDS_INDEX_BASE_DATE = (
# "An `index_base` forecast value requires an `index_base_date` to be provided."
# )
# 08: periods/components/parameters.py
VE08_0 = (
"The `index_base` is not an explicitly provided value for the Period.\n"
"`index_base_date` must therefore be provided to forecast `index_base` from an `index_curve` "
"or `index_fixings`."
)
VE08_1 = (
"Must supply an `index_date` from which to forecast if `index_fixings` is not provided.\n"
"This error usually arises when an `index_base` value is not provided for a Period and "
"there is no `index_base_date`,\nor if there are no `index_fixings` and there is no "
"`index_reference_date` is combination."
)
VE_NEEDS_STRIKE = "An FXOptionPeriod cashflow cannot be determined without setting a `strike`."
# VE_NEEDS_FIXING_SERIES = (
# "A `fixing_series` must be supplied for floating rate parameters."
# )
# VE_NEEDS_FIXING_FREQUENCY = "A `fixing_frequency` must be supplied for floating rate parameters."
# 02: periods/components/float_rate.py
VE_NEEDS_RATE_CURVE = "A `rate_curve` must be provided to this method."
VE_MISMATCHED_ND_PAIR = (
"A non-deliverable pair must contain the settlement currency.\nGot '{0}' and '{1}'."
)
MISMATCH_RATE_INDEX_PARAMETERS = (
"A `rate_curve` and `rate_index` have been supplied with conflicting parameters.\n"
"Specifically for the attribute '{0}'\n"
"Got: '{1}' and '{2}'."
)
VE_NEEDS_CURVE_OR_INDEX = (
"Either `rate_curve` or `rate_index` must be provided so that the "
"conventions for the floating rate, such as the fixing calendar and the accrual "
"convention can be determined."
)
VE_NEEDS_RATE_TO_FORECAST_RFR = (
"A `rate_curve` is required to forecast missing RFR rates.\n"
"This may be observed as a direct argument input, or this error may by a result of "
"incorrectly supplying the `curves` argument to any Instrument class."
)
VE_NEEDS_RATE_TO_FORECAST_STUB_IBOR = (
"A `rate_curve` is required to forecast missing IBOR rate.\n"
"`rate_curve` might be specifically omitted or an external `curves` argument may be "
"malformed.\nNote that forecasting an IBOR stub from a single curve is bad practice and "
"a more accurate calculation will likely be obtained from a dict of curves, e.g.\n"
"'{'1m': curve1, '3m': curve2, '6m': curve3}'"
)
VE_NEEDS_RATE_TO_FORECAST_TENOR_IBOR = (
"A `rate_curve` is required to forecast missing IBOR rate.\n"
"`rate_curve` might be specifically omitted or an external `curves` argument may be "
"malformed."
)
VE_FIXINGS_BAD_TYPE = (
"`.._fixings` should be a single value or a string labelling a fixing set in the "
"`fixings` container. It cannot be a list or Series.\n"
"To migrate from the legacy implementation where a Series could be supplied directly "
"use the following:\nAdd your Series to defaults: `default.fixings.add('EURIBOR_3M', "
"my_series_obj)`\nAnd then reference this fixing set directly: `rate_fixings='EURIBOR'`.\nThe"
"suffix '_3M' will be added directly internally (based on the Frequency) and will adjust for "
"stub fixings. RFR fixings will have the '_1B' suffix added, so use for example:\n"
"Add an RFR Series: `fixings.add('SOFR_1B', my_series_obj)`\n"
"And reference this set directly: `rate_fixings='SOFR'`.\n"
"For further details see the cookbook documentation entitled 'Working with Fixings'."
)
VE02_1 = (
"RFR Observation and Accrual DCF dates do not align.\nThis is usually the result of a "
"'rfr_lookback' Period which does not adhere to the holiday calendar of the `curve`.\n"
"start date: {0} is curve holiday? => {1}\nend date: {2} is curve holiday? => {3}\n"
)
VE02_2 = (
"The accrual `start` and `end` dates ({0} and/or {1}) for the period do not align with "
"business days under the `fixing_calendar`.\nRFR Periods need to align with valid fixing"
"days."
)
VE02_3 = (
"Providing `rate_fixings` as a scalar value for an RFR type `fixing_method` is not "
"permitted due to ambiguity, particularly in combination with the `float_spread`.\n"
"Consider adding a Series to `defaults`: `fixings.add('MY_RFR_1B', "
"some_series)`\nAnd then referencing this fixings collection: `rate_fixings='MY_RFR'\n"
"For an RFR type fixing method the suffix added internally is always '_1B'."
)
VE_SPREAD_METHOD_RFR = (
"The `spread_compound_method` must be the 'NoneSimple' variant when using a "
"`fixing_method` which defines an RFR Averaging type calculation.\nGot: {0}"
)
VE02_5 = (
"The fixings series '{0}' for the RFR 1B rates is missing a value expected by the fixings "
"calendar.\n"
"Specifically '{1}' is expected, yet '{2}' is provided implying a data entry is missing."
)
VE_NEEDS_RATE_POPULATE_FIXINGS = (
"A `rate_curve` is required to forecast missing RFR fixings in a floating rate calculation.\n"
"This may be a direct input or the input to an Instrument's `curves` argument may be incorrect."
"\nThe missing data is shown below for this calculation:\n"
"{0}"
)
VE_LOCKOUT_METHOD_PARAM = (
"The `method_param` for an RFR Lockout type `fixing_method` must not exceed the length of the "
"period.\nGot: '{0}' for the following fixing rates:\n{1}"
)
W02_0 = (
"The fixings series '{0}' for the RFR 1B rates contains more fixings than are expected from "
"the fixings calendar.\n"
"Specifically, the extra data item lies within the fixings window: '{1}':'{2}'."
)
================================================
FILE: python/rateslib/fx/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.fx.fx_forwards import FXForwards, forward_fx
from rateslib.fx.fx_rates import FXRates
__all__ = ("FXForwards", "forward_fx", "FXRates")
================================================
FILE: python/rateslib/fx/fx_forwards.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import json
import warnings
from dataclasses import replace
from datetime import datetime, timedelta
from itertools import combinations, product
from typing import TYPE_CHECKING, Any, TypeAlias
import numpy as np
from pandas import DataFrame, Series
from rateslib import defaults
from rateslib.curves import Curve, MultiCsaCurve, ProxyCurve
from rateslib.curves.utils import _CurveType
from rateslib.data.fixings import FXIndex
from rateslib.default import PlotOutput, plot
from rateslib.dual import Dual, Dual2, Variable, gradient
from rateslib.enums.generics import NoInput, _drb
from rateslib.fx.fx_rates import FXRates
from rateslib.mutability import (
_clear_cache_post,
_new_state_post,
_validate_states,
_WithCache,
_WithState,
)
from rateslib.scheduling import add_tenor
if TYPE_CHECKING:
from rateslib.local_types import Number, _BaseCurve, datetime_
DualTypes: TypeAlias = (
"Dual | Dual2 | Variable | float" # required for non-cyclic import on _WithCache
)
"""
.. ipython:: python
:suppress:
from rateslib.curves import Curve
from datetime import datetime as dt
"""
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
class FXForwards(_WithState, _WithCache[tuple[str, datetime], DualTypes]):
"""
Class for storing and calculating FX forward rates.
Parameters
----------
fx_rates : FXRates, or list of such
An ``FXRates`` object with an associated settlement date. If multiple settlement
dates are relevant, e.g. GBPUSD (T+2) and USDCAD(T+1), then a list of
``FXRates`` object is allowed to create a no arbitrage framework.
fx_curves : dict
A dict of DF ``Curve`` objects defined by keys of two currency labels. First, by
the currency in which cashflows occur (3-digit code), combined with the
currency by which the future cashflow is collateralised in a derivatives sense
(3-digit code). There must also be a curve in each currency for
local discounting, i.e. where the cashflow and collateral currency are the
same. See examples.
base : str, optional
The base currency (3-digit code). If not given defaults to the base currency
of the first ``fx_rates`` object.
Notes
-----
.. math::
f_{DOMFOR,i} &= \\text{Forward domestic-foreign FX rate fixing on maturity date, }m_i \\\\
F_{DOMFOR,0} &= \\text{Immediate settlement market domestic-foreign FX rate} \\\\
v_{dom:dom,i} &= \\text{Local domestic-currency DF on maturity date, }m_i \\\\
w_{dom:for,i} &= \\text{XCS adjusted domestic-currency DF on maturity date, }m_i \\\\
Examples
--------
The most basic ``FXForwards`` object is created from a spot ``FXRates`` object and
two local currency discount curves.
.. ipython:: python
from rateslib.fx import FXRates, FXForwards
from rateslib.curves import Curve
.. ipython:: python
fxr = FXRates({"eurusd": 1.1}, settlement=dt(2022, 1, 3))
eur_local = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.91})
usd_local = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.95})
fxf = FXForwards(fxr, {"usdusd": usd_local, "eureur": eur_local, "eurusd": eur_local})
Note that in the above the ``eur_local`` curve has also been used as the curve
for EUR cashflows collateralised in USD, which is necessary for calculation
of forward FX rates and cross-currency basis. With this assumption the
cross-currency basis is implied to be zero at all points along the curve.
Attributes
----------
fx_rates : FXRates or list
fx_curves : dict
immediate : datetime
currencies: dict
q : int
currencies_list : list
transform : ndarray
base : str
fx_rates_immediate : FXRates
"""
_mutable_by_association = True
# @_new_state_post # handled internally
@_clear_cache_post
def update(self, fx_rates: list[dict[str, float]] | NoInput = NoInput(0)) -> None:
"""
Update the FXForward object with the latest FX rates and FX curves values.
The update method is primarily used to allow synchronous updating within a
``Solver``.
Parameters
----------
fx_rates: list of dict, optional
A list of dictionaries with new rates to update the associated
:class:`~rateslib.fx.FXRates` objects associated with the *FXForwards* object.
Returns
-------
None
Notes
-----
An *FXForwards* object contains associations to external objects, those being
:class:`~rateslib.fx.FXRates` and :class:`~rateslib.curves.Curve`, and its purpose is
to be able to combine those objects to yield FX forward rates.
When those external objects have themselves been updated the *FXForwards* class
will detect this via *rateslib's* cache management and will automatically update
the *FXForwards* object. Manually calling this update on the *FXForwards* class
also allows those associated *FXRates* classes to be updated with new market data.
.. ipython:: python
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3), base="usd")
fx_curves = {
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.965}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.985}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.985}),
}
fxf = FXForwards(fxr, fx_curves)
fxf.rate("eurusd", dt(2022, 8, 15))
.. ipython:: python
fxr.update({"eurusd": 2.0}) # <-- update the associated FXRates object.
fxf.rate("eurusd", dt(2022, 8, 15)) # <-- rate has changed, fxf has auto-updated.
It is possible to update an *FXRates* object directly from the *FXForwards* object, via
the ``fx_rates`` argument.
.. ipython:: python
fxf.update([{"eurusd": 1.50}])
fxf.rate("eurusd", dt(2022, 8, 15))
The :class:`~rateslib.solver.Solver` also automatically updates *FXForwards* objects
when it mutates and solves the *Curves*.
"""
# does not require cache validation because resets the cache_id at end of method.
if not isinstance(fx_rates, NoInput):
self_fx_rates = self.fx_rates if isinstance(self.fx_rates, list) else [self.fx_rates]
if not isinstance(fx_rates, list) or len(self_fx_rates) != len(fx_rates):
raise ValueError(
"`fx_rates` must be a list of dicts with length equal to the number of FXRates "
f"objects associated with the *FXForwards* object: {len(self_fx_rates)}."
)
for fxr_obj, fxr_up in zip(self_fx_rates, fx_rates, strict=True):
fxr_obj.update(fxr_up)
if self._state != self._get_composited_state():
self._calculate_immediate_rates(base=self.base, init=False)
self._set_new_state()
@_new_state_post
@_clear_cache_post
def __init__(
self,
fx_rates: FXRates | list[FXRates],
fx_curves: dict[str, _BaseCurve],
base: str | NoInput = NoInput(0),
) -> None:
self._ad = 1
self._validate_fx_curves(fx_curves)
self._fx_proxy_curves: dict[str, ProxyCurve] = {}
self.fx_rates: FXRates | list[FXRates] = fx_rates
self._calculate_immediate_rates(base, init=True)
assert self.currencies_list == self.fx_rates_immediate.currencies_list # noqa: S101
@property
def fx_proxy_curves(self) -> dict[str, ProxyCurve]:
"""
A dict of cached :class:`~rateslib.curves.ProxyCurve` associated with this object.
"""
return self._fx_proxy_curves
def _get_composited_state(self) -> int:
self_fx_rates = [self.fx_rates] if not isinstance(self.fx_rates, list) else self.fx_rates
total = sum(curve._state for curve in self.fx_curves.values()) + sum(
fxr._state for fxr in self_fx_rates
)
return hash(total)
def _validate_state(self) -> None:
if self._state != self._get_composited_state():
self.update()
def _validate_fx_curves(self, fx_curves: dict[str, _BaseCurve]) -> None:
self.fx_curves: dict[str, _BaseCurve] = {k.lower(): v for k, v in fx_curves.items()}
self.terminal: datetime = datetime(2200, 1, 1)
for flag, (k, curve) in enumerate(self.fx_curves.items()):
try: # to label curve meta with collateral
curve._meta = replace(curve._meta, _collateral=k[3:6]) # type: ignore[misc]
except AttributeError:
if curve._meta.collateral is not None and curve._meta.collateral != k[3:6]:
warnings.warn(
"Constructing an FXForwards with curve operation objects is possible.\n"
"However, these objects reference other curve meta data, and a collateral "
f"clash has been detected.\n "
f"Curve.meta.collateral: '{curve._meta.collateral}'\n"
f"Actual collateral: '{k[3:6]}'",
UserWarning,
)
else:
# collateral is None so ignore, or it is correct anyway so pass
pass
if flag == 0:
self.immediate: datetime = curve.nodes.keys[0]
elif self.immediate != curve.nodes.keys[0]:
raise ValueError("`fx_curves` do not have the same initial date.")
if curve._base_type == _CurveType.values:
raise TypeError("`fx_curves` must be DF based, not type LineCurve.")
if curve.nodes.final < self.terminal:
self.terminal = curve.nodes.final
def _calculate_immediate_rates(self, base: str | NoInput, init: bool) -> None:
if not isinstance(self.fx_rates, list):
# if in initialisation phase (and not update phase) populate immutable values
if init:
self.currencies = self.fx_rates.currencies
self.q = len(self.currencies.keys())
self.currencies_list: list[str] = self.fx_rates.currencies_list
self.transform = _get_curves_indicator_array(
self.q,
self.currencies,
self.fx_curves,
)
self._paths = _create_initial_mapping(self.transform)
self.base: str = self.fx_rates.base if isinstance(base, NoInput) else base
self.pairs = self.fx_rates.pairs
self.variables = tuple(f"fx_{pair}" for pair in self.pairs)
self.pairs_settlement = self.fx_rates.pairs_settlement
self.fx_rates_immediate = self._calculate_immediate_rates_same_settlement_frame()
else:
# Get values for the first FXRates in the list
sub_curves = self._get_curves_for_currencies(
self.fx_curves,
self.fx_rates[0].currencies_list,
)
acyclic_fxf: FXForwards = FXForwards(
fx_rates=self.fx_rates[0],
fx_curves=sub_curves,
)
settlement_pairs = dict.fromkeys(self.fx_rates[0].pairs, self.fx_rates[0].settlement)
# Now iterate through the remaining FXRates objects and patch them into the fxf
for fx_rates_obj in self.fx_rates[1:]:
# create sub FXForwards for each FXRates instance and re-combine.
# This reuses the arg validation of a single FXRates object and
# dependency of FXRates with fx_curves.
# calculate additional FX rates from previous objects
# in the same settlement frame.
overlapping_currencies = [
ccy
for ccy in fx_rates_obj.currencies_list
if ccy in acyclic_fxf.currencies_list
]
pre_currencies = [
ccy
for ccy in acyclic_fxf.currencies_list
if ccy not in fx_rates_obj.currencies_list
]
pre_rates = {
f"{overlapping_currencies[0]}{ccy}": acyclic_fxf._rate_without_validation(
f"{overlapping_currencies[0]}{ccy}",
fx_rates_obj.settlement,
)
for ccy in pre_currencies
}
combined_fx_rates = FXRates(
fx_rates={**fx_rates_obj.fx_rates, **pre_rates},
settlement=fx_rates_obj.settlement,
)
sub_curves = self._get_curves_for_currencies(
self.fx_curves,
fx_rates_obj.currencies_list + pre_currencies,
)
acyclic_fxf = FXForwards(fx_rates=combined_fx_rates, fx_curves=sub_curves)
settlement_pairs.update(
dict.fromkeys(fx_rates_obj.pairs, fx_rates_obj.settlement),
)
if not isinstance(base, NoInput):
acyclic_fxf.base = base.lower()
for attr in [
"currencies",
"q",
"currencies_list",
"transform",
"base",
"fx_rates_immediate",
"pairs",
"_paths",
]:
setattr(self, attr, getattr(acyclic_fxf, attr))
self.pairs_settlement = settlement_pairs
def _calculate_immediate_rates_same_settlement_frame(self) -> FXRates:
"""
Calculate the immediate FX rates values given current Curves and input FXRates obj.
Notes
-----
Searches the non-diagonal elements of transformation matrix, once it has
found a pair uses the relevant curves and the FX rate to determine the
immediate FX rate for that pair.
"""
# this method can only be performed on an FXForwards object that is associated to a
# single FXRates obj (hence the use of the acyclic_fxf)
# since this is an internal method this line is used for testing
assert not isinstance(self.fx_rates, list) # noqa: S101
fx_rates_immediate: dict[str, DualTypes] = {}
for row in range(self.q):
for col in range(self.q):
if row == col or self.transform[row, col] == 0:
continue
cash_ccy = self.currencies_list[row]
coll_ccy = self.currencies_list[col]
settlement = self.fx_rates.settlement
if isinstance(settlement, NoInput) or settlement is None:
raise ValueError(
"`fx_rates` as FXRates supplied to FXForwards must contain a "
"`settlement` argument.",
)
v_i = self.fx_curves[f"{coll_ccy}{coll_ccy}"][settlement]
v_0 = self.fx_curves[f"{coll_ccy}{coll_ccy}"][self.immediate]
w_i = self.fx_curves[f"{cash_ccy}{coll_ccy}"][settlement]
w_0 = self.fx_curves[f"{cash_ccy}{coll_ccy}"][self.immediate]
pair = f"{cash_ccy}{coll_ccy}"
fx_rates_immediate.update(
{pair: self.fx_rates.fx_array[row, col] * v_i / w_i * w_0 / v_0}
)
fx_rates_immediate_ = FXRates(fx_rates_immediate, self.immediate, self.currencies_list[0])
return fx_rates_immediate_.restate(self.fx_rates.pairs, keep_ad=True)
def __repr__(self) -> str:
if len(self.currencies_list) > 5:
return (
f""
)
else:
return f""
@staticmethod
def _get_curves_for_currencies(
fx_curves: dict[str, _BaseCurve], currencies: list[str]
) -> dict[str, _BaseCurve]:
"""produces a complete subset of fx curves given a list of currencies"""
ps = product(currencies, currencies)
ret = {p[0] + p[1]: fx_curves[p[0] + p[1]] for p in ps if p[0] + p[1] in fx_curves}
return ret
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
@_validate_states
def rate(
self,
pair: FXIndex | str,
settlement: datetime_ = NoInput(0),
) -> DualTypes:
"""
Return the fx forward rate for a currency pair.
Parameters
----------
pair : FXIndex, str
The FX pair in usual domestic:foreign convention (6 digit code).
settlement : datetime, optional
The settlement date of currency exchange. If not given defaults to
immediate settlement.
Returns
-------
float, Dual, Dual2
Notes
-----
Uses the formula,
.. math::
f_{DOMFOR, i} = \\frac{w_{dom:for, i}}{v_{for:for, i}} F_{DOMFOR,0} = \\frac{v_{dom:dom, i}}{w_{for:dom, i}} F_{DOMFOR,0}
where :math:`v` is a local currency discount curve and :math:`w` is a discount
curve collateralised with an alternate currency.
If required curves do not exist in the relevant currencies then forwards rates are chained
using those calculable from available curves. The chain is found using a search algorithm.
.. math::
f_{DOMFOR, i} = f_{DOMALT, i} ... f_{ALTFOR, i}
""" # noqa: E501
if isinstance(pair, FXIndex):
pair = pair.pair
return self._rate_without_validation(pair, settlement)
def _rate_without_validation(self, pair: str, settlement: datetime_ = NoInput(0)) -> DualTypes:
settlement_: datetime = _drb(self.immediate, settlement)
if defaults.curve_caching and (pair, settlement_) in self._cache:
return self._cache[(pair, settlement_)]
if settlement_ < self.immediate:
raise ValueError("`settlement` cannot be before immediate FX rate date.")
if settlement_ == self.immediate:
# get FX rate directly from the immediate object
return self._cached_value((pair, settlement_), self.fx_rates_immediate.rate(pair))
elif isinstance(self.fx_rates, FXRates) and settlement_ == self.fx_rates.settlement:
# get FX rate directly from the spot object
return self._cached_value((pair, settlement_), self.fx_rates.rate(pair))
ccy_lhs = pair[0:3].lower()
ccy_rhs = pair[3:6].lower()
if ccy_lhs == ccy_rhs:
return 1.0 # then return identity
if (self.currencies[ccy_lhs], self.currencies[ccy_rhs]) not in self._paths:
# then paths have not been recursively determined, so determine them and cache now.
self._paths = _recursive_pair_population(self.transform, self._paths)[1]
via_idx = self._paths[(self.currencies[ccy_lhs], self.currencies[ccy_rhs])]
if via_idx == -1:
# then a rate is directly available
return self._rate_direct(ccy_lhs, ccy_rhs, settlement_)
else:
# recursively determine from FX-crosses
via_ccy = self.currencies_list[via_idx]
ret = self.rate(f"{ccy_lhs}{via_ccy}", settlement_) * self.rate(
f"{via_ccy}{ccy_rhs}", settlement_
)
return self._cached_value((pair, settlement_), ret)
def _rate_direct(
self,
ccy_lhs: str,
ccy_rhs: str,
settlement: datetime,
) -> DualTypes:
"""Return a forward FX rate conditional on curves existing directly between the
given currency indexes."""
ccy_lhs_idx = self.currencies[ccy_lhs]
ccy_rhs_idx = self.currencies[ccy_rhs]
if self.transform[ccy_lhs_idx, ccy_rhs_idx] == 1:
# f_ab = w_ab / v_bb * F_ab
w_ab = self.fx_curves[f"{ccy_lhs}{ccy_rhs}"][settlement]
v_bb = self.fx_curves[f"{ccy_rhs}{ccy_rhs}"][settlement]
scalar = w_ab / v_bb
elif self.transform[ccy_rhs_idx, ccy_lhs_idx] == 1:
# f_ab = v_aa / w_ba * F_ab
v_aa = self.fx_curves[f"{ccy_lhs}{ccy_lhs}"][settlement]
w_ba = self.fx_curves[f"{ccy_rhs}{ccy_lhs}"][settlement]
scalar = v_aa / w_ba
else:
raise ValueError("`fx_curves` do not exist to create a direct FX rate for the pair.")
f = self.fx_rates_immediate.rate(f"{ccy_lhs}{ccy_rhs}")
return self._cached_value((f"{ccy_lhs}{ccy_rhs}", settlement), scalar * f)
@_validate_states
def positions(
self, value: Number, base: str | NoInput = NoInput(0), aggregate: bool = False
) -> Series[float] | DataFrame:
"""
Convert a base value with FX rate sensitivities into an array of cash positions
by settlement date.
Parameters
----------
value : float or Dual
The amount expressed in base currency to convert to cash positions.
base : str, optional
The base currency in which ``value`` is given (3-digit code). If not given
assumes the ``base`` of the object.
aggregate : bool, optional
Whether to aggregate positions across all settlement dates and yield
a single column Series.
Returns
-------
DataFrame or Series
Examples
--------
.. ipython:: python
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxf = FXForwards(
fx_rates=[fxr1, fxr2],
fx_curves={
"usdusd": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
"cadusd": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
}
)
fxf.positions(
value=Dual(100000, ["fx_eurusd", "fx_usdcad"], [-100000, -150000]),
base="usd",
)
"""
if isinstance(value, float | int):
value = Dual(value, [], [])
base_: str = self.base if isinstance(base, NoInput) else base.lower()
_ = np.array(
[0 if ccy != base_ else float(value) for ccy in self.currencies_list],
) # this is an NPV so is assumed to be immediate settlement
if isinstance(self.fx_rates, list):
fx_rates = self.fx_rates
else:
fx_rates = [self.fx_rates]
dates = list({fxr.settlement for fxr in fx_rates})
if self.immediate not in dates:
dates.insert(0, self.immediate)
df = DataFrame(0.0, index=self.currencies_list, columns=dates)
df.loc[base_, self.immediate] = float(value)
for pair in value.vars:
if pair[:3] == "fx_":
dom_, for_ = pair[3:6], pair[6:9]
for fxr in fx_rates:
if dom_ in fxr.currencies_list and for_ in fxr.currencies_list:
delta = gradient(value, [pair])[0]
_ = fxr._get_positions_from_delta(delta, pair[3:], base_)
_ = Series(_, index=fxr.currencies_list, name=fxr.settlement)
df = df.add(_.to_frame(), fill_value=0.0)
if aggregate:
_s: Series[float] = df.sum(axis=1).rename(dates[0])
return _s
else:
_d: DataFrame = df.sort_index(axis=1)
return _d
@_validate_states
def convert(
self,
value: DualTypes,
domestic: str,
foreign: str | NoInput = NoInput(0),
settlement: datetime | NoInput = NoInput(0),
value_date: datetime | NoInput = NoInput(0),
collateral: str | NoInput = NoInput(0),
on_error: str = "ignore",
) -> DualTypes | None:
"""
Convert an amount of a domestic currency, as of a settlement date
into a foreign currency, valued on another date.
Parameters
----------
value : float or Dual
The amount of the domestic currency to convert.
domestic : str
The domestic currency (3-digit code).
foreign : str, optional
The foreign currency to convert to (3-digit code). Uses instance
``base`` if not given.
settlement : datetime, optional
The date of the assumed domestic currency cashflow. If not given is
assumed to be ``immediate`` settlement.
value_date : datetime, optional
The date for which the domestic cashflow is to be projected to. If not
given is assumed to be equal to the ``settlement``.
collateral : str, optional
The collateral currency to project the cashflow if ``value_date`` is
different to ``settlement``. If they are the same this is not needed.
If not given defaults to ``domestic``.
on_error : str in {"ignore", "warn", "raise"}
The action taken if either ``domestic`` or ``foreign`` are not contained
in the FX framework. `"ignore"` and `"warn"` will still return `None`.
Returns
-------
Dual or None
Examples
--------
.. ipython:: python
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxf = FXForwards(
fx_rates=[fxr1, fxr2],
fx_curves={
"usdusd": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
"cadusd": Curve({dt(2022, 1, 1):1.0, dt(2022, 2, 1): 0.999}),
}
)
fxf.convert(1000, "usd", "cad")
"""
foreign_ = _drb(self.base, foreign).lower()
domestic_ = domestic.lower()
collateral_ = _drb(domestic_, collateral).lower()
for ccy in [domestic_, foreign_]:
if ccy not in self.currencies:
if on_error == "ignore":
return None
elif on_error == "warn":
warnings.warn(
f"'{ccy}' not in FXForwards.currencies: returning None.",
UserWarning,
)
return None
else:
raise ValueError(f"'{ccy}' not in FXForwards.currencies.")
settlement_: datetime = _drb(self.immediate, settlement)
value_date_: datetime = _drb(settlement_, value_date)
fx_rate: DualTypes = self.rate(domestic_ + foreign_, settlement_)
if value_date_ == settlement_:
return fx_rate * value
else:
crv = self.curve(foreign_, collateral_)
return fx_rate * value * crv[settlement_] / crv[value_date_]
@_validate_states
# this is technically unnecessary since calls pre-cached method: convert
def convert_positions(
self,
array: np.ndarray[tuple[int], np.dtype[np.float64]]
| list[float]
| DataFrame
| Series[float],
base: str | NoInput = NoInput(0),
) -> DualTypes:
"""
Convert an input of currency cash positions into a single base currency value.
Parameters
----------
array : list, 1d ndarray of floats, or Series, or DataFrame
The cash positions to simultaneously convert to base currency value.
If a DataFrame, must be indexed by currencies (3-digit lowercase) and the
column headers must be settlement dates.
If a Series, must be indexed by currencies (3-digit lowercase).
If a 1d array or sequence, must
be ordered by currency as defined in the attribute ``FXForward.currencies``.
base : str, optional
The currency to convert to (3-digit code). Uses instance ``base`` if not
given.
Returns
-------
Dual
Examples
--------
.. ipython:: python
:suppress:
from pandas import DataFrame
.. ipython:: python
fxr = FXRates({"usdnok": 8.0}, settlement=dt(2022, 1, 1))
usdusd = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99})
noknok = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.995})
fxf = FXForwards(fxr, {"usdusd": usdusd, "noknok": noknok, "nokusd": noknok})
fxf.currencies
fxf.convert_positions([0, 1000000], "usd")
.. ipython:: python
fxr.convert_positions(Series([1000000, 0], index=["nok", "usd"]), "usd")
.. ipython:: python
positions = DataFrame(index=["usd", "nok"], data={
dt(2022, 6, 2): [0, 1000000],
dt(2022, 9, 7): [0, -1000000],
})
fxf.convert_positions(positions, "usd")
"""
base = _drb(self.base, base).lower()
if isinstance(array, Series):
array_: DataFrame = array.to_frame(name=self.immediate)
elif isinstance(array, DataFrame):
array_ = array
else:
array_ = DataFrame({self.immediate: np.asarray(array)}, index=self.currencies_list)
# j = self.currencies[base]
# return np.sum(array_ * self.fx_array[:, j])
sum_: DualTypes = 0.0
for d in array_.columns:
d_sum: DualTypes = 0.0
for ccy in array_.index:
# typing d is a datetime by default.
value_: DualTypes | None = self.convert(array_.loc[ccy, d], ccy, base, d) # type: ignore[arg-type]
d_sum += 0.0 if value_ is None else value_
if abs(d_sum) < 1e-2:
sum_ += d_sum
else: # only discount if there is a real value
value_ = self.convert(d_sum, base, base, d, self.immediate) # type: ignore[arg-type]
sum_ += 0.0 if value_ is None else value_
return sum_
@_validate_states
def swap(
self,
pair: FXIndex | str,
settlements: list[datetime],
) -> DualTypes:
"""
Return the FXSwap mid-market rate for the given currency pair.
Parameters
----------
pair : FXIndex, str
The FX pair in usual domestic:foreign convention (6-digit code).
settlements : list of datetimes,
The settlement date of currency exchanges.
Returns
-------
Dual
"""
if isinstance(pair, FXIndex):
pair = pair.pair
fx0 = self._rate_without_validation(pair, settlements[0])
fx1 = self._rate_without_validation(pair, settlements[1])
return (fx1 - fx0) * 10000
@_validate_states
def _full_curve(self, cashflow: str, collateral: str) -> _BaseCurve:
"""
Calculate a cash collateral curve.
Parameters
----------
cashflow : str
The currency in which cashflows are represented (3-digit code).
collateral : str
The currency of the CSA against which cashflows are collateralised (3-digit
code).
Returns
-------
Curve
Notes
-----
Uses the formula,
.. math::
w_{DOM:FOR,i} = \\frac{f_{DOMFOR,i}}{F_{DOMFOR,0}} v_{FOR:FOR,i}
The returned curve has each DF uniquely specified on each date.
"""
cash_ccy, coll_ccy = cashflow.lower(), collateral.lower()
cash_idx, coll_idx = self.currencies[cash_ccy], self.currencies[coll_ccy]
end = self.fx_curves[f"{coll_ccy}{coll_ccy}"].nodes.final
days = (end - self.immediate).days
nodes = {
k: (
self._rate_without_validation(f"{cash_ccy}{coll_ccy}", k)
/ self.fx_rates_immediate.fx_array[cash_idx, coll_idx]
* self.fx_curves[f"{coll_ccy}{coll_ccy}"][k]
)
for k in [self.immediate + timedelta(days=i) for i in range(days + 1)]
}
c_: _BaseCurve = Curve(nodes)
return c_
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
# @_validate_states: function does not determine values, just links to contained objects.
def curve(
self,
cashflow: str,
collateral: str | list[str] | tuple[str, ...],
id: str | NoInput = NoInput(0), # noqa: A002
) -> _BaseCurve:
"""
Return a cash collateral *Curve*.
Parameters
----------
cashflow : str
The currency in which cashflows are represented (3-digit code).
collateral : str, or list/tuple of such
The currency of the CSA against which cashflows are collateralised (3-digit
code). If a list or tuple will return a CompositeCurve in multi-CSA mode.
id : str, optional
The identifier attached to any constructed :class:`~rateslib.fx.ProxyCurve`.
Returns
-------
Curve, ProxyCurve or MultiCsaCurve
Notes
-----
If the :class:`~rateslib.curves.Curve` already exists within the attribute
``fx_curves`` that *Curve* will be returned directly.
If a :class:`~rateslib.curves.ProxyCurve` already exists with the attribute
``fx_proxy_curves`` that *Curve* will be returned.
Otherwise, creates and returns a :class:`~rateslib.curves.ProxyCurve` which determines rates
and DFs via the chaining method and the below formula,
.. math::
w_{dom:for,i} = \\frac{f_{DOMFOR,i}}{F_{DOMFOR,0}} v_{for:for,i}
For multiple collateral currencies returns a :class:`~rateslib.curves.MultiCsaCurve`.
The :class:`~rateslib.curves._CurveMeta` inherits values from the local cash
:class:`~rateslib.curves.Curve`, and the ``collateral`` value is set as the defined
collateral currency.
"""
if isinstance(collateral, list | tuple):
# TODO add this curve to fx_proxy_curves and lexsort the collateral
curves = []
for coll in collateral:
curves.append(self.curve(cashflow, coll))
curve: _BaseCurve = MultiCsaCurve(curves=curves, id=id)
curve._meta = replace(curve.meta, _collateral=",".join([_.lower() for _ in collateral])) # type: ignore[misc]
return curve
cash_ccy, coll_ccy = cashflow.lower(), collateral.lower()
pair = f"{cash_ccy}{coll_ccy}"
if pair in self.fx_curves:
return self.fx_curves[pair]
elif pair in self._fx_proxy_curves:
return self._fx_proxy_curves[pair]
else:
curve_: ProxyCurve = ProxyCurve(
cashflow=cash_ccy,
collateral=coll_ccy,
fx_forwards=self,
id=id,
)
self._fx_proxy_curves[pair] = curve_
return curve_
@_validate_states
def plot(
self,
pair: FXIndex | str,
right: datetime | str | NoInput = NoInput(0),
left: datetime | str | NoInput = NoInput(0),
fx_swap: bool = False,
) -> PlotOutput:
"""
Plot given forward FX rates.
Parameters
----------
pair : FXIndex, str
The FX pair to determine rates for (6-digit code).
right : datetime or str, optional
The right bound of the graph. If given as str should be a tenor format
defining a point measured from the initial node date of the curve.
Defaults to the terminal date of the FXForwards object.
left : datetime or str, optional
The left bound of the graph. If given as str should be a tenor format
defining a point measured from the initial node date of the curve.
Defaults to the immediate FX settlement date.
fx_swap : bool
Whether to plot as the FX rate or as FX swap points relative to the
initial FX rate on the left side of the chart.
Default is `False`.
Returns
-------
(fig, ax, line) : Matplotlib.Figure, Matplotplib.Axes, Matplotlib.Lines2D
"""
if isinstance(pair, FXIndex):
pair = pair.pair
if isinstance(left, NoInput):
left_: datetime = self.immediate
elif isinstance(left, str):
left_ = add_tenor(self.immediate, left, "NONE", NoInput(0))
elif isinstance(left, datetime):
left_ = left
else:
raise ValueError("`left` must be supplied as datetime or tenor string.")
if isinstance(right, NoInput):
right_: datetime = self.terminal
elif isinstance(right, str):
right_ = add_tenor(self.immediate, right, "NONE", NoInput(0))
elif isinstance(right, datetime):
right_ = right
else:
raise ValueError("`right` must be supplied as datetime or tenor string.")
points: int = (right_ - left_).days
x = [left_ + timedelta(days=i) for i in range(points)]
rates: list[DualTypes] = [self._rate_without_validation(pair, _) for _ in x]
if not fx_swap:
y: list[list[DualTypes]] = [rates]
else:
y = [[(rate - rates[0]) * 10000 for rate in rates]]
return plot([x] * len(y), y)
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
# does not require cache validation because updates the cache_id at end of method
self._ad = order
for curve in self.fx_curves.values():
curve._set_ad_order(order)
if isinstance(self.fx_rates, list):
for fx_rates in self.fx_rates:
fx_rates._set_ad_order(order)
else:
self.fx_rates._set_ad_order(order)
self.fx_rates_immediate._set_ad_order(order)
@_validate_states
def to_json(self) -> str:
if isinstance(self.fx_rates, list):
fx_rates: list[str] | str = [_.to_json() for _ in self.fx_rates]
else:
fx_rates = self.fx_rates.to_json()
container = {
"base": self.base,
"fx_rates": fx_rates,
"fx_curves": {k: v.to_json() for k, v in self.fx_curves.items()}, # type: ignore[attr-defined]
}
return json.dumps(container, default=str)
@classmethod
def from_json(cls, fx_forwards: str, **kwargs) -> FXForwards: # type: ignore[no-untyped-def]
"""
Loads an FXForwards object from JSON.
Parameters
----------
fx_forwards : str
JSON string describing the FXForwards class. Typically constructed with
:meth:`to_json`.
Returns
-------
FXForwards
Notes
-----
This method also creates new ``FXRates`` and ``Curve`` objects from JSON.
These new objects can be accessed from the attributes of the ``FXForwards``
instance.
"""
from rateslib.serialization import from_json
serial = json.loads(fx_forwards)
if isinstance(serial["fx_rates"], list):
fx_rates = [from_json(_) for _ in serial["fx_rates"]]
else:
fx_rates = from_json(serial["fx_rates"])
fx_curves = {k: from_json(v) for k, v in serial["fx_curves"].items()}
base = serial["base"]
return FXForwards(fx_rates, fx_curves, base)
def __eq__(self, other: Any) -> bool:
"""Test two FXForwards are identical"""
if type(self) is not type(other):
return False
for attr in ["base"]:
if getattr(self, attr, None) != getattr(other, attr, None):
return False
if self.fx_rates_immediate != other.fx_rates_immediate:
return False
# it is sufficient to check that FX immediate and curves are equivalent.
# if type(self.fx_rates) != type(other.fx_rates):
# return False
# if isinstance(self.fx_rates, list):
# if len(self.fx_rates) != len(other.fx_rates):
# return False
# for i in range(len(self.fx_rates)):
# # this tests FXRates are also ordered in the same on each object
# if self.fx_rates[i] != other.fx_rates[i]:
# return False
# else:
# if self.fx_rates != other.fx_rates:
# return False
for k, curve in self.fx_curves.items():
if k not in other.fx_curves:
return False
if curve != other.fx_curves[k]:
return False
return True
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
# @_validate_state: unused because it is redirected to a cache_validated method (to_json)
def copy(self) -> FXForwards:
"""
An FXForwards copy creates a new object with copied references.
"""
return self.from_json(self.to_json())
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
def forward_fx(
date: datetime,
curve_domestic: _BaseCurve,
curve_foreign: _BaseCurve,
fx_rate: DualTypes,
fx_settlement: datetime | NoInput = NoInput(0),
) -> DualTypes:
"""
Return a forward FX rate based on interest rate parity.
Parameters
----------
date : datetime
The target date to determine the adjusted FX rate for.
curve_domestic : Curve
The discount curve for the domestic currency. Should be collateral adjusted.
curve_foreign : Curve
The discount curve for the foreign currency. Should be collateral consistent
with ``domestic curve``.
fx_rate : float or Dual
The known FX rate, typically spot FX given with a spot settlement date.
fx_settlement : datetime, optional
The date the given ``fx_rate`` will settle, i.e. spot T+2. If `None` is assumed
to be immediate settlement, i.e. date upon which both ``curves`` have a DF
of precisely 1.0. Method is more efficient if ``fx_rate`` is given for
immediate settlement.
Returns
-------
float, Dual, Dual2
Notes
-----
We use the formula,
.. math::
(EURUSD) f_i = \\frac{(EUR:USD-CSA) w^*_i}{(USD:USD-CSA) v_i} F_0 = \\frac{(EUR:EUR-CSA) v^*_i}{(USD:EUR-CSA) w_i} F_0
where :math:`w` is a collateral adjusted discount curve and :math:`v` is the
locally derived discount curve in a given currency, and `*` denotes the domestic
currency. :math:`F_0` is the immediate FX rate, i.e. aligning with the initial date
on curves such that discounts factors are precisely 1.0.
This implies that given the dates and rates supplied,
.. math::
f_i = \\frac{w^*_iv_j}{v_iw_j^*} f_j = \\frac{v^*_iw_j}{w_iv_j^*} f_j
where `j` denotes the settlement date provided.
Examples
--------
Using this function directly.
.. ipython:: python
:suppress:
from rateslib.fx.fx_forwards import forward_fx
.. ipython:: python
domestic_curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.96})
foreign_curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99})
forward_fx(
date=dt(2022, 7, 1),
curve_domestic=domestic_curve,
curve_foreign=foreign_curve,
fx_rate=2.0,
fx_settlement=dt(2022, 1, 3)
)
Best practice is to use :class:`FXForwards` classes but this method provides
an efficient alternative and is occasionally used internally in the library.
.. ipython:: python
fxr = FXRates({"usdgbp": 2.0}, settlement=dt(2022, 1, 3))
fxf = FXForwards(fxr, {
"usdusd": domestic_curve,
"gbpgbp": foreign_curve,
"gbpusd": foreign_curve,
})
fxf.rate("usdgbp", dt(2022, 7, 1))
""" # noqa: E501
if date == fx_settlement: # noqa: SIM114
return fx_rate # noqa: SIM114
elif date == curve_domestic.nodes.initial and isinstance(fx_settlement, NoInput): # noqa: SIM114
return fx_rate # noqa: SIM114
_: DualTypes = curve_domestic[date] / curve_foreign[date]
if not isinstance(fx_settlement, NoInput):
_ *= curve_foreign[fx_settlement] / curve_domestic[fx_settlement]
# else: fx_settlement is deemed to be immediate hence DF are both equal to 1.0
_ *= fx_rate
return _
def _get_curves_indicator_array(
q: int, currencies: dict[str, int], fx_curves: dict[str, _BaseCurve]
) -> np.ndarray[tuple[int, int], np.dtype[np.int_]]:
"""
Constructs an indicator array identifying which cash-collateral curves are available in the
`fx_curves` dictionary.
"""
# Define the transformation matrix with unit elements in each valid pair.
T = np.zeros((q, q), dtype=int)
for k, _ in fx_curves.items():
cash, coll = k[:3].lower(), k[3:].lower()
try:
cash_idx, coll_idx = currencies[cash], currencies[coll]
except KeyError:
raise ValueError(f"`fx_curves` contains an unexpected currency: {cash} or {coll}")
T[cash_idx, coll_idx] = 1
_validate_curves_indicator_array(T)
return T
def _validate_curves_indicator_array(T: np.ndarray[tuple[int, int], np.dtype[np.int_]]) -> None:
"""
Performs checks to ensure the indicator array of cash-collateral curves contains the
appropriate number of curves required by an FXForwards object.
"""
q = T.shape[0]
if T.sum() > (2 * q) - 1:
raise ValueError(
f"`fx_curves` is overspecified. {2 * q - 1} curves are expected "
f"but {T.sum()} provided.",
)
elif T.sum() < (2 * q) - 1:
raise ValueError(
f"`fx_curves` is underspecified. {2 * q - 1} curves are expected "
f"but {T.sum()} provided.",
)
elif T.diagonal().sum() != q:
raise ValueError(
"`fx_curves` must contain local cash-collateral curves for each and every currency."
)
elif np.linalg.matrix_rank(T) != q:
raise ValueError("`fx_curves` contains co-dependent rates.")
def _recursive_pair_population(
arr: np.ndarray[tuple[int, int], np.dtype[np.int_]],
mapping: dict[tuple[int, int], int] | None = None,
) -> tuple[np.ndarray[tuple[int, int], np.dtype[np.int_]], dict[tuple[int, int], int]]:
"""
Recursively scan through an indicator matrix and populate new entries.
This identifies existing FX pairs and attempts to derive new FX pairs from those values.
Parameters
----------
arr: 2d-ndarray
An square indicator matrix consisting only of zeros and ones.
Notes
-----
``arr`` should satify the following:
- be a square matrix,
- be an indicator matrix containing only zero and ones,
- have unit diagonal,
- sum to 2n - 1, so that the correct number of prior rates are supplied,
- be a full rank matrix so no pairs are degenerate
"""
# Build the initial mapping if none exists
if mapping is None:
_mapping: dict[tuple[int, int], int] = _create_initial_mapping(arr)
else:
_mapping = mapping
# loop through currencies and find new pairs
_arr = arr.copy()
for i in range(len(_arr)):
ccy_idxs = [_ for _ in range(len(_arr)) if _arr[i, _] == 1]
pairs = combinations(ccy_idxs, 2)
for pair in pairs:
if _arr[pair[0], pair[1]] == 1 and _arr[pair[1], pair[0]] == 1:
# then the rate and its inverse are already attainable
continue
elif _arr[pair[0], pair[1]] == 1:
# then the inverse is directly attainable
_mapping[pair[1], pair[0]] = _mapping[pair[0], pair[1]]
_arr[pair[1], pair[0]] = 1
elif _arr[pair[1], pair[0]] == 1:
# then the inverse is directly attainable
_mapping[pair[0], pair[1]] = _mapping[pair[1], pair[0]]
_arr[pair[0], pair[1]] = 1
else:
_arr[pair[0], [pair[1]]] = 1
_arr[pair[1], [pair[0]]] = 1
_mapping[(pair[0], pair[1])] = i
_mapping[(pair[1], pair[0])] = i
if np.all(_arr == arr) or np.sum(_arr, axis=None) == len(_arr) ** 2:
return _arr, _mapping
else:
return _recursive_pair_population(_arr, _mapping)
def _create_initial_mapping(
arr: np.ndarray[tuple[int, int], np.dtype[np.int_]],
) -> dict[tuple[int, int], int]:
"""Detect the mappings immediately available and denote these with the value '-1'."""
_mapping: dict[tuple[int, int], int] = {}
for i in range(len(arr)):
for j in range(len(arr)):
if i == j:
continue
if arr[i, j] == 1:
_mapping[(i, j)] = -1
_mapping[(j, i)] = -1
return _mapping
================================================
FILE: python/rateslib/fx/fx_rates.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from datetime import datetime
from functools import cached_property
from typing import TYPE_CHECKING, Any
import numpy as np
from pandas import DataFrame, Series
from rateslib import defaults
from rateslib.data.fixings import FXIndex
from rateslib.default import (
_make_py_json,
)
from rateslib.dual import Dual, gradient
from rateslib.dual.utils import _get_adorder
from rateslib.enums.generics import NoInput, _drb
from rateslib.mutability import (
_clear_cache_post,
_new_state_post,
_WithState,
)
from rateslib.rs import Ccy, FXRate
from rateslib.rs import FXRates as FXRatesObj
if TYPE_CHECKING:
from rateslib.local_types import Arr1dF64, Arr1dObj, Arr2dObj, DualTypes, Number
"""
.. ipython:: python
:suppress:
from rateslib.curves import Curve
from rateslib.fx import FXRates
from datetime import datetime as dt
"""
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
class FXRates(_WithState):
"""
Object to store and calculate FX rates for a consistent settlement date.
Parameters
----------
fx_rates : dict[str, float]
Dict whose keys are 6-character currency pairs, and whose
values are the relevant rates.
settlement : datetime, optional
The settlement date for the FX rates.
base : str, optional
The base currency (3-digit code). If not given defaults to either:
- the base currency defined in `defaults`, if it is present in the list of currencies,
- the first currency detected.
Notes
-----
.. note::
When this class uses ``Dual`` numbers to represent sensitivities of values to
certain FX rates the variable names are called `"fx_cc1cc2"` where `"cc1"`
is left hand currency and `"cc2"` is the right hand currency in the currency pair.
See the examples contained in class methods for clarification.
Examples
--------
An FX rates market of *n* currencies is completely defined by *n-1*
independent FX pairs.
Below we define an FX rates market in 4 currencies with 3 FX pairs,
.. ipython:: python
fxr = FXRates({"eurusd": 1.1, "gbpusd": 1.25, "usdjpy": 100})
fxr.currencies
fxr.rate("gbpjpy")
Ill defined FX markets will raise ``ValueError`` and are either **overspecified**,
.. ipython:: python
try:
FXRates({"eurusd": 1.1, "gbpusd": 1.25, "usdjpy": 100, "gbpjpy": 125})
except ValueError as e:
print(e)
or are **underspecified**,
.. ipython:: python
try:
FXRates({"eurusd": 1.1, "gbpjpy": 125})
except ValueError as e:
print(e)
or use redundant, co-dependent information,
.. ipython:: python
try:
FXRates({"eurusd": 1.1, "usdeur": 0.90909, "gbpjpy": 125})
except ValueError as e:
print(e)
"""
def __init__(
self,
fx_rates: dict[str, DualTypes],
settlement: datetime | NoInput = NoInput(0),
base: str | NoInput = NoInput(0),
):
# Temporary declaration - will be overwritten
self._currencies: dict[str, int] = {}
settlement_: datetime | None = _drb(None, settlement)
fx_rates_ = [FXRate(k[0:3], k[3:6], v, settlement_) for k, v in fx_rates.items()]
if isinstance(base, NoInput):
default_ccy = defaults.base_currency.lower()
if any(default_ccy in k.lower() for k in fx_rates):
base_ = Ccy(defaults.base_currency)
else:
base_ = None
else:
base_ = Ccy(base)
self.obj = FXRatesObj(fx_rates_, base_)
self.__init_post_obj__()
self._clear_cache()
self._set_new_state()
@classmethod
def __init_from_obj__(cls, obj: FXRatesObj) -> FXRates:
"""Construct the class instance from a given rust object which is wrapped."""
# create a default instance and overwrite it
new = cls({"usdeur": 1.0}, datetime(2000, 1, 1))
new.obj = obj
new.__init_post_obj__()
return new
def __init_post_obj__(self) -> None:
self._currencies = {ccy.name: i for (i, ccy) in enumerate(self.obj.currencies)}
def __eq__(self, other: Any) -> bool:
if isinstance(other, FXRates):
return self.obj == other.obj
return False
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
def __copy__(self) -> FXRates:
obj = FXRates.__init_from_obj__(self.obj.__copy__())
obj.__init_post_obj__()
return obj
def __repr__(self) -> str:
if len(self.currencies_list) > 5:
return (
f""
)
else:
return f""
@cached_property
def fx_array(self) -> Arr2dObj:
"""An array containing all of the FX pairs/crosses available on the object."""
# caching this prevents repetitive data transformations between Rust/Python
return np.array(self.obj.fx_array)
def _fx_array_el(self, i: int, j: int) -> Number:
# this is for typing since this numpy object array can only hold float | Dual | Dual2
return self.fx_array[i, j] # type: ignore
@property
def base(self) -> str:
"""The assumed base currency of the object which may be used as the default ``base``
currency in ``npv`` calculations when otherwise omitted.
The base currency has index 0 in the ``currencies`` dict and is that which the ``fx_vector``
is defined relative to.
"""
return self.obj.base.name
@property
def settlement(self) -> datetime:
"""The settlement date of the FX rates that define the object."""
return self.obj.fx_rates[0].settlement
@property
def pairs(self) -> list[str]:
"""A list of the currency pairs that define the object. The number of pairs is one
less than ``q``."""
return [fxr.pair for fxr in self.obj.fx_rates]
@property
def fx_rates(self) -> dict[str, DualTypes]:
"""The dict of currency pairs and their FX rates that define the object."""
return {fxr.pair: fxr.rate for fxr in self.obj.fx_rates}
@property
def currencies_list(self) -> list[str]:
"""An list of currencies available in the object. Aligns with ``currencies``."""
return [ccy.name for ccy in self.obj.currencies]
@property
def currencies(self) -> dict[str, int]:
"""A dict whose keys are the currencies contained in the object and the value is the
ordered index of that currencies in other attributes such as ``fx_array`` and
``currencies_list``."""
return self._currencies
@property
def q(self) -> int:
"""The number of currencies contained in the object."""
return len(self.obj.currencies)
@property
def fx_vector(self) -> Arr1dObj:
"""A vector of currency FX rates all relative to the stated ``base`` currency."""
return self.fx_array[0, :]
@property
def pairs_settlement(self) -> dict[str, datetime]:
"""A dict aggregating each FX pair and its settlement date. In an *FXRates* object
all pairs settle on the same settlement date."""
return dict.fromkeys(self.pairs, self.settlement)
@property
def variables(self) -> tuple[str, ...]:
"""The names of the variables associated with the object for automatic differentiation (AD)
purposes."""
return tuple(f"fx_{pair}" for pair in self.pairs)
@property
def _ad(self) -> int:
return self.obj.ad
def rate(self, pair: FXIndex | str) -> Number:
"""
Return a specified FX rate for a given currency pair.
Parameters
----------
pair : FXIndex, str
The FX pair in usual domestic:foreign convention (6 digit code).
Returns
-------
Dual
Examples
--------
.. ipython:: python
fxr = FXRates({"usdeur": 2.0, "usdgbp": 2.5})
fxr.rate("eurgbp")
"""
if isinstance(pair, FXIndex):
pair = pair.pair
domi, fori = self.currencies[pair[:3].lower()], self.currencies[pair[3:].lower()]
return self._fx_array_el(domi, fori)
def restate(self, pairs: list[str], keep_ad: bool = False) -> FXRates:
"""
Create a new :class:`FXRates` class using other (or fewer) currency pairs as majors.
Parameters
----------
pairs : list of str
The new currency pairs with which to define the ``FXRates`` class.
keep_ad : bool, optional
Keep the original derivative exposures defined by ``Dual``, instead
of redefinition. It is advised against setting this to *True*, it is mainly used
internally.
Returns
--------
FXRates
Notes
-----
This will redefine the pairs to which delta risks are expressed in ``Dual``
outputs.
If ``pairs`` match the existing object and ``keep_ad`` is
requested then the existing object is returned unchanged as new copy.
Examples
--------
Re-expressing an *FXRates* class with new majors, to which *Dual* sensitivities are
measured.
.. ipython:: python
fxr = FXRates({"eurgbp": 0.9, "gbpjpy": 125, "usdjpy": 100})
fxr.convert(100, "gbp", "usd")
fxr2 = fxr.restate(["eurusd", "gbpusd", "usdjpy"])
fxr2.convert(100, "gbp", "usd")
Extracting an *FXRates* subset from a larger object.
.. ipython:: python
fxr = FXRates({"eurgbp": 0.9, "gbpjpy": 125, "usdjpy": 100, "audusd": 0.85})
fxr2 = fxr.restate({"eurusd", "gbpusd"})
fxr2.rates_table()
"""
if pairs == self.pairs and keep_ad:
return self.__copy__() # no restate needed but return new instance
restated_fx_rates = FXRates(
{pair: self.rate(pair) if keep_ad else self.rate(pair).real for pair in pairs},
settlement=self.settlement,
base=self.base,
)
return restated_fx_rates
def convert(
self,
value: DualTypes,
domestic: str,
foreign: str | NoInput = NoInput(0),
on_error: str = "ignore",
) -> DualTypes | None:
"""
Convert an amount of a domestic currency into a foreign currency.
Parameters
----------
value : float or Dual
The amount of the domestic currency to convert.
domestic : str
The domestic currency (3-digit code).
foreign : str, optional
The foreign currency to convert to (3-digit code). Uses instance
``base`` if not given.
on_error : str in {"ignore", "warn", "raise"}
The action taken if either ``domestic`` or ``foreign`` are not contained
in the FX framework. `"ignore"` and `"warn"` will still return `None`.
Returns
-------
Dual or None
Examples
--------
.. ipython:: python
fxr = FXRates({"usdnok": 8.0})
fxr.convert(1000000, "nok", "usd")
fxr.convert(1000000, "nok", "inr") # <- returns None, "inr" not in fxr.
"""
foreign = self.base if isinstance(foreign, NoInput) else foreign.lower()
domestic = domestic.lower()
for ccy in [domestic, foreign]:
if ccy not in self.currencies:
if on_error == "ignore":
return None
elif on_error == "warn":
warnings.warn(
f"'{ccy}' not in FXRates.currencies: returning None.",
UserWarning,
)
return None
else:
raise ValueError(f"'{ccy}' not in FXRates.currencies.")
i, j = self.currencies[domestic.lower()], self.currencies[foreign.lower()]
return value * self._fx_array_el(i, j)
def convert_positions(
self,
array: Arr1dF64 | list[float],
base: str | NoInput = NoInput(0),
) -> Number:
"""
Convert an array of currency cash positions into a single base currency.
Parameters
----------
array : list, 1d ndarray of floats, or Series
The cash positions to simultaneously convert in the base currency. **Must**
be ordered by currency as defined in the attribute ``FXRates.currencies``.
base : str, optional
The currency to convert to (3-digit code). Uses instance ``base`` if not
given.
Returns
-------
Dual
Examples
--------
.. ipython:: python
fxr = FXRates({"usdnok": 8.0})
fxr.currencies
fxr.convert_positions([0, 1000000], "usd")
"""
base = self.base if isinstance(base, NoInput) else base.lower()
array_ = np.asarray(array)
j = self.currencies[base]
return np.sum(array_ * self.fx_array[:, j]) # type: ignore[no-any-return]
def positions(
self,
value: DualTypes,
base: str | NoInput = NoInput(0),
) -> Series[float]:
"""
Convert a base value with FX rate sensitivities into an array of cash positions.
Parameters
----------
value : float or Dual
The amount expressed in base currency to convert to cash positions.
base : str, optional
The base currency in which ``value`` is given (3-digit code). If not given
assumes the ``base`` of the object.
Returns
-------
Series
Examples
--------
.. ipython:: python
fxr = FXRates({"usdnok": 8.0})
fxr.positions(Dual(125000, ["fx_usdnok"], [-15625]), "usd")
fxr.positions(100, base="nok")
"""
if isinstance(value, float | int):
value = Dual(value, [], [])
base_: str = self.base if isinstance(base, NoInput) else base.lower()
_ = np.array([0 if ccy != base_ else value.real for ccy in self.currencies_list])
for pair in value.vars:
if pair[:3] == "fx_":
delta = gradient(value, [pair])[0]
_ += self._get_positions_from_delta(delta, pair[3:], base_)
return Series(_, index=self.currencies_list)
def _get_positions_from_delta(
self, delta: float, pair: str, base: str
) -> np.ndarray[tuple[int], np.dtype[np.float64]]:
"""Return an array of cash positions determined from an FX pair delta risk."""
b_idx = self.currencies[base]
domestic, foreign = pair[:3], pair[3:]
d_idx, f_idx = self.currencies[domestic], self.currencies[foreign]
_: np.ndarray[tuple[int], np.dtype[np.float64]] = np.zeros(self.q, dtype=np.float64)
# f_val = -delta * float(self.fx_array[b_idx, d_idx]) * float(self.fx_array[d_idx,f_idx])**2
# _[f_idx] = f_val
# _[d_idx] = -f_val / float(self.fx_array[d_idx, f_idx])
# return _
f_val = delta * float(self._fx_array_el(b_idx, f_idx))
_[d_idx] = f_val
_[f_idx] = -f_val / float(self._fx_array_el(f_idx, d_idx))
return _ # calculation is more efficient from a domestic pov than foreign
def rates_table(self) -> DataFrame:
"""
Return a DataFrame of all FX rates in the object.
Returns
-------
DataFrame
"""
return DataFrame(
np.vectorize(float)(self.fx_array),
index=self.currencies_list,
columns=self.currencies_list,
)
# Cache management
def _clear_cache(self) -> None:
"""
Clear the cache ID so the fx_array can be fetched and cached from Rust object.
"""
# the fx_array is a cached property.
self.__dict__.pop("fx_array", None)
# Mutation
@_new_state_post
@_clear_cache_post
def update(self, fx_rates: dict[str, float] | NoInput = NoInput(0)) -> None:
"""
Update all or some of the FX rates of the instance with new market data.
Parameters
----------
fx_rates : dict, optional
Dict whose keys are 6-character domestic-foreign currency pairs and
which are present in FXRates.pairs, and whose
values are the relevant rates to update. An empty dict will be ignored and
perform no update.
Returns
-------
None
Notes
-----
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of an *FXRates* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *FXRates* instance.
This class is labelled as a **mutable on update** object.
Suppose an *FXRates* class has been instantiated and resides in memory.
.. ipython:: python
fxr = FXRates({"eurusd": 1.05, "gbpusd": 1.25}, settlement=dt(2022, 1, 3), base="usd")
id(fxr)
This object may be linked to others, probably an :class:`~rateslib.fx.FXForwards` class.
It can be updated with some new market data. This will preserve its memory id and
association with other objects. Any :class:`~rateslib.fx.FXForwards` objects referencing
this will detect this change and will also lazily update via *rateslib's* state
management.
.. ipython:: python
linked_obj = fxr
fxr.update({"eurusd": 1.06})
id(fxr) # <- SAME as above
linked_obj.rate("eurusd")
Examples
--------
.. ipython:: python
fxr = FXRates({"usdeur": 0.9, "eurnok": 8.5})
fxr.rate("usdnok")
fxr.update({"usdeur": 1.0})
fxr.rate("usdnok")
"""
if isinstance(fx_rates, NoInput) or len(fx_rates) == 0:
return None
fx_rates_ = [FXRate(k[0:3], k[3:6], v, self.settlement) for k, v in fx_rates.items()]
self.obj.update(fx_rates_)
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
"""
Change the node values to float, Dual or Dual2 based on input parameter.
"""
self.obj.set_ad_order(_get_adorder(order))
# Serialization
def to_json(self) -> str:
"""Return a JSON representation of the object.
Returns
-------
str
"""
return _make_py_json(self.obj.to_json(), "FXRates")
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
================================================
FILE: python/rateslib/instruments/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.instruments.bonds import (
Bill,
BillCalcMode,
BondCalcMode,
BondFuture,
FixedRateBond,
FloatRateNote,
IndexFixedRateBond,
_BaseBondInstrument,
)
from rateslib.instruments.cds import CDS
from rateslib.instruments.fee import Fee
from rateslib.instruments.fly import Fly
from rateslib.instruments.fra import FRA
from rateslib.instruments.fx_forward import FXForward
from rateslib.instruments.fx_options import (
FXBrokerFly,
FXCall,
FXPut,
FXRiskReversal,
FXStraddle,
FXStrangle,
FXVolValue,
_BaseFXOption,
_BaseFXOptionStrat,
)
from rateslib.instruments.fx_swap import FXSwap
from rateslib.instruments.iirs import IIRS
from rateslib.instruments.ir_options import (
IRSCall,
IRSPut,
IRSRiskReversal,
IRSStraddle,
IRSStrangle,
IRVolValue,
_BaseIRSOption,
_BaseIRSOptionStrat,
)
from rateslib.instruments.irs import IRS
from rateslib.instruments.loan import Loan
from rateslib.instruments.ndf import NDF
from rateslib.instruments.ndxcs import NDXCS
from rateslib.instruments.portfolio import Portfolio
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.sbs import SBS
from rateslib.instruments.spread import Spread
from rateslib.instruments.stir_future import STIRFuture
from rateslib.instruments.value import Value
from rateslib.instruments.xcs import XCS
from rateslib.instruments.yoyis import YoYIS
from rateslib.instruments.zcis import ZCIS
from rateslib.instruments.zcs import ZCS
__all__ = [
# derivatives
"IRS",
"FRA",
"SBS",
"STIRFuture",
"ZCS",
# cross currency
"XCS",
"NDXCS",
"NDF",
"FXSwap",
"FXForward",
# inflation
"ZCIS",
"IIRS",
"YoYIS",
# credit
"CDS",
# securities
"FixedRateBond",
"FloatRateNote",
"IndexFixedRateBond",
"BondFuture",
"Bill",
"Fee",
"Loan",
# fx options
"FXPut",
"FXCall",
"FXRiskReversal",
"FXStraddle",
"FXStrangle",
"FXBrokerFly",
# ir options
"IRSPut",
"IRSCall",
"IRSRiskReversal",
"IRSStraddle",
"IRSStrangle",
# generics
"Portfolio",
"Fly",
"Spread",
"Value",
"FXVolValue",
"IRVolValue",
"BondCalcMode",
"BillCalcMode",
"_BaseInstrument",
"_BaseBondInstrument",
"_BaseFXOption",
"_BaseFXOptionStrat",
"_BaseIRSOption",
"_BaseIRSOptionStrat",
]
================================================
FILE: python/rateslib/instruments/bonds/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.instruments.bonds.bill import Bill
from rateslib.instruments.bonds.bond_future import BondFuture
from rateslib.instruments.bonds.conventions import BillCalcMode, BondCalcMode
from rateslib.instruments.bonds.fixed_rate_bond import FixedRateBond
from rateslib.instruments.bonds.float_rate_note import FloatRateNote
from rateslib.instruments.bonds.index_fixed_rate_bond import IndexFixedRateBond
from rateslib.instruments.bonds.protocols import _BaseBondInstrument
__all__ = [
"FixedRateBond",
"IndexFixedRateBond",
"BondFuture",
"Bill",
"FloatRateNote",
"BillCalcMode",
"BondCalcMode",
"_BaseBondInstrument",
]
================================================
FILE: python/rateslib/instruments/bonds/bill.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual import Variable, gradient
from rateslib.dual.utils import _dual_float, _to_number
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.bonds.conventions import (
BillCalcMode,
_get_bill_calc_mode,
)
from rateslib.instruments.bonds.fixed_rate_bond import FixedRateBond
from rateslib.instruments.bonds.protocols import _BaseBondInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg
from rateslib.scheduling import Schedule
from rateslib.scheduling.frequency import _get_frequency
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DualTypes,
DualTypes_,
FXForwards_,
Number,
RollDay,
Sequence,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
int_,
str_,
)
class Bill(_BaseBondInstrument):
"""
A *bill*, or discount security, composed of a :class:`~rateslib.legs.FixedLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import Bill
from datetime import datetime as dt
.. ipython:: python
bill = Bill(
effective=dt(2000, 1, 1),
termination="3y",
spec="us_gbb",
)
bill.cashflows()
.. rubric:: Pricing
A *Bill* requires one *disc curve*. The following input formats are
allowed:
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = {"disc_curve": disc_curve} # dict form is explicit
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
calc_mode : str or BillCalcMode
A calculation mode for dealing with bonds under different conventions. See notes.
settle: int
The number of days by which to lag 'today' to arrive at standard settlement.
metric : str, :green:`optional` (set as 'price')
The pricing metric returned by :meth:`~rateslib.instruments.FixedRateBond.rate`.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
"""
_rate_scalar = 1.0
@property
def leg1(self) -> FixedLeg:
"""The :class:`~rateslib.legs.FixedLeg` of the *Instrument*."""
return self._leg1
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: str_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
currency: str_ = NoInput(0),
convention: str_ = NoInput(0),
ex_div: int_ = NoInput(0),
settle: int_ = NoInput(0),
calc_mode: BillCalcMode | str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str = "price",
):
user_args = dict(
effective=effective,
termination=termination,
frequency=frequency,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
ex_div=ex_div,
roll=roll,
eom=eom,
notional=notional,
currency=currency,
convention=convention,
settle=settle,
calc_mode=calc_mode,
curves=self._parse_curves(curves),
metric=metric,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
initial_exchange=False,
final_exchange=True,
fixed_rate=0.0,
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
calc_mode=defaults.calc_mode[type(self).__name__],
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_specific[type(self).__name__],
ex_div=defaults.ex_div,
settle=defaults.settle,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "calc_mode", "settle", "metric", "frequency", "vol"],
)
self.kwargs.meta["calc_mode"] = _get_bill_calc_mode(self.kwargs.meta["calc_mode"])
if isinstance(self.kwargs.leg1["termination"], str):
s_ = Schedule(
effective=self.kwargs.leg1["effective"],
termination=self.kwargs.leg1["termination"],
frequency=self.kwargs.leg1["termination"],
modifier=self.kwargs.leg1["modifier"],
calendar=self.kwargs.leg1["calendar"],
roll=self.kwargs.leg1["roll"],
eom=self.kwargs.leg1["eom"],
)
self._kwargs.leg1["termination"] = s_.termination
self._kwargs.leg1["frequency"] = "Z"
self._kwargs.meta["frequency"] = _drb(
self.kwargs.meta["calc_mode"]._ytm_clone_kwargs["frequency"],
self.kwargs.meta["frequency"],
)
self._leg1 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._legs = [self.leg1]
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
A Bill has one curve requirements: a disc_curve.
When given as only 1 element this curve is applied to all of the those components
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) == 1:
return _Curves(
disc_curve=curves[0],
)
elif len(curves) == 2:
return _Curves(
disc_curve=curves[1],
)
else:
raise ValueError(
f"{type(self).__name__} requires only 1 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
disc_curve=curves, # type: ignore[arg-type]
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
"""
Return various pricing metrics of the security calculated from
:class:`~rateslib.curves.Curve` s.
Parameters
----------
curves : Curve, str or list of such
A single :class:`Curve` or id or a list of such. A list defines the
following curves in the order:
- Forecasting :class:`Curve` for ``leg1``.
- Discounting :class:`Curve` for ``leg1``.
solver : Solver, optional
The numerical :class:`Solver` that constructs ``Curves`` from calibrating
instruments.
fx : float, FXRates, FXForwards, optional
The immediate settlement FX rate that will be used to convert values
into another currency. A given `float` is used directly. If giving a
``FXRates`` or ``FXForwards`` object, converts from local currency
into ``base``.
base : str, optional
The base currency to convert cashflows into (3-digit code), set by default.
Only used if ``fx`` is an ``FXRates`` or ``FXForwards`` object.
metric : str in {"price", "discount_rate", "ytm", "simple_rate"}
Metric returned by the method. Uses the *Instrument* default if not given.
Returns
-------
float, Dual, Dual2
"""
c = _parse_curves(self, curves, solver)
disc_curve_ = _get_curve("disc_curve", False, False, *c)
settlement_ = self._maybe_get_settlement(settlement=settlement, disc_curve=disc_curve_)
# scale price to par 100 and make a fwd adjustment according to curve
price = (
self.npv(curves=curves, solver=solver, local=False) # type: ignore[operator]
* 100
/ (-self.leg1.settlement_params.notional * disc_curve_[settlement_])
)
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
if metric_ in ["price", "clean_price", "dirty_price"]:
return price
elif metric_ == "discount_rate":
return self.discount_rate(price, settlement_)
elif metric_ == "simple_rate":
return self.simple_rate(price, settlement_)
elif metric_ == "ytm":
return self.ytm(price, settlement_, NoInput(0))
raise ValueError("`metric` must be in {'price', 'discount_rate', 'ytm', 'simple_rate'}")
def simple_rate(self, price: DualTypes, settlement: datetime) -> DualTypes:
"""
Return the simple rate of the security from its ``price``.
Parameters
----------
price : float, Dual, or Dual2
The price of the security.
settlement : datetime
The settlement date of the security.
Returns
-------
float, Dual, or Dual2
"""
acc_frac = self.kwargs.meta["calc_mode"]._settle_accrual(self, settlement, 0)
dcf = (1 - acc_frac) * self.leg1._regular_periods[0].period_params.dcf
return ((100 / price - 1) / dcf) * 100 # type: ignore[no-any-return]
def discount_rate(self, price: DualTypes, settlement: datetime) -> DualTypes:
"""
Return the discount rate of the security from its ``price``.
Parameters
----------
price : float, Dual, or Dual2
The price of the security.
settlement : datetime
The settlement date of the security.
Returns
-------
float, Dual, or Dual2
"""
acc_frac = self.kwargs.meta["calc_mode"]._settle_accrual(self, settlement, 0)
dcf = (1 - acc_frac) * self.leg1._regular_periods[0].period_params.dcf
rate = ((1 - price / 100) / dcf) * 100
return rate # type: ignore[no-any-return]
def price(
self,
rate: DualTypes,
settlement: datetime,
dirty: bool = False,
calc_mode: str_ = NoInput(0),
) -> DualTypes:
"""
Return the price of the bill given the ``discount_rate``.
Parameters
----------
rate : float
The rate used by the pricing formula.
settlement : datetime
The settlement date.
dirty : bool, not required
Discount securities have no coupon, the concept of clean or dirty is not
relevant. Argument is included for signature consistency with
:meth:`FixedRateBond.price`.
calc_mode : str, optional
A calculation mode to force, which is used instead of that attributed the
*Bill* instance.
Returns
-------
float, Dual, Dual2
"""
calc_mode_ = _get_bill_calc_mode(_drb(self.kwargs.meta["calc_mode"], calc_mode))
price_func = getattr(self, f"_price_{calc_mode_._price_type}")
return price_func(rate, settlement) # type: ignore[no-any-return]
def _price_discount(self, rate: DualTypes, settlement: datetime) -> DualTypes:
acc_frac = self.kwargs.meta["calc_mode"]._settle_accrual(self, settlement, 0)
dcf = (1 - acc_frac) * self.leg1._regular_periods[0].period_params.dcf
return 100 - rate * dcf # type: ignore[no-any-return]
def _price_simple(self, rate: DualTypes, settlement: datetime) -> DualTypes:
acc_frac = self.kwargs.meta["calc_mode"]._settle_accrual(self, settlement, 0)
dcf = (1 - acc_frac) * self.leg1._regular_periods[0].period_params.dcf
return 100 / (1 + rate * dcf / 100) # type: ignore[no-any-return]
def ytm( # type: ignore[override]
self,
price: DualTypes,
settlement: datetime,
calc_mode: BillCalcMode | str_ = NoInput(0),
) -> Number:
"""
Calculate the yield-to-maturity on an equivalent bond with a coupon of 0%.
Parameters
----------
price: float, Dual, Dual2
The price of the *Bill*.
settlement: datetime
The settlement date of the *Bill*.
calc_mode : str, optional
A calculation mode to force, which is used instead of that attributed the
*Bill* instance.
Notes
-----
Maps the following *Bill* ``calc_mode`` to the following *Bond* specifications:
- *NoInput* -> "ust"
- *"ustb"* -> "ust"
- *"uktb"* -> "ukt"
- *"sgbb"* -> "sgb"
This method calculates by constructing a :class:`~rateslib.instruments.FixedRateBond`
with a regular 0% coupon measured from the termination date of the bill.
"""
calc_mode_ = _get_bill_calc_mode(_drb(self.kwargs.meta["calc_mode"], calc_mode))
freq = calc_mode_._ytm_clone_kwargs["frequency"]
frequency = _get_frequency(
freq, self.leg1.schedule.utermination.day, self.leg1.schedule.calendar
)
quasi_ustart = frequency.uprevious(self.leg1.schedule.uschedule[-1])
while quasi_ustart > settlement:
quasi_ustart = frequency.uprevious(quasi_ustart)
equiv_bond = FixedRateBond( # type: ignore[abstract]
effective=quasi_ustart,
termination=self.leg1.schedule.utermination,
fixed_rate=0.0,
**calc_mode_._ytm_clone_kwargs, # type: ignore[arg-type]
)
return equiv_bond.ytm(price, settlement)
def duration(self, ytm: DualTypes, settlement: datetime, metric: str = "risk") -> float:
"""
Return the duration of the *Bill*. See
:class:`~rateslib.instruments.FixedRateBond.duration` for arguments.
Notes
------
.. warning::
This function returns a *duration* that is consistent with a
*FixedRateBond* yield-to-maturity definition. It currently does not use the
specified ``convention`` of the *Bill*, and can be sensitive to the
``frequency`` of the representative *FixedRateBond* equivalent.
.. ipython:: python
bill = Bill(effective=dt(2024, 2, 29), termination=dt(2024, 8, 29), spec="us_gbb")
bill.duration(settlement=dt(2024, 5, 30), ytm=5.2525, metric="duration")
bill = Bill(effective=dt(2024, 2, 29), termination=dt(2024, 8, 29), spec="us_gbb", frequency="A")
bill.duration(settlement=dt(2024, 5, 30), ytm=5.2525, metric="duration")
""" # noqa: E501
# TODO: this is not AD safe: returns only float
ytm_: float = _dual_float(ytm)
if metric == "duration":
price_ = _to_number(self.price(Variable(ytm_, ["y"]), settlement, dirty=True))
freq = _get_frequency(
self.kwargs.meta["frequency"],
self.leg1.schedule.utermination.day,
self.leg1.schedule.calendar,
)
f = freq.periods_per_annum()
v = 1 + ytm_ / (100 * f)
_: float = -gradient(price_, ["y"])[0] / _dual_float(price_) * v * 100
return _
else:
return super().duration(ytm, settlement, metric)
================================================
FILE: python/rateslib/instruments/bonds/bond_future.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from collections.abc import Sequence
from datetime import datetime
from typing import TYPE_CHECKING, Protocol
from pandas import DataFrame
from rateslib import defaults
from rateslib.curves import Curve
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument, _KWArgs
from rateslib.instruments.protocols.pricing import _Curves, _get_curve, _parse_curves, _Vol
from rateslib.periods.utils import (
_maybe_local,
)
from rateslib.rs import Adjuster, Cal, RollDay
from rateslib.scheduling import add_tenor
from rateslib.scheduling.calendars import _get_years_and_months
from rateslib.solver import Solver
if TYPE_CHECKING:
from rateslib.local_types import (
Any,
CurvesT_,
DualTypes,
FixedRateBond,
FXForwards_,
Solver_,
VolT_,
datetime_,
float_,
int_,
str_,
)
class ConversionFactorFunction(Protocol):
# Callable type for Conversion Factor Functions
def __call__(self, bond: FixedRateBond) -> DualTypes: ...
class BondFuture(_BaseInstrument):
"""
A *bond future* derivative containing a basket of :class:`~rateslib.instruments.FixedRateBond`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import BondFuture, dt, FixedRateBond
.. ipython:: python
bf = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), spec="uk_gb", fixed_rate=5.75),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), spec="uk_gb", fixed_rate=9.00),
],
nominal=100000,
currency="gbp",
calc_mode="ytm"
)
bf.cfs
.. rubric:: Pricing
The ``curves`` on individual bonds can be set directly on those *Instruments*, or the
``curves`` for the *BondFuture* will act, if given, as an override.
Any *FixedRateBond* requires one *disc curve*. The following input formats are
allowed:
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = {"disc_curve": disc_curve} # dict form is explicit
Parameters
----------
coupon: float
The nominal coupon rate set on the contract specifications.
delivery: datetime or 2-tuple of datetimes
The delivery window first and last delivery day, or a single delivery day.
basket: tuple of FixedRateBond
The bonds that are available as deliverables.
nominal: float, optional
The nominal amount of the contract.
contracts: int, optional
The number of contracts owned or short.
calendar: str, optional
The calendar to define delivery days within the delivery window.
currency: str, optional
The currency (3-digit code) of the settlement contract.
calc_mode : str or BondCalcMode
A calculation mode for determining conversion factors. See notes.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* digital methods' ``curves`` argument.
See **Pricing**.
metric : str, :green:`optional` (set as 'clean_price')
The pricing metric returned by :meth:`~rateslib.instruments.FixedRateBond.rate`.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
Notes
-----
Conversion factors (CFs) ``calc_mode`` are:
- *"ytm"* which calculates the CF as the clean price percent of par with the bond having a
yield-to-maturity on the first delivery day in the delivery window.
- *"ust_short"* which applies to CME 2y, 3y and 5y treasury futures. See
:download:`CME Treasury Conversion Factors<_static/us-treasury-cfs.pdf>`.
- *"ust_long"* which applies to CME 10y and 30y treasury futures.
- *"eurex_eur"* which applies to EUREX EUR denominated government bond futures, except
Italian BTPs which require a different CF formula.
- *"eurex_chf"* which applies to EUREX CHF denominated government bond futures.
- *"ice_gbp"* which applies to ICE Gilt futures.
""" # noqa: E501
def __init__(
self,
coupon: float_ = NoInput(0),
delivery: datetime_ | tuple[datetime, datetime] = NoInput(0),
basket: tuple[FixedRateBond] | NoInput = NoInput(0),
nominal: float_ = NoInput(0),
contracts: int_ = NoInput(0),
calendar: str_ = NoInput(0),
currency: str_ = NoInput(0),
calc_mode: str_ = NoInput(0),
# meta
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str_ = NoInput(0),
):
user_args = dict(
coupon=coupon,
delivery=delivery,
basket=basket,
nominal=nominal,
contracts=contracts,
calendar=calendar,
currency=currency,
calc_mode=calc_mode,
metric=metric,
curves=self._parse_curves(curves),
)
instrument_args: dict[str, Any] = dict(
vol=_Vol(),
)
# set defaults for missing values
default_args = dict(
calc_mode=defaults.calc_mode_futures,
currency=defaults.base_currency,
nominal=defaults.notional,
contracts=1,
metric="future_price",
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=[
"coupon",
"delivery",
"basket",
"nominal",
"contracts",
"calendar",
"currency",
"calc_mode",
"metric",
"curves",
"vol",
],
)
kw = self.kwargs.meta
if isinstance(kw["delivery"], datetime):
kw["delivery"] = (kw["delivery"], kw["delivery"])
elif isinstance(kw["delivery"], NoInput):
raise ValueError("`delivery` must be a datetime or sequence of datetimes.")
else:
kw["delivery"] = tuple(kw["delivery"])
if isinstance(kw["coupon"], NoInput):
raise ValueError("`coupon` must be value.")
self._cfs: tuple[DualTypes, ...] | NoInput = NoInput(0)
def __repr__(self) -> str:
return f""
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An FRB has one curve requirements: a disc_curve.
When given as only 1 element this curve is applied to all of the those components
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) == 1:
return _Curves(
disc_curve=curves[0],
)
elif len(curves) == 2:
return _Curves(
disc_curve=curves[1],
)
else:
raise ValueError(
f"{type(self).__name__} requires only 1 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
disc_curve=curves, # type: ignore[arg-type]
)
@property
def notional(self) -> DualTypes:
"""
The effective notional: the number of contracts multiplied by contract nominal.
Returns
-------
float, Dual, Dual2, Variable
"""
nominal: DualTypes = self.kwargs.meta["nominal"]
contracts: DualTypes = self.kwargs.meta["contracts"]
_: DualTypes = nominal * contracts * -1
return _ # long positions is negative notn
@property
def cfs(self) -> tuple[DualTypes, ...]:
"""
Return the conversion factors for each bond in the ordered ``basket``.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, BondFuture, FixedRateBond
.. ipython:: python
bf = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, spec="uk_gb"),
]
)
bf.cfs
Returns
-------
tuple
Notes
-----
The determination of conversion factors depend upon the ``calc_mode`` given
at initialization. These values, under the appropriate method, can be compared with
officially published exchange data such as that for UK gilts under the "ytm" method:
:download:`ICE-LIFFE Jun23 Long Gilt<_static/long_gilt_initial_jun23.pdf>`, and values
under the 'eurex_eur' see
:download:`EUREX Jun23 Bond Futures<_static/eurex_bond_conversion_factors.csv>`.
"""
if isinstance(self._cfs, NoInput):
self._cfs = self._conversion_factors()
return self._cfs
@property
def _cf_funcs(self) -> dict[str, ConversionFactorFunction]:
return {
"ytm": self._cfs_ytm,
"ust_short": self._cfs_ust_short,
"ust_long": self._cfs_ust_long,
"eurex_eur": self._cfs_eurex_eur,
"eurex_chf": self._cfs_eurex_chf,
"ice_gbp": self._cfs_ice_gbp,
}
def _conversion_factors(self) -> tuple[DualTypes, ...]:
calc_mode: str = self.kwargs.meta["calc_mode"].lower()
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
try:
return tuple(self._cf_funcs[calc_mode](bond) for bond in basket)
except KeyError:
raise ValueError("`calc_mode` must be in {'ytm', 'ust_short', 'ust_long'}")
def _cfs_ytm(self, bond: FixedRateBond) -> DualTypes:
coupon: DualTypes = self.kwargs.meta["coupon"]
delivery: tuple[datetime, datetime] = self.kwargs.meta["delivery"]
return bond.price(coupon, delivery[0]) / 100
def _cfs_ust(self, bond: FixedRateBond, short: bool) -> float:
# TODO: This method is not AD safe: it uses "round" function which destroys derivatives
# See CME pdf in doc Notes for formula.
coupon = _dual_float(bond.fixed_rate / 100.0) # type: ignore[operator] # fixed rate is given
delivery: datetime = self.kwargs.meta["delivery"][0]
n, z = _get_years_and_months(delivery, bond.leg1.schedule.termination)
if not short:
mapping = {
0: 0,
1: 0,
2: 0,
3: 3,
4: 3,
5: 3,
6: 6,
7: 6,
8: 6,
9: 9,
10: 9,
11: 9,
}
z = mapping[z] # round down number of months to quarters
if z < 7:
v = z
elif short:
v = z - 6
else:
v = 3
a = 1 / 1.03 ** (v / 6.0)
b = (coupon / 2) * (6 - v) / 6.0
if z < 7:
c = 1 / 1.03 ** (2 * n)
else:
c = 1 / 1.03 ** (2 * n + 1)
d = (coupon / 0.06) * (1 - c)
factor = a * ((coupon / 2) + c + d) - b
_: float = round(factor, 4)
return _
def _cfs_ust_short(self, bond: FixedRateBond) -> float:
return self._cfs_ust(bond, True)
def _cfs_ust_long(self, bond: FixedRateBond) -> float:
return self._cfs_ust(bond, False)
def _cfs_eurex_eur(self, bond: FixedRateBond) -> float:
# TODO: This method is not AD safe: it uses "round" function which destroys derivatives
# See EUREX specs
dd: datetime = self.kwargs.meta["delivery"][1]
i = bond.leg1._period_index(dd)
ncd = bond.leg1._regular_periods[i].period_params.end
ncd1y = add_tenor(ncd, "-1y", "none")
ncd2y = add_tenor(ncd, "-2y", "none")
lcd = bond.leg1._regular_periods[i].period_params.start
d_e = float((ncd1y - dd).days)
if d_e < 0:
act1 = float((ncd - ncd1y).days)
else:
act1 = float((ncd1y - ncd2y).days)
d_i = float((ncd1y - lcd).days)
if d_i < 0:
act2 = float((ncd - ncd1y).days)
else:
act2 = float((ncd1y - ncd2y).days)
f = 1.0 + d_e / act1
c: DualTypes = bond.fixed_rate # type: ignore[assignment]
n = round((bond.leg1.schedule.termination - ncd).days / 365.25)
not_: DualTypes = self.kwargs.meta["coupon"]
_ = 1.0 + not_ / 100
cf = 1 / _**f * (c / 100.0 * d_i / act2 + c / not_ * (_ - 1 / _**n) + 1 / _**n)
cf -= c / 100.0 * (d_i / act2 - d_e / act1)
return round(_dual_float(cf), 6)
def _cfs_eurex_chf(self, bond: FixedRateBond) -> float:
# TODO: This method is not AD safe: it uses "round" function which destroys derivatives
# See EUREX specs
dd: datetime = self.kwargs.meta["delivery"][1]
mat = bond.leg1.schedule.termination
# get full years and full months
cal = Cal([], [])
n = mat.year - dd.year - 1
_date = datetime(dd.year + n, dd.month, dd.day)
f = -1.0
while _date < mat:
f += 1
_date = cal.add_months(_date, 1, Adjuster.Actual(), RollDay.Day(dd.day))
if f == 12:
f = 0
n += 1
## Using only Python calendar methods
# n = mat.year - dd.year
# f = (mat.month - dd.month)
# if f < 0:
# n = n - 1
# f = f % 12
#
# if f < 0:
# n = n - 1
# f = f % 12
#
# if mat.day < dd.day:
# if f == 0:
# n = n - 1
# f = 11
# else:
# f = f - 1
#
# if f == 0:
# f = 12
# n = n - 1
f = f / 12.0
c: DualTypes = bond.fixed_rate # type: ignore[assignment]
not_: DualTypes = self.kwargs.meta["coupon"]
v = 1.0 / (1.0 + not_ / 100.0)
cf = v**f * (c / not_ * (1.0 + not_ / 100.0 - v**n) + v**n) - c * (1 - f) / 100.0
return round(_dual_float(cf), 6)
def _cfs_ice_gbp(self, bond: FixedRateBond) -> float:
# TODO: This method is not AD safe: it uses "round" function which destroys derivatives
# See ICE specs: uses a YTM method for the first delivery date as settlement, rounded
d: datetime = self.kwargs.meta["delivery"][0]
price = bond.price(ytm=self.kwargs.meta["coupon"], settlement=datetime(d.year, d.month, 1))
return round(_dual_float(price / 100.0), 7)
def dlv(
self,
future_price: DualTypes,
prices: list[DualTypes],
repo_rate: DualTypes | tuple[DualTypes, ...],
settlement: datetime,
delivery: datetime | NoInput = NoInput(0),
convention: str | NoInput = NoInput(0),
dirty: bool = False,
) -> DataFrame:
"""
Return an aggregated DataFrame of deliverable (dlv) metrics.
.. rubric:: Examples
This example replicates the screen print in the publication
*The Futures Bond Basis: Second Edition (p77)* by Moorad Choudhry. To replicate
that publication exactly no calendar has been provided. Using the London business day
calendar and would affect the metrics of the third bond to a small degree (i.e.
set `calendar="ldn"`)
.. ipython:: python
:suppress:
from rateslib import BondFuture, Solver, FixedRateBond, dt
.. ipython:: python
future = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), spec="uk_gb", fixed_rate=5.75, calendar="bus"),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), spec="uk_gb", fixed_rate=9.00, calendar="bus"),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), spec="uk_gb", fixed_rate=6.25, calendar="bus"),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), spec="uk_gb", fixed_rate=9.00, calendar="bus"),
],
nominal=100000,
contracts=10,
currency="gbp",
)
future.dlv(
future_price=112.98,
prices=[102.732, 131.461, 107.877, 134.455],
repo_rate=6.24,
settlement=dt(2000, 3, 16),
convention="Act365f",
)
Parameters
----------
future_price: float, Dual, Dual2
The price of the future.
prices: sequence of float, Dual, Dual2
The prices of the bonds in the deliverable basket (ordered).
repo_rate: float, Dual, Dual2 or list/tuple of such
The repo rates of the bonds to delivery.
settlement: datetime
The settlement date of the bonds.
delivery: datetime, optional
The date of the futures delivery. If not given uses the final delivery
day.
convention: str, optional
The day count convention applied to the repo rates.
dirty: bool
Whether the bond prices are given including accrued interest. Default is *False*.
Returns
-------
DataFrame
""" # noqa: E501
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
if not isinstance(repo_rate, tuple | list):
r_ = (repo_rate,) * len(basket)
else:
r_ = tuple(repo_rate)
df = DataFrame(
columns=[
"Bond",
"Price",
"YTM",
"C.Factor",
"Gross Basis",
"Implied Repo",
"Actual Repo",
"Net Basis",
],
index=range(len(basket)),
)
df["Price"] = prices # type: ignore[assignment]
df["YTM"] = [bond.ytm(prices[i], settlement, dirty=dirty) for i, bond in enumerate(basket)] # type: ignore[assignment]
df["C.Factor"] = self.cfs # type: ignore[assignment]
df["Gross Basis"] = self.gross_basis(future_price, prices, settlement, dirty=dirty) # type: ignore[assignment]
df["Implied Repo"] = self.implied_repo( # type: ignore[assignment]
future_price,
prices,
settlement,
delivery,
convention,
dirty=dirty,
)
df["Actual Repo"] = r_ # type: ignore[assignment]
df["Net Basis"] = self.net_basis( # type: ignore[assignment]
future_price,
prices,
r_,
settlement,
delivery,
convention,
dirty=dirty,
)
df["Bond"] = [
f"{bond.fixed_rate:,.3f}% {bond.leg1.schedule.termination.strftime('%d-%m-%Y')}"
for bond in basket
]
return df
def cms(
self,
prices: Sequence[float],
settlement: datetime,
shifts: Sequence[float],
delivery: datetime | NoInput = NoInput(0),
dirty: bool = False,
) -> DataFrame:
"""
Perform CTD multi-security analysis.
Parameters
----------
prices: sequence of float, Dual, Dual2
The prices of the bonds in the deliverable basket (ordered).
settlement: datetime
The settlement date of the bonds.
shifts : Sequence[float]
The scenarios to analyse.
delivery: datetime, optional
The date of the futures delivery. If not given uses the final delivery
day.
dirty: bool
Whether the bond prices are given including accrued interest. Default is *False*.
Returns
-------
DataFrame
Notes
-----
This method only operates when the CTD basket has multiple securities
"""
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
if len(basket) == 1:
raise ValueError("Multi-security analysis cannot be performed with one security.")
delivery = _drb(self.kwargs.meta["delivery"][1], delivery)
# build a curve for pricing
today = basket[0].leg1.schedule.calendar.lag_bus_days(
settlement,
-basket[0].kwargs.meta["settle"],
False,
)
unsorted_nodes = {
today: 1.0,
**{_.leg1.schedule.termination: 1.0 for _ in basket},
}
bcurve = Curve(
nodes=dict(sorted(unsorted_nodes.items(), key=lambda _: _[0])),
convention="act365f", # use the most natural DCF without scaling
)
if dirty:
metric = "dirty_price"
else:
metric = "clean_price"
solver = Solver(
curves=[bcurve],
instruments=[(_, {"curves": bcurve, "metric": metric}) for _ in basket], # type: ignore[misc]
s=prices,
)
if solver.result["status"] != "SUCCESS":
raise ValueError(
"A bond curve could not be solved for analysis. "
"See 'Cookbook: Bond Future CTD Multi-Security Analysis'.",
)
bcurve._set_ad_order(order=0) # turn off AD for efficiency
data: dict[str | float, Any] = {
"Bond": [
f"{bond.fixed_rate:,.3f}% {bond.leg1.schedule.termination.strftime('%d-%m-%Y')}"
for bond in basket
],
}
for shift in shifts:
_curve = bcurve.shift(shift)
future_price = self.rate(curves=_curve, metric="future_price")
data.update(
{
shift: tuple(
bond.rate(curves=_curve, metric="clean_price", settlement=delivery)
- self.cfs[i] * future_price
for i, bond in enumerate(basket)
),
},
)
_: DataFrame = DataFrame(data=data)
return _
def gross_basis(
self,
future_price: DualTypes,
prices: list[DualTypes],
settlement: datetime | NoInput = NoInput(0),
dirty: bool = False,
) -> tuple[DualTypes, ...]:
"""
Calculate the gross basis of each bond in the basket.
.. rubric:: Exmaples
.. ipython:: python
:suppress:
from rateslib import BondFuture, Solver, FixedRateBond, dt
.. ipython:: python
bf = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), spec="uk_gb", fixed_rate=5.75, calendar="bus"),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), spec="uk_gb", fixed_rate=9.00, calendar="bus"),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), spec="uk_gb", fixed_rate=6.25, calendar="bus"),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), spec="uk_gb", fixed_rate=9.00, calendar="bus"),
],
nominal=100000,
contracts=10,
currency="gbp",
)
bf.gross_basis(
future_price=112.98,
prices=[102.732, 131.461, 107.877, 134.455],
settlement=dt(2000, 3, 16),
)
Parameters
----------
future_price: float, Dual, Dual2
The price of the future.
prices: sequence of float, Dual, Dual2
The prices of the bonds in the deliverable basket (ordered).
settlement: datetime
The settlement date of the bonds, required only if ``dirty`` is *True*.
dirty: bool
Whether the bond prices are given including accrued interest.
Returns
-------
tuple
""" # noqa: E501
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
if dirty:
if isinstance(settlement, NoInput):
raise ValueError("`settlement` must be specified if `dirty` is True.")
prices_: Sequence[DualTypes] = tuple(
prices[i] - bond.accrued(settlement) for i, bond in enumerate(basket)
)
else:
prices_ = prices
return tuple(prices_[i] - self.cfs[i] * future_price for i in range(len(basket)))
def net_basis(
self,
future_price: DualTypes,
prices: Sequence[DualTypes],
repo_rate: DualTypes | Sequence[DualTypes],
settlement: datetime,
delivery: datetime | NoInput = NoInput(0),
convention: str | NoInput = NoInput(0),
dirty: bool = False,
) -> tuple[DualTypes, ...]:
"""
Calculate the net basis of each bond in the basket via the proceeds
method of repo.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, BondFuture, FixedRateBond
.. ipython:: python
bf = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, spec="uk_gb"),
]
)
bf.net_basis(
future_price=112.98,
prices=[102.732, 131.461, 107.877, 134.455],
settlement=dt(2000, 3, 16),
repo_rate=6.24,
convention="Act365F",
)
Parameters
----------
future_price: float, Dual, Dual2
The price of the future.
prices: sequence of float, Dual, Dual2
The prices of the bonds in the deliverable basket (ordered).
repo_rate: float, Dual, Dual2 or list/tuple of such
The repo rates of the bonds to delivery.
settlement: datetime
The settlement date of the bonds, required only if ``dirty`` is *True*.
delivery: datetime, optional
The date of the futures delivery. If not given uses the final delivery
day.
convention: str, optional
The day count convention applied to the repo rates.
dirty: bool
Whether the bond prices are given including accrued interest.
Returns
-------
tuple
""" # noqa: E501
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
f_settlement: datetime = _drb(self.kwargs.meta["delivery"][1], delivery)
if not isinstance(repo_rate, Sequence):
r_: Sequence[DualTypes] = (repo_rate,) * len(basket)
else:
r_ = repo_rate
if dirty:
net_basis_ = tuple(
bond.fwd_from_repo(
prices[i],
settlement,
f_settlement,
r_[i],
convention,
dirty=dirty,
)
- self.cfs[i] * future_price
- bond.accrued(f_settlement)
for i, bond in enumerate(basket)
)
else:
net_basis_ = tuple(
bond.fwd_from_repo(
prices[i],
settlement,
f_settlement,
r_[i],
convention,
dirty=dirty,
)
- self.cfs[i] * future_price
for i, bond in enumerate(basket)
)
return net_basis_
def implied_repo(
self,
future_price: DualTypes,
prices: Sequence[DualTypes],
settlement: datetime,
delivery: datetime | NoInput = NoInput(0),
convention: str | NoInput = NoInput(0),
dirty: bool = False,
) -> tuple[DualTypes, ...]:
"""
Calculate the implied repo of each bond in the basket using the proceeds
method.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import BondFuture, Solver, FixedRateBond, dt
.. ipython:: python
bf = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), spec="uk_gb", fixed_rate=5.75),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), spec="uk_gb", fixed_rate=9.00),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), spec="uk_gb", fixed_rate=6.25),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), spec="uk_gb", fixed_rate=9.00),
],
)
future.implied_repo(
future_price=112.98,
prices=[102.732, 131.461, 107.877, 134.455],
settlement=dt(2000, 3, 16),
convention="Act365F",
)
Parameters
----------
future_price: float, Dual, Dual2
The price of the future.
prices: sequence of float, Dual, Dual2
The prices of the bonds in the deliverable basket (ordered).
settlement: datetime
The settlement date of the bonds.
delivery: datetime, optional
The date of the futures delivery. If not given uses the final delivery
day.
convention: str, optional
The day count convention used in the rate.
dirty: bool
Whether the bond prices are given including accrued interest.
Returns
-------
tuple
""" # noqa: E501
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
f_settlement: datetime = _drb(self.kwargs.meta["delivery"][1], delivery)
implied_repos: tuple[DualTypes, ...] = tuple()
for i, bond in enumerate(basket):
invoice_price = future_price * self.cfs[i]
implied_repos += (
bond.repo_from_fwd(
price=prices[i],
settlement=settlement,
forward_settlement=f_settlement,
forward_price=invoice_price,
convention=convention,
dirty=dirty,
),
)
return implied_repos
def ytm(
self,
future_price: DualTypes,
delivery: datetime | NoInput = NoInput(0),
) -> tuple[DualTypes, ...]:
"""
Calculate the yield-to-maturity of the bond future.
The relevant ytm should be selected according to the CTD index.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import BondFuture, Solver, FixedRateBond, dt
.. ipython:: python
bf = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), spec="uk_gb", fixed_rate=5.75),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), spec="uk_gb", fixed_rate=9.00),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), spec="uk_gb", fixed_rate=6.25),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), spec="uk_gb", fixed_rate=9.00),
],
)
bf.ytm(future_price=112.98)
Parameters
----------
future_price : float, Dual, Dual2
The price of the future.
delivery : datetime, optional
The future delivery day on which to calculate the yield. If not given aligns
with the last delivery day specified on the future.
Returns
-------
tuple
"""
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
settlement: datetime = _drb(self.kwargs.meta["delivery"][1], delivery)
adjusted_prices = [future_price * cf for cf in self.cfs]
yields = tuple(bond.ytm(adjusted_prices[i], settlement) for i, bond in enumerate(basket))
return yields
def duration(
self,
future_price: DualTypes,
metric: str = "risk",
delivery: datetime | NoInput = NoInput(0),
) -> tuple[float, ...]:
"""
Return the (negated) derivative of ``price`` w.r.t. ``ytm`` .
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import BondFuture, Solver, FixedRateBond, dt
.. ipython:: python
bf = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), spec="uk_gb", fixed_rate=5.75),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), spec="uk_gb", fixed_rate=9.00),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), spec="uk_gb", fixed_rate=6.25),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), spec="uk_gb", fixed_rate=9.00),
],
)
bf.duration(future_price=112.98)
Parameters
----------
future_price : float
The price of the future.
metric : str
The specific duration calculation to return. See notes.
delivery : datetime, optional
The delivery date of the contract.
Returns
-------
float
See Also
--------
FixedRateBond.duration: Calculation the risk of a FixedRateBond.
Example
-------
.. ipython:: python
risk = future.duration(112.98)
risk
The difference in yield is shown to be 1bp for the CTD (index: 0)
when the futures price is adjusted by the risk amount.
.. ipython:: python
future.ytm(112.98)
future.ytm(112.98 + risk[0] / 100)
"""
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
f_settlement: datetime = _drb(self.kwargs.meta["delivery"][1], delivery)
_: tuple[float, ...] = ()
for i, bond in enumerate(basket):
invoice_price = future_price * self.cfs[i]
ytm = bond.ytm(invoice_price, f_settlement)
if metric == "risk":
_ += (_dual_float(bond.duration(ytm, f_settlement, "risk") / self.cfs[i]),)
else:
__ = (bond.duration(ytm, f_settlement, metric),)
_ += __
return _
def convexity(
self,
future_price: DualTypes,
delivery: datetime | NoInput = NoInput(0),
) -> tuple[float, ...]:
"""
Return the second derivative of ``price`` w.r.t. ``ytm`` .
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import BondFuture, Solver, FixedRateBond, dt
.. ipython:: python
bf = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), spec="uk_gb", fixed_rate=5.75),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), spec="uk_gb", fixed_rate=9.00),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), spec="uk_gb", fixed_rate=6.25),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), spec="uk_gb", fixed_rate=9.00),
],
)
bf.convexity(future_price=112.98)
Parameters
----------
future_price : float
The price of the future.
delivery : datetime, optional
The delivery date of the contract. If not given uses the last delivery day
in the delivery window.
Returns
-------
float
See Also
--------
FixedRateBond.convexity: Calculate the convexity of a FixedRateBond.
"""
# TODO: Not AD safe becuase dependent convexity method is not AD safe. Returns float.
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
f_settlement: datetime = _drb(self.kwargs.meta["delivery"][1], delivery)
_: tuple[float, ...] = ()
for i, bond in enumerate(basket):
invoice_price = future_price * self.cfs[i]
ytm = bond.ytm(invoice_price, f_settlement)
_ += (_dual_float(bond.convexity(ytm, f_settlement) / self.cfs[i]),)
return _
def ctd_index(
self,
future_price: DualTypes,
prices: Sequence[DualTypes],
settlement: datetime,
delivery: datetime | NoInput = NoInput(0),
dirty: bool = False,
ordered: bool = False,
) -> int | list[int]:
"""
Determine the index (base 0) of the CTD in the basket from implied repo rate.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import BondFuture, Solver, FixedRateBond, dt
.. ipython:: python
future = BondFuture(
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
coupon=7.0,
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), spec="uk_gb", fixed_rate=5.75),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), spec="uk_gb", fixed_rate=9.00),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), spec="uk_gb", fixed_rate=6.25),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), spec="uk_gb", fixed_rate=9.00),
],
nominal=100000,
contracts=10,
currency="gbp",
)
future.ctd_index(
future_price=112.98,
prices=[102.732, 131.461, 107.877, 134.455],
settlement=dt(2000, 3, 16),
ordered=True,
)
Parameters
----------
future_price : float
The price of the future.
prices: sequence of float, Dual, Dual2
The prices of the bonds in the deliverable basket (ordered).
settlement: datetime
The settlement date of the bonds.
delivery: datetime, optional
The date of the futures delivery. If not given uses the final delivery
day.
dirty: bool
Whether the bond prices are given including accrued interest.
ordered : bool, optional
Whether to return the sorted order of CTD indexes and not just a single index for
the specific CTD.
Returns
-------
int or list[int]
"""
implied_repo = self.implied_repo(
future_price,
prices,
settlement,
delivery,
"Act365F", # to determine CTD only require a consistent comparison
dirty,
)
if not ordered:
ctd_index_ = implied_repo.index(max(implied_repo))
return ctd_index_
else:
_: dict[int, DualTypes] = dict(zip(range(len(implied_repo)), implied_repo, strict=True))
_ = dict(sorted(_.items(), key=lambda item: -item[1]))
return list(_.keys())
# Digital Methods
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
"""
Return various pricing metrics of the security calculated from
:class:`~rateslib.curves.Curve` s.
Parameters
----------
curves : Curve, str or list of such
A single :class:`Curve` or id or a list of such. A list defines the
following curves in the order:
- Forecasting :class:`Curve` for ``leg1``.
- Discounting :class:`Curve` for ``leg1``.
solver : Solver, optional
The numerical :class:`Solver` that constructs ``Curves`` from calibrating
instruments.
fx : float, FXRates, FXForwards, optional
The immediate settlement FX rate that will be used to convert values
into another currency. A given `float` is used directly. If giving a
``FXRates`` or ``FXForwards`` object, converts from local currency
into ``base``.
base : str, optional
The base currency to convert cashflows into (3-digit code), set by default.
Only used if ``fx`` is an ``FXRates`` or ``FXForwards`` object.
metric : str in {"future_price", "ytm"}, optional
Metric returned by the method.
delivery: datetime, optional
The date of the futures delivery. If not given uses the final delivery
day.
Returns
-------
float, Dual, Dual2
Notes
-----
This method determines the *'futures_price'* and *'ytm'* by assuming a net
basis of zero and pricing from the cheapest to delivery (CTD).
"""
c = _parse_curves(self, curves, solver)
disc_curve = _get_curve("disc_curve", False, True, *c)
basket: tuple[FixedRateBond, ...] = self.kwargs.meta["basket"]
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
if metric_ not in ["future_price", "ytm"]:
raise ValueError("`metric` must be in {'future_price', 'ytm'}.")
f_settlement = _drb(self.kwargs.meta["delivery"][1], settlement)
prices_: list[DualTypes] = [
bond.rate(
curves={"disc_curve": disc_curve}, # type: ignore[arg-type]
solver=solver,
fx=fx,
base=base,
metric="clean_price",
settlement=f_settlement,
)
for bond in basket
]
future_prices_: list[DualTypes] = [price / self.cfs[i] for i, price in enumerate(prices_)]
future_price: DualTypes = min(future_prices_)
ctd_index: int = future_prices_.index(min(future_prices_))
if metric_ == "future_price":
return future_price
else: # metric == "ytm":
return basket[ctd_index].ytm(future_price * self.cfs[ctd_index], f_settlement)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
"""
Determine the monetary value of the bond future position.
This method is mainly included to calculate risk sensitivities. The
monetary value of bond futures is not usually a metric worth considering.
The profit or loss of a position based on entry level is a more common
metric, however the initial value of the position does not affect the risk.
See :meth:`BaseDerivative.npv`.
"""
future_price = self.rate(
curves=curves, solver=solver, fx=fx, base=base, metric="future_price"
)
local_npv = future_price / 100 * -self.notional
return _maybe_local(
value=local_npv,
local=local,
currency=self.kwargs.meta["currency"].lower(),
fx=fx,
base=base,
forward=forward,
)
================================================
FILE: python/rateslib/instruments/bonds/conventions/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.instruments.bonds.conventions.accrued import ACC_FRAC_FUNCS
from rateslib.instruments.bonds.conventions.discounting import (
C_FUNCS,
V1_FUNCS,
V2_FUNCS,
V3_FUNCS,
)
if TYPE_CHECKING:
from rateslib.instruments.bonds.conventions.accrued import AccrualFunction # pragma: no cover
from rateslib.instruments.bonds.conventions.discounting import ( # pragma: no cover
CashflowFunction,
YtmDiscountFunction,
YtmStubDiscountFunction,
)
from rateslib.local_types import ( # pragma: no cover
Any,
)
class BondCalcMode:
"""
Define calculation conventions for :class:`~rateslib.instruments.FixedRateBond`,
:class:`~rateslib.instruments.IndexFixedRateBond` and
:class:`~rateslib.instruments.FloatRateNote` types.
For a list of :class:`~rateslib.instruments.BondCalcMode` that have already
been pre-defined see :ref:`Securities Defaults `.
Parameters
----------
settle_accrual: str or Callable
The calculation type for accrued interest for physical settlement. See notes.
ytm_accrual: str or Callable
The calculation method for accrued interest used in the YTM formula. Often the same
as above but not always (e.g. Canadian GBs). See notes.
v1: str or Callable
The calculation function that defines discounting of the first period of the YTM formula.
v2: str or Callable
The calculation function that defines discounting of the regular periods of the YTM formula.
v3: str or Callable
The calculation function that defines discounting of the last period of the YTM formula.
c1: str or Callable
The calculation function that determines the cashflow amount in the first period of the
YTM formula.
ci: str or Callable
The calculation function that determines the cashflow amount in the interim periods of the
YTM formula.
cn: str or Callable
The calculation function that determines the cashflow amount in the final period of the
YTM formula.
Notes
-------
For an example custom implementation of a *BondCalcMode* see the cookbook article:
`Cookbook: Understanding and Customising FixedRateBond Conventions <../z_bond_conventions.html>`_
The :class:`~rateslib.instruments.BondCalcMode` is used to configure the calculations for
**accrued interest** and **yield-to-maturity** for a variety of different bonds.
The bottom of this page enumerates all of the notation for formulae.
Accrued Interest
****************
*Rateslib* makes two types of accrued interest calculations:
- Physically settleable accrued interest, *AI*, returned from the
:meth:`~rateslib.instruments.FixedRateBond.accrued` method by default.
.. math::
&AI = \\xi c_i \\qquad \\text{if not ex-dividend} \\\\
&AI = (\\xi - 1) c_i \\qquad \\text{if ex-dividend} \\\\
- Accrued interest for the purpose of determining accurate YTM calculations.
.. math::
&AI_y = \\xi_y c_i \\qquad \\text{if not ex-dividend} \\\\
&AI_y = (\\xi_y - 1) c_i \\qquad \\text{if ex-dividend} \\\\
Where in both these formulae :math:`c_i` currently always uses the real ``cashflow`` method
(see below).
These two methods are almost always the same, but for an example where they differ consider
Canadian government bonds. The calculation mode relies on determining the :math:`\\xi` and
:math:`\\xi_y` values, known as the **accrual fraction**. This is achieved by using the
following functions:
**Accrual Functions**
Accrual functions must be supplied to the ``settle_accrual`` and ``ytm_accrual``
arguments. The available values are:
- ``linear_days``: A calendar day, linear proportion used in any period.
.. math::
\\xi = r_u / s_u
- ``linear_days_long_front_split``: A modified version of the above which, **only for long
stub** periods, uses a different formula treating the first quasi period as part of the
long stub differently. This adjustment is then scaled according to the length of the period.
(Used by UK and German GBs and is the Treasury method for US Treasuries,
see Section 31B ii A.356, Code of Federal Regulations)
.. math::
\\xi = (\\bar{r}_u / \\bar{s}_u + r_u / s_u) / ( d_i * f )
- ``30e360_backward``: For **stubs** this method reverts to ``linear_days``. Otherwise,
determines the DCF, under *'30e360'* convention, of the remaining part of the coupon
period from settlement and deducts this from the full accrual fraction.
.. math::
\\xi = 1 - \\bar{d_u} f
- ``30u360_forward``: Calculates the DCF between last accrual coupon and settlement,
and compares this with DCF between accrual coupon dates, both measured using *'30u360'*
(See MSRB Rule G-33):
.. math::
\\xi = DCF(prior, settlement) / DCF(prior, next)
- ``act365f_1y``: For **stubs** this method reverts to ``linear_days``. Otherwise,
determines the accrual fraction using an approach that uses ACT365F convention.
(Used by Canadian GBs)
.. math::
\\xi = \\left \\{ \\begin{matrix} 1.0 & \\text{if, } r_u = s_u \\\\ 1.0 - f(s_u - r_u) / 365 & \\text{if, } r_u \\ge 365 / f \\\\ fr_u / 365 & \\text{if, } r_u < 365 / f \\\\ \\end{matrix} \\right .
**Custom accrual functions** can also be supplied where the input arguments signature should
accept the bond object, the settlement date, and the index relating to the period in which
the relevant coupon period falls. It should return an accrual fraction upto settlement.
As an example the code below shows the implementation of the
*"linear_days"* accrual function:
.. ipython:: python
def _linear_days(obj, settlement, acc_idx, *args) -> float:
sch = obj.leg1.schedule # <- obj is always the Bond itself
r_u = (settlement - sch.aschedule[acc_idx]).days # <- acc_idx accesses the correct date
s_u = (sch.aschedule[acc_idx + 1] - sch.aschedule[acc_idx]).days
return r_u / s_u
Yield-To-Maturity
-----------------
Yield-to-maturity in *rateslib*, for **every bond**, is calculated using the below formula.
The specific discounting and cashflow generating functions must be provided to determine
values based on the conventions of that specific bond. The cases where the number of remaining
coupons are 1, 2, or generically >2 are outlined explicitly:
.. math::
P &= v_1 \\left ( c_1 + 100 \\right ), \\quad n = 1 \\\\
P &= v_1 \\left ( c_1 + v_3 (c_n + 100) \\right ), \\quad n = 2 \\\\
P &= v_1 \\left ( c_1 + \\sum_{i=2}^{n-1} c_i v_2^{i-2} v_{2,i} + c_nv_2^{n-2}v_3 + 100 v_2^{n-2}v_3 \\right ), \\quad n > 2 \\\\
Q &= P - AI_y
where,
.. math::
P &= \\text{Dirty price}, \\; Q = \\text{Clean Price} \\\\
n &= \\text{Coupon periods remaining} \\\\
c_1 &= \\text{Cashflow (per 100) on next coupon date (may be zero if ex-dividend)} \\\\
c_i &= i \\text{'th cashflow (per 100) on subsequent coupon dates} \\\\
v_1 &= \\text{Discount value for the initial, possibly stub, period} \\\\
v_2 &= \\text{General discount value for the interim regular periods} \\\\
v_{2,i} &= \\text{Specific discount value for the i'th interim regular period} \\\\
v_3 &= \\text{Discount value for the final, possibly stub, period} \\\\
**v2 Functions**
*v2* forms the core, regular part of discounting the cashflows. *v2* functions are required when
a bond has more than two coupon remaining. This reflects coupon periods that are
never stubs. The available functions are described below:
- ``regular``: uses the traditional discounting function matching the actual frequency of
coupons:
.. math::
v_2 = \\frac{1}{1 + y/f}
- ``annual``: assumes an annually expressed YTM disregarding the actual coupon frequency:
.. math::
v_2 = \\left ( \\frac{1}{1 + y} \\right ) ^ {1/f}
- ``annual_pay_adjust``: an extension to ``annual`` that adjusts the period in scope to
account for a delay between its unadjusted coupon end date and the actual payment date. (Used
by Italian BTPs)
.. math::
v_2 = \\left ( \\frac{1}{1 + y} \\right ) ^ {1/f}, \\qquad \\text{and in the current period} \\qquad v_{2,i} = v_2 ^ {(1 + p_d / p_D)}
**v1 Functions**
*v1* functions are required for every bond. Its value may, or may not, be dependent upon *v2*.
*v1* functions have to handle the cases whereby the coupon period in which *settlement* falls
is
- The first coupon period, **and** it may be a **stub**,
- A regular interim coupon period,
- The final coupon period **and** it may be a **stub**.
The two most common functions for determining *v1* are described below:
- ``compounding``: If a **stub** then scaled by the length of
the stub. At issue, or on a coupon date, for a regular period, *v1* converges to *v2*.
.. math::
v_1 = v_2^{g(\\xi_y)} \\quad \\text{where,} \\quad g(\\xi_y) = \\left \\{ \\begin{matrix} 1-\\xi_y & \\text{if regular,} \\\\ (1-\\xi_y) f d_i & \\text{if stub,} \\\\ \\end{matrix} \\right . \\\\
- ``simple``: calculation uses a simple interest formula. At issue, or on a coupon date,
for a regular period, *v1* converges to a *'regular'* style *v2*.
.. math::
v_1 = \\frac{1}{1 + g(\\xi_y) y / f} \\quad \\text{where, } g(\\xi_y) \\text{ defined as above}
Combinations, or extensions, of the two above functions are also required for some
bond conventions:
- ``simple_act365f``: uses simple interest with a DCF calculated under Act365F convention,
irrespective of the bond’s underlying convention.
.. math::
v_1 = \\frac{1}{1 + \\bar{d_u} y}
- ``compounding_final_simple``: uses ``compounding``, unless settlement occurs in the final
period of the bond (and in which case n=1) and then the ``simple`` method is applied.
- ``compounding_final_simple_act365f``: uses ``compounding``, unless settlement occurs in the
final period of the bond (and in which case n=1) and then the ``simple_act365f`` method is
applied.
- ``compounding_stub_act365f``: uses ``compounding``, unless settlement occurs in a stub
period in which case Act365F convention derives the exponent.
.. math::
v_1 = v_2^{\\bar{d}_u} \\qquad \\text{if stub.}
- ``simple_long_stub_compounding``: uses ``simple`` formula **except** for long stubs,
and the calculation is only different if settlement falls before the quasi-coupon.
If settlement occurs before the quasi-coupon date then the entire quasi-coupon period
applies regular *v2* discounting, and the preliminary component has *simple* method
applied.
.. math::
v_1 = v_2 \\frac{1}{1 + [f d_i(1 - \\xi_y) - 1] y / f} \\qquad \\text{if settlement before quasi-coupon in long stub}
- ``simple_pay_adjust``: adjusts the *'simple'* method to account for the payment date.
.. math::
v_1 = \\frac{1}{1 + g_p(\\xi_y) y / f} \\quad \\text{where,} \\quad g_p(\\xi_y) = \\left \\{ \\begin{matrix} 1-\\xi_y + p_d / p_D & \\text{if regular,} \\\\ (1-\\xi_y + p_d / p_D) f d_i & \\text{if stub,} \\\\ \\end{matrix} \\right .
- ``compounding_pay_adjust``: adjusts the *'compounding'* method to account for payment date.
.. math::
v_1 = v_2^{g_p(\\xi_y)} \\quad \\text{where, } g_p(\\xi_y) \\text{ defined as above}
- ``compounding_final_simple_pay_adjust``: uses ``compounding`` unless settlement
occurs in the final period of the bond (and in which case n=1) and then the
``simple_pay_adjust`` method is applied.
**v3 Functions**
*v3* functions will never have a settlement mid period, and are only used in the case
of 2 or more remaining coupon periods. The available functions are:
- ``compounding``: is identical to *v1 'compounding'* where :math:`\\xi_y` is set to zero.
- ``compounding_pay_adjust``: is identical to *v1 'compounding_pay_adjust'* where :math:`\\xi_y` is set to zero.
- ``simple``: is identical to *v1 'simple'* where :math:`\\xi_y` is set to zero.
- ``simple_pay_adjust``: is identical to *v1 'simple_pay_adjust'* where :math:`\\xi_y`
is set to zero.
- ``simple_30e360``: uses simple interest with a DCF calculated
under 30e360 convention, irrespective of the bond's underlying convention.
.. math::
v_3 = \\frac{1}{1+\\bar{d}_n y}
**Custom discount functions** can also be supplied where the input arguments signature
is shown in the below example. It should return a discount factor. The example
shows the implementation of the *"regular"* discount function:
.. ipython:: python
def _v2_(
obj, # the bond object
ytm, # y as defined
f, # f as defined
settlement, # datetime
acc_idx, # the index of the period in which settlement occurs
v2, # the numeric value of v2 already calculated
accrual, # the ytm_accrual function to return accrual fractions
):
return 1 / (1 + ytm / (100 * f))
**Cashflow Generating Functions**
Most of the time, for the cashflows shown above in the YTM formula, the actual cashflows, as
determined by the native *schedule* and *convention* on the bond itself, can be used.
This is because the cashflow often aligns with a *typical* expected amount,
i.e. *coupon / frequency*. Since this is by definition under the *ActActICMA* convention
and unadjusted *30360* will also tend to return standardised coupons.
However, some bonds use a *convention* which does not lead to standardised
coupons, but have YTM formula definitions which do require standardised coupons. An example
is Thai Government Bonds.
The available functions here are:
- ``cashflow``: determine the cashflow for the period by using the native cashflow calculation
under the *schedule* and *convention* on the bond.
- ``full_coupon``: determine the cashflow as a full coupon payment, irrespective of period
dates, based on the notional of the period and the coupon rate of the bond. This method is
only for fixed rate bonds.
.. math::
c_i = \\frac{-N_i C}{f}
Notation
--------
The notation used above is described:
- :math:`\\xi`: The **accrual fraction** is a float, typically, in [0, 1] which defines the
amount of a bond's current cashflow period that is paid at *settlement* as accrued interest.
- :math:`\\xi_y`: The **accrual fraction** determined in a secondary method, used only in YTM
calculations and **not** for physical settlement.
(Almost always :math:`\\xi_y` and :math:`\\xi` are the same, for an exception see
Canadian GBs)
- :math:`r_u`: The number of calendar days between the last accrual coupon date and
settlement. If a **long stub** this is either; zero if settlement falls before the
accrual quasi-coupon date, or the number of calendar days between
those dates.
- :math:`s_u`: The number of calendar days between the last accrual coupon date and the
next accrual coupon date, i.e the number of calendar days in the accrual coupon
period. If a **long stub** this is the number of calendar days in the accrual
quasi-coupon period.
- :math:`\\bar{r}_u`: If a **long stub**, the number of calendar days between the accrual
effective date and either; the next accrual quasi-coupon date, or settlement date,
whichever is earliest.
- :math:`\\bar{s}_u`: If a **long stub**, the number of calendar days between the prior
accrual quasi-coupon date and the accrual next quasi-coupon date surrounding the
accrual effective date.
- :math:`d_i`: The full DCF of coupon period, *i*, calculated with the convention which
determines the physical cashflows.
- :math:`f`: The number of coupon periods per annum, e.g. 1-annually, 2-semi, 3-tertiary,
4-quarterly, 6-bi-monthly, 12-monthly.
- :math:`\\bar{d}_u`: The DCF between settlement and the next accrual coupon date
determined with the convention of the accrual function (which may be different to the
convention for determining physical bond cashflows)
- :math:`c_i`: A coupon cashflow monetary amount, **per 100 nominal**, for coupon period, *i*.
- :math:`p_d`: Number of days between unadjusted coupon date and payment date in a coupon
period, i.e. the pay delay.
- :math:`p_D` = Number of days between previous payment date and current payment date, in a
coupon period.
- :math:`C`: The nominal annual coupon rate for the bond.
- :math:`y`: The yield-to-maturity for a given bond. The expression of which, i.e. annually
or semi-annually is derived from the calculation context.
""" # noqa: E501, W293
_settle_accrual: AccrualFunction
# _settle_accrual_rounding: int | None
_ytm_accrual: AccrualFunction
_v1: YtmStubDiscountFunction
_v2: YtmDiscountFunction
_v3: YtmStubDiscountFunction
_c1: CashflowFunction
_ci: CashflowFunction
_cn: CashflowFunction
def __init__(
self,
settle_accrual: str | AccrualFunction,
ytm_accrual: str | AccrualFunction,
v1: str | YtmStubDiscountFunction,
v2: str | YtmDiscountFunction,
v3: str | YtmStubDiscountFunction,
c1: str | CashflowFunction,
ci: str | CashflowFunction,
cn: str | CashflowFunction,
# settle_accrual_rounding: int_ = NoInput(0),
):
self._kwargs: dict[str, str] = {}
for name, func, _map in zip(
["settle_accrual", "ytm_accrual", "v1", "v2", "v3", "c1", "ci", "cn"],
[settle_accrual, ytm_accrual, v1, v2, v3, c1, ci, cn],
[
ACC_FRAC_FUNCS,
ACC_FRAC_FUNCS,
V1_FUNCS,
V2_FUNCS,
V3_FUNCS,
C_FUNCS,
C_FUNCS,
C_FUNCS,
],
strict=False,
):
if isinstance(func, str):
setattr(self, f"_{name}", _map[func.lower()]) # type: ignore[index]
self._kwargs[name] = func
else:
setattr(self, f"_{name}", func)
self._kwargs[name] = "custom"
# if isinstance(settle_accrual_rounding, NoInput):
# self._settle_accrual_rounding = None
# self._kwargs["settle_accrual_rounding"] = "none"
# else:
# self._settle_accrual_rounding = settle_accrual_rounding
# self._kwargs["settle_accrual_rounding"] = str(settle_accrual_rounding)
@property
def kwargs(self) -> dict[str, str]:
"""String representation of the parameters for the calculation convention."""
return self._kwargs
class BillCalcMode:
"""
Define calculation conventions for :class:`~rateslib.instruments.Bill` type.
Parameters
----------
price_type: str in {"simple", "discount"}
The default calculation convention for the rate of the bill.
ytm_clone_kwargs: dict | str,
A list of bond keyword arguments, or the ``spec`` for a given bond for which
a replicable zero coupon bond is constructed and its YTM calculated as comparison.
Notes
------
- *"simple"*: uses simple interest formula:
.. math::
P = \\frac{100}{1+r_{simple}d}
- *"discount*": uses a discount rate:
.. math::
P = 100 ( 1 - r_{discount} d )
"""
def __init__(
self,
price_type: str,
# price_accrual_type: str,
# accrual type uses "linear days" by default. This correctly scales ACT365f and ACT360
# DCF conventions and prepares for any non-standard DCFs.
# currently no identified cases where anything else is needed. Revise as necessary.
ytm_clone_kwargs: dict[str, str] | str,
):
self._price_type = price_type
price_accrual_type = "linear_days"
self._settle_accrual = ACC_FRAC_FUNCS[price_accrual_type.lower()]
if isinstance(ytm_clone_kwargs, dict):
self._ytm_clone_kwargs = ytm_clone_kwargs
else:
self._ytm_clone_kwargs = defaults.spec[ytm_clone_kwargs]
self._kwargs: dict[str, str] = {
"price_type": price_type,
"price_accrual_type": price_accrual_type,
"ytm_clone": "Custom dict" if isinstance(ytm_clone_kwargs, dict) else ytm_clone_kwargs,
}
@property
def kwargs(self) -> dict[str, str]:
"""String representation of the parameters for the calculation convention."""
return self._kwargs
UK_GB = BondCalcMode(
# UK government bond conventions
settle_accrual="linear_days_long_front_split",
ytm_accrual="linear_days_long_front_split",
v1="compounding",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
CN_GB = BondCalcMode(
# Chinese government bond conventions
settle_accrual="linear_days",
ytm_accrual="linear_days",
v1="compounding",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
NZ_GB = BondCalcMode(
# New Zealand government bond conventions
settle_accrual="linear_days",
ytm_accrual="linear_days",
v1="compounding_final_simple_act365f",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
AU_GB = BondCalcMode(
# Australian government bond conventions
settle_accrual="linear_days",
# settle_accrual_rounding=3,
ytm_accrual="linear_days",
v1="compounding_final_simple_act365f",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
US_GB = BondCalcMode(
# US Treasury street convention
settle_accrual="linear_days_long_front_split",
ytm_accrual="linear_days_long_front_split",
v1="compounding_final_simple",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
US_GB_TSY = BondCalcMode(
# US Treasury treasury convention
settle_accrual="linear_days_long_front_split",
ytm_accrual="linear_days_long_front_split",
v1="simple_long_stub_compounding",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
US_CORP = BondCalcMode(
# US Corporate bond street convention
settle_accrual="30u360_forward",
ytm_accrual="30u360_forward",
v1="compounding_final_simple",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
US_MUNI = BondCalcMode(
# US Corporate bond street convention
settle_accrual="30u360_forward",
ytm_accrual="30u360_forward",
v1="compounding_final_simple",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
SE_GB = BondCalcMode(
# Swedish government bonds
settle_accrual="30e360_backward",
ytm_accrual="30e360_backward",
v1="compounding_final_simple",
v2="regular",
v3="simple_30e360",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
CA_GB = BondCalcMode(
# Canadian government bonds
settle_accrual="act365f_1y",
ytm_accrual="linear_days",
v1="compounding",
v2="regular",
v3="simple_30e360",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
DE_GB = BondCalcMode(
# German government bonds
settle_accrual="linear_days_long_front_split",
ytm_accrual="linear_days_long_front_split",
v1="compounding_final_simple",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
FR_GB = BondCalcMode(
# French OATs
settle_accrual="linear_days",
ytm_accrual="linear_days",
v1="compounding",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
IT_GB = BondCalcMode(
# Italian GBs
settle_accrual="linear_days",
ytm_accrual="linear_days",
v1="compounding_final_simple_pay_adjust",
v2="annual_pay_adjust",
v3="compounding_pay_adjust",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
NO_GB = BondCalcMode(
# Norwegian GBs
settle_accrual="act365f_1y",
ytm_accrual="act365f_1y",
v1="compounding_stub_act365f",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
NL_GB = BondCalcMode(
# Dutch GBs
settle_accrual="linear_days_long_front_split",
ytm_accrual="linear_days_long_front_split",
v1="compounding_final_simple",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
CH_GB = BondCalcMode(
# Swiss GBs
settle_accrual="30e360_backward",
ytm_accrual="30e360_backward",
v1="compounding",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
UK_GBB = BillCalcMode(
# UK T-bills
price_type="simple",
# price_accrual_type="linear_days",
ytm_clone_kwargs="uk_gb",
)
US_GBB = BillCalcMode(
# US T-bills
price_type="discount",
# price_accrual_type="linear_days",
ytm_clone_kwargs="us_gb",
)
SE_GBB = BillCalcMode(
# Swedish T-bills
price_type="simple",
# price_accrual_type="linear_days",
ytm_clone_kwargs="se_gb",
)
NO_GBB = BillCalcMode(
# Norwegian T-bills
price_type="discount",
# price_accrual_type="linear_days",
ytm_clone_kwargs="no_gb",
)
BOND_MODE_MAP = {
"uk_gb": UK_GB,
"nz_gb": NZ_GB,
"au_gb": AU_GB,
"cn_gb": CN_GB,
"us_gb": US_GB,
"de_gb": DE_GB,
"fr_gb": FR_GB,
"nl_gb": NL_GB,
"ch_gb": CH_GB,
"no_gb": NO_GB,
"se_gb": SE_GB,
"us_gb_tsy": US_GB_TSY,
"us_corp": US_CORP,
"us_muni": US_MUNI,
"it_gb": IT_GB,
"ca_gb": CA_GB,
# aliases
"ukg": UK_GB,
"cadgb": CA_GB,
"ust": US_GB,
"ust_31bii": US_GB_TSY,
"sgb": SE_GB,
}
BILL_MODE_MAP = {
"uk_gbb": UK_GBB,
"us_gbb": US_GBB,
"se_gbb": SE_GBB,
"no_gbb": NO_GBB,
# aliases
"ustb": US_GBB,
"uktb": UK_GBB,
"sgbb": SE_GBB,
}
def _get_bond_calc_mode(calc_mode: str | BondCalcMode) -> BondCalcMode:
if isinstance(calc_mode, str):
return BOND_MODE_MAP[calc_mode.lower()]
return calc_mode
def _get_bill_calc_mode(calc_mode: str | BillCalcMode) -> BillCalcMode:
if isinstance(calc_mode, str):
return BILL_MODE_MAP[calc_mode.lower()]
return calc_mode
def _get_calc_mode_for_class(
obj: Any, calc_mode: str | BondCalcMode | BillCalcMode
) -> BondCalcMode | BillCalcMode:
if isinstance(calc_mode, str):
map_: dict[str, dict[str, BondCalcMode] | dict[str, BillCalcMode]] = {
"FixedRateBond": BOND_MODE_MAP,
"Bill": BILL_MODE_MAP,
"FloatRateNote": BOND_MODE_MAP,
"IndexFixedRateBond": BOND_MODE_MAP,
}
klass: str = type(obj).__name__
return map_[klass][calc_mode.lower()]
return calc_mode
================================================
FILE: python/rateslib/instruments/bonds/conventions/accrued.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING, Protocol
from rateslib.scheduling import dcf
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
_SupportsFixedFloatLeg1,
)
"""
All functions in this module are designed to take a Bond object and return the **fraction**
of the current coupon period associated with the given settlement.
This fraction is used to assess the total accrued calculation at a subsequent stage.
"""
class AccrualFunction(Protocol):
# Callable type for Accrual Functions
def __call__(
self, obj: _SupportsFixedFloatLeg1, settlement: datetime, acc_idx: int, *args: Any
) -> float: ...
def _acc_linear_proportion_by_days(
obj: _SupportsFixedFloatLeg1, settlement: datetime, acc_idx: int, *args: Any
) -> float:
"""
Return the fraction of an accrual period between start and settlement.
Method: a linear proportion of actual days between start, settlement and end.
Measures between unadjusted coupon dates.
This is a general method, used by many types of bonds, for example by UK Gilts,
German Bunds.
"""
r = (settlement - obj.leg1.schedule.aschedule[acc_idx]).days
s = (obj.leg1.schedule.aschedule[acc_idx + 1] - obj.leg1.schedule.aschedule[acc_idx]).days
return float(r / s)
def _acc_linear_proportion_by_days_long_stub_split(
obj: _SupportsFixedFloatLeg1,
settlement: datetime,
acc_idx: int,
*args: Any,
) -> float:
"""
For long stub periods this splits the accrued interest into two components.
Otherwise, returns the regular linear proportion.
[Designed primarily for US Treasuries]
"""
# TODO: handle this union attribute by segregating Securities periods into different
# categories, perhaps when also integrating deterministic amortised bonds.
if obj.leg1._regular_periods[acc_idx].period_params.stub:
f = obj.leg1.schedule.periods_per_annum
freq = obj.leg1.schedule.frequency_obj
adjuster = obj.leg1.schedule.accrual_adjuster
calendar = obj.leg1.schedule.calendar
if obj.leg1._regular_periods[acc_idx].period_params.dcf * f > 1:
# long stub
if acc_idx > 0:
# then stub is implied to be at the back, must roll forwards
ustart = obj.leg1.schedule.uschedule[acc_idx]
astart = obj.leg1.schedule.aschedule[acc_idx]
quasi_ucoupon = freq.unext(ustart)
quasi_acoupon = adjuster.adjust(quasi_ucoupon, calendar)
quasi_uend = freq.unext(quasi_ucoupon)
quasi_aend = adjuster.adjust(quasi_uend, calendar)
s_bar_u = (quasi_acoupon - astart).days
if settlement <= quasi_acoupon:
#
# |--------------------------|-----------------|---------|
# s * qc e qe
# <-----------s_bar_u-------->
# <---r_bar_u-----------> ==> (r_bar_u / s_bar_u) / (df)
r_bar_u = (settlement - astart).days
r_u = 0.0
s_u = 1.0
else:
#
# |--------------------------|-----------------|---------|
# s qc * e qe
# <-----------s_bar_u--------><------s_u----------------->
# <--------r_bar_u-----------><----r_u------>
# ==> (r_bar_u / s_bar_u + r_u / s_u) / (df)
r_u = (settlement - quasi_acoupon).days
s_u = (quasi_aend - quasi_acoupon).days
r_bar_u = (quasi_acoupon - astart).days
else:
# then stub is implied to be at the front, must roll backwards
uend = obj.leg1.schedule.uschedule[acc_idx + 1]
aend = obj.leg1.schedule.aschedule[acc_idx + 1]
quasi_ucoupon = freq.uprevious(uend)
quasi_acoupon = adjuster.adjust(quasi_ucoupon, calendar)
quasi_ustart = freq.uprevious(quasi_ucoupon)
quasi_astart = adjuster.adjust(quasi_ustart, calendar)
s_bar_u = (quasi_acoupon - quasi_astart).days
if settlement <= quasi_acoupon:
#
# |--------|-------------------|--------------------------|
# qs s * qc e
# <-----------s_bar_u--------->
# <---r_bar_u---> ==> (r_bar_u / s_bar_u) / (df)
r_bar_u = (settlement - obj.leg1.schedule.aschedule[acc_idx]).days
r_u = 0.0
s_u = 1.0
else:
#
# |--------|-------------------|--------------------------|
# qs s qc * e
# <-----------s_bar_u---------><------------s_u----------->
# <-------r_bar_u----><------r_u----->
#
# ==> (r_bar_u / s_bar_u + r_u / s_u) / (df)
r_u = (settlement - quasi_acoupon).days
s_u = (aend - quasi_acoupon).days
r_bar_u = (quasi_acoupon - obj.leg1.schedule.aschedule[acc_idx]).days
return (r_bar_u / s_bar_u + r_u / s_u) / (
obj.leg1._regular_periods[acc_idx].period_params.dcf * f
)
return _acc_linear_proportion_by_days(obj, settlement, acc_idx, *args)
def _acc_30e360_backward(
obj: _SupportsFixedFloatLeg1, settlement: datetime, acc_idx: int, *args: Any
) -> float:
"""
Ignoring the convention on the leg uses "30E360" to determine the accrual fraction.
Measures between unadjusted date and settlement.
[Designed primarily for Swedish Government Bonds]
If stub revert to linear proportioning.
"""
if obj.leg1._regular_periods[acc_idx].period_params.stub:
return _acc_linear_proportion_by_days(obj, settlement, acc_idx)
f = obj.leg1.schedule.periods_per_annum
_: float = (
dcf(
start=settlement,
end=obj.leg1.schedule.aschedule[acc_idx + 1],
convention="30e360",
frequency=obj.leg1.schedule.frequency_obj,
)
* f
)
_ = 1 - _
return _
def _acc_30u360_forward(
obj: _SupportsFixedFloatLeg1, settlement: datetime, acc_idx: int, *args: Any
) -> float:
"""
Ignoring the convention on the leg uses "30U360" to determine the accrual fraction.
Measures between unadjusted dates and settlement.
[Designed primarily for US Corporate/Muni Bonds]
"""
sch = obj.leg1.schedule
accrued = dcf(
start=sch.aschedule[acc_idx],
end=settlement,
convention="30u360",
frequency=sch.frequency_obj,
)
period = dcf(
start=sch.aschedule[acc_idx],
end=sch.aschedule[acc_idx + 1],
convention="30u360",
frequency=sch.frequency_obj,
)
return accrued / period
def _acc_act365_with_1y_and_stub_adjustment(
obj: _SupportsFixedFloatLeg1, settlement: datetime, acc_idx: int, *args: Any
) -> float:
"""
Ignoring the convention on the leg uses "Act365f" to determine the accrual fraction.
Measures between unadjusted date and settlement.
Special adjustment if number of days is greater than 365.
If the period is a stub reverts to a straight line interpolation
[this is primarily designed for Canadian Government Bonds]
"""
if obj.leg1._regular_periods[acc_idx].period_params.stub:
return _acc_linear_proportion_by_days(obj, settlement, acc_idx)
f = obj.leg1.schedule.periods_per_annum
r = (settlement - obj.leg1.schedule.aschedule[acc_idx]).days
s = (obj.leg1.schedule.aschedule[acc_idx + 1] - obj.leg1.schedule.aschedule[acc_idx]).days
if r == s:
_: float = 1.0 # then settlement falls on the coupon date
elif r >= 365.0 / f:
_ = 1.0 - ((s - r) * f) / 365.0 # counts remaining days
else:
_ = f * r / 365.0
return _
ACC_FRAC_FUNCS: dict[str, AccrualFunction] = {
"linear_days": _acc_linear_proportion_by_days,
"linear_days_long_front_split": _acc_linear_proportion_by_days_long_stub_split,
"30e360_backward": _acc_30e360_backward,
"30u360_forward": _acc_30u360_forward,
"act365f_1y": _acc_act365_with_1y_and_stub_adjustment,
}
================================================
FILE: python/rateslib/instruments/bonds/conventions/discounting.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING, Protocol
from rateslib.scheduling import dcf
if TYPE_CHECKING:
from rateslib.instruments.bonds.conventions.accrued import AccrualFunction
from rateslib.local_types import (
CurveOption_,
DualTypes,
_SupportsFixedFloatLeg1,
)
"""
The calculations for v2 (the interim, regular period discount value) are more standardised
than the other calculations because they exclude the scenarios for stub handling.
"""
class YtmDiscountFunction(Protocol):
# Callable Type for discount functions in YTM formula
def __call__(
self,
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes | None,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes: ...
class YtmStubDiscountFunction(Protocol):
# Callable Type for discount functions in YTM formula
# This is same as above, except v2 must be pre-calculated and cannot be None
def __call__(
self,
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes: ...
class CashflowFunction(Protocol):
# Callable Type for cashflow generation in YTM formula
def __call__(
self,
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
acc_idx: int,
p_idx: int,
n: int,
curve: CurveOption_,
) -> DualTypes: ...
"""
The calculations for v2:
"""
def _v2_(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes | None,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Default method for a single regular period discounted in the regular portion of bond.
Implies compounding at the same frequency as the coupons.
"""
if v2 is None:
return 1 / (1 + ytm / (100 * f))
else:
return v2
def _v2_annual(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes | None,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
ytm is expressed annually but coupon payments are on another frequency
"""
if v2 is None:
return (1 / (1 + ytm / 100)) ** (1 / f)
else:
return v2
def _v2_annual_pay_adjust(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes | None,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
if v2 is None:
# This is the initial, regular determination of v2
return (1 / (1 + ytm / 100)) ** (1 / f)
else:
return v2 ** (1.0 + _pay_adj(obj, period_idx))
"""
The calculations for v1:
"""
def _v1_compounded(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Determine the discount factor for the first cashflow after settlement.
The parameter "v2" is a generic discount function which is normally :math:`1/(1+y/f)`
Method: compounds "v2" with exponent in terms of the accrual fraction of the period.
"""
acc_frac = accrual(obj, settlement, acc_idx)
if obj.leg1.periods[acc_idx].period_params.stub: # type: ignore[attr-defined]
# If it is a stub then the remaining fraction must be scaled by the relative size of the
# stub period compared with a regular period.
fd0 = obj.leg1.periods[acc_idx].period_params.dcf * f * (1 - acc_frac) # type: ignore[attr-defined]
else:
# 1 minus acc_fra is the fraction of the period remaining until the next cashflow.
fd0 = 1 - acc_frac
return v2**fd0 # type: ignore[no-any-return]
def _v1_simple(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Use simple rates with a yield which matches the frequency of the coupon.
"""
acc_frac = accrual(obj, settlement, acc_idx)
if obj.leg1.periods[acc_idx].period_params.stub: # type: ignore[attr-defined]
# is a stub so must account for discounting in a different way.
fd0 = obj.leg1.periods[acc_idx].period_params.dcf * f * (1 - acc_frac) # type: ignore[attr-defined]
else:
fd0 = 1 - acc_frac
v_ = 1 / (1 + fd0 * ytm / (100 * f))
return v_ # type: ignore[no-any-return]
def _v1_simple_act365f(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Use simple rates with the DCF determined by Act365F.
"""
fd0 = dcf(settlement, obj.leg1.schedule.aschedule[acc_idx + 1], "Act365F")
v_ = 1 / (1 + fd0 * ytm / 100.0)
return v_
def _v1_simple_pay_adjust(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
acc_frac = accrual(obj, settlement, acc_idx)
if obj.leg1._regular_periods[acc_idx].period_params.stub:
# is a stub so must account for discounting in a different way.
fd0 = (
obj.leg1.periods[acc_idx].period_params.dcf # type: ignore[attr-defined]
* f
* (1 - acc_frac + _pay_adj(obj, period_idx))
)
else:
fd0 = 1 - acc_frac + _pay_adj(obj, period_idx)
v_ = 1 / (1 + fd0 * ytm / (100 * f))
return v_ # type: ignore[no-any-return]
def _v1_compounded_pay_adjust(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
acc_frac = accrual(obj, settlement, acc_idx)
if obj.leg1._regular_periods[acc_idx].period_params.stub:
# If it is a stub then the remaining fraction must be scaled by the relative size of the
# stub period compared with a regular period.
fd0 = (
obj.leg1.periods[acc_idx].period_params.dcf # type: ignore[attr-defined]
* f
* (1 - acc_frac + _pay_adj(obj, period_idx))
)
else:
# 1 minus acc_fra is the fraction of the period remaining until the next cashflow.
fd0 = 1 - acc_frac + _pay_adj(obj, period_idx)
return v2**fd0 # type: ignore[no-any-return]
def _v1_compounded_final_simple(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Uses regular fractional compounding except if it is last period, when simple money-mkt
yield is used instead.
Introduced for German Bunds.
"""
if acc_idx == obj.leg1.schedule.n_periods - 1:
# or \
# settlement == self.leg1.schedule.aschedule[acc_idx + 1]:
# then settlement is in last period use simple interest.
return _v1_simple(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx)
else:
return _v1_compounded(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx)
def _v1_compounded_final_simple_act365f(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Uses regular fractional compounding except if it is last period, when simple money-mkt
yield is used instead with discounting Act365F.
Introduced for New Zealand Government Bonds.
"""
if acc_idx == obj.leg1.schedule.n_periods - 1:
# or \
# settlement == self.leg1.schedule.aschedule[acc_idx + 1]:
# then settlement is in last period use simple interest.
return _v1_simple_act365f(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx)
else:
return _v1_compounded(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx)
def _v1_compounded_final_simple_pay_adjust(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Uses regular fractional compounding except if it is last period, when simple money-mkt
yield is used instead.
Both methods are adjusted to account for pay delays.
"""
if acc_idx == obj.leg1.schedule.n_periods - 1:
return _v1_simple_pay_adjust(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx)
else:
# Pay adjustment is not applied if it is not the final period
return _v1_compounded(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx)
def _v1_comp_stub_act365f(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""Compounds the yield. In a stub period the act365f DCF is used"""
if not obj.leg1.periods[acc_idx].period_params.stub: # type: ignore[attr-defined]
return _v1_compounded(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx)
else:
fd0 = dcf(settlement, obj.leg1.schedule.aschedule[acc_idx + 1], "Act365F")
return v2**fd0
def _v1_simple_long_stub(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Use simple rates with a yield which matches the frequency of the coupon.
If the stub period is long, then discount the regular part of the stub with the regular
discount param ``v``.
"""
if (
obj.leg1._regular_periods[acc_idx].period_params.stub
and obj.leg1._regular_periods[acc_idx].period_params.dcf * f > 1
):
# long stub
acc_frac = accrual(obj, settlement, acc_idx)
fd0 = obj.leg1.periods[acc_idx].period_params.dcf * f * (1 - acc_frac) # type: ignore[attr-defined]
if fd0 > 1.0:
# then there is a whole quasi-coupon period until payment of next cashflow
v_ = v2 * 1 / (1 + (fd0 - 1) * ytm / (100 * f))
else:
# this is standard _v1_simple formula
v_ = 1 / (1 + fd0 * ytm / (100 * f))
return v_ # type: ignore[no-any-return]
else:
return _v1_simple(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx)
"""
The calculations for v3:
"""
def _v3_compounded(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Final period uses a compounding approach where the power is determined by the DCF of that
period under the bond's specified convention.
"""
if obj.leg1.periods[acc_idx].period_params.stub: # type: ignore[attr-defined]
# If it is a stub then the remaining fraction must be scaled by the relative size of the
# stub period compared with a regular period.
fd0 = obj.leg1.periods[acc_idx].period_params.dcf * f # type: ignore[attr-defined]
else:
fd0 = 1
return v2**fd0 # type: ignore[no-any-return]
def _v3_compounded_pay_adjust(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
Final period uses a compounding approach where the power is determined by the DCF of that
period under the bond's specified convention.
"""
regular_v3 = _v3_compounded(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx)
return regular_v3 ** (1.0 + _pay_adj(obj, period_idx))
def _v3_30e360_u_simple(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
"""
The final period is discounted by a simple interest method under a 30E360 convention.
The YTM is assumed to have the same frequency as the coupons.
"""
d_ = dcf(
obj.leg1._regular_periods[acc_idx].period_params.start,
obj.leg1._regular_periods[acc_idx].period_params.end,
"30E360",
)
return 1 / (1 + d_ * ytm / 100) # simple interest
def _v3_simple(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
if obj.leg1.periods[acc_idx].period_params.stub: # type: ignore[attr-defined]
# is a stub so must account for discounting in a different way.
fd0 = obj.leg1.periods[acc_idx].period_params.dcf * f # type: ignore[attr-defined]
else:
fd0 = 1.0
v_ = 1 / (1 + fd0 * ytm / (100 * f))
return v_ # type: ignore[no-any-return]
def _v3_simple_pay_adjust(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
settlement: datetime,
acc_idx: int,
v2: DualTypes,
accrual: AccrualFunction,
period_idx: int,
) -> DualTypes:
if obj.leg1.periods[acc_idx].period_params.stub: # type: ignore[attr-defined]
# is a stub so must account for discounting in a different way.
fd0 = (1.0 + _pay_adj(obj, period_idx)) * obj.leg1.periods[acc_idx].period_params.dcf * f # type: ignore[attr-defined]
else:
fd0 = 1.0 + _pay_adj(obj, period_idx)
v_ = 1 / (1 + fd0 * ytm / (100 * f))
return v_ # type: ignore[no-any-return]
V1_FUNCS: dict[str, YtmStubDiscountFunction] = {
"compounding": _v1_compounded,
"compounding_pay_adjust": _v1_compounded_pay_adjust,
"simple": _v1_simple,
"simple_pay_adjust": _v1_simple_pay_adjust,
"compounding_final_simple": _v1_compounded_final_simple,
"compounding_final_simple_pay_adjust": _v1_compounded_final_simple_pay_adjust, # noqa: E501
"compounding_stub_act365f": _v1_comp_stub_act365f,
"simple_long_stub_compounding": _v1_simple_long_stub,
"compounding_final_simple_act365f": _v1_compounded_final_simple_act365f,
"simple_act365f": _v1_simple_act365f,
}
V2_FUNCS: dict[str, YtmDiscountFunction] = {
"regular": _v2_,
"annual": _v2_annual,
"annual_pay_adjust": _v2_annual_pay_adjust,
}
V3_FUNCS: dict[str, YtmStubDiscountFunction] = {
"compounding": _v3_compounded,
"compounding_pay_adjust": _v3_compounded_pay_adjust,
"simple": _v3_simple,
"simple_pay_adjust": _v3_simple_pay_adjust,
"simple_30e360": _v3_30e360_u_simple,
}
def _c_from_obj(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
acc_idx: int,
p_idx: int,
n: int,
curve: CurveOption_,
) -> DualTypes:
"""
Return the cashflow as it has been calculated directly on the object according to the
native schedule and conventions, for the specified period index.
"""
return obj._period_cashflow(obj.leg1._regular_periods[p_idx], curve) # type: ignore[no-any-return, attr-defined]
def _c_full_coupon(
obj: _SupportsFixedFloatLeg1,
ytm: DualTypes,
f: float,
acc_idx: int,
p_idx: int,
n: int,
curve: CurveOption_,
) -> DualTypes:
"""
Ignore the native schedule and conventions and return an amount based on the period
notional, the bond coupon, and the bond frequency.
"""
return -obj.leg1._regular_periods[p_idx].settlement_params.notional * obj.fixed_rate / (100 * f) # type: ignore[attr-defined, no-any-return]
C_FUNCS: dict[str, CashflowFunction] = {
"cashflow": _c_from_obj,
"full_coupon": _c_full_coupon,
}
def _pay_adj(obj: _SupportsFixedFloatLeg1, period_idx: int) -> float:
sch = obj.leg1.schedule
pd = (sch.pschedule[period_idx + 1] - sch.aschedule[period_idx + 1]).days
PD = (sch.pschedule[period_idx + 1] - sch.pschedule[period_idx]).days
return pd / PD
================================================
FILE: python/rateslib/instruments/bonds/fixed_rate_bond.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.bonds.conventions import (
BondCalcMode,
_get_bond_calc_mode,
)
from rateslib.instruments.bonds.protocols import _BaseBondInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DualTypes,
DualTypes_,
Frequency,
FXForwards_,
RollDay,
Sequence,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class FixedRateBond(_BaseBondInstrument):
"""
A *fixed rate bond* composed of a :class:`~rateslib.legs.FixedLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import FixedRateBond, BondCalcMode
from datetime import datetime as dt
.. ipython:: python
frb = FixedRateBond(
effective=dt(2000, 1, 1),
termination="2y",
spec="us_gb",
fixed_rate=2.0,
)
frb.cashflows()
.. rubric:: Pricing
A *FixedRateBond* requires one *disc curve*. The following input formats are
allowed:
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = {"disc_curve": disc_curve} # dict form is explicit
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
.. note::
The following are **rate parameters**.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
calc_mode : str or BondCalcMode
A calculation mode for dealing with bonds under different conventions. See notes.
settle: int
The number of days by which to lag 'today' to arrive at standard settlement.
metric : str, :green:`optional` (set as 'clean_price')
The pricing metric returned by :meth:`~rateslib.instruments.FixedRateBond.rate`.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
Notes
------
The ``calc_mode``, which creates a :class:`~rateslib.instruments.BondCalcMode` defines the
specifications for YTM and accrued interest calculations. Examples of these values
are shown on the :ref:`FixedRateBond defaults ` page.
One can also create their own mixing-and-matching some presets already designed, e.g.:
.. ipython:: python
mode = BondCalcMode(
settle_accrual="linear_days_long_front_split",
ytm_accrual="linear_days_long_front_split",
v1="simple_long_stub_compounding",
v2="annual",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
All of the arguments allow callables so it is technically possible to re-write any types of
calculations that fit into the framework. A cookbook page which demonstrates doing this
is :ref:`Understanding and Customising FixedRateBond Conventions `.
""" # noqa: E501
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.legs.FixedLeg`."""
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
@property
def leg1(self) -> FixedLeg:
"""The :class:`~rateslib.legs.FixedLeg` of the *Instrument*."""
return self._leg1
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
# settlement parameters
currency: str_ = NoInput(0),
notional: float_ = NoInput(0),
# amortization: float_ = NoInput(0),
# rate parameters
fixed_rate: DualTypes_ = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
calc_mode: BondCalcMode | str_ = NoInput(0),
settle: int_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str = "clean_price",
) -> None:
user_args = dict(
# scheduling
effective=effective,
termination=termination,
frequency=frequency,
stub=stub,
front_stub=front_stub,
back_stub=back_stub,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
payment_lag_exchange=payment_lag_exchange,
ex_div=ex_div,
convention=convention,
# settlement
currency=currency,
notional=notional,
# amortization=amortization,
# rate
fixed_rate=fixed_rate,
# meta
curves=self._parse_curves(curves),
calc_mode=calc_mode,
settle=settle,
metric=metric,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
initial_exchange=False,
final_exchange=True,
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
calc_mode=defaults.calc_mode[type(self).__name__],
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_specific[type(self).__name__],
ex_div=defaults.ex_div,
settle=defaults.settle,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "calc_mode", "settle", "metric", "vol"],
)
self.kwargs.meta["calc_mode"] = _get_bond_calc_mode(self.kwargs.meta["calc_mode"])
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
raise ValueError(f"`fixed_rate` must be provided for {type(self).__name__}.")
self._leg1 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._legs = [self.leg1]
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An FRB has one curve requirements: a disc_curve.
When given as only 1 element this curve is applied to all of the those components
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) == 1:
return _Curves(
disc_curve=curves[0],
)
elif len(curves) == 2:
return _Curves(
disc_curve=curves[1],
)
else:
raise ValueError(
f"{type(self).__name__} requires only 1 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
disc_curve=curves, # type: ignore[arg-type]
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
disc_curve = _get_curve("disc_curve", False, False, *c)
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
settlement_ = self._maybe_get_settlement(settlement=settlement, disc_curve=disc_curve)
npv = self.leg1.local_npv(
disc_curve=disc_curve,
settlement=settlement_,
forward=settlement_,
)
# scale price to par 100 (npv is already projected forward to settlement)
dirty_price = npv * 100 / -self.leg1.settlement_params.notional
if metric_ == "dirty_price":
return dirty_price
elif metric_ == "clean_price":
return dirty_price - self.accrued(settlement_)
elif metric_ == "ytm":
return self.ytm(dirty_price, settlement_, True)
else:
raise ValueError("`metric` must be in {'dirty_price', 'clean_price', 'ytm'}.")
================================================
FILE: python/rateslib/instruments/bonds/float_rate_note.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import FloatFixingMethod
from rateslib.instruments.bonds.conventions import (
BondCalcMode,
_get_bond_calc_mode,
)
from rateslib.instruments.bonds.protocols import _BaseBondInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FloatLeg
from rateslib.periods import FloatPeriod
from rateslib.scheduling import Frequency
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurveOption_,
CurvesT_,
DualTypes,
DualTypes_,
FloatRateSeries,
FXForwards_,
LegFixings,
Sequence,
Solver_,
VolT_,
_BaseLeg,
datetime,
datetime_,
int_,
str_,
)
class FloatRateNote(_BaseBondInstrument):
"""
A *floating rate note (FRN)* composed of a :class:`~rateslib.legs.FloatLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import FloatRateNote
from datetime import datetime as dt
.. ipython:: python
frn = FloatRateNote(
effective=dt(2000, 1, 1),
termination="2y",
frequency="A",
currency="usd",
fixing_method="rfr_observation_shift(5)",
convention="Act360",
calendar="nyc|fed",
float_spread=25.0,
)
frn.cashflows()
.. rubric:: Pricing
A *FloatRateNote* requires a *disc curve* and a *rate curve*. The following input formats are
allowed:
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = [rate_curve, disc_curve] # a sequence of two curves
curves = { # dict form is explicit
"disc_curve": disc_curve,
"rate_curve": rate_curve,
}
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
.. note::
The following are **rate parameters**.
fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
calc_mode : str or BondCalcMode
A calculation mode for dealing with bonds under different conventions. See notes.
settle: int
The number of days by which to lag 'today' to arrive at standard settlement.
metric : str, :green:`optional` (set as 'clean_price')
The pricing metric returned by :meth:`~rateslib.instruments.FixedRateBond.rate`.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
""" # noqa: E501
_rate_scalar = 1.0
@property
def float_spread(self) -> DualTypes:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
return self.leg1.float_spread
@float_spread.setter
def float_spread(self, value: DualTypes) -> None:
self.kwargs.leg1["float_spread"] = value
self.leg1.float_spread = value
@property
def leg1(self) -> FloatLeg:
"""The :class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg1
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: int_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: str | int_ = NoInput(0),
eom: bool | NoInput = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
# settlement params
currency: str_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
amortization: DualTypes_ = NoInput(0),
# rate params
float_spread: DualTypes_ = NoInput(0),
spread_compound_method: str_ = NoInput(0),
rate_fixings: LegFixings = NoInput(0),
fixing_method: str_ = NoInput(0),
fixing_frequency: Frequency | str_ = NoInput(0),
fixing_series: FloatRateSeries | str_ = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
calc_mode: BondCalcMode | str_ = NoInput(0),
settle: int_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str = "clean_price",
) -> None:
user_args = dict(
# scheduling
effective=effective,
termination=termination,
frequency=frequency,
stub=stub,
front_stub=front_stub,
back_stub=back_stub,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
payment_lag_exchange=payment_lag_exchange,
ex_div=ex_div,
convention=convention,
# settlement
currency=currency,
notional=notional,
amortization=amortization,
# rate
float_spread=float_spread,
spread_compound_method=spread_compound_method,
rate_fixings=rate_fixings,
fixing_method=fixing_method,
fixing_frequency=fixing_frequency,
fixing_series=fixing_series,
# meta
curves=self._parse_curves(curves),
calc_mode=calc_mode,
settle=settle,
metric=metric,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
initial_exchange=False,
final_exchange=True,
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
calc_mode=defaults.calc_mode[type(self).__name__],
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_specific[type(self).__name__],
ex_div=defaults.ex_div,
settle=defaults.settle,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "calc_mode", "settle", "metric", "vol"],
)
self.kwargs.meta["calc_mode"] = _get_bond_calc_mode(self.kwargs.meta["calc_mode"])
self._leg1 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
if self._leg1.schedule.frequency_obj == Frequency.Zero():
raise ValueError("A `FloatRateNote` cannot have a 'zero' frequency.")
self._legs = [self.leg1]
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An FRN has two curve requirements: a leg2_rate_curve and a disc_curve used by both legs.
When given as only 1 element this curve is applied to all of the those components
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
)
elif len(curves) == 1:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[0],
)
else:
raise ValueError(
f"{type(self).__name__} requires only 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
rate_curve=curves, # type: ignore[arg-type]
disc_curve=curves, # type: ignore[arg-type]
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
disc_curve = _get_curve("disc_curve", False, False, *c)
rate_curve = _get_curve("rate_curve", True, True, *c)
metric = _drb(self.kwargs.meta["metric"], metric).lower()
if metric in ["clean_price", "dirty_price", "spread", "ytm"]:
settlement_ = self._maybe_get_settlement(settlement, disc_curve)
if metric == "spread":
_: DualTypes = self.leg1.spread(
# target_npv=-(npv + self.leg1.settlement_params.notional),
target_npv=-(self.leg1.settlement_params.notional),
rate_curve=rate_curve,
disc_curve=disc_curve,
settlement=settlement_,
forward=settlement_,
)
return _
else:
npv = self.leg1.local_npv(
rate_curve=rate_curve,
disc_curve=disc_curve,
settlement=settlement_,
forward=settlement_,
)
# scale price to par 100 (npv is already projected forward to settlement)
dirty_price = npv * 100 / -self.leg1.settlement_params.notional
if metric == "dirty_price":
return dirty_price
elif metric == "clean_price":
return dirty_price - self.accrued(settlement_, rate_curve=rate_curve)
elif metric == "ytm":
return self.ytm(
price=dirty_price, settlement=settlement_, dirty=True, rate_curve=rate_curve
)
raise ValueError("`metric` must be in {'dirty_price', 'clean_price', 'spread', 'ytm'}.")
def accrued(
self,
settlement: datetime,
rate_curve: CurveOption_ = NoInput(0),
) -> DualTypes:
acc_idx = self.leg1._period_index(settlement)
if isinstance(self.leg1.rate_params.fixing_method, FloatFixingMethod.IBOR):
frac = self.kwargs.meta["calc_mode"]._settle_accrual(self, settlement, acc_idx)
if self.ex_div(settlement):
frac = frac - 1 # accrued is negative in ex-div period
rate = self.leg1._regular_periods[acc_idx].rate(rate_curve=rate_curve)
cashflow = (
-self.leg1._regular_periods[acc_idx].settlement_params.notional
* self.leg1._regular_periods[acc_idx].period_params.dcf
* rate
/ 100.0
)
return frac * cashflow / -self.leg1.settlement_params.notional * 100.0 # type: ignore[no-any-return]
else: # is "rfr"
p = FloatPeriod(
start=self.leg1.schedule.aschedule[acc_idx],
end=settlement,
payment=settlement,
termination=self.leg1.schedule.aschedule[acc_idx + 1],
stub=True,
frequency=self.leg1.schedule.frequency_obj,
notional=-100,
currency=self.leg1.settlement_params.currency,
convention=self.leg1._regular_periods[acc_idx].period_params.convention,
float_spread=self.float_spread,
fixing_method=self.leg1.rate_params.fixing_method,
rate_fixings=self.leg1.rate_params.fixing_identifier,
spread_compound_method=self.leg1.rate_params.spread_compound_method,
fixing_series=self.leg1.rate_params.fixing_series,
fixing_frequency=self.leg1.rate_params.fixing_frequency,
# roll=self.leg1.schedule.roll,
calendar=self.leg1.schedule.calendar,
adjuster=self.leg1.schedule.accrual_adjuster,
)
if p.period_params.start == p.period_params.end and acc_idx == 0:
# bond settlement on issue date so there is no accrued
return 0.0
is_ex_div = self.ex_div(settlement)
if is_ex_div and settlement == self.leg1._regular_periods[acc_idx].period_params.end:
# then settlement is on a coupon date so no accrued
return 0.0
rate_to_settle = p.rate(rate_curve)
accrued_to_settle = 100.0 * p.period_params.dcf * rate_to_settle / 100.0
if is_ex_div:
rate_to_end = self.leg1._regular_periods[acc_idx].rate(rate_curve=rate_curve)
accrued_to_end = (
100.0
* self.leg1._regular_periods[acc_idx].period_params.dcf
* rate_to_end
/ 100.0
)
return accrued_to_settle - accrued_to_end
else:
return accrued_to_settle
================================================
FILE: python/rateslib/instruments/bonds/index_fixed_rate_bond.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual import Dual, gradient
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.bonds.conventions import (
BondCalcMode,
_get_bond_calc_mode,
)
from rateslib.instruments.bonds.protocols import _BaseBondInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg
from rateslib.periods.parameters import _IndexParams
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurveOption_,
CurvesT_,
DualTypes,
DualTypes_,
Frequency,
FXForwards_,
IndexMethod,
LegFixings,
Number,
RollDay,
Sequence,
Solver_,
VolT_,
_BaseCurve_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class IndexFixedRateBond(_BaseBondInstrument):
"""
An *index-linked fixed rate bond* composed of a :class:`~rateslib.legs.FixedLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import IndexFixedRateBond
from datetime import datetime as dt
from rateslib import fixings
.. ipython:: python
fixings.add("RPI_series", Series(index=[dt(2024, 4, 1), dt(2024, 5, 1)], data=[385.0, 386.4]))
ifrb = IndexFixedRateBond(
effective=dt(2024, 7, 12),
termination="2y",
fixed_rate=2.25,
spec="us_gbi",
index_fixings="RPI_series",
)
ifrb.cashflows()
.. ipython:: python
:suppress:
fixings.pop("RPI_series")
.. rubric:: Pricing
An *IndexFixedRateBond* requires an *index curve* and a *disc curve*. The following input
formats are allowed:
.. code-block:: python
curves = [index_curve, disc_curve] # two curves as a list
curves = {"index_curve": index_curve, "disc_curve": disc_curve} # dict form is explicit
The available ``metric`` for the :meth:`~rateslib.instruments.IndexFixedRateBond.rate`
are in *{'clean_price', 'dirty_price', 'ytm', 'indexed_ytm', 'indexed_clean_price',
'indexed_dirty_price'}*.
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
.. note::
The following are **rate parameters**.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
.. note::
The following parameters define **indexation**.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value applied as the base index value for all *Periods*.
If not given and ``index_fixings`` is a string fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The index value for the reference date.
Best practice is to supply this value as string identifier relating to the global
``fixings`` object.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
calc_mode : str or BondCalcMode
A calculation mode for dealing with bonds under different conventions. See notes.
settle: int
The number of days by which to lag 'today' to arrive at standard settlement.
metric : str, :green:`optional` (set as 'clean_price')
The pricing metric returned by :meth:`~rateslib.instruments.IndexFixedRateBond.rate`.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
""" # noqa: E501
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.legs.FixedLeg`."""
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
@property
def leg1(self) -> FixedLeg:
"""The :class:`~rateslib.legs.FixedLeg` of the *Instrument*."""
return self._leg1
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
# settlement parameters
currency: str_ = NoInput(0),
notional: float_ = NoInput(0),
# amortization: float_ = NoInput(0),
# index params
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: LegFixings = NoInput(0),
# rate parameters
fixed_rate: DualTypes_ = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
calc_mode: BondCalcMode | str_ = NoInput(0),
settle: int_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str = "clean_price",
) -> None:
user_args = dict(
# scheduling
effective=effective,
termination=termination,
frequency=frequency,
stub=stub,
front_stub=front_stub,
back_stub=back_stub,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
payment_lag_exchange=payment_lag_exchange,
ex_div=ex_div,
convention=convention,
# settlement
currency=currency,
notional=notional,
# amortization=amortization,
# index_params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings,
# rate
fixed_rate=fixed_rate,
# meta
curves=self._parse_curves(curves),
calc_mode=calc_mode,
settle=settle,
metric=metric,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
initial_exchange=False,
final_exchange=True,
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
calc_mode=defaults.calc_mode[type(self).__name__],
initial_exchange=False,
final_exchange=True,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_specific[type(self).__name__],
ex_div=defaults.ex_div,
settle=defaults.settle,
index_lag=defaults.index_lag,
index_method=defaults.index_method,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "calc_mode", "settle", "metric", "vol"],
)
self.kwargs.meta["calc_mode"] = _get_bond_calc_mode(self.kwargs.meta["calc_mode"])
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
raise ValueError(f"`fixed_rate` must be provided for {type(self).__name__}.")
self._leg1 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._legs = [self.leg1]
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An IFRB has two curve requirements: an index_curve and a disc_curve.
No available index curve can be input as None or NoInput
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
index_curve=curves.get("index_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
index_curve=curves[0] if curves[0] is not None else NoInput(0),
disc_curve=curves[1],
)
else:
raise ValueError(
f"{type(self).__name__} requires 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else:
raise ValueError(f"{type(self).__name__} requires 2 curve types. Got 1.")
def index_ratio(self, settlement: datetime, index_curve: _BaseCurve_ = NoInput(0)) -> DualTypes:
"""
Return the index ratio assigned to an *IndexFixedRateBond* for a given settlement.
.. rubric:: Examples
.. ipython:: python
:suppress:
from pandas import Series
from datetime import datetime as dt
from rateslib import fixings
from rateslib.instruments import IndexFixedRateBond
.. ipython:: python
fixings.add("UK_RPI", Series(index=[dt(2025, 3, 1), dt(2025, 4, 1), dt(2025, 5, 1)], data=[395.3, 402.2, 402.9]))
ukti = IndexFixedRateBond( # ISIN: GB00BMY62Z61
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_fixings="UK_RPI"
)
ukti.index_ratio(settlement=dt(2025, 7, 29))
.. ipython:: python
:suppress:
fixings.pop("UK_RPI")
Parameters
----------
settlement: datetime
The settlement date of the bond.
index_curve: _BaseCurve, optional
A curve capable of forecasting index values.
Returns
-------
float, Dual, Dual2, Variable
""" # noqa: E501
left_index = self.leg1._period_index(settlement)
period_index_params: _IndexParams = self.leg1._regular_periods[left_index].index_params # type: ignore[assignment]
new_index_params = _IndexParams(
_index_method=period_index_params.index_method,
_index_lag=period_index_params.index_lag,
_index_base=period_index_params.index_base.value,
_index_base_date=period_index_params.index_base.date,
_index_reference_date=settlement,
_index_fixings=period_index_params.index_fixing.identifier,
_index_only=False,
)
return new_index_params.index_ratio(index_curve=index_curve)[0]
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
"""
Calculate some pricing rate metric for the *Instrument*.
.. rubric:: Examples
.. ipython:: python
:suppress:
from pandas import Series
from datetime import datetime as dt
from rateslib import fixings, Curve
from rateslib.instruments import IndexFixedRateBond
.. ipython:: python
disc_curve = Curve(
nodes={dt(2025, 7, 28): 1.0, dt(2045, 7, 25): 1.0},
convention="act365f"
).shift(250) # curve begins at 0% and gets shifted by 250 Act365F O/N basis points
index_curve = Curve(
nodes={dt(2025, 5, 1): 1.0, dt(2045, 5, 1): 1.0},
convention="act365f", index_lag=0, index_base=402.9
).shift(100) # curves begins at 0% and gets shifted by 100 Ac6t365f O/N basis points
fixings.add(
"UK_RPI",
Series(index=[dt(2025, 3, 1), dt(2025, 4, 1), dt(2025, 5, 1)], data=[395.3, 402.2, 402.9]),
)
ukti = IndexFixedRateBond( # ISIN: GB00BMY62Z61
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_fixings="UK_RPI"
)
ukti.rate(curves=[index_curve, disc_curve], metric="clean_price") # settles T+1 i.e. 29th July
ukti.rate(curves=[index_curve, disc_curve], metric="dirty_price")
ukti.rate(curves=[index_curve, disc_curve], metric="indexed_clean_price")
ukti.rate(curves=[index_curve, disc_curve], metric="indexed_dirty_price")
ukti.rate(curves=[index_curve, disc_curve], metric="ytm")
ukti.rate(curves=[index_curve, disc_curve], metric="indexed_ytm")
.. ipython:: python
:suppress:
fixings.pop("UK_RPI")
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
local: bool, :green:`optional (set as False)`
An override flag to return a dict of NPV values indexed by string currency.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
metric: str, :green:`optional`
The specific calculation to perform and the value to return.
See **Pricing** on each *Instrument* for details of allowed inputs.
Returns
-------
float, Dual, Dual2, Variable
""" # noqa: E501
c = _parse_curves(self, curves, solver)
disc_curve = _get_curve("disc_curve", False, False, *c)
index_curve = _get_curve("index_curve", False, True, *c)
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
if isinstance(settlement, NoInput):
settlement_ = self.leg1.schedule.calendar.lag_bus_days(
disc_curve.nodes.initial,
self.kwargs.meta["settle"],
True,
)
else:
settlement_ = settlement
npv = self.leg1.local_npv(
index_curve=index_curve,
disc_curve=disc_curve,
settlement=settlement_,
forward=settlement_,
)
# scale price to par 100 (npv is already projected forward to settlement)
index_dirty_price = npv * 100 / -self.leg1.settlement_params.notional
index_ratio = self.index_ratio(settlement_, index_curve)
dirty_price = index_dirty_price / index_ratio
if metric_ == "dirty_price":
return dirty_price
elif metric_ == "clean_price":
return dirty_price - self.accrued(settlement_)
elif metric_ == "ytm":
return self.ytm(dirty_price, settlement_, True)
elif metric_ == "index_dirty_price" or metric_ == "indexed_dirty_price":
return index_dirty_price
elif metric_ == "index_clean_price" or metric_ == "indexed_clean_price":
return index_dirty_price - self.accrued(settlement_) * index_ratio
elif metric_ == "index_ytm" or metric_ == "indexed_ytm":
return self.ytm(
price=index_dirty_price,
settlement=settlement_,
dirty=True,
indexed_price=True,
indexed_ytm=True,
index_curve=index_curve,
)
else:
raise ValueError(
"`metric` must be in {'dirty_price', 'clean_price', 'ytm', "
"'indexed_dirty_price', 'indexed_clean_price', 'indexed_ytm'}.",
)
def accrued(
self, settlement: datetime, indexed: bool = False, index_curve: _BaseCurve_ = NoInput(0)
) -> DualTypes:
"""
Calculate the accrued amount per nominal par value of 100.
.. rubric:: Examples
.. ipython:: python
:suppress:
from pandas import Series
from datetime import datetime as dt
from rateslib import fixings
from rateslib.instruments import IndexFixedRateBond
.. ipython:: python
fixings.add("UK_RPI", Series(index=[dt(2025, 3, 1), dt(2025, 4, 1), dt(2025, 5, 1)], data=[395.3, 402.2, 402.9]))
ukti = IndexFixedRateBond( # ISIN: GB00BMY62Z61
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_fixings="UK_RPI"
)
ukti.accrued(settlement=dt(2025, 7, 29))
ukti.accrued(settlement=dt(2025, 7, 29), indexed=True)
.. ipython:: python
:suppress:
fixings.pop("UK_RPI")
Parameters
----------
settlement : datetime
The settlement date which to measure accrued interest against.
indexed : bool
Whether to calculate the accrued amount indexed up according to settlement.
index_curve : _BaseCurve, optional
The curve used to forecast index values if required.
Notes
-----
Calculation depends upon the
:class:`~rateslib.instruments.bonds.conventions.BondCalcMode` of the
*Instrument*.
""" # noqa: E501
unindexed_accrued = super().accrued(settlement=settlement)
if indexed:
index_ratio = self.index_ratio(settlement=settlement, index_curve=index_curve)
return unindexed_accrued * index_ratio
else:
return unindexed_accrued
def fwd_from_repo(
self,
price: DualTypes,
settlement: datetime,
forward_settlement: datetime,
repo_rate: DualTypes,
convention: str_ = NoInput(0),
dirty: bool = False,
method: str = "proceeds",
indexed: bool = False,
index_curve: _BaseCurve_ = NoInput(0),
) -> DualTypes:
"""
Return a forward price implied by a given repo rate.
Parameters
----------
price : float, Dual, or Dual2
The initial price of the security at ``settlement``.
settlement : datetime
The settlement date of the bond
forward_settlement : datetime
The forward date for which to calculate the forward price.
repo_rate : float, Dual or Dual2
The rate which is used to calculate values.
convention : str, optional
The day count convention applied to the rate. If not given uses default
values.
dirty : bool, optional
Whether the input and output price are specified including accrued interest.
method : str in {"proceeds", "compounded"}, optional
The method for determining the forward price.
indexed : bool, optional
Whether the given price is expressed with indexation.
index_curve : _BaseCurve, optional
The curve for forecasting index values if required.
Returns
-------
float, Dual or Dual2
Notes
-----
Any intermediate (non ex-dividend) cashflows between ``settlement`` and
``forward_settlement`` will also be assumed to accrue at ``repo_rate``.
"""
match (indexed, dirty):
# need to adjust any input to yield an indexed_dirty_price
case (True, True):
indexed_dirty_price = price
case (False, True):
indexed_dirty_price = price * self.index_ratio(
settlement=settlement, index_curve=index_curve
)
case (True, False):
indexed_dirty_price = price + self.accrued(
settlement, indexed=True, index_curve=index_curve
)
case (False, False):
indexed_dirty_price = (
price + self.accrued(settlement, indexed=False)
) * self.index_ratio(settlement=settlement, index_curve=index_curve)
forward_indexed_dirty_price = super().fwd_from_repo(
price=indexed_dirty_price,
settlement=settlement,
forward_settlement=forward_settlement,
repo_rate=repo_rate,
convention=convention,
dirty=True,
method=method,
)
match (indexed, dirty):
# reverse adjust the forward indexed_dirty_price to suit the input arguments
case (True, True):
forward_price = forward_indexed_dirty_price
case (False, True):
forward_price = forward_indexed_dirty_price / self.index_ratio(
forward_settlement, index_curve=index_curve
)
case (True, False):
forward_price = forward_indexed_dirty_price - self.accrued(
forward_settlement, indexed=True, index_curve=index_curve
)
case (False, False):
forward_price = forward_indexed_dirty_price / self.index_ratio(
forward_settlement, index_curve=index_curve
) - self.accrued(forward_settlement, indexed=False)
return forward_price
def repo_from_fwd(
self,
price: DualTypes,
settlement: datetime,
forward_settlement: datetime,
forward_price: DualTypes,
convention: str_ = NoInput(0),
dirty: bool = False,
indexed: bool = False,
index_curve: _BaseCurve_ = NoInput(0),
) -> DualTypes:
"""
Return an implied repo rate from a forward price.
Parameters
----------
price : float, Dual, or Dual2
The initial price of the security at ``settlement``.
settlement : datetime
The settlement date of the bond
forward_settlement : datetime
The forward date for which to calculate the forward price.
forward_price : float, Dual or Dual2
The forward price which implies the repo rate
convention : str, optional
The day count convention applied to the rate. If not given uses default
values.
dirty : bool, optional
Whether the input and output price are specified including accrued interest.
indexed : bool, optional
Whether the given price is expressed with indexation.
index_curve : _BaseCurve, optional
The curve for forecasting index values if required.
Returns
-------
float, Dual or Dual2
Notes
-----
Any intermediate (non ex-dividend) cashflows between ``settlement`` and
``forward_settlement`` will also be assumed to accrue at ``repo_rate``.
"""
match (indexed, dirty):
# must convert input price to indexed_dirty_price equivalents
case (True, True):
indexed_dirty_price = price
forward_indexed_dirty_price = forward_price
case (False, True):
indexed_dirty_price = price * self.index_ratio(
settlement=settlement, index_curve=index_curve
)
forward_indexed_dirty_price = forward_price * self.index_ratio(
settlement=forward_settlement, index_curve=index_curve
)
case (True, False):
indexed_dirty_price = price + self.accrued(
settlement, indexed=True, index_curve=index_curve
)
forward_indexed_dirty_price = forward_price + self.accrued(
forward_settlement, indexed=True, index_curve=index_curve
)
case (False, False):
indexed_dirty_price = (
price + self.accrued(settlement, indexed=False)
) * self.index_ratio(settlement=settlement, index_curve=index_curve)
forward_indexed_dirty_price = (
forward_price + self.accrued(forward_settlement, indexed=False)
) * self.index_ratio(settlement=forward_settlement, index_curve=index_curve)
repo = super().repo_from_fwd(
price=indexed_dirty_price,
settlement=settlement,
forward_settlement=forward_settlement,
forward_price=forward_indexed_dirty_price,
convention=convention,
dirty=True,
)
return repo
def duration(
self,
ytm: DualTypes,
settlement: datetime,
metric: str = "risk",
indexed_price: bool = False,
indexed_ytm: bool = False,
index_curve: _BaseCurve_ = NoInput(0),
) -> float:
"""
Return the (negated) derivative of ``price`` w.r.t. ``ytm``.
Parameters
----------
ytm : float
The yield-to-maturity for the bond.
settlement : datetime
The settlement date of the bond.
metric : str
The specific duration calculation to return. See notes.
indexed_price: bool, :green:`optional (set as False)`
Indicated whether the returned price should be indexed or not.
indexed_ytm: bool, :green:`optional (set as False)`
Indicates if the given ``ytm`` is expressed indexed or not.
index_curve : _BaseCurve, optional
If either the ytm or the price are indicated as indexed then an index curve may be
required to forecast index values.
Returns
-------
float
Notes
-----
For an *IndexFixedRateBond* both the price and the ytm are expressible unindexed or
indexed. The below notation :math:`P_i` and :math:`y_j` describes either of these
varieties provided they align with the ``indexed_price`` and ``indexed_ytm`` arguments.
The available metrics are:
- *"risk"*: the derivative of price w.r.t. ytm, scaled to -1bp.
.. math::
risk = - \\frac{\\partial P_i }{\\partial y_j}
- *"modified"*: the modified duration which is *risk* divided by dirty price.
.. math::
mod \\; duration = \\frac{risk}{P_i} = - \\frac{1}{P_i} \\frac{\\partial P_i }{\\partial y_j}
- *"duration"* (or *"macaulay"*): the duration which is modified duration reverse modified.
.. math::
duration = mod \\; duration \\times (1 + y_j / f)
""" # noqa: E501
# TODO: this is not AD safe: returns only float
ytm_: Dual = Dual(_dual_float(ytm), ["__y__§"], [])
dirty_price: Dual = self.price( # type: ignore[assignment]
ytm=ytm_,
settlement=settlement,
dirty=True,
indexed_price=indexed_price,
indexed_ytm=indexed_ytm,
index_curve=index_curve,
)
if metric == "risk":
ret: float = -gradient(dirty_price, ["__y__§"])[0]
elif metric == "modified":
ret = -gradient(dirty_price, ["__y__§"])[0] / _dual_float(dirty_price) * 100
elif metric == "duration" or metric == "macaulay":
f = self.leg1.schedule.periods_per_annum
v = _dual_float(1 + ytm_ / (100 * f))
ret = -gradient(dirty_price, ["__y__§"])[0] / _dual_float(dirty_price) * v * 100
else:
raise ValueError(
"`metric` must be one of {'risk', 'modified', 'duration'}."
) # pragma: no cover
return ret
def ytm(
self,
price: DualTypes,
settlement: datetime,
dirty: bool = False,
rate_curve: CurveOption_ = NoInput(0),
calc_mode: BondCalcMode | str_ = NoInput(0),
indexed_price: bool = False,
indexed_ytm: bool = False,
index_curve: _BaseCurve_ = NoInput(0),
) -> Number:
# overloaded ytm by IndexFixedRateBond
"""
Calculate the yield-to-maturity of the security given its price.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import FixedRateBond, dt, Dual, Dual2
.. ipython:: python
aapl_bond = FixedRateBond(dt(2013, 5, 4), dt(2043, 5, 4), fixed_rate=3.85, spec="us_corp")
aapl_bond.ytm(price=87.24, settlement=dt(2014, 3, 5))
aapl_bond.ytm(price=87.24, settlement=dt(2014, 3, 5), calc_mode="us_gb_tsy")
.. role:: red
.. role:: green
Parameters
----------
price: float, Dual, Dual2, Variable, :red:`required`
The price, per 100 nominal, against which to determine the yield. Can be given as
either clean or dirty, and either unindexed or indexed.
settlement: datetime, :red:`required`
The settlement date on which to determine the price.
dirty: bool, :green:`optional (set as False)`
If `True` will assume the (settlement)
:meth:`~rateslib.instruments.FixedRateBond.accrued` is included in the price.
rate_curve: _BaseCurve or dict of such, :green:`optional`
Used to forecast floating rates if required.
calc_mode: str or BondCalcMode, :green:`optional`
An alternative calculation mode to use. The ``calc_mode`` is typically set at
*Instrument* initialisation and is not required, but is useful as an override to
allow comparisons, e.g. of *"us_gb"* street convention versus *"us_gb_tsy"* treasury
convention.
indexed_price: bool, :green:`optional (set as False)`
Indicates whether the input price is indexed or not.
indexed_ytm: bool, :green:`optional (set as False)`
Indicates whether the returned ``ytm`` is expressed indexed or not.
index_curve: _BaseCurve :green:`optional`
If any element is ``indexed`` then a *Curve* may be required to determine
index ratio's in order to properly index up cashflows.
Returns
-------
float, Dual, Dual2
Notes
-----
If ``price`` is given as :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2` input the result of the yield will be output
as the same type with the variables passed through accordingly.
.. ipython:: python
aapl_bond.ytm(price=Dual(87.24, ["price", "a"], [1, -0.75]), settlement=dt(2014, 3, 5))
aapl_bond.ytm(price=Dual2(87.24, ["price", "a"], [1, -0.75], []), settlement=dt(2014, 3, 5))
""" # noqa: E501
match (indexed_price, indexed_ytm):
case (False, False) | (True, True):
# when both price and yield are expressed in the same indexation this will be
# handled directly
adjusted_price = price
case (False, True):
# if the ytm is requested indexed but the price is given unindexed then it
# must be indexed-up for calculation
adjusted_price = price * self.index_ratio(
settlement=settlement, index_curve=index_curve
)
case (True, False):
# if the ytm is requested unindexed but the price is given as indexed then it must
# be indexed down for calculation
adjusted_price = price / self.index_ratio(
settlement=settlement, index_curve=index_curve
)
case _: # pragma: no cover
raise ValueError(
"`indexed_price` and `indexed_ytm` must each be given as a boolean."
)
return self._ytm(
price=adjusted_price,
settlement=settlement,
dirty=dirty,
rate_curve=rate_curve,
calc_mode=calc_mode,
indexed=indexed_ytm,
index_curve=index_curve,
)
def price(
self,
ytm: DualTypes,
settlement: datetime,
dirty: bool = False,
indexed_price: bool = False,
indexed_ytm: bool = False,
index_curve: _BaseCurve_ = NoInput(0),
) -> DualTypes:
"""
Calculate the price of the security per nominal value of 100, given
yield-to-maturity.
.. role:: red
.. role:: green
Parameters
----------
ytm : float, :red:`required`
The yield-to-maturity against which to determine the price. If ``indexed`` this
should be given as a nominal ytm.
settlement : datetime, :red:`required`
The settlement date on which to determine the price.
dirty : bool, optional, :green:`optional (set as False)`
If `True` will include the
:meth:`rateslib.instruments.FixedRateBond.accrued` in the price.
indexed_price: bool, :green:`optional (set as False)`
Indicated whether the returned price should be indexed or not.
indexed_ytm: bool, :green:`optional (set as False)`
Indicates if the given ``ytm`` is expressed indexed or not.
index_curve: _BaseCurve, :green:`optional`
An inflation curve to forecast index ratios if required.
Returns
-------
float, Dual, Dual2
Examples
--------
.. ipython:: python
:suppress:
from pandas import Series
from datetime import datetime as dt
from rateslib import fixings, Curve
from rateslib.instruments import IndexFixedRateBond
.. ipython:: python
index_curve = Curve(
nodes={dt(2025, 5, 1): 1.0, dt(2045, 5, 1): 1.0},
convention="act365f", index_lag=0, index_base=402.9
).shift(100) # curves begins at 0% and gets shifted by 100 Act365f O/N basis points
ukti = IndexFixedRateBond( # ISIN: GB00BMY62Z61
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_base=397.6,
)
ukti.index_ratio(index_curve=index_curve, settlement=dt(2025, 8, 5))
ukti.price(ytm=2.5, settlement=dt(2025, 8, 5), indexed_ytm=True, index_curve=index_curve)
ukti.price(ytm=1.5, settlement=dt(2025, 8, 5), indexed_ytm=False)
ukti.price(ytm=2.5, settlement=dt(2025, 8, 5), dirty=True, indexed_ytm=True, index_curve=index_curve)
ukti.price(ytm=1.5, settlement=dt(2025, 8, 5), dirty=True, indexed_ytm=False)
""" # noqa: E501
_price = self._price_from_ytm(
ytm=ytm,
settlement=settlement,
calc_mode=NoInput(0), # will be set to kwargs.meta
dirty=dirty,
rate_curve=NoInput(0),
indexed=indexed_ytm,
index_curve=index_curve,
)
match (indexed_price, indexed_ytm):
case (True, True) | (False, False):
# then both price and ytm has the same indexation expression
return _price
case (True, False):
# then the yield is given unindexed but the returned price must be indexed-up
return _price * self.index_ratio(settlement, index_curve)
case (False, True):
# then the yield is given unindexed but the returned price requires indexing-down
return _price / self.index_ratio(settlement, index_curve)
case _: # pragma: no cover
raise ValueError(
"`indexed_price` and `indexed_ytm` must each be given as a boolean."
)
================================================
FILE: python/rateslib/instruments/bonds/protocols/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.bonds.protocols.accrued import _WithAccrued
from rateslib.instruments.bonds.protocols.cashflows import _WithExDiv
from rateslib.instruments.bonds.protocols.duration import _WithDuration
from rateslib.instruments.bonds.protocols.oaspread import _WithOASpread
from rateslib.instruments.bonds.protocols.repo import _WithRepo
from rateslib.instruments.bonds.protocols.ytm import _WithYTM
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.pricing import (
_get_curve,
_parse_curves,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurvesT_,
DataFrame,
DualTypes,
FXForwards_,
Solver_,
VolT_,
_BaseCurve,
datetime,
datetime_,
str_,
)
class _BaseBondInstrument(
_BaseInstrument,
_WithExDiv,
_WithDuration,
_WithRepo,
_WithYTM,
_WithOASpread,
):
"""Abstract base class used in the construction of bond type *Instruments*"""
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
if isinstance(settlement, NoInput):
c = _parse_curves(self, curves, solver)
disc_curve = _get_curve("disc_curve", False, False, *c)
settlement_ = self.leg1.schedule.calendar.lag_bus_days(
disc_curve.nodes.initial,
self.kwargs.meta["settle"],
True,
)
forward_ = _drb(disc_curve.nodes.initial, forward)
else:
settlement_ = settlement
forward_ = forward # if NoInput adopts the usual default settings from 'settlement'
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement_,
forward=forward_,
)
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
def analytic_delta(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
leg: int = 1,
) -> DualTypes | dict[str, DualTypes]:
c = _parse_curves(self, curves, solver)
settlement_ = self._maybe_get_settlement(
settlement=settlement,
disc_curve=_get_curve("disc_curve", False, False, *c),
)
return super().analytic_delta(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement_,
forward=forward,
leg=leg,
)
def price(self, ytm: DualTypes, settlement: datetime, dirty: bool = False) -> DualTypes:
# overloaded by IndexFixedRateBond
"""
Calculate the price of the security per nominal value of 100, given
yield-to-maturity.
Parameters
----------
ytm : float
The yield-to-maturity against which to determine the price.
settlement : datetime
The settlement date on which to determine the price.
dirty : bool, optional
If `True` will include the
:meth:`rateslib.instruments.FixedRateBond.accrued` in the price.
Returns
-------
float, Dual, Dual2
Examples
--------
This example is taken from the UK debt management office website.
The result should be `141.070132` and the bond is ex-div.
.. ipython:: python
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0
)
gilt.ex_div(dt(1999, 5, 27))
gilt.price(
ytm=4.445,
settlement=dt(1999, 5, 27),
dirty=True
)
This example is taken from the Swedish national debt office website.
The result of accrued should, apparently, be `0.210417` and the clean
price should be `99.334778`.
.. ipython:: python
bond = FixedRateBond(
effective=dt(2017, 5, 12),
termination=dt(2028, 5, 12),
frequency="A",
calendar="stk",
currency="sek",
convention="ActActICMA",
ex_div=5,
fixed_rate=0.75
)
bond.ex_div(dt(2017, 8, 23))
bond.accrued(dt(2017, 8, 23))
bond.price(
ytm=0.815,
settlement=dt(2017, 8, 23),
dirty=False
)
"""
return self._price_from_ytm(
ytm=ytm,
settlement=settlement,
calc_mode=NoInput(0), # will be set to kwargs.meta
dirty=dirty,
rate_curve=NoInput(0),
indexed=False,
index_curve=NoInput(0),
)
def _maybe_get_settlement(
self,
settlement: datetime_,
disc_curve: _BaseCurve,
) -> datetime:
if isinstance(settlement, NoInput):
return self.leg1.schedule.calendar.lag_bus_days(
disc_curve.nodes.initial,
self.kwargs.meta["settle"],
True,
)
else:
return settlement
__all__ = [
"_WithYTM",
"_WithExDiv",
"_WithAccrued",
"_WithDuration",
"_WithRepo",
"_WithOASpread",
]
================================================
FILE: python/rateslib/instruments/bonds/protocols/accrued.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.enums.generics import NoInput
if TYPE_CHECKING:
from rateslib.instruments.bonds.conventions.accrued import AccrualFunction # pragma: no cover
from rateslib.local_types import ( # pragma: no cover
Cashflow,
DualTypes,
FixedLeg,
FixedPeriod,
FloatLeg,
FloatPeriod,
ZeroFloatPeriod,
_BaseCurveOrDict_,
_KWArgs,
datetime,
)
class _WithAccrued(Protocol):
"""
Protocol to determine the *yield-to-maturity* of a bond type *Instrument*.
"""
def _period_cashflow(
self,
period: Cashflow | FixedPeriod | FloatPeriod | ZeroFloatPeriod,
rate_curve: _BaseCurveOrDict_,
) -> DualTypes: ...
@property
def leg1(self) -> FixedLeg | FloatLeg: ...
@property
def kwargs(self) -> _KWArgs: ...
def _accrued(self, settlement: datetime, func: AccrualFunction) -> DualTypes:
"""func is the specific accrued function associated with the bond ``calc_mode``"""
acc_idx = self.leg1._period_index(settlement)
frac = func(self, settlement, acc_idx)
if self.leg1.ex_div(settlement):
frac = frac - 1 # accrued is negative in ex-div period
_: DualTypes = self._period_cashflow(self.leg1._regular_periods[acc_idx], NoInput(0))
return frac * _ / -self.leg1._regular_periods[acc_idx].settlement_params.notional * 100
def accrued(self, settlement: datetime) -> DualTypes:
"""
Calculate the accrued amount per nominal par value of 100.
Parameters
----------
settlement : datetime
The settlement date which to measure accrued interest against.
Notes
-----
The amount of accrued interest is calculated using the following formula:
.. math::
&AI = \\xi c_i \\qquad \\text{if not ex-dividend} \\\\
&AI = (\\xi - 1) c_i \\qquad \\text{if ex-dividend} \\\\
where :math:`c_i` is the physical ``cashflow`` related to the period in which ``settlement``
falls, and :math:`\\xi` is a fraction of that amount determined according to the
calculation mode specific to the :class:`~rateslib.instruments.BondCalcMode`.
""" # noqa: E501
value = self._accrued(settlement, self.kwargs.meta["calc_mode"]._settle_accrual)
return value
================================================
FILE: python/rateslib/instruments/bonds/protocols/cashflows.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
FixedLeg,
FloatLeg,
datetime,
)
class _WithExDiv(Protocol):
"""
Protocol to determine the *yield-to-maturity* of a bond type *Instrument*.
"""
@property
def leg1(self) -> FixedLeg | FloatLeg: ...
def ex_div(self, settlement: datetime) -> bool:
"""
Return a boolean whether the security is ex-div at the given settlement.
Parameters
----------
settlement : datetime
The settlement date to test.
Returns
-------
bool
Notes
-----
Uses the UK DMO convention of returning *False* if ``settlement``
**is on or before** the ex-div date for a regular coupon period.
This is evaluated by analysing the attribute ``pschedule3`` of the associated
:class:`~rateslib.scheduling.Schedule` object of the *Leg*.
"""
return self.leg1.ex_div(settlement)
================================================
FILE: python/rateslib/instruments/bonds/protocols/duration.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.dual import Dual, Dual2, gradient
from rateslib.dual.utils import _dual_float
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
DualTypes,
FixedLeg,
FloatLeg,
datetime,
)
class _WithDuration(Protocol):
"""
Protocol to determine the *yield-to-maturity* of a bond type *Instrument*.
"""
def price(self, *args: Any, **kwargs: Any) -> DualTypes: ...
@property
def leg1(self) -> FixedLeg | FloatLeg: ...
def duration(self, ytm: DualTypes, settlement: datetime, metric: str = "risk") -> float:
"""
Return the (negated) derivative of ``price`` w.r.t. ``ytm``.
Parameters
----------
ytm : float
The yield-to-maturity for the bond.
settlement : datetime
The settlement date of the bond.
metric : str
The specific duration calculation to return. See notes.
Returns
-------
float
Notes
-----
The available metrics are:
- *"risk"*: the derivative of price w.r.t. ytm, scaled to -1bp.
.. math::
risk = - \\frac{\\partial P }{\\partial y}
- *"modified"*: the modified duration which is *risk* divided by dirty price.
.. math::
mod \\; duration = \\frac{risk}{P} = - \\frac{1}{P} \\frac{\\partial P }{\\partial y}
- *"duration"* (or *"macaulay"*): the duration which is modified duration reverse modified.
.. math::
duration = mod \\; duration \\times (1 + y / f)
Examples
--------
.. ipython:: python
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0
)
gilt.duration(4.445, dt(1999, 5, 27), "risk")
gilt.duration(4.445, dt(1999, 5, 27), "modified")
gilt.duration(4.445, dt(1999, 5, 27), "duration")
This result is interpreted as cents. If the yield is increased by 1bp the price
will fall by 14.65 cents.
.. ipython:: python
gilt.price(4.445, dt(1999, 5, 27))
gilt.price(4.455, dt(1999, 5, 27))
"""
# TODO: this is not AD safe: returns only float
ytm_: float = _dual_float(ytm)
if metric == "risk":
price_dual: Dual = self.price(Dual(ytm_, ["__y__§"], []), settlement) # type: ignore[assignment]
_: float = -gradient(price_dual, ["__y__§"])[0]
elif metric == "modified":
price_dual = -self.price(Dual(ytm_, ["__y__§"], []), settlement, dirty=True) # type: ignore[assignment]
_ = -gradient(price_dual, ["__y__§"])[0] / float(price_dual) * 100
elif metric == "duration" or metric == "macaulay":
price_dual = self.price(Dual(ytm_, ["__y__§"], []), settlement, dirty=True) # type: ignore[assignment]
f = self.leg1.schedule.periods_per_annum
v = 1 + ytm_ / (100 * f)
_ = -gradient(price_dual, ["__y__§"])[0] / float(price_dual) * v * 100
return _
def convexity(self, ytm: DualTypes, settlement: datetime, metric: str = "risk") -> float:
"""
Return the second derivative of ``price`` w.r.t. ``ytm``.
Parameters
----------
ytm : float
The yield-to-maturity for the bond.
settlement : datetime
The settlement date of the bond.
metric: str, optional
Returns
-------
float
Notes
------
The default metric is similar to the :meth:`duration` method and is *'risk'* based,
but the traditional calculation is available.
- *"risk"*: the second derivative of price w.r.t. ytm, scaled to -1bp.
.. math::
risk = \\frac{\\partial^2 P }{\\partial y^2}
- *"convexity"*: the standard formula for convexity which is the above scaled by price.
.. math::
convexity = \\frac{1}{P} \\frac{\\partial P^2 }{\\partial y^2}
Examples
--------
.. ipython:: python
:suppress:
from rateslib import FixedRateBond
.. ipython:: python
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0
)
gilt.convexity(4.445, dt(1999, 5, 27))
This number is interpreted as hundredths of a cent. For a 1bp increase in
yield the duration will decrease by 2 hundredths of a cent.
.. ipython:: python
gilt.duration(4.445, dt(1999, 5, 27))
gilt.duration(4.455, dt(1999, 5, 27))
"""
# TODO: method is not AD safe: returns float
ytm_: float = _dual_float(ytm)
d = self.price(Dual2(ytm_, ["_ytm__§"], [], []), settlement, dirty=True)
ret: float = gradient(d, ["_ytm__§"], 2)[0][0]
if metric == "risk":
return ret
elif metric == "convexity":
return ret * 100.0 / _dual_float(d)
return ret
================================================
FILE: python/rateslib/instruments/bonds/protocols/oaspread.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from functools import partial
from typing import TYPE_CHECKING, Protocol
from rateslib import defaults
from rateslib.curves._parsers import (
_maybe_set_ad_order,
)
from rateslib.dual import ift_1dim
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.bonds.protocols import _WithAccrued
from rateslib.instruments.protocols.pricing import (
_get_curve,
_parse_curves,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurvesT_,
DualTypes,
DualTypes_,
FXForwards_,
Solver_,
VolT_,
_BaseCurve,
_BaseCurveOrDict_,
_Curves,
datetime_,
float_,
str_,
)
class _WithOASpread(_WithAccrued, Protocol):
"""
Protocol to determine the *yield-to-maturity* of a bond type *Instrument*.
"""
def _parse_curves(self, curves: CurvesT_) -> _Curves: ...
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes: ...
def oaspread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
price: DualTypes_ = NoInput(0),
metric: str_ = NoInput(0),
func_tol: float_ = NoInput(0),
conv_tol: float_ = NoInput(0),
) -> DualTypes:
"""
The option adjusted spread added to the discounting *Curve* to value the security
at ``price``.
Parameters
----------
curves : Curve, str or list of such
A single :class:`Curve` or id or a list of such. A list defines the
following curves in the order:
- Forecasting :class:`Curve` for ``leg1``.
- Discounting :class:`Curve` for ``leg1``.
solver : Solver, optional
The numerical :class:`Solver` that constructs ``Curves`` from calibrating
instruments.
fx : float, FXRates, FXForwards, optional
The immediate settlement FX rate that will be used to convert values
into another currency. A given `float` is used directly. If giving a
``FXRates`` or ``FXForwards`` object, converts from local currency
into ``base``.
base : str, optional
The base currency to convert cashflows into (3-digit code), set by default.
Only used if ``fx`` is an ``FXRates`` or ``FXForwards`` object.
price : float, Dual, Dual2
The price of the bond to match.
metric : str, optional
The metric to use when evaluating the price/rate of the instrument. If not
given uses the instrument's :meth:`~rateslib.instruments.FixedRateBond.rate` method
default.
func_tol: float, optional
The tolerance for the objective function value when iteratively solving. If not given
uses `defaults.oaspread_func_tol`.
conv_tol: float, optional
The tolerance used for stopping criteria of successive iteration values. If not
given uses `defaults.oaspread_conv_tol`.
Returns
-------
float, Dual, Dual2
Notes
------
The discount curve must be of type :class:`~rateslib.curves._BaseCurve` with a
provided :meth:`~rateslib.curves._BaseCurve.shift` method available.
.. warning::
The sensitivity of variables is preserved for the input argument ``price``, but this
function does **not** preserve AD towards variables associated with the ``curves`` or
``solver``.
Examples
--------
.. ipython:: python
:suppress:
from rateslib import Variable
.. ipython:: python
bond = FixedRateBond(dt(2000, 1, 1), "3Y", fixed_rate=2.5, spec="us_gb")
curve = Curve({dt(2000, 7, 1): 1.0, dt(2005, 7, 1): 0.80})
# Add AD variables to the curve without a Solver
curve._set_ad_order(1)
bond.oaspread(curves=curve, price=Variable(95.0, ["price"], []))
This result excludes curve sensitivities but includes sensitivity to the
constructed *'price'* variable. Accuracy can be observed through numerical simulation.
.. ipython:: python
bond.oaspread(curves=curve, price=96.0)
bond.oaspread(curves=curve, price=94.0)
"""
if isinstance(price, NoInput):
raise ValueError("`price` must be supplied in order to derive the `oaspread`.")
c = _parse_curves(self, curves, solver) # type: ignore[arg-type]
disc_curve_ = _get_curve("disc_curve", False, False, *c)
rate_curve_ = _get_curve("rate_curve", True, True, *c)
_ad_disc = _maybe_set_ad_order(disc_curve_, 0)
_ad_fore = _maybe_set_ad_order(rate_curve_, 0)
def s_with_args(
g: DualTypes, curve: _BaseCurveOrDict_, disc_curve: _BaseCurve, metric: str_
) -> DualTypes:
"""
Return the price of a bond given an OASpread.
Parameters
----------
g: DualTypes
The OASpread value in basis points.
curve:
The forecasting curve.
disc_curve:
The discount curve.
Returns
-------
DualTypes
"""
_shifted_discount_curve = disc_curve.shift(g)
return self.rate(curves=[curve, _shifted_discount_curve], metric=metric) # type: ignore[list-item]
s = partial(
s_with_args,
curve=rate_curve_,
disc_curve=disc_curve_,
metric=metric,
)
result = ift_1dim(
s,
price,
"ytm_quadratic",
(-300, 200, 1200),
func_tol=_drb(defaults.oaspread_func_tol, func_tol),
conv_tol=_drb(defaults.oaspread_conv_tol, conv_tol),
)
_maybe_set_ad_order(disc_curve_, _ad_disc)
_maybe_set_ad_order(rate_curve_, _ad_fore)
ret: DualTypes = result["g"]
return ret
================================================
FILE: python/rateslib/instruments/bonds/protocols/repo.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib import defaults
from rateslib.curves import index_left
from rateslib.curves.utils import average_rate
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.bonds.protocols import _WithAccrued
from rateslib.legs.amortization import _AmortizationType
from rateslib.scheduling import dcf
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
DualTypes,
datetime,
str_,
)
class _WithRepo(_WithAccrued, Protocol):
"""
Protocol to determine the *yield-to-maturity* of a bond type *Instrument*.
"""
def fwd_from_repo(
self,
price: DualTypes,
settlement: datetime,
forward_settlement: datetime,
repo_rate: DualTypes,
convention: str_ = NoInput(0),
dirty: bool = False,
method: str = "proceeds",
) -> DualTypes:
"""
Return a forward price implied by a given repo rate.
Parameters
----------
price : float, Dual, or Dual2
The initial price of the security at ``settlement``.
settlement : datetime
The settlement date of the bond
forward_settlement : datetime
The forward date for which to calculate the forward price.
repo_rate : float, Dual or Dual2
The rate which is used to calculate values.
convention : str, optional
The day count convention applied to the rate. If not given uses default
values.
dirty : bool, optional
Whether the input and output price are specified including accrued interest.
method : str in {"proceeds", "compounded"}, optional
The method for determining the forward price.
Returns
-------
float, Dual or Dual2
Notes
-----
Any intermediate (non ex-dividend) cashflows between ``settlement`` and
``forward_settlement`` will also be assumed to accrue at ``repo_rate``.
"""
convention_ = _drb(defaults.convention, convention)
dcf_ = dcf(settlement, forward_settlement, convention_)
if not dirty:
d_price = price + self._accrued(
settlement=settlement, func=self.kwargs.meta["calc_mode"]._settle_accrual
)
else:
d_price = price
if self.leg1.amortization._type != _AmortizationType.NoAmortization:
raise NotImplementedError(
"method for forward price not available with amortization",
) # pragma: no cover
total_rtn = (
d_price * (1 + repo_rate * dcf_ / 100) * -self.leg1.settlement_params.notional / 100
)
# now systematically deduct coupons paid between settle and forward settle
settlement_idx = index_left(
self.leg1.schedule.aschedule,
self.leg1.schedule.n_periods + 1,
settlement,
)
fwd_settlement_idx = index_left(
self.leg1.schedule.aschedule,
self.leg1.schedule.n_periods + 1,
forward_settlement,
)
# do not accrue a coupon not received
settlement_idx += 1 if self.leg1.ex_div(settlement) else 0
# deduct final coupon if received within period
fwd_settlement_idx += 1 if self.leg1.ex_div(forward_settlement) else 0
for p_idx in range(settlement_idx, fwd_settlement_idx):
# deduct accrued coupon from dirty price
c_period = self.leg1._regular_periods[p_idx]
c_cashflow: DualTypes = c_period.cashflow()
# TODO handle FloatPeriod cashflow fetch if need a curve.
if method.lower() == "proceeds":
dcf_ = dcf(c_period.settlement_params.payment, forward_settlement, convention_)
accrued_coup = c_cashflow * (1 + dcf_ * repo_rate / 100)
total_rtn -= accrued_coup
elif method.lower() == "compounded":
r_bar, d, _ = average_rate(
settlement, forward_settlement, convention_, repo_rate, dcf_
)
n = (forward_settlement - c_period.settlement_params.payment).days
accrued_coup = c_cashflow * (1 + d * r_bar / 100) ** n
total_rtn -= accrued_coup
else:
raise ValueError("`method` must be in {'proceeds', 'compounded'}.")
forward_price: DualTypes = total_rtn / -self.leg1.settlement_params.notional * 100
if dirty:
return forward_price
else:
return forward_price - self._accrued(
settlement=forward_settlement, func=self.kwargs.meta["calc_mode"]._settle_accrual
)
def repo_from_fwd(
self,
price: DualTypes,
settlement: datetime,
forward_settlement: datetime,
forward_price: DualTypes,
convention: str_ = NoInput(0),
dirty: bool = False,
) -> DualTypes:
"""
Return an implied repo rate from a forward price.
Parameters
----------
price : float, Dual, or Dual2
The initial price of the security at ``settlement``.
settlement : datetime
The settlement date of the bond
forward_settlement : datetime
The forward date for which to calculate the forward price.
forward_price : float, Dual or Dual2
The forward price which iplies the repo rate
convention : str, optional
The day count convention applied to the rate. If not given uses default
values.
dirty : bool, optional
Whether the input and output price are specified including accrued interest.
Returns
-------
float, Dual or Dual2
Notes
-----
Any intermediate (non ex-dividend) cashflows between ``settlement`` and
``forward_settlement`` will also be assumed to accrue at ``repo_rate``.
"""
convention_ = _drb(defaults.convention, convention)
# forward price from repo is linear in repo_rate so reverse calculate with AD
if not dirty:
p_t = forward_price + self._accrued(
settlement=forward_settlement, func=self.kwargs.meta["calc_mode"]._settle_accrual
)
p_0 = price + self._accrued(
settlement=settlement, func=self.kwargs.meta["calc_mode"]._settle_accrual
)
else:
p_t, p_0 = forward_price, price
dcf_ = dcf(settlement, forward_settlement, convention_)
numerator = p_t - p_0
denominator = p_0 * dcf_
# now systematically deduct coupons paid between settle and forward settle
settlement_idx = index_left(
self.leg1.schedule.aschedule,
self.leg1.schedule.n_periods + 1,
settlement,
)
fwd_settlement_idx = index_left(
self.leg1.schedule.aschedule,
self.leg1.schedule.n_periods + 1,
forward_settlement,
)
# do not accrue a coupon not received
settlement_idx += 1 if self.leg1.ex_div(settlement) else 0
# deduct final coupon if received within period
fwd_settlement_idx += 1 if self.leg1.ex_div(forward_settlement) else 0
for p_idx in range(settlement_idx, fwd_settlement_idx):
# deduct accrued coupon from dirty price
c_period = self.leg1._regular_periods[p_idx]
c_cashflow: DualTypes = c_period.cashflow()
# TODO handle FloatPeriod if it needs a Curve to forecast cashflow
dcf_ = dcf(
start=c_period.settlement_params.payment,
end=forward_settlement,
convention=convention_,
)
numerator += 100 * c_cashflow / -self.leg1.settlement_params.notional
denominator -= 100 * dcf_ * c_cashflow / -self.leg1.settlement_params.notional
return numerator / denominator * 100
================================================
FILE: python/rateslib/instruments/bonds/protocols/ytm.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.dual import ift_1dim
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.bonds.conventions import BOND_MODE_MAP
from rateslib.instruments.bonds.protocols.accrued import _WithAccrued
if TYPE_CHECKING:
from rateslib.instruments.bonds.conventions import ( # pragma: no cover
BondCalcMode,
)
from rateslib.instruments.bonds.conventions.accrued import ( # pragma: no cover
AccrualFunction,
)
from rateslib.instruments.bonds.conventions.discounting import ( # pragma: no cover
CashflowFunction,
YtmDiscountFunction,
)
from rateslib.local_types import ( # pragma: no cover
Cashflow,
CurveOption_,
DualTypes,
FixedLeg,
FixedPeriod,
FloatLeg,
FloatPeriod,
Number,
ZeroFloatPeriod,
_BaseCurve_,
_KWArgs,
datetime,
str_,
)
class _WithYTM(_WithAccrued, Protocol):
"""
Protocol to determine the *yield-to-maturity* of a bond type *Instrument*.
"""
@property
def kwargs(self) -> _KWArgs: ...
@property
def leg1(self) -> FixedLeg | FloatLeg: ...
def _price_from_ytm(
self,
ytm: DualTypes,
settlement: datetime,
calc_mode: BondCalcMode | str_,
dirty: bool,
rate_curve: CurveOption_,
index_curve: _BaseCurve_,
indexed: bool,
) -> DualTypes:
"""
Loop through all future cashflows and discount them with ``ytm`` to achieve
correct price.
"""
calc_mode_ = _drb(self.kwargs.meta["calc_mode"], calc_mode)
if isinstance(calc_mode_, str):
calc_mode_ = BOND_MODE_MAP[calc_mode_]
try:
if indexed:
q = self._generic_price_from_ytm_indexed(
ytm=ytm,
settlement=settlement,
f1=calc_mode_._v1,
f2=calc_mode_._v2,
f3=calc_mode_._v3,
c1=calc_mode_._c1,
ci=calc_mode_._ci,
cn=calc_mode_._cn,
accrual=calc_mode_._ytm_accrual,
rate_curve=rate_curve,
index_curve=index_curve,
)
if dirty:
return q + self.accrued(settlement, indexed=True, index_curve=index_curve) # type: ignore[call-arg]
else:
return q
else:
q = self._generic_price_from_ytm(
ytm=ytm,
settlement=settlement,
f1=calc_mode_._v1,
f2=calc_mode_._v2,
f3=calc_mode_._v3,
c1=calc_mode_._c1,
ci=calc_mode_._ci,
cn=calc_mode_._cn,
accrual=calc_mode_._ytm_accrual,
rate_curve=rate_curve,
)
if dirty:
return q + self._accrued(settlement, calc_mode_._settle_accrual)
else:
return q
except KeyError:
raise ValueError(f"Cannot calculate with `calc_mode`: {calc_mode}")
def _generic_price_from_ytm(
self,
ytm: DualTypes,
settlement: datetime,
f1: YtmDiscountFunction,
f2: YtmDiscountFunction,
f3: YtmDiscountFunction,
c1: CashflowFunction,
ci: CashflowFunction,
cn: CashflowFunction,
accrual: AccrualFunction,
rate_curve: CurveOption_,
) -> DualTypes:
"""
Refer to supplementary material.
Note: `curve` is only needed for FloatRate Periods on `_period_cashflow`
"""
f: float = self.leg1.schedule.frequency_obj.periods_per_annum()
acc_idx: int = self.leg1._period_index(settlement)
_is_ex_div: bool = self.leg1.ex_div(settlement)
if settlement == self.leg1.schedule.aschedule[acc_idx + 1]:
# then settlement aligns with a cashflow: manually adjust to next period
_is_ex_div = False
acc_idx += 1
v2 = f2(self, ytm, f, settlement, acc_idx, None, accrual, -100000)
v1 = f1(self, ytm, f, settlement, acc_idx, v2, accrual, acc_idx)
v3 = f3(
self,
ytm,
f,
settlement,
self.leg1.schedule.n_periods - 1,
v2,
accrual,
self.leg1.schedule.n_periods - 1,
)
# Sum up the coupon cashflows discounted by the calculated factors
d: DualTypes = 0.0
n = self.leg1.schedule.n_periods
for i, p_idx in enumerate(range(acc_idx, n)):
if i == 0 and _is_ex_div:
# no coupon cashflow is received so no addition to the sum
continue
elif i == 0:
# then this is the first period: c1 and v1 are used
cf1 = c1(self, ytm, f, acc_idx, p_idx, n, rate_curve)
d += cf1 * v1
elif p_idx == (self.leg1.schedule.n_periods - 1):
# then this is last period, but it is not the first (i>0).
# cn and v3 are relevant, but v1 is also used, and if i > 1 then v2 is also used.
cfn = cn(self, ytm, f, acc_idx, p_idx, n, rate_curve)
d += cfn * v2 ** (i - 1) * v3 * v1
else:
# this is not the first and not the last period.
# ci and v2i are relevant, but v1 is also required and v2 may also be used if i > 1.
# v2i allows for a per-period adjustment to the v2 discount factor, e.g. BTPs.
cfi = ci(self, ytm, f, acc_idx, p_idx, n, rate_curve)
v2i = f2(self, ytm, f, settlement, acc_idx, v2, accrual, p_idx)
d += cfi * v2 ** (i - 1) * v2i * v1
# Add the redemption payment discounted by relevant factors
redemption: Cashflow = self.leg1._exchange_periods[1] # type: ignore[assignment]
if i == 0: # only looped 1 period, only use the last discount
d += self._period_cashflow(redemption, rate_curve) * v1
elif i == 1: # only looped 2 periods, no need for v2
d += self._period_cashflow(redemption, rate_curve) * v3 * v1
else: # looped more than 2 periods, regular formula applied
d += self._period_cashflow(redemption, rate_curve) * v2 ** (i - 1) * v3 * v1
# discount all by the first period factor and scaled to price
p = d / -self.leg1.settlement_params.notional * 100
return p - self._accrued(settlement, accrual) # always return the clean price due to
# the possibility of different accrual functions for physical settlement vs YTM calc.
def _generic_price_from_ytm_indexed(
self,
ytm: DualTypes,
settlement: datetime,
f1: YtmDiscountFunction,
f2: YtmDiscountFunction,
f3: YtmDiscountFunction,
c1: CashflowFunction,
ci: CashflowFunction,
cn: CashflowFunction,
accrual: AccrualFunction,
rate_curve: CurveOption_,
index_curve: _BaseCurve_,
) -> DualTypes:
"""
Very similar to `_generic_price_from_ytm` except every cashflow is indexed by the
index ratio.
"""
assert hasattr(self, "index_ratio") # noqa: S101 # i.e. object is an IndexFixedRatedBond
f: float = self.leg1.schedule.frequency_obj.periods_per_annum()
acc_idx: int = self.leg1._period_index(settlement)
_is_ex_div: bool = self.leg1.ex_div(settlement)
if settlement == self.leg1.schedule.aschedule[acc_idx + 1]:
# then settlement aligns with a cashflow: manually adjust to next period
_is_ex_div = False
acc_idx += 1
v2 = f2(self, ytm, f, settlement, acc_idx, None, accrual, -100000)
v1 = f1(self, ytm, f, settlement, acc_idx, v2, accrual, acc_idx)
v3 = f3(
self,
ytm,
f,
settlement,
self.leg1.schedule.n_periods - 1,
v2,
accrual,
self.leg1.schedule.n_periods - 1,
)
# Sum up the coupon cashflows discounted by the calculated factors
d: DualTypes = 0.0
n = self.leg1.schedule.n_periods
for i, p_idx in enumerate(range(acc_idx, n)):
irn = self.index_ratio(self.leg1.schedule.aschedule[p_idx + 1], index_curve=index_curve)
if i == 0 and _is_ex_div:
# no coupon cashflow is received so no addition to the sum
continue
elif i == 0:
# then this is the first period: c1 and v1 are used
cf1 = c1(self, ytm, f, acc_idx, p_idx, n, rate_curve)
d += cf1 * v1 * irn
elif p_idx == (self.leg1.schedule.n_periods - 1):
# then this is last period, but it is not the first (i>0).
# cn and v3 are relevant, but v1 is also used, and if i > 1 then v2 is also used.
cfn = cn(self, ytm, f, acc_idx, p_idx, n, rate_curve)
d += cfn * v2 ** (i - 1) * v3 * v1 * irn
else:
# this is not the first and not the last period.
# ci and v2i are relevant, but v1 is also required and v2 may also be used if i > 1.
# v2i allows for a per-period adjustment to the v2 discount factor, e.g. BTPs.
cfi = ci(self, ytm, f, acc_idx, p_idx, n, rate_curve)
v2i = f2(self, ytm, f, settlement, acc_idx, v2, accrual, p_idx)
d += cfi * v2 ** (i - 1) * v2i * v1 * irn
# Add the redemption payment discounted by relevant factors
redemption: Cashflow = self.leg1._exchange_periods[1] # type: ignore[assignment]
if i == 0: # only looped 1 period, only use the last discount
d += self._period_cashflow(redemption, rate_curve) * v1 * irn
elif i == 1: # only looped 2 periods, no need for v2
d += self._period_cashflow(redemption, rate_curve) * v3 * v1 * irn
else: # looped more than 2 periods, regular formula applied
d += self._period_cashflow(redemption, rate_curve) * v2 ** (i - 1) * v3 * v1 * irn
# discount all by the first period factor and scaled to price
p = d / -self.leg1.settlement_params.notional * 100
settle_ir: DualTypes = self.index_ratio(settlement=settlement, index_curve=index_curve)
return p - self._accrued(settlement, accrual) * settle_ir # return the clean indexed price
def _ytm(
self,
price: DualTypes,
settlement: datetime,
rate_curve: CurveOption_,
dirty: bool,
indexed: bool,
calc_mode: BondCalcMode | str_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
) -> Number:
"""
Calculate the yield-to-maturity of the security given its price.
Parameters
----------
price : float, Dual, Dual2
The price, per 100 nominal, against which to determine the yield.
settlement : datetime
The settlement date on which to determine the price.
dirty : bool, optional
If `True` will assume the
:meth:`~rateslib.instruments.FixedRateBond.accrued` is included in the price.
Returns
-------
float, Dual, Dual2
Notes
-----
If ``price`` is given as :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2` input the result of the yield will be output
as the same type with the variables passed through accordingly.
""" # noqa: E501
def s(g: DualTypes) -> DualTypes:
return self._price_from_ytm(
ytm=g,
settlement=settlement,
calc_mode=calc_mode,
dirty=dirty,
rate_curve=rate_curve,
index_curve=index_curve,
indexed=indexed,
)
result = ift_1dim(
s,
s_tgt=price,
h="ytm_quadratic",
ini_h_args=(-3.0, 2.0, 12.0),
func_tol=1e-9,
conv_tol=1e-9,
raise_on_fail=True,
)
return result["g"] # type: ignore[no-any-return]
def ytm(
self,
price: DualTypes,
settlement: datetime,
dirty: bool = False,
rate_curve: CurveOption_ = NoInput(0),
calc_mode: BondCalcMode | str_ = NoInput(0),
) -> Number:
# overloaded ytm by IndexFixedRateBond
"""
Calculate the yield-to-maturity of the security given its price.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import FixedRateBond, dt, Dual, Dual2
.. ipython:: python
aapl_bond = FixedRateBond(dt(2013, 5, 4), dt(2043, 5, 4), fixed_rate=3.85, spec="us_corp")
aapl_bond.ytm(price=87.24, settlement=dt(2014, 3, 5))
aapl_bond.ytm(price=87.24, settlement=dt(2014, 3, 5), calc_mode="us_gb_tsy")
.. image:: https://ebrary.net/imag/econom/smith_bondm/image232.jpg
:align: center
:alt: Image from ebrary.net
:height: 310
:width: 433
.. role:: red
.. role:: green
Parameters
----------
price: float, Dual, Dual2, Variable, :red:`required`
The price, per 100 nominal, against which to determine the yield.
settlement: datetime, :red:`required`
The settlement date on which to determine the price.
dirty: bool, :green:`optional (set as False)`
If `True` will assume the (settlement)
:meth:`~rateslib.instruments.FixedRateBond.accrued` is included in the price.
rate_curve: _BaseCurve or dict of such, :green:`optional`
Used to forecast floating rates if required.
calc_mode: str or BondCalcMode, :green:`optional`
An alternative calculation mode to use. The ``calc_mode`` is typically set at
*Instrument* initialisation and is not required, but is useful as an override to
allow comparisons, e.g. of *"us_gb"* street convention versus *"us_gb_tsy"* treasury
convention.
Returns
-------
float, Dual, Dual2
Notes
-----
If ``price`` is given as :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2` input the result of the yield will be output
as the same type with the variables passed through accordingly.
.. ipython:: python
aapl_bond.ytm(price=Dual(87.24, ["price", "a"], [1, -0.75]), settlement=dt(2014, 3, 5))
aapl_bond.ytm(price=Dual2(87.24, ["price", "a"], [1, -0.75], []), settlement=dt(2014, 3, 5))
""" # noqa: E501
return self._ytm(
price=price,
settlement=settlement,
dirty=dirty,
rate_curve=rate_curve,
calc_mode=calc_mode,
indexed=False,
)
def _period_cashflow(
self,
period: Cashflow | FixedPeriod | FloatPeriod | ZeroFloatPeriod,
rate_curve: CurveOption_,
) -> DualTypes:
"""Nominal fixed rate bonds use the known "cashflow" attribute on the *Period*."""
return period.unindexed_cashflow(rate_curve=rate_curve)
================================================
FILE: python/rateslib/instruments/cds.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_get_fx_maybe_from_solver,
_parse_curves,
_Vol,
)
from rateslib.legs import CreditPremiumLeg, CreditProtectionLeg
from rateslib.scheduling import Frequency
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FXForwards_,
RollDay,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class CDS(_BaseInstrument):
"""
A *credit default swap (CDS)* composing a :class:`~rateslib.legs.CreditPremiumLeg`
and a :class:`~rateslib.legs.CreditProtectionLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import CDS
from datetime import datetime as dt
.. ipython:: python
irs = CDS(
effective=dt(2001, 12, 20),
termination="2y",
spec="us_ig_cds",
)
irs.cashflows()
.. rubric:: Pricing
A *CDS* requires a hazard *rate curve* and a *disc curve* on both legs
(which should be the same). The following input formats are
allowed:
.. code-block:: python
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = [rate_curve, disc_curve, rate_curve, disc_curve] # four curves applied to each leg
curves = {"rate_curve": rate_curve, "disc_curve": disc_curve}
curves = { # dict form is explicit
"rate_curve": rate_curve,
"disc_curve": disc_curve
"leg2_rate_curve": rate_curve,
"leg2_disc_curve": rate_curve,
}
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
leg2_effective : datetime, :green:`optional (inherited from leg1)`
leg2_termination : datetime, str, :green:`optional (inherited from leg1)`
leg2_frequency : Frequency, str, :green:`optional (inherited from leg1)`
leg2_stub : StubInference, str, :green:`optional (inherited from leg1)`
leg2_front_stub : datetime, :green:`optional (inherited from leg1)`
leg2_back_stub : datetime, :green:`optional (inherited from leg1)`
leg2_roll : RollDay, int, str, :green:`optional (inherited from leg1)`
leg2_eom : bool, :green:`optional (inherited from leg1)`
leg2_modifier : Adjuster, str, :green:`optional (inherited from leg1)`
leg2_calendar : calendar, str, :green:`optional (inherited from leg1)`
leg2_payment_lag: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_payment_lag_exchange: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_ex_div: Adjuster, int, :green:`optional (inherited from leg1)`
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
leg2_amortization : float, Dual, Dual2, Variable, str, Amortization, :green:`optional (negatively inherited from leg1)`
.. note::
The following are **rate parameters**.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
.. note::
The following parameters define **credit specific** elements.
premium_accrued: bool, :green:`optional (set by 'defaults')`
Whether an accrued premium is paid on the event of mid-period credit default.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
""" # noqa: E501
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
@property
def leg1(self) -> CreditPremiumLeg:
"""The :class:`~rateslib.legs.CreditPremiumLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> CreditProtectionLeg:
"""The :class:`~rateslib.legs.CreditProtectionLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> list[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
leg2_effective: datetime_ = NoInput(1),
leg2_termination: datetime | str_ = NoInput(1),
leg2_frequency: Frequency | str_ = NoInput(0),
leg2_stub: str_ = NoInput(1),
leg2_front_stub: datetime_ = NoInput(1),
leg2_back_stub: datetime_ = NoInput(1),
leg2_roll: int | RollDay | str_ = NoInput(1),
leg2_eom: bool_ = NoInput(1),
leg2_modifier: str_ = NoInput(1),
leg2_calendar: CalInput = NoInput(1),
leg2_payment_lag: int_ = NoInput(1),
leg2_payment_lag_exchange: int_ = NoInput(1),
# leg2_convention: str_ = NoInput(1),
leg2_ex_div: int_ = NoInput(1),
# settlement
notional: float_ = NoInput(0),
currency: str_ = NoInput(0),
amortization: float_ = NoInput(0),
leg2_notional: float_ = NoInput(-1),
leg2_amortization: float_ = NoInput(-1),
# rate and credit params
premium_accrued: bool_ = NoInput(0),
fixed_rate: DualTypes_ = NoInput(0),
# meta params
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
user_args = dict(
effective=effective,
termination=termination,
frequency=frequency,
stub=stub,
front_stub=front_stub,
back_stub=back_stub,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
payment_lag_exchange=payment_lag_exchange,
ex_div=ex_div,
notional=notional,
currency=currency,
amortization=amortization,
convention=convention,
leg2_effective=leg2_effective,
leg2_termination=leg2_termination,
leg2_frequency=leg2_frequency,
leg2_stub=leg2_stub,
leg2_front_stub=leg2_front_stub,
leg2_back_stub=leg2_back_stub,
leg2_roll=leg2_roll,
leg2_eom=leg2_eom,
leg2_modifier=leg2_modifier,
leg2_calendar=leg2_calendar,
leg2_payment_lag=leg2_payment_lag,
leg2_payment_lag_exchange=leg2_payment_lag_exchange,
leg2_ex_div=leg2_ex_div,
leg2_notional=leg2_notional,
leg2_amortization=leg2_amortization,
# leg2_convention=leg2_convention,
# rate and credit
premium_accrued=premium_accrued,
fixed_rate=fixed_rate,
# meta
curves=self._parse_curves(curves),
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
leg2_currency=NoInput(1),
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
premium_accrued=defaults.cds_premium_accrued,
leg2_frequency=Frequency.Zero(),
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "vol"],
)
self._leg1 = CreditPremiumLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._leg2 = CreditProtectionLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self._leg1, self._leg2]
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_npv: DualTypes = self.leg2.local_npv(
rate_curve=_get_curve("leg2_rate_curve", True, True, *c),
disc_curve=_get_curve("leg2_disc_curve", False, True, *c),
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
return (
self.leg1.spread(
target_npv=-leg2_npv,
rate_curve=_get_curve("rate_curve", True, True, *c),
disc_curve=_get_curve("disc_curve", False, True, *c),
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
/ 100
)
def accrued(self, settlement: datetime) -> DualTypes:
"""
Calculate the amount of premium accrued until a specific date within the relevant *Period*.
Parameters
----------
settlement: datetime
The date against which accrued is measured.
Returns
-------
float, Dual, Dual2, Variable
Notes
------
Will raise an exception if there is no set ``fixed_rate``.
"""
return self.leg1.accrued(settlement=settlement)
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
return (
self.rate(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
* 100.0
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# the test for an unpriced IRS is that its fixed rate is not set.
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
self.leg1.fixed_rate = _dual_float(mid_market_rate)
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
A CDS has two curve requirements: a hazard_curve and a disc_curve used by both legs.
When given as anything other than two curves will raise an Exception.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_rate_curve=_drb(
curves.get("rate_curve", NoInput(0)),
curves.get("leg2_rate_curve", NoInput(0)),
),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
rate_curve=curves[0],
leg2_rate_curve=curves[0],
disc_curve=curves[1],
leg2_disc_curve=curves[1],
)
elif len(curves) == 4:
return _Curves(
rate_curve=curves[0],
leg2_rate_curve=curves[2],
disc_curve=curves[1],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(f"{type(self).__name__} requires 2 `curves`. Got {len(curves)}.")
else: # `curves` is just a single input
raise ValueError(f"{type(self).__name__} requires 2 `curves`. Got 1.")
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
def analytic_rec_risk(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
return self.leg2.analytic_rec_risk(
rate_curve=_get_curve("leg2_rate_curve", False, True, *c),
disc_curve=_get_curve("leg2_disc_curve", False, True, *c),
fx=_get_fx_maybe_from_solver(solver=solver, fx=fx),
base=base,
)
================================================
FILE: python/rateslib/instruments/fee.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import datetime as dt
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_Vol,
)
from rateslib.legs import CustomLeg
from rateslib.periods import Cashflow
from rateslib.scheduling import Frequency
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Adjuster,
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FXForwards_,
IndexMethod,
PeriodFixings,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class Fee(_BaseInstrument):
"""
A single :class:`~rateslib.periods.Cashflow` payable on a payment date.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import Fee
from datetime import datetime as dt
.. ipython:: python
fee = Fee(dt(2022, 1, 4), notional=2e6, calendar="nyc", payment_lag=0)
fee.cashflows()
.. rubric:: Pricing
A *Fee* requires just one *Curve* for discounting, unless it is also indexed, in which
case it may also require an additional index *Curve*
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = [index_curve, disc_curve] # two curves given the specific order
curves = { # dict form is explicit
"disc_curve": disc_curve,
"index_curve": index_curve,
}
The concept of *rate* is alien to a *Fee*, and these are not *Instruments* that would
typically be expected to form part of a *Solver* framework. However, for flexibility,
two *rate* ``metric`` that are available are:
- *'npv'*: returns the result of the :meth:`~rateslib.instruments.Fee.npv` method.
- *'payment'*: returns the physical settlement amount.
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **settlement** parameters.
effective : datetime, :red:`required`
The datetime index for which the `rate`, which is just the curve value, is
returned.
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
calendar : calendar, str, :green:`optional`
The business day calendar object to use for date manipulation. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used to modify the ``effective`` payment date
according to a given ``calendar``.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map the adjusted payment date into
an additional date acting an ex-dividend indicator. If given as integer
will define the number of business days to lag dates by.
.. note::
The following define **non-deliverability** parameters. If the fee is
directly deliverable do not use these parameters.
pair: FXIndex, str, :green:`optional`
The currency pair for :class:`~rateslib.data.fixings.FXFixing` that determines *Period*
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` according
to non-deliverability.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value set of the base index value.
If not given and ``index_fixings`` is a str fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The index value for the reference date.
If a scalar value this is used directly. If a string identifier will link to the
central ``fixings`` object and data loader. See :ref:`fixings `.
index_base_date: datetime, :green:`optional`
The reference date for determining the base index value. Not required if ``index_base``
value is given directly, but required for indexation in all other cases.
index_reference_date: datetime, :green:`optional (set as 'payment')`
The reference date for determining the index value. Not required if ``_index_fixings``
is given as a scalar value.
index_only: bool, :green:`optional (set as False)`
A flag which determines non-payment of notional on supported *Periods*.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
metric : str, :green:`optional` (set as 'curve_value')
The pricing metric returned by :meth:`~rateslib.instruments.Value.rate`. See
**Pricing**.
"""
_rate_scalar = 1.0
@property
def leg1(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument*."""
return self._leg1
@property
def legs(self) -> list[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs # type: ignore[return-value]
def __init__(
self,
# settlement
effective: datetime,
notional: float_ = NoInput(0),
*,
currency: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: Adjuster | str | int_ = NoInput(0),
ex_div: Adjuster | str | int_ = NoInput(0),
# non-deliverability
pair: str_ = NoInput(0),
fx_fixings: PeriodFixings = NoInput(0),
# index-args:
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: PeriodFixings = NoInput(0),
index_only: bool_ = NoInput(0),
index_base_date: datetime_ = NoInput(0),
index_reference_date: datetime_ = NoInput(0),
# meta
metric: str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
) -> None:
user_args = dict(
effective=effective,
notional=notional,
ex_div=ex_div,
currency=currency,
calendar=calendar,
payment_lag=payment_lag,
# non-deliverable
pair=pair,
fx_fixings=fx_fixings,
# indexation
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings,
index_only=index_only,
index_base_date=index_base_date,
index_reference_date=index_reference_date,
# meta
curves=self._parse_curves(curves),
metric=metric,
vol=_Vol(),
)
default_args = dict(
metric="npv",
notional=defaults.notional,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
calendar="all",
)
self._kwargs = _KWArgs(
spec=NoInput(0),
user_args=user_args,
default_args=default_args,
meta_args=["curves", "metric", "vol"],
)
_ = _convert_to_schedule_kwargs(
dict(
effective=dt(1600, 1, 1),
termination=effective,
frequency=Frequency.Zero(),
payment_lag=self.kwargs.leg1["payment_lag"],
calendar=self.kwargs.leg1["calendar"],
ex_div=self.kwargs.leg1["ex_div"],
),
1,
)["schedule"]
self._leg1 = CustomLeg(
periods=[
Cashflow(
payment=_.pschedule[-1],
notional=self.kwargs.leg1["notional"],
currency=self.kwargs.leg1["currency"],
ex_dividend=_.pschedule3[-1],
# non-deliverable
pair=self.kwargs.leg1["pair"],
fx_fixings=self.kwargs.leg1["fx_fixings"],
delivery=NoInput(0), # set as payment
# indexation
index_base=self.kwargs.leg1["index_base"],
index_lag=self.kwargs.leg1["index_lag"],
index_method=self.kwargs.leg1["index_method"],
index_fixings=self.kwargs.leg1["index_fixings"],
index_only=self.kwargs.leg1["index_only"],
index_base_date=self.kwargs.leg1["index_base_date"],
index_reference_date=self.kwargs.leg1["index_reference_date"],
)
]
)
self._legs = [self._leg1]
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
@classmethod
def _parse_curves(cls, curves: CurvesT_) -> _Curves:
"""
A Value requires only one 1 curve, if not indexed, which is set as all element values.
If the fee is indexed then an `index_curve` may be required.
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
index_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("index_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 1:
return _Curves(
disc_curve=curves[0],
index_curve=curves[0],
)
elif len(curves) == 2:
return _Curves(
disc_curve=curves[1],
index_curve=curves[0],
)
else:
raise ValueError(
f"{type(cls).__name__} requires upto 2 curve type. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input
return _Curves(
disc_curve=curves, # type: ignore[arg-type]
index_curve=curves, # type: ignore[arg-type]
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
if metric_ == "npv":
return self.npv( # type: ignore[return-value]
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
local=False,
)
elif metric_ == "payment":
return -1 * self.settlement_params.notional
else:
raise ValueError("`metric`must be in {'npv', 'cashflow'}.")
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def analytic_delta(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
leg: int = 1,
) -> DualTypes | dict[str, DualTypes]:
return super().analytic_delta(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
leg=leg,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
================================================
FILE: python/rateslib/instruments/fly.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING, NoReturn
from pandas import DataFrame, DatetimeIndex
from rateslib.enums.generics import NoInput
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.pricing import (
_get_fx_maybe_from_solver,
)
from rateslib.periods.utils import _maybe_fx_converted
if TYPE_CHECKING:
from rateslib.local_types import (
Any,
CurvesT_,
DualTypes,
FXForwards_,
Solver_,
VolT_,
datetime_,
str_,
)
def _composit_fixings_table(df_result: DataFrame, df: DataFrame) -> DataFrame:
"""
Add a DataFrame to an existing fixings table by extending or adding to relevant columns.
Parameters
----------
df_result: The main DataFrame that will be updated
df: The incoming DataFrame with new data to merge
Returns
-------
DataFrame
"""
# reindex the result DataFrame
if df_result.empty:
return df
else:
df_result = df_result.reindex(index=df_result.index.union(df.index))
# # update existing columns with missing data from the new available data
# for c in [c for c in df.columns if c in df_result.columns and c[1] in ["dcf", "rates"]]:
# df_result[c] = df_result[c].combine_first(df[c])
# merge by addition existing values with missing filled to zero
m = [c for c in df.columns if c in df_result.columns]
if len(m) > 0:
df_result[m] = df_result[m].add(df[m], fill_value=0.0)
# append new columns without additional calculation
a = [c for c in df.columns if c not in df_result.columns]
if len(a) > 0:
df_result[a] = df[a]
# df_result.columns = MultiIndex.from_tuples(df_result.columns)
return df_result
class Fly(_BaseInstrument):
"""
A *Butterfly* of :class:`~rateslib.instruments.protocols._BaseInstrument`.
.. rubric:: Examples
The following initialises a *Butterfly* of *IRSs*.
.. ipython:: python
:suppress:
from rateslib.instruments import Fly, IRS
from datetime import datetime as dt
.. ipython:: python
fly = Fly(
instrument1=IRS(dt(2000, 1, 1), "1y", notional=10e6, spec="eur_irs", curves=["estr"]),
instrument2=IRS(dt(2000, 1, 1), "2y", notional=-5e6, spec="eur_irs", curves=["estr"]),
instrument3=IRS(dt(2000, 1, 1), "3y", notional=1.75e6, spec="eur_irs", curves=["estr"]),
)
fly.cashflows()
.. rubric:: Pricing
Each :class:`~rateslib.instruments.protocols._BaseInstrument` should have
its own ``curves`` and ``vol`` objects set at its initialisation, according to the
documentation for that *Instrument*. For the pricing methods ``curves`` and ``vol`` objects,
these can be universally passed to each *Instrument* but in many cases that would be
technically impossible since each *Instrument* might require difference pricing objects, e.g.
if the *Instruments* have difference currencies. For a *Fly*
of three *IRS* in the same currency this would be possible, however.
Parameters
----------
instrument1 : _BaseInstrument
The *Instrument* with the shortest maturity.
instrument2 : _BaseInstrument
The *Instrument* with the intermediate maturity.
instrument3 : _BaseInstrument
The *Instrument* with the longest maturity.
Notes
-----
A *Fly* is just a container for three
:class:`~rateslib.instruments.protocols._BaseInstrument`, with an overload
for the :meth:`~rateslib.instruments.Spread.rate` method to calculate twice the
belly rate minus the wings (whatever metric is in use for each *Instrument*), which allows
it to offer a lot of flexibility in *pseudo Instrument* creation.
"""
_instruments: Sequence[_BaseInstrument]
@property
def instruments(self) -> Sequence[_BaseInstrument]:
"""The *Instruments* contained within the *Portfolio*."""
return self._instruments
def __init__(
self,
instrument1: _BaseInstrument,
instrument2: _BaseInstrument,
instrument3: _BaseInstrument,
) -> None:
self._instruments = [instrument1, instrument2, instrument3]
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
"""
Return the NPV of the *Portfolio* by summing individual *Instrument* NPVs.
"""
local_npv = self._npv_single_core(curves=curves, solver=solver, fx=fx, vol=vol, base=base)
if not local:
single_value: DualTypes = 0.0
for k, v in local_npv.items():
single_value += _maybe_fx_converted(
value=v,
currency=k,
fx=_get_fx_maybe_from_solver(fx=fx, solver=solver),
base=base,
forward=forward,
)
return single_value
else:
return local_npv
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
TBD
"""
df_result = DataFrame(index=DatetimeIndex([], name="obs_dates"))
for inst in self.instruments:
try:
df = inst.local_analytic_rate_fixings(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
forward=forward,
settlement=settlement,
)
except AttributeError:
continue
df_result = _composit_fixings_table(df_result, df)
return df_result
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._cashflows_from_instruments(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
base=base,
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
rates: list[DualTypes] = []
for inst in self.instruments:
rates.append(
inst.rate(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
metric=metric,
)
)
return (-rates[0] + 2 * rates[1] - rates[2]) * 100.0
def analytic_delta(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`analytic_delta` is not defined for Portfolio.")
================================================
FILE: python/rateslib/instruments/fra.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, Ok, Result, _drb
from rateslib.enums.parameters import FloatFixingMethod, SpreadCompoundMethod
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg, FloatLeg
from rateslib.scheduling import Adjuster
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FixingsRates_,
FloatRateSeries,
Frequency,
FXForwards_,
RollDay,
Solver_,
VolT_,
_BaseCurveOrDict_,
_BaseLeg,
bool_,
datetime,
datetime_,
str_,
)
class FRA(_BaseInstrument):
"""
A *forward rate agreement (FRA)* compositing a
:class:`~rateslib.legs.FixedLeg` and :class:`~rateslib.legs.FloatLeg`.
These *Legs* have *Instrument* level overloads in order to satisfy the cashflow determination
conventions of a *FRA* instruments.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import FRA
from datetime import datetime as dt
.. ipython:: python
fra = FRA(
effective=dt(2000, 1, 1),
termination="6m",
spec="eur_fra6",
fixed_rate=2.0,
)
fra.cashflows()
.. rubric:: Pricing
An *FRA* requires a *disc curve* on both legs (which should be the same *Curve*) and a
*leg2 rate curve* to forecast the IBOR type rate on the *FloatLeg*. The following input
formats are allowed:
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = [None, disc_curve, rate_curve, disc_curve] # four curves applied to each leg
curves = {"leg2_rate_curve": rate_curve, "disc_curve": disc_curve} # dict form is explicit
The only ``metric`` is *'rate'*.
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: int, :green:`optional (set as 0)`
A number of business days by which to lag a traditional *FRA* payment date.
.. warning::
*FRAs* are defined by a payment structure that has a cashflow at the accrual start
date and an amount adjusted by the rate fixing. An input to this parameter, say 5,
will apply an adjuster: `Adjuster.BusDaysLagSettleInAdvance(5)`.
ex_div: int, :green:`optional (set as 0)`
Applied in the same manner as the ``payment_lag``, except negated. An input of 1 will
apply an adjuster: `Adjuster.BusDaysLagSettleInAdvance(-1)`.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
.. note::
The following are **rate parameters**.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
leg2_fixing_method: int, :green:`optional (set by 'defaults')`
The ``fixing_method`` used by the *Instrument*. This will be IBOR with a defined
publication lag. The default is "IBOR(2)" with a two-day lag.
leg2_fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule.
leg2_fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
leg2_rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
metric : str, :green:`optional` (set as 'rate')`
The pricing metric returned by :meth:`~rateslib.instruments.FRA.rate`.
Notes
-----
A *FRA* is modelled as a single period *IRS* whose payment date is overloaded to be
based on the 'accrual' effective date, and whose cashflow values are adjusted by a scaling
factor related to the floating rate, i.e. :math:`\\frac{1}{1 + d r}`, thus replicating the
payoff calculation for a traditional *FRA*.
""" # noqa: E501
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.legs.FixedLeg`."""
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
@property
def leg1(self) -> FixedLeg:
"""The :class:`~rateslib.legs.FixedLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> FloatLeg:
"""The :class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> list[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An STIRFuture has two curve requirements: a leg2_rate_curve and a disc_curve used by
both legs.
When given as only 1 element this curve is applied to all of the those components
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_rate_curve=_drb(
curves.get("rate_curve", NoInput(0)),
curves.get("leg2_rate_curve", NoInput(0)),
),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
leg2_rate_curve=curves[0],
disc_curve=curves[1],
leg2_disc_curve=curves[1],
)
elif len(curves) == 1:
return _Curves(
leg2_rate_curve=curves[0],
disc_curve=curves[0],
leg2_disc_curve=curves[0],
)
elif len(curves) == 4:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
leg2_rate_curve=curves[2],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(self).__name__} requires only 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
leg2_rate_curve=curves, # type: ignore[arg-type]
disc_curve=curves, # type: ignore[arg-type]
leg2_disc_curve=curves, # type: ignore[arg-type]
)
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int = 0,
ex_div: int = 0,
convention: str_ = NoInput(0),
# settlement parameters
currency: str_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
# rate parameters
fixed_rate: DualTypes_ = NoInput(0),
leg2_rate_fixings: FixingsRates_ = NoInput(0),
leg2_fixing_method: FloatFixingMethod | str_ = NoInput(0),
leg2_fixing_frequency: Frequency | str_ = NoInput(0),
leg2_fixing_series: FloatRateSeries | str_ = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str_ = NoInput(0),
) -> None:
user_args = dict(
# scheduling
effective=effective,
termination=termination,
frequency=frequency,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
convention=convention,
# settlement
currency=currency,
notional=notional,
# rate
fixed_rate=fixed_rate,
leg2_rate_fixings=leg2_rate_fixings,
leg2_fixing_series=leg2_fixing_series,
leg2_fixing_frequency=leg2_fixing_frequency,
leg2_fixing_method=leg2_fixing_method,
# meta
curves=self._parse_curves(curves),
metric=metric,
)
instrument_args = dict(
leg2_effective=NoInput.inherit,
leg2_termination=NoInput.inherit,
leg2_frequency=NoInput.inherit,
leg2_roll=NoInput.inherit,
leg2_eom=NoInput.inherit,
leg2_modifier=NoInput.inherit,
leg2_calendar=NoInput.inherit,
leg2_payment_lag=NoInput.inherit,
leg2_ex_div=NoInput.inherit,
leg2_convention=NoInput.inherit,
leg2_float_spread=0.0,
leg2_spread_compound_method=SpreadCompoundMethod.NoneSimple,
leg2_notional=NoInput.negate,
leg2_currency=NoInput.inherit,
payment_lag=Adjuster.BusDaysLagSettleInAdvance(payment_lag),
ex_div=Adjuster.BusDaysLagSettleInAdvance(-ex_div),
initial_exchange=False,
final_exchange=False,
leg2_initial_exchange=False,
leg2_final_exchange=False,
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
leg2_fixing_method=FloatFixingMethod.IBOR(2),
metric="rate",
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "metric", "vol"],
)
self._leg1 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._leg2 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self.leg1, self.leg2]
if self._leg1.schedule.n_periods != 1:
raise ValueError(
"The scheduling parameters of the STIRFuture must define exactly "
f"one regular period. Got '{self.leg1.schedule.n_periods}'."
)
def _fra_rate_scalar(self, leg2_rate_curve: _BaseCurveOrDict_) -> DualTypes:
r = self.leg2._regular_periods[0].rate(rate_curve=leg2_rate_curve)
return 1 / (1 + self.leg2._regular_periods[0].period_params.dcf * r / 100.0)
def _try_fra_rate_scalar(self, leg2_rate_curve: _BaseCurveOrDict_) -> Result[DualTypes]:
r = self.leg2._regular_periods[0].try_rate(rate_curve=leg2_rate_curve)
if r.is_err:
return r
else:
return Ok(
1 / (1 + self.leg2._regular_periods[0].period_params.dcf * r.unwrap() / 100.0)
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
c = _parse_curves(self, curves, solver)
fra_scalar = self._fra_rate_scalar(
leg2_rate_curve=_get_curve("leg2_rate_curve", True, True, *c)
)
npv = super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
if isinstance(npv, dict):
return {k: v * fra_scalar for k, v in npv.items()}
else:
return npv * fra_scalar
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# the test for an unpriced IRS is that its fixed rate is not set.
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
self.leg1.fixed_rate = _dual_float(mid_market_rate)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
leg2_npv: DualTypes = self.leg2.local_npv(
rate_curve=_get_curve("leg2_rate_curve", True, True, *c),
disc_curve=_get_curve("leg2_disc_curve", False, True, *c),
settlement=settlement,
forward=forward,
)
rate = (
self.leg1.spread(
target_npv=-leg2_npv,
rate_curve=NoInput(0),
disc_curve=_get_curve("disc_curve", False, True, *c),
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
/ 100
)
if metric_ == "rate":
return rate
else:
raise ValueError("`metric` must be in {'rate'}.")
def analytic_delta(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
leg: int = 1,
) -> DualTypes | dict[str, DualTypes]:
c = _parse_curves(self, curves, solver)
fra_scalar = self._fra_rate_scalar(
leg2_rate_curve=_get_curve("leg2_rate_curve", True, True, *c)
)
a_delta = super().analytic_delta(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
leg=leg,
)
if isinstance(a_delta, dict):
return {k: v * fra_scalar for k, v in a_delta.items()}
else:
return a_delta * fra_scalar
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
df = self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
c = _parse_curves(self, curves, solver)
return df * self._fra_rate_scalar(
leg2_rate_curve=_get_curve("leg2_rate_curve", True, True, *c)
)
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
df = super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
c = _parse_curves(self, curves, solver)
scalar = self._try_fra_rate_scalar(
leg2_rate_curve=_get_curve("leg2_rate_curve", True, True, *c)
)
headers = [
defaults.headers["cashflow"],
defaults.headers["npv"],
defaults.headers["npv_fx"],
]
for header in headers:
if scalar.is_err:
df[header] = None
else:
df[header] = df[header] * _dual_float(scalar.unwrap())
return df
================================================
FILE: python/rateslib/instruments/fx_forward.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.data.fixings import _fx_index_set_cross, _get_fx_index
from rateslib.enums.generics import NoInput, _drb
from rateslib.fx import FXForwards, FXRates, forward_fx
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_get_fx_maybe_from_solver,
_parse_curves,
_Vol,
)
from rateslib.legs import CustomLeg
from rateslib.periods import Cashflow
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FXForwards_,
Sequence,
Solver_,
VolT_,
_BaseLeg,
datetime,
datetime_,
str_,
)
class FXForward(_BaseInstrument):
"""
A dated *FX exchange* composing two
:class:`~rateslib.legs.CustomLeg`
of individual :class:`~rateslib.periods.Cashflow` of different currencies.
.. rubric:: Examples
A sold EURUSD *FX forward* at 1.165 expressed in $10mm.
.. ipython:: python
:suppress:
from datetime import datetime as dt
from rateslib.instruments import FXForward
.. ipython:: python
fxfwd = FXForward(
settlement=dt(2022, 2, 24),
pair="eurusd",
leg2_notional=10e6,
fx_rate=1.165
)
fxfwd.cashflows()
.. rubric:: Pricing
An *FX Forward* requires a *disc curve* and a *leg2 disc curve* to discount the cashflows
of the respective currencies (typically with the same collateral definition).
The following input formats are allowed:
.. code-block:: python
curves = [disc_curve, leg2_disc_curve] # two curves are applied in the given order
curves = [None, disc_curve, None, leg2_disc_curve] # four curves applied to each leg
curves = {"disc_curve": disc_curve, "leg2_disc_curve": leg2_disc_curve} # dict form is explicit
.. role:: red
.. role:: green
Parameters
----------
settlement : datetime, :red:`required`
The date of the currency exchange.
pair: FXIndex, str, :red:`required`
The currency pair of the exchange, e.g. "eurusd", using 3-digit iso codes.
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
To define the notional of the trade in units of LHS pair use ``notional``.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
To define the notional of the trade in units of RHS pair use ``leg2_notional``.
Only one of ``notional`` or ``leg2_notional`` can be specified.
fx_rate : float, :green:`optional`
The FX rate of ``pair`` defining the transaction price. If not given, set at pricing.
curves : Curve, LineCurve, str or list of such, :green:`optional`
For *FXExchange* only discounting curves are required in each currency and not rate
forecasting curves.
The signature should be: `[None, eur_curve, None, usd_curve]` for a "eurusd" pair.
""" # noqa: E501
_rate_scalar = 1.0
@property
def leg1(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An FXExchange requires 2 curves; a disc_curve and leg2_disc_curve.
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
disc_curve=curves[0],
leg2_disc_curve=curves[1],
)
elif len(curves) == 4:
return _Curves(
disc_curve=curves[1],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(self).__name__} requires 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
raise ValueError(f"{type(self).__name__} requires 2 curve types. Got 1.")
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def __init__(
self,
settlement: datetime,
pair: str,
fx_rate: DualTypes_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
leg2_notional: DualTypes_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
):
# FXForwards are physically settled so do not allow WMR cross methodology to impact
# forecast rates for FXFixings.
pair_ = _fx_index_set_cross(_get_fx_index(pair), allow_cross=False)
if isinstance(notional, NoInput) and isinstance(leg2_notional, NoInput):
notional = defaults.notional
elif not isinstance(notional, NoInput) and not isinstance(leg2_notional, NoInput):
raise ValueError("Only one of `notional` and `leg2_notional` can be given.")
user_args = dict(
settlement=settlement,
currency=pair_.pair[:3],
leg2_currency=pair_.pair[3:6],
notional=notional,
leg2_notional=leg2_notional,
curves=self._parse_curves(curves),
)
instrument_args = dict(
leg2_settlement=NoInput.inherit,
pair=NoInput(0),
leg2_pair=NoInput(0),
fx_fixings=NoInput(0),
leg2_fx_fixings=NoInput(0),
vol=_Vol(),
) # these are hard coded arguments specific to this instrument
default_args = dict(
notional=defaults.notional,
)
self._kwargs = _KWArgs(
spec=NoInput(0),
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "vol"],
)
# allocate arguments to correct legs for non-deliverability
if isinstance(notional, NoInput):
# both notionals cannot be NoInput so leg2_notional is assumed given
self.kwargs.leg1["notional"] = -1.0 * self.kwargs.leg2["notional"]
self.kwargs.leg1["pair"] = pair_
self.kwargs.leg1["fx_fixings"] = fx_rate
else: # notional set on leg1
self.kwargs.leg2["notional"] = -1.0 * self.kwargs.leg1["notional"]
self.kwargs.leg2["pair"] = pair_
self.kwargs.leg2["fx_fixings"] = fx_rate
self._leg1 = CustomLeg(
periods=[
Cashflow(
currency=self.kwargs.leg1["currency"],
notional=-1.0 * self.kwargs.leg1["notional"],
payment=self.kwargs.leg1["settlement"],
pair=self.kwargs.leg1["pair"],
fx_fixings=self.kwargs.leg1["fx_fixings"],
),
]
)
self._leg2 = CustomLeg(
periods=[
Cashflow(
currency=self.kwargs.leg2["currency"],
notional=-1.0 * self.kwargs.leg2["notional"],
payment=self.kwargs.leg2["settlement"],
pair=self.kwargs.leg2["pair"],
fx_fixings=self.kwargs.leg2["fx_fixings"],
)
]
)
self._legs = [self._leg1, self._leg2]
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
fx_ = _get_fx_maybe_from_solver(solver=solver, fx=fx)
if isinstance(fx_, FXForwards | FXRates):
imm_fx: DualTypes = fx_.rate(self.kwargs.leg2["pair"])
elif isinstance(fx_, NoInput):
raise ValueError(
"`fx` must be supplied to price FXExchange object.\n"
"Note: it can be attached to, and then fetched from, a Solver.",
)
else:
# this is a mypy error since FXForwards is a case above
imm_fx = fx_ # type: ignore[assignment]
_: DualTypes = forward_fx(
date=self.kwargs.leg1["settlement"],
curve_domestic=_get_curve("disc_curve", False, False, *c),
curve_foreign=_get_curve("leg2_disc_curve", False, False, *c),
fx_rate=imm_fx,
)
return _
================================================
FILE: python/rateslib/instruments/fx_options/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.instruments.fx_options.brokerfly import FXBrokerFly
from rateslib.instruments.fx_options.call_put import FXCall, FXPut, _BaseFXOption
from rateslib.instruments.fx_options.risk_reversal import FXRiskReversal, _BaseFXOptionStrat
from rateslib.instruments.fx_options.straddle import FXStraddle
from rateslib.instruments.fx_options.strangle import FXStrangle
from rateslib.instruments.fx_options.vol_value import FXVolValue
__all__ = [
"FXCall",
"FXPut",
"FXRiskReversal",
"FXStraddle",
"FXStrangle",
"FXBrokerFly",
"FXVolValue",
"_BaseFXOption",
"_BaseFXOptionStrat",
]
================================================
FILE: python/rateslib/instruments/fx_options/brokerfly.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.fx_options.risk_reversal import _BaseFXOptionStrat
from rateslib.instruments.fx_options.straddle import FXStraddle
from rateslib.instruments.fx_options.strangle import FXStrangle
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CalInput,
CurvesT_,
DualTypes,
DualTypes_,
FXForwards_,
Sequence,
Solver_,
VolStrat_,
VolT_,
bool_,
datetime,
datetime_,
int_,
str_,
)
class FXBrokerFly(_BaseFXOptionStrat):
"""
An *FX BrokerFly* :class:`~rateslib.instruments._BaseFXOptionStrat`.
A *BrokerFly* is composed of a :class:`~rateslib.instruments.FXStrangle`
and a :class:`~rateslib.instruments.FXStraddle`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import FXBrokerFly, Curve, FXForwards, FXDeltaVolSmile, FXRates, dt
.. ipython:: python
fxbf = FXBrokerFly(
expiry="3m",
strike=[["-10d", "10d"], "atm_delta"],
eval_date=dt(2020, 1, 1),
spec="eurusd_call",
notional=[1000000.0, None], # <- straddle notional is derived from vega neutral
)
fxbf.cashflows()
.. rubric:: Pricing
The pricing mirrors that for an :class:`~rateslib.instruments.FXCall`.
All options use the same ``curves``. Allowable inputs are:
.. code-block:: python
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = {"rate_curve": rate_curve, "disc_curve": disc_curve} # dict form is explicit
Any *FXOption* also requires an :class:`~rateslib.fx.FXForwards` as input to the ``fx``
argument.
A ``vol`` argument must be provided to each *Instrument*. This can either be a single
value universally used for all, or an individual item as part of a sequence. Allowed
inputs are:
.. code-block:: python
vol = 12.0 | vol_obj # a single item universally applied
vol = [[13.1, 13.4], 12.0] # values for Strangle and Straddle respectively
*BrokerFlys* inherit the peculiarities of an :class:`~rateslib.instruments.FXStrangle`.
If the notional is not set on the *FXStraddle* then a calculation will be performed to derive a
notional that yields a vega neutral strategy.
The following pricing ``metric`` are available, with examples:
.. ipython:: python
eur = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.98})
usd = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.96})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2020, 1, 3)),
fx_curves={"eureur": eur, "eurusd": eur, "usdusd": usd},
)
fxvs = FXDeltaVolSmile(
nodes={0.25: 11.0, 0.5: 9.8, 0.75: 10.7},
expiry=dt(2020, 4, 1),
eval_date=dt(2020, 1, 1),
delta_type="forward",
)
- **'single_vol'**: this is the *'single_vol'* price of the *FXStrangle* minus the *'single_vol'*
price of the *FXStraddle*. **'vol'** is an alias for single vol and returns the same value.
.. ipython:: python
fxbf.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="single_vol")
fxbf.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="vol")
- **'premium'**: the summed cash premium amount, of both options, applicable to the 'payment'
date. If *FXStrangle* strikes are given as delta percentages then they are first determined
using the *'single_vol'*.
.. ipython:: python
fxbf.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="premium")
- **'pips_or_%'**: if the premium currency is LHS of ``pair`` this is a % of notional, whilst if
the premium currency is RHS this gives a number of pips of the FX rate. Summed over both
options. For *FXStrangle* strikes set with delta percentages these are first determined using the
'single_vol'.
.. ipython:: python
fxbf.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="pips_or_%")
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **fx option** and generalised **settlement** parameters.
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
strike: 2-tuple of float, Variable, str, :red:`required`
The strikes of the *FXStrangle* and the *FXStraddle* in order.
pair: str, :red:`required`
The currency pair for the FX rate which settles the option, in 3-digit codes, e.g. "eurusd".
May be included as part of ``spec``.
notional: 2-tuple of float or None, :green:`optional (set by 'defaults')`
The notional amount of each option strategy expressed in units of LHS of ``pair``.
If the straddle notional is given as None then it will be determined from the strangle
notional under a vega neutral approach.
eval_date: datetime, :green:`optional`
Only required if ``expiry`` is given as string tenor.
Should be entered as today (also called horizon) and **not** spot. Spot is derived
from ``delivery_lag`` and ``calendar``.
modifier : str, :green:`optional (set by 'defaults')`
The modification rule, in {"F", "MF", "P", "MP"} for date evaluation.
eom: bool, :green:`optional (set by 'defaults')`
Whether to use end-of-month rolls when expiry is given as a month or year tenor.
calendar : calendar or str, :green:`optional`
The holiday calendar object to use. If str, looks up named calendar from
static data.
delivery_lag: int, :green:`optional (set by 'defaults')`
The number of business days after expiry that the physical settlement of the FX
exchange occurs.
payment_lag: int or datetime, :green:`optional (set by 'defaults')`
The number of business days after expiry to pay premium. If a *datetime* is given this will
set the premium date explicitly.
premium_ccy: str, :green:`optional (set as RHS of 'pair')`
The currency in which the premium is paid. Can *only* be one of the two currencies
in `pair`.
delta_type: FXDeltaMethod, str, :green:`optional (set by 'defaults')`
When deriving strike from delta use the equation associated with *'spot'* or *'forward'*
delta. If premium currency is LHS of ``pair`` then this will produce
**premium adjusted** delta values. If the `premium_ccy` is RHS of ``pair`` then delta values
are **unadjusted**.
.. note::
The following define additional **rate** parameters.
premium: 2-tuple of 2-tuple float, :green:`optional`
The amount paid for each option in each strategy in order. If not given assumes unpriced
*Options* and sets this as mid-market premium during pricing.
option_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of each option's :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
metric : str, :green:`optional (set as "pips_or_%")`
The pricing metric returned by the ``rate`` method. See **Pricing**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
vol: str, Smile, Surface, float, Dual, Dual2, Variable, Sequence
Pricing objects passed directly to the *Instrument's* methods' ``vol`` argument. See
**Pricing**.
spec : str, optional
An identifier to pre-populate many field with conventional values. See
:ref:`here` for more info and available values.
Notes
-----
Buying a *Straddle* equates to buying a :class:`~rateslib.instruments.FXPut`
and buying a :class:`~rateslib.instruments.FXCall` with the same strike. The ``notional`` of
each are the same, and should be entered as a single value.
When supplying ``strike`` as a string delta the strike will be determined at price time from
the provided volatility.
This class is an alias constructor for an
:class:`~rateslib.instruments._FXOptionStrat` where the number
of options and their definitions and nominals have been specifically overloaded for
convenience.
""" # noqa: E501
_rate_scalar = 100.0
def __init__(
self,
expiry: datetime | str,
strike: tuple[tuple[DualTypes | str, DualTypes | str], DualTypes | str],
pair: str_ = NoInput(0),
*,
notional: tuple[DualTypes_, DualTypes_] | NoInput = NoInput(0),
eval_date: datetime | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
modifier: str_ = NoInput(0),
eom: bool_ = NoInput(0),
delivery_lag: int_ = NoInput(0),
premium: tuple[tuple[DualTypes_, DualTypes_], tuple[DualTypes_, DualTypes_]] = (
(NoInput(0), NoInput(0)),
(NoInput(0), NoInput(0)),
),
premium_ccy: str_ = NoInput(0),
payment_lag: str | datetime_ = NoInput(0),
option_fixings: DualTypes_ = NoInput(0),
delta_type: str_ = NoInput(0),
metric: str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
vol_ = self._parse_vol(vol)
if isinstance(notional, NoInput):
notional_: tuple[DualTypes_, DualTypes_] = (defaults.notional, NoInput(0))
elif isinstance(notional, tuple | list):
notional_ = notional
notional_[1] = NoInput(0) if notional_[1] is None else notional_[1] # type: ignore[index]
else:
raise ValueError("FXBrokerFly `notional` must be a 2 element sequence if given.")
strategies = [
FXStrangle(
pair=pair,
expiry=expiry,
delivery_lag=delivery_lag,
payment_lag=payment_lag,
calendar=calendar,
modifier=modifier,
eom=eom,
eval_date=eval_date,
strike=strike[0],
notional=notional_[0],
option_fixings=option_fixings[0]
if isinstance(option_fixings, tuple | list)
else option_fixings,
delta_type=delta_type,
premium=premium[0],
premium_ccy=premium_ccy,
curves=curves,
vol=vol_[0], # type: ignore[arg-type]
metric=NoInput(0),
spec=spec,
),
FXStraddle(
pair=pair,
expiry=expiry,
delivery_lag=delivery_lag,
payment_lag=payment_lag,
calendar=calendar,
modifier=modifier,
eom=eom,
eval_date=eval_date,
strike=strike[1],
notional=notional_[1],
option_fixings=option_fixings[1]
if isinstance(option_fixings, tuple | list)
else option_fixings,
delta_type=delta_type,
premium=premium[1],
premium_ccy=premium_ccy,
curves=curves,
vol=vol_[1], # type: ignore[arg-type]
metric=NoInput(0),
spec=spec,
),
]
super().__init__(
options=strategies,
rate_weight=[1.0, 1.0],
rate_weight_vol=[1.0, -1.0],
metric=_drb("single_vol", metric),
curves=curves,
vol=vol_,
)
self.kwargs.leg1["notional"] = notional_
@property
def instruments(self) -> tuple[FXStrangle, FXStraddle]:
"""A tuple containing the :class:`~rateslib.instruments.FXStrangle` and
:class:`~rateslib.instruments.FXStraddle` of the *Fly*."""
return self.kwargs.meta["instruments"] # type: ignore[no-any-return]
@classmethod
def _parse_vol(cls, vol: VolStrat_) -> tuple[VolStrat_, VolStrat_]: # type: ignore[override]
if not isinstance(vol, list | tuple):
vol = (vol, vol)
return (FXStrangle._parse_vol(vol[0]), FXStrangle._parse_vol(vol[1]))
def _maybe_set_vega_neutral_notional(
self,
curves: CurvesT_,
solver: Solver_,
fx: FXForwards_,
vol: tuple[VolStrat_, VolStrat_],
metric: str_,
) -> None:
"""
Calculate the vega of the strangle and then set the notional on the straddle
to yield a vega neutral strategy.
Notional is set as a fixed quantity, collapsing any AD sensitivities in accordance
with the general principle for determining risk sensitivities of unpriced instruments.
This is only applied if ``metric`` is a cash based quantity, {"pips_or_%", "premium"}
"""
if isinstance(self.kwargs.leg1["notional"][1], NoInput) and metric in [
"pips_or_%",
"premium",
]:
self.instruments[0]._rate(
curves,
solver,
fx,
base=NoInput(0),
vol=vol[0],
metric="single_vol",
record_greeks=True,
forward=NoInput(0),
settlement=NoInput(0),
)
self._greeks["straddle"] = self.instruments[1].analytic_greeks(
curves,
solver,
fx,
vol=vol[1],
)
strangle_vega = self._greeks["strangle"]["market_vol"]["FXPut"]["vega"]
strangle_vega += self._greeks["strangle"]["market_vol"]["FXCall"]["vega"]
straddle_vega = self._greeks["straddle"]["vega"]
scalar = strangle_vega / straddle_vega
self.instruments[1].kwargs.leg1["notional"] = _dual_float(
self.instruments[0].kwargs.leg1["notional"] * -scalar,
)
self.instruments[1]._set_notionals(self.instruments[1].kwargs.leg1["notional"])
# BrokerFly -> Strangle -> FXPut -> FXPutPeriod
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
# Get curves and vol
vol_ = tuple(
[
_drb(d, b)
for (d, b) in zip(self.kwargs.meta["vol"], self._parse_vol(vol), strict=True)
]
)
_curves = self._parse_curves(curves)
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
self._maybe_set_vega_neutral_notional(_curves, solver, fx, vol_, metric_)
if metric_ == "pips_or_%":
straddle_scalar = (
self.instruments[1].instruments[0]._option.settlement_params.notional
/ self.instruments[0].instruments[0]._option.settlement_params.notional
)
weights: Sequence[DualTypes] = [1.0, straddle_scalar]
elif metric_ == "premium":
weights = self.kwargs.meta["rate_weight"]
else:
weights = self.kwargs.meta["rate_weight_vol"]
_: DualTypes = 0.0
for option_strat, vol__, weight in zip(self.instruments, vol_, weights, strict=False):
_ += (
option_strat.rate(
curves=_curves,
solver=solver,
fx=fx,
base=base,
vol=vol__,
metric=metric_,
forward=forward,
settlement=settlement,
)
* weight
)
return _
def analytic_greeks(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
) -> dict[str, Any]:
# implicitly call set_pricing_mid for unpriced parameters
self.rate(curves=curves, solver=solver, fx=fx, base=NoInput(0), vol=vol, metric="pips_or_%")
vol_ = self._parse_vol(vol)
# TODO: this meth can be optimised because it calculates greeks at multiple times in frames
g_grks = self.instruments[0].analytic_greeks(curves, solver, fx, vol_[0])
d_grks = self.instruments[1].analytic_greeks(curves, solver, fx, vol_[1])
sclr = abs(
self.instruments[1].instruments[0]._option.settlement_params.notional
/ self.instruments[0].instruments[0]._option.settlement_params.notional,
)
_unit_attrs = ["delta", "gamma", "vega", "vomma", "vanna", "_kega", "_kappa", "__bs76"]
_: dict[str, Any] = {}
for attr in _unit_attrs:
_[attr] = g_grks[attr] - sclr * d_grks[attr]
_notional_attrs = [
f"delta_{self.kwargs.leg1['pair'].pair[:3]}",
f"gamma_{self.kwargs.leg1['pair'].pair[:3]}_1%",
f"vega_{self.kwargs.leg1['pair'].pair[3:]}",
]
for attr in _notional_attrs:
_[attr] = g_grks[attr] - d_grks[attr]
_.update(
{
"__class": "_FXOptionStrat",
"__strategies": {"FXStrangle": g_grks, "FXStraddle": d_grks},
"__delta_type": g_grks["__delta_type"],
"__notional": self.kwargs.leg1["notional"],
},
)
return _
def _plot_payoff(
self,
window: tuple[float, float] | NoInput = NoInput(0), # noqa: A002
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
) -> tuple[Any, Any]:
vol_ = self._parse_vol(vol)
self._maybe_set_vega_neutral_notional(curves, solver, fx, vol_, metric="pips_or_%")
return super()._plot_payoff(window, curves, solver, fx, vol_)
================================================
FILE: python/rateslib/instruments/fx_options/call_put.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from abc import ABCMeta
from dataclasses import dataclass
from datetime import datetime
from typing import TYPE_CHECKING, Any
from pandas import DataFrame
from rateslib import defaults
from rateslib.curves._parsers import _validate_obj_not_no_input
from rateslib.data.fixings import _fx_index_set_cross, _get_fx_index
from rateslib.default import PlotOutput, plot
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import FXOptionMetric, _get_fx_delta_type
from rateslib.instruments.protocols import _BaseInstrument, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_get_fx_forwards_maybe_from_solver,
_get_fx_vol,
_parse_curves,
_parse_vol,
_Vol,
)
from rateslib.legs import CustomLeg
from rateslib.periods import Cashflow, FXCallPeriod, FXPutPeriod
from rateslib.periods.utils import _validate_fx_as_forwards
from rateslib.scheduling.frequency import _get_fx_expiry_and_delivery_and_payment
from rateslib.volatility import FXDeltaVolSmile, FXDeltaVolSurface, FXSabrSmile, FXSabrSurface
from rateslib.volatility.ir import _BaseIRCube, _BaseIRSmile
if TYPE_CHECKING:
from typing import NoReturn # pragma: no cover
import numpy as np # pragma: no cover
from rateslib.local_types import ( # pragma: no cover
FX_,
Any,
CalInput,
CurvesT_,
DualTypes,
DualTypes_,
FXForwards,
FXForwards_,
FXIndex,
FXVol_,
Sequence,
Solver_,
VolT_,
_BaseCurve,
_BaseCurve_,
_BaseFXOptionPeriod,
_BaseLeg,
_FXVolOption_,
bool_,
datetime_,
float_,
int_,
str_,
)
@dataclass
class _PricingMetrics:
"""None elements are used as flags to indicate an element is not yet set."""
vol: _FXVolOption_ | None
k: DualTypes | None
delta_index: DualTypes | None
spot: datetime
t_e: DualTypes | None
f_d: DualTypes
class _BaseFXOption(_BaseInstrument, metaclass=ABCMeta):
"""
Abstract base class for implementing *FXOptions*.
See :class:`~rateslib.instruments.FXCall` and :class:`~rateslib.instruments.FXPut`.
"""
_rate_scalar: float = 1.0
_pricing: _PricingMetrics
@property
def leg1(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument* containing the
:class:`~rateslib.periods.FXOptionPeriod`."""
return self._leg1
@property
def leg2(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument* containing the
premium :class:`~rateslib.periods.Cashflow`."""
return self._leg2
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
@property
def _option(self) -> _BaseFXOptionPeriod:
return self.leg1.periods[0] # type: ignore[return-value]
@property
def _premium(self) -> Cashflow:
return self.leg2.periods[0] # type: ignore[return-value]
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An FXOption has two curve requirements:
The *rate curve* is the curve for the LHS of ``pair`` which is the curve typically
used to convert between spot and forward delta types. However, if the premium currency is
in the LHS side currency this cure will also be used as a discount curve for that
payment.
The *disc curve* is the curve for the RHS side of ``pair``.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
rate_curve = curves.get("rate_curve", NoInput(0))
disc_curve = curves.get("disc_curve", NoInput(0))
if self._premium.settlement_params.currency == self.kwargs.leg1["pair"][:3]:
leg2_disc_curve = rate_curve
else:
leg2_disc_curve = disc_curve
return _Curves(
rate_curve=rate_curve,
disc_curve=disc_curve,
leg2_disc_curve=leg2_disc_curve,
)
elif isinstance(curves, list | tuple) and len(curves) == 2:
rate_curve = curves[0] # type: ignore[assignment]
disc_curve = curves[1] # type: ignore[assignment]
if self.kwargs.leg2["premium_ccy"] == self.kwargs.leg1["pair"].pair[:3]:
leg2_disc_curve = rate_curve
else:
leg2_disc_curve = disc_curve
return _Curves(
rate_curve=rate_curve,
disc_curve=disc_curve,
leg2_disc_curve=leg2_disc_curve,
)
elif isinstance(curves, _Curves):
return curves
else:
raise ValueError(f"{type(self).__name__} requires 2 curve types.")
@classmethod
def _parse_vol(cls, vol: VolT_) -> _Vol:
"""
FXoptions requires only a single FXVolObj or a scalar.
"""
if isinstance(vol, _Vol):
return vol
elif isinstance(vol, _BaseIRSmile | _BaseIRCube):
raise TypeError("`vol` cannot be an IR type vol object and must be FX type vol object.")
else:
return _Vol(fx_vol=vol)
def __init__(
self,
expiry: datetime | str,
strike: DualTypes | str,
pair: FXIndex | str_ = NoInput(0),
*,
notional: DualTypes_ = NoInput(0),
eval_date: datetime | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
modifier: str_ = NoInput(0),
eom: bool_ = NoInput(0),
delivery_lag: int_ = NoInput(0),
premium: DualTypes_ = NoInput(0),
premium_ccy: str_ = NoInput(0),
payment_lag: str | datetime_ = NoInput(0),
option_fixings: DualTypes_ = NoInput(0),
delta_type: str_ = NoInput(0),
metric: str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolT_ = NoInput(0),
spec: str_ = NoInput(0),
call: bool = True,
):
user_args = dict(
pair=pair,
expiry=expiry,
notional=notional,
strike=strike,
calendar=calendar,
eom=eom,
modifier=modifier,
delta_type=delta_type,
option_fixings=option_fixings,
delivery_lag=delivery_lag,
leg2_payment_lag=payment_lag,
leg2_premium=premium,
leg2_premium_ccy=premium_ccy,
metric=metric,
curves=curves,
vol=self._parse_vol(vol),
)
# instrument_args: dict[str, Any] = dict()
default_args = dict(
delta_type=defaults.fx_delta_type,
notional=defaults.notional,
modifier=defaults.modifier,
metric="pips_or_%",
delivery_lag=defaults.fx_delivery_lag,
leg2_payment_lag=defaults.payment_lag,
eom=defaults.eom_fx,
)
self._kwargs = _KWArgs(
user_args=user_args,
default_args=default_args,
spec=spec,
meta_args=["curves", "vol", "metric"],
)
# This configuration here assumes that the options are physically settled, so do not
# allow WMR cross methodology to impact forecast rates for FXFixings.
self.kwargs.leg1["pair"] = _fx_index_set_cross(
_get_fx_index(self.kwargs.leg1["pair"]),
allow_cross=False,
)
# apply the parse knowing the premium currency
self._kwargs.leg2["premium_ccy"] = _drb(
self.kwargs.leg1["pair"].pair[3:], self.kwargs.leg2["premium_ccy"]
)
self._kwargs.meta["curves"] = self._parse_curves(self._kwargs.meta["curves"])
# determine the `expiry` and `delivery` as datetimes if derived from other combinations
(self.kwargs.leg1["expiry"], self.kwargs.leg1["delivery"], self.kwargs.leg2["payment"]) = (
_get_fx_expiry_and_delivery_and_payment(
eval_date=eval_date,
expiry=self.kwargs.leg1["expiry"],
delivery_lag=self.kwargs.leg1["delivery_lag"],
calendar=self.kwargs.leg1["calendar"],
modifier=self.kwargs.leg1["modifier"],
eom=self.kwargs.leg1["eom"],
payment_lag=self.kwargs.leg2["payment_lag"],
)
)
if self.kwargs.leg2["premium_ccy"] not in [
self.kwargs.leg1["pair"].pair[:3],
self.kwargs.leg1["pair"].pair[3:],
]:
raise ValueError(
f"`premium_ccy`: '{self.kwargs.leg2['premium_ccy']}' must be one of option "
f"currency pair: '{self.kwargs.leg1['pair'].pair}'.",
)
elif self.kwargs.leg2["premium_ccy"] == self.kwargs.leg1["pair"].pair[3:]:
self.kwargs.meta["metric_period"] = "pips"
self.kwargs.meta["delta_method"] = _get_fx_delta_type(self.kwargs.leg1["delta_type"])
else:
self.kwargs.meta["metric_period"] = "percent"
self.kwargs.meta["delta_method"] = _get_fx_delta_type(
self.kwargs.leg1["delta_type"] + "_pa"
)
self._validate_strike_and_premiums()
self._leg1 = CustomLeg(
[
FXCallPeriod( # type: ignore[abstract]
pair=self.kwargs.leg1["pair"],
expiry=self.kwargs.leg1["expiry"],
delivery=self.kwargs.leg1["delivery"],
strike=(
NoInput(0)
if isinstance(self.kwargs.leg1["strike"], str)
else self.kwargs.leg1["strike"]
),
notional=self.kwargs.leg1["notional"],
option_fixings=self.kwargs.leg1["option_fixings"],
delta_type=self.kwargs.meta["delta_method"],
metric=self.kwargs.meta["metric_period"],
)
if call
else FXPutPeriod( # type: ignore[abstract]
pair=self.kwargs.leg1["pair"],
expiry=self.kwargs.leg1["expiry"],
delivery=self.kwargs.leg1["delivery"],
strike=(
NoInput(0)
if isinstance(self.kwargs.leg1["strike"], str)
else self.kwargs.leg1["strike"]
),
notional=self.kwargs.leg1["notional"],
option_fixings=self.kwargs.leg1["option_fixings"],
delta_type=self.kwargs.meta["delta_method"],
metric=self.kwargs.meta["metric_period"],
)
]
)
self._leg2 = CustomLeg(
[
Cashflow(
notional=_drb(0.0, self.kwargs.leg2["premium"]),
payment=self.kwargs.leg2["payment"],
currency=self.kwargs.leg2["premium_ccy"],
),
]
)
self._legs = [self._leg1, self._leg2]
def __repr__(self) -> str:
return f""
def _validate_strike_and_premiums(self) -> None:
if isinstance(self.kwargs.leg1["strike"], str) and not isinstance(
self.kwargs.leg2["premium"], NoInput
):
raise ValueError(
"FXOption with string delta as `strike` cannot be initialised with a known "
"`premium`.\n"
"Either set `strike` as a defined numeric value, or remove the `premium`.",
)
def _set_strike_and_vol(
self,
rate_curve: _BaseCurve_,
disc_curve: _BaseCurve_,
fx: FX_,
vol: _FXVolOption_,
) -> None:
"""
Set the strike, if necessary, and determine pricing metrics from the volatility objects.
The strike for the *OptionPeriod* is either; string or numeric.
If it is string, then a numeric strike must be determined with an associated vol.
If it is numeric then the volatility must be determined for the given strike.
Pricing elements are captured and cached so they can be used later by subsequent methods.
"""
fx_ = _validate_fx_as_forwards(fx)
_pricing = _PricingMetrics(
vol=None,
k=None,
delta_index=None,
spot=fx_.pairs_settlement[self.kwargs.leg1["pair"].pair],
t_e=None,
f_d=fx_.rate(self.kwargs.leg1["pair"], self.kwargs.leg1["delivery"]),
)
if isinstance(vol, FXDeltaVolSmile | FXDeltaVolSurface | FXSabrSmile | FXSabrSurface):
eval_date = vol.meta.eval_date
else:
_ = _validate_obj_not_no_input(disc_curve, "disc_curve")
eval_date = _.nodes.initial
_pricing.vol = vol # Not a vol model so set directly
_pricing.t_e = self._option.fx_option_params.time_to_expiry(eval_date)
self._update_pricing_for_strike(
strike=self.kwargs.leg1["strike"],
fx=fx_,
pricing=_pricing,
vol=vol,
rate_curve=rate_curve,
)
# _PricingMetrics.k is completely specified
assert _pricing.k is not None # noqa: S101
# Review section in book regarding Hyper-parameters and Solver interaction
self._option.fx_option_params.strike = _pricing.k
self._pricing = _pricing
# self._option_periods[0].strike = _dual_float(self._pricing.k)
def _update_pricing_for_strike(
self,
strike: str | DualTypes,
fx: FXForwards,
pricing: _PricingMetrics,
vol: _FXVolOption_,
rate_curve: _BaseCurve_,
) -> None:
"""Update the _PricingMetrics object to populate values."""
if not isinstance(strike, str):
# then strike is a numeric quantity
pricing.k = strike
else:
# then strike is a string which must be converted to a numeric value
strike = strike.lower()
if strike == "atm_forward":
pricing.k = fx.rate(self.kwargs.leg1["pair"], self.kwargs.leg1["delivery"])
elif strike == "atm_spot":
pricing.k = fx.rate(self.kwargs.leg1["pair"], pricing.spot)
elif strike == "atm_delta":
rc = _validate_obj_not_no_input(rate_curve, "rate_curve")
pricing.delta_index, pricing.vol, pricing.k = (
self._option._index_vol_and_strike_from_atm(
delta_type=self._option.fx_option_params.delta_type,
vol=_validate_obj_not_no_input(vol, "vol"), # type: ignore[arg-type]
w_deli=rc[self.kwargs.leg1["delivery"]],
w_spot=rc[pricing.spot],
f=fx if isinstance(vol, FXSabrSurface) else pricing.f_d,
t_e=pricing.t_e, # type: ignore[arg-type]
)
)
return None
elif strike[-1] == "d": # representing a delta percentage
rc = _validate_obj_not_no_input(rate_curve, "rate_curve")
pricing.delta_index, pricing.vol, pricing.k = (
self._option._index_vol_and_strike_from_delta(
delta=float(strike[:-1]) / 100.0,
delta_type=self.kwargs.meta["delta_method"],
vol=_validate_obj_not_no_input(vol, "vol"), # type: ignore[arg-type]
w_deli=rc[self.kwargs.leg1["delivery"]],
w_spot=rc[pricing.spot],
f=fx if isinstance(vol, FXSabrSurface) else pricing.f_d,
t_e=pricing.t_e, # type: ignore[arg-type]
)
)
return None
if pricing.vol is None:
# vol is only None if vol_ is a VolObj so can be safely type ignored.
# a numeric vol has already been set on the 'pricing' object.
# then an explicit strike is set so determine the vol from strike, set and return.
rc = _validate_obj_not_no_input(rate_curve, "rate_curve")
pricing.delta_index, pricing.vol, _ = vol.get_from_strike( # type: ignore[union-attr]
k=pricing.k, # type: ignore[arg-type]
f=pricing.f_d if not isinstance(vol, FXSabrSurface) else fx, # type: ignore[arg-type]
expiry=self.kwargs.leg1["expiry"],
z_w=rc[self.kwargs.leg1["delivery"]] / rc[pricing.spot],
)
return None
def _set_premium(
self,
rate_curve: _BaseCurve_,
disc_curve: _BaseCurve_,
fx: FXForwards_,
pricing: _PricingMetrics,
) -> None:
"""
Set an unspecified premium on the Option to be equal to the mid-market premium.
"""
if isinstance(self.kwargs.leg2["premium"], NoInput):
# then set the CashFlow to mid-market
disc_curve_: _BaseCurve = _validate_obj_not_no_input(disc_curve, "disc curve")
rate_curve_: _BaseCurve = _validate_obj_not_no_input(rate_curve, "rate curve")
try:
npv: DualTypes = self._option.npv( # type: ignore[assignment]
rate_curve=rate_curve_,
disc_curve=disc_curve_,
fx=fx,
fx_vol=pricing.vol, # type: ignore[arg-type]
local=False,
forward=self.kwargs.leg2["payment"],
base=self.kwargs.leg2["premium_ccy"],
)
except AttributeError:
raise ValueError(
"`premium` has not been configured for the specified FXOption.\nThis is "
"normally determined at mid-market from the given `curves` and `vol` but "
"in this case these values do not provide a valid calculation. "
"If not required, initialise the "
"FXOption with a `premium` of 0.0, and this will be avoided.",
)
self._premium.settlement_params._notional = _dual_float(npv)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", False, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
v = _parse_vol(self, vol, solver, False)
fx_vol = _get_fx_vol(True, True, *v)
fx_ = _get_fx_forwards_maybe_from_solver(solver=solver, fx=fx)
self._set_strike_and_vol(rate_curve=rate_curve, disc_curve=disc_curve, fx=fx_, vol=fx_vol)
# Premium is not required for rate and also sets as float
# Review section: "Hyper-parameters and Solver interaction" before enabling.
# self._set_premium(curves, fx)
metric = _drb(self.kwargs.meta["metric"], metric)
if metric in ["vol", "single_vol"]:
return _validate_obj_not_no_input(self._pricing.vol, "vol") # type: ignore[return-value]
_: DualTypes = self._option.rate(
rate_curve=_validate_obj_not_no_input(rate_curve, "curve"),
disc_curve=_validate_obj_not_no_input(disc_curve, "curve"),
fx=fx_,
fx_vol=self._pricing.vol, # type: ignore[arg-type]
forward=self.kwargs.leg2["payment"],
)
if metric == "premium":
if self._option.fx_option_params.metric == FXOptionMetric.Pips:
# is expressed in RHS currency
_ *= self._option.settlement_params.notional / 10000
else: # == "percent"
# is expressed in LHS currency
_ *= self._option.settlement_params.notional / 100
return _
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", False, False, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, False, *c)
v = _parse_vol(self, vol, solver, False)
fx_vol = _get_fx_vol(True, True, *v)
fx_ = _get_fx_forwards_maybe_from_solver(solver=solver, fx=fx)
self._set_strike_and_vol(rate_curve=rate_curve, disc_curve=disc_curve, fx=fx_, vol=fx_vol)
self._set_premium(
rate_curve=rate_curve, disc_curve=disc_curve, fx=fx_, pricing=self._pricing
)
if not local:
base_ = _drb(self.legs[0].settlement_params.currency, base)
else:
base_ = base
opt_npv = self._option.npv(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=fx_,
base=base_,
local=local,
fx_vol=self._pricing.vol, # type: ignore[arg-type]
settlement=settlement,
forward=forward,
)
prem_npv = self._premium.npv(
disc_curve=leg2_disc_curve,
fx=fx_,
base=base_,
local=local,
settlement=settlement,
forward=forward,
)
if local:
return {k: opt_npv.get(k, 0) + prem_npv.get(k, 0) for k in set(opt_npv) | set(prem_npv)} # type:ignore[union-attr, arg-type]
else:
return opt_npv + prem_npv # type: ignore[operator]
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
c = _parse_curves(self, curves, solver)
v = _parse_vol(self, vol, solver, False)
try:
rate_curve = _get_curve("rate_curve", False, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
fx_vol = _get_fx_vol(True, True, *v)
fx_ = _get_fx_forwards_maybe_from_solver(solver=solver, fx=fx)
self._set_strike_and_vol(
rate_curve=rate_curve, disc_curve=disc_curve, fx=fx_, vol=fx_vol
)
self._set_premium(
rate_curve=rate_curve, disc_curve=disc_curve, fx=fx_, pricing=self._pricing
)
except Exception: # noqa: S110
pass # `cashflows` proceed without pricing determined values
return self._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
base=base,
settlement=settlement,
forward=forward,
vol=vol,
)
def analytic_greeks(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: FXVol_ = NoInput(0),
) -> dict[str, Any]:
"""
Return various pricing metrics of the *FX Option*.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import Curve, FXCall, dt, FXForwards, FXRates, FXDeltaVolSmile
.. ipython:: python
eur = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.98})
usd = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.96})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2020, 1, 3)),
fx_curves={"eureur": eur, "eurusd": eur, "usdusd": usd},
)
fxvs = FXDeltaVolSmile(
nodes={0.25: 11.0, 0.5: 9.8, 0.75: 10.7},
delta_type="forward",
eval_date=dt(2020, 1, 1),
expiry=dt(2020, 4, 1)
)
fxc = FXCall(
expiry="3m",
strike=1.10,
eval_date=dt(2020, 1, 1),
spec="eurusd_call",
)
fxc.analytic_greeks(fx=fxf, curves=[eur, usd], vol=fxvs)
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
Returns
-------
dict
"""
return self._analytic_greeks_set_metrics(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
set_metrics=True,
)
def _analytic_greeks_set_metrics(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: FXVol_ = NoInput(0),
set_metrics: bool_ = True,
) -> dict[str, Any]:
"""
Return various pricing metrics of the *FX Option*.
Returns
-------
float, Dual, Dual2
"""
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", False, False, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
v = _parse_vol(self, vol, solver, False)
fx_vol = _get_fx_vol(True, True, *v)
fx_ = _get_fx_forwards_maybe_from_solver(solver=solver, fx=fx)
if set_metrics:
self._set_strike_and_vol(
rate_curve=rate_curve, disc_curve=disc_curve, fx=fx_, vol=fx_vol
)
# self._set_premium(curves, fx)
return self._option.analytic_greeks(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=_validate_fx_as_forwards(fx_),
fx_vol=fx_vol,
premium=NoInput(0),
premium_payment=self.kwargs.leg2["payment"],
)
def _analytic_greeks_reduced(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
base: str_ = NoInput(0),
vol: FXVol_ = NoInput(0),
set_metrics: bool_ = True,
) -> dict[str, Any]:
"""
Return various pricing metrics of the *FX Option*.
"""
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", False, False, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
v = _parse_vol(self, vol, solver, False)
fx_vol = _get_fx_vol(True, True, *v)
fx_ = _get_fx_forwards_maybe_from_solver(solver=solver, fx=fx)
if set_metrics:
self._set_strike_and_vol(
rate_curve=rate_curve, disc_curve=disc_curve, fx=fx_, vol=fx_vol
)
# self._set_premium(curves, fx)
return self._option._base_analytic_greeks(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=_validate_fx_as_forwards(fx_),
fx_vol=self._pricing.vol, # type: ignore[arg-type] # vol is set and != None
premium=NoInput(0),
_reduced=True,
) # none of the reduced greeks need a VolObj - faster to reuse from _pricing.vol
def analytic_delta(self, *args: Any, leg: int = 1, **kwargs: Any) -> NoReturn:
"""Not implemented for Option types.
Use :meth:`~rateslib.instruments._BaseFXOption.analytic_greeks`.
"""
raise NotImplementedError("For Option types use `analytic_greeks`.")
def _plot_payoff(
self,
window: tuple[float, float] | NoInput = NoInput(0),
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: FXVol_ = NoInput(0),
) -> tuple[
np.ndarray[tuple[int], np.dtype[np.float64]], np.ndarray[tuple[int], np.dtype[np.float64]]
]:
"""
Mechanics to determine (x,y) coordinates for payoff at expiry plot.
"""
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", False, False, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
v = _parse_vol(self, vol, solver, False)
fx_vol = _get_fx_vol(True, True, *v)
fx_ = _get_fx_forwards_maybe_from_solver(solver=solver, fx=fx)
self._set_strike_and_vol(rate_curve=rate_curve, disc_curve=disc_curve, fx=fx_, vol=fx_vol)
# self._set_premium(curves, fx)
x, y = self._option._payoff_at_expiry(window)
return x, y
def plot_payoff(
self,
range: tuple[float, float] | NoInput = NoInput(0), # noqa: A002
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
base: str_ = NoInput(0),
vol: float_ = NoInput(0),
) -> PlotOutput:
"""
Return a plot of the payoff at expiry, indexed by the *FXFixing* value.
Parameters
----------
range: list of float, :green:`optional`
A range of values for the *FXFixing* value at expiry to use as the x-axis.
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
Returns
-------
(Figure, Axes, list[Lines2D])
"""
x, y = self._plot_payoff(window=range, curves=curves, solver=solver, fx=fx, vol=vol)
return plot([x], [y]) # type: ignore
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return DataFrame()
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
"""
Not implemented for Option types. Use :meth:`~rateslib.instruments._BaseFXOption.rate`.
"""
raise NotImplementedError(f"`spread` is not implemented for type: {type(self).__name__}")
class FXCall(_BaseFXOption):
"""
An *FX Call* option.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, FXCall, FXForwards, FXRates, FXDeltaVolSmile, Curve
.. ipython:: python
fxc = FXCall(
expiry="3m",
strike=1.10,
eval_date=dt(2020, 1, 1),
spec="eurusd_call",
)
fxc.cashflows()
.. rubric:: Pricing
An *FXOption* requires two discount curves; a curve to discount the cashflow of the LHS
currency of ``pair``. This is labelled as the *rate curve* and is used to derive the
difference between spot and forward deltas. The curve labelled as *disc curve* discounts
cashflows of the RHS of ``pair``. For the premium, depending upon whether it is paid in LHS
or RHS currency the appropriate curve from *Leg1* will be used and labelled as
*leg2 disc curve*. Allowable inputs are:
.. code-block:: python
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = {"rate_curve": rate_curve, "disc_curve": disc_curve} # dict form is explicit
An *FXOption* also requires an :class:`~rateslib.fx.FXForwards` as input to the ``fx``
argument, and an *FXVolatility* object or numeric value for the ``vol`` argument. Allowed
inputs are:
.. code-block:: python
vol = 12.0 # a specific calendar-day annualized %-volatility until expiry
vol = vol_obj # an explicit volatility object, e.g. FXDeltaVolSurface
The following pricing ``metric`` are available, with examples:
.. ipython:: python
eur = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.98})
usd = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.96})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2020, 1, 3)),
fx_curves={"eureur": eur, "eurusd": eur, "usdusd": usd},
)
fxvs = FXDeltaVolSmile(
nodes={0.25: 11.0, 0.5: 9.8, 0.75: 10.7},
expiry=dt(2020, 4, 1),
eval_date=dt(2020, 1, 1),
delta_type="forward",
)
- **'vol'**: the implied volatility value of the option from a volatility object.
.. ipython:: python
fxc.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="vol")
- **'premium'**: the cash premium amount applicable to the 'payment' date, expressed in the
premium currency.
.. ipython:: python
fxc.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="premium")
- **'pips_or_%'**: if the premium currency is LHS of ``pair`` this is a % of notional, whilst if
the premium currency is RHS this gives a number of pips of the FX rate.
.. ipython:: python
fxc.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="pips_or_%")
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **fx option** and generalised **settlement** parameters.
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
strike: float, Variable, str, :red:`required`
The strike value of the option.
If str, there are four possibilities; {"atm_forward", "atm_spot", "atm_delta", "%d"}.
Call % deltas can be given, as "25d".
pair: str, :red:`required`
The currency pair for the FX rate which settles the option, in 3-digit codes, e.g. "eurusd".
May be included as part of ``spec``.
notional: float, :green:`optional (set by 'defaults')`
The notional amount expressed in units of LHS of ``pair``.
eval_date: datetime, :green:`optional`
Only required if ``expiry`` is given as string tenor.
Should be entered as today (also called horizon) and **not** spot. Spot is derived
from ``delivery_lag`` and ``calendar``.
modifier : str, :green:`optional (set by 'defaults')`
The modification rule, in {"F", "MF", "P", "MP"} for date evaluation.
eom: bool, :green:`optional (set by 'defaults')`
Whether to use end-of-month rolls when expiry is given as a month or year tenor.
calendar : calendar or str, :green:`optional`
The holiday calendar object to use. If str, looks up named calendar from
static data.
delivery_lag: int, :green:`optional (set by 'defaults')`
The number of business days after expiry that the physical settlement of the FX
exchange occurs.
payment_lag: int or datetime, :green:`optional (set by 'defaults')`
The number of business days after expiry to pay premium. If a *datetime* is given this will
set the premium date explicitly.
premium_ccy: str, :green:`optional (set as RHS of 'pair')`
The currency in which the premium is paid. Can *only* be one of the two currencies
in `pair`.
delta_type: FXDeltaMethod, str, :green:`optional (set by 'defaults')`
When deriving strike from delta use the equation associated with *'spot'* or *'forward'*
delta. If premium currency is LHS of ``pair`` then this will produce
**premium adjusted** delta values. If the `premium_ccy` is RHS of ``pair`` then delta values
are **unadjusted**.
.. note::
The following define additional **rate** parameters.
premium: float, :green:`optional`
The amount paid for the option. If not given assumes an unpriced *Option* and sets this as
mid-market premium during pricing.
option_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the option :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
metric : str, :green:`optional (set as "pips_or_%")`
The pricing metric returned by the ``rate`` method. See **Pricing**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
vol: str, Smile, Surface, float, Dual, Dual2, Variable
Pricing objects passed directly to the *Instrument's* methods' ``vol`` argument. See
**Pricing**.
spec : str, optional
An identifier to pre-populate many field with conventional values. See
:ref:`here` for more info and available values.
Notes
------
Date calculations for *FXOption* products are very specific. See *'Expiry and Delivery Rules'*
in *FX Option Pricing* by I. Clark. *Rateslib* uses calendars with associated settlement
calendars and the recognised market convention rules to derive dates.
.. ipython:: python
:suppress:
from rateslib import dt
from rateslib.instruments import FXCall
.. ipython:: python
fxc = FXCall(
pair="eursek",
expiry="2M",
eval_date=dt(2024, 6, 19), # <- Wednesday
strike=11.0,
modifier="mf",
calendar="tgt,stk|fed",
delivery_lag=2,
payment_lag=2,
)
fxc.kwargs.leg1["delivery"] # <- '2M' out of spot: Monday 24 Jun 2024: FX delivery
fxc.kwargs.leg1["expiry"] # <- '2b' before 'delivery': Option expiry
fxc.kwargs.leg2["payment"] # <- '2b' after 'expiry': Premium payment date
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, call=True, **kwargs)
class FXPut(_BaseFXOption):
"""
An *FX Put* option.
For parameters and examples see :class:`~rateslib.instruments.FXCall`.
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, call=False, **kwargs)
================================================
FILE: python/rateslib/instruments/fx_options/risk_reversal.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from pandas import DataFrame
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.fx_options.call_put import FXCall, FXPut, _BaseFXOption
from rateslib.instruments.protocols import _KWArgs
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CalInput,
CurvesT_,
DualTypes,
DualTypes_,
FXForwards_,
Sequence,
Solver_,
VolStrat_,
VolT_,
_Vol,
bool_,
datetime,
datetime_,
int_,
str_,
)
class _BaseFXOptionStrat(_BaseFXOption):
"""
A custom option strategy composed of a list of :class:`~rateslib.instruments._BaseFXOption`,
or other :class:`~rateslib.instruments._BaseFXOptionStrat` objects, of the same
currency ``pair``.
Parameters
----------
options: list
The *FXOptions* or *FXOptionStrats* which make up the strategy.
rate_weight: list
The multiplier for the *'pips_or_%'* metric that sums the options to a final *rate*.
E.g. A *RiskReversal* uses [-1.0, 1.0] for a sale and a purchase.
E.g. A *Straddle* uses [1.0, 1.0] for summing two premium purchases.
rate_weight_vol: list
The multiplier for the *'vol'* metric that sums the options to a final *rate*.
E.g. A *RiskReversal* uses [-1.0, 1.0] to obtain the vol difference between two options.
E.g. A *Straddle* uses [0.5, 0.5] to obtain the volatility at the strike of each option.
"""
_greeks: dict[str, Any] = {}
_strat_elements: tuple[_BaseFXOption | _BaseFXOptionStrat, ...]
@property
def kwargs(self) -> _KWArgs:
"""The :class:`~rateslib.instruments.protocols._KWArgs` of the *Instrument*."""
return self._kwargs
def __init__(
self,
options: Sequence[_BaseFXOption | _BaseFXOptionStrat],
rate_weight: list[float],
rate_weight_vol: list[float],
metric: str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
):
self._n = len(options)
if self._n != len(rate_weight) or self._n != len(rate_weight_vol):
raise ValueError(
"`rate_weight` and `rate_weight_vol` must have same length as `options`.",
)
self._kwargs = _KWArgs(
spec=NoInput(0),
user_args=dict(
rate_weight=rate_weight,
rate_weight_vol=rate_weight_vol,
instruments=tuple(options),
metric=metric,
pair=options[0].kwargs.leg1["pair"],
curves=NoInput(0),
vol=vol,
),
default_args=dict(
metric="vol",
),
meta_args=["metric", "vol", "curves", "instruments", "rate_weight", "rate_weight_vol"],
)
self.kwargs.leg2["premium_ccy"] = self.instruments[0].kwargs.leg2["premium_ccy"]
self.kwargs.meta["curves"] = self._parse_curves(curves)
# @property
# def _vol_agg(self) -> FXVolStrat_:
# """Aggregate the `vol` metric on contained options into a container"""
#
# def vol_attr(obj: FXOption | FXOptionStrat) -> FXVolStrat_:
# if isinstance(obj, FXOption):
# return obj.vol
# else:
# return obj._vol_agg
#
# return [vol_attr(obj) for obj in self._strat_elements]
#
# def _parse_vol_sequence(self, vol: FXVolStrat_) -> ListFXVol_:
# """
# This function exists to determine a recursive list
#
# This function must exist to parse an input sequence of given vol values for each
# *Instrument* in the strategy to a list that will be applied sequentially to value
# each of those *Instruments*.
#
# If a sub-sequence, e.g BrokerFly is a strategy of strategies then this function will
# be repeatedly called within each strategy.
# """
# ret: ListFXVol_ = []
# if isinstance(
# vol,
# str
# | float
# | Dual
# | Dual2
# | Variable
# | FXDeltaVolSurface
# | FXDeltaVolSmile
# | FXSabrSmile
# | FXSabrSurface
# | NoInput,
# ):
# for obj in self.periods:
# if isinstance(obj, FXOptionStrat):
# ret.append(obj._parse_vol_sequence(vol))
# else:
# ret.append(vol)
#
# elif isinstance(vol, Sequence):
# if len(vol) != len(self.periods):
# raise ValueError(
# "`vol` as sequence must have same length as its contained "
# f"strategy elements: {len(self.periods)}"
# )
# else:
# for obj, vol_ in zip(self.periods, vol, strict=True):
# if isinstance(obj, FXOptionStrat):
# ret.append(obj._parse_vol_sequence(vol_))
# else:
# assert isinstance(vol_, str) or not isinstance(vol_, Sequence)
# ret.append(vol_)
# return ret
#
# def _get_fxvol_maybe_from_solver_recursive(
# self, vol: FXVolStrat_, solver: Solver_
# ) -> ListFXVol_:
# """
# Function must parse a ``vol`` input in combination with ``vol_agg`` attribute to yield
# a Sequence of vols applied to the various levels of associated *Options* or *OptionStrats*
# """
# vol_ = self._parse_vol_sequence(vol) # vol_ is properly nested for one vol per option
# ret: ListFXVol_ = []
# for obj, vol__ in zip(self.periods, vol_, strict=False):
# if isinstance(obj, FXOptionStrat):
# ret.append(obj._get_fxvol_maybe_from_solver_recursive(vol__, solver))
# else:
# assert isinstance(vol__, str) or not isinstance(vol__, Sequence) # noqa: S101
# ret.append(
# _get_fxvol_maybe_from_solver(vol_attr=obj.vol, vol=vol__, solver=solver)
# )
# return ret
@classmethod
def _parse_vol(cls, vol: VolStrat_) -> tuple[_Vol, _Vol]: # type: ignore[override]
raise NotImplementedError(f"{type(cls).__name__} must implement `_parse_vol`.")
@property
def instruments(self) -> tuple[_BaseFXOption | _BaseFXOptionStrat, ...]:
return self.kwargs.meta["instruments"] # type: ignore[no-any-return]
def __repr__(self) -> str:
return f""
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
vol_: VolStrat_ = self._parse_vol(vol)
metric_: str = _drb(self.kwargs.meta["metric"], metric)
map_ = {
"pips_or_%": self.kwargs.meta["rate_weight"],
"vol": self.kwargs.meta["rate_weight_vol"],
"premium": [1.0] * len(self.instruments),
"single_vol": self.kwargs.meta["rate_weight_vol"],
}
weights = map_[metric_]
_: DualTypes = 0.0
for option, vol__, weight in zip(self.instruments, vol_, weights, strict=True): # type: ignore[misc, arg-type]
_ += (
option.rate(
curves=curves,
solver=solver,
fx=fx,
base=base,
vol=vol__, # type: ignore[arg-type]
metric=metric_,
settlement=settlement,
forward=forward,
)
* weight
)
return _
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
vol_ = self._parse_vol(vol)
results = [
option.npv(
curves=curves,
solver=solver,
fx=fx,
base=base,
local=local,
vol=vol__,
forward=forward,
settlement=settlement,
)
for (option, vol__) in zip(self.instruments, vol_, strict=True)
]
if local:
df = DataFrame(results).fillna(0.0)
df_sum = df.sum()
_: DualTypes | dict[str, DualTypes] = df_sum.to_dict() # type: ignore[assignment]
else:
_ = sum(results) # type: ignore[arg-type]
return _
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._cashflows_from_instruments(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
base=base,
)
def _plot_payoff(
self,
window: tuple[float, float] | NoInput = NoInput(0),
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
) -> tuple[Any, Any]:
vol_: VolStrat_ = self._parse_vol(vol)
y = None
for inst, vol__ in zip(self.instruments, vol_, strict=True): # type: ignore[misc, arg-type]
x, y_ = inst._plot_payoff(
window=window,
curves=curves,
solver=solver,
fx=fx,
vol=vol__, # type: ignore[arg-type]
)
if y is None:
y = y_
else:
y += y_
return x, y
def analytic_greeks(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
) -> dict[str, Any]:
# implicitly call set_pricing_mid for unpriced parameters
# this is important for Strategies whose options are
# dependent upon each other, e.g. Strangle. (RR and Straddle do not have
# interdependent options)
self.rate(curves=curves, solver=solver, fx=fx, vol=vol)
vol_: VolStrat_ = self._parse_vol(vol=vol)
gks = []
for inst, vol_i in zip(self.instruments, vol_, strict=True): # type: ignore[misc, arg-type]
if isinstance(inst, _BaseFXOptionStrat):
gks.append(
inst.analytic_greeks(
curves=curves,
solver=solver,
fx=fx,
vol=vol_i,
)
)
else: # option is FXOption
# by calling on the OptionPeriod directly the strike is maintained from rate call.
gks.append(
inst._analytic_greeks_set_metrics(
curves=curves,
solver=solver,
fx=fx,
vol=vol_i, # type: ignore[arg-type]
set_metrics=False, # already done in the rate call above
)
)
_unit_attrs = ["delta", "gamma", "vega", "vomma", "vanna", "_kega", "_kappa", "__bs76"]
_: dict[str, Any] = {}
for attr in _unit_attrs:
_[attr] = sum(gk[attr] * self.kwargs.meta["rate_weight"][i] for i, gk in enumerate(gks))
_notional_attrs = [
f"delta_{self.kwargs.leg1['pair'].pair[:3]}",
f"gamma_{self.kwargs.leg1['pair'].pair[:3]}_1%",
f"vega_{self.kwargs.leg1['pair'].pair[3:]}",
]
for attr in _notional_attrs:
_[attr] = sum(gk[attr] * self.kwargs.meta["rate_weight"][i] for i, gk in enumerate(gks))
_.update(
{
"__class": "FXOptionStrat",
"__options": gks,
"__delta_type": gks[0]["__delta_type"],
"__notional": self.kwargs.leg1["notional"],
},
)
return _
class FXRiskReversal(_BaseFXOptionStrat):
"""
An *FX Risk Reversal* :class:`~rateslib.instruments._BaseFXOptionStrat`.
A *RiskReversal* is composed of a lower strike :class:`~rateslib.instruments.FXPut`
and a higher strike :class:`~rateslib.instruments.FXCall`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import FXRiskReversal, Curve, FXForwards, FXRates, FXDeltaVolSmile, dt
.. ipython:: python
fxrr = FXRiskReversal(
expiry="3m",
strike=["-25d", "25d"],
eval_date=dt(2020, 1, 1),
spec="eurusd_call",
notional=1000000,
)
fxrr.cashflows()
.. rubric:: Pricing
The pricing mirrors that for an :class:`~rateslib.instruments.FXCall`. All options use the
same ``curves``.
Allowable inputs are:
.. code-block:: python
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = {"rate_curve": rate_curve, "disc_curve": disc_curve} # dict form is explicit
Any *FXOption* also requires an :class:`~rateslib.fx.FXForwards` as input to the ``fx``
argument.
A ``vol`` argument must be provided to each *Instrument*. This can either be a single
value universally used for all, or an individual item as part of a sequence. Allowed
inputs are:
.. code-block:: python
vol = 12.0 | vol_obj # a single item universally applied
vol = [12.0, 13.0] # values for the Put and Call respectively
The following pricing ``metric`` are available, with examples:
.. ipython:: python
eur = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.98})
usd = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.96})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2020, 1, 3)),
fx_curves={"eureur": eur, "eurusd": eur, "usdusd": usd},
)
fxvs = FXDeltaVolSmile(
nodes={0.25: 11.0, 0.5: 9.8, 0.75: 10.7},
expiry=dt(2020, 4, 1),
eval_date=dt(2020, 1, 1),
delta_type="forward",
)
- **'vol'**: the implied volatility value of the *FXCall* minus the volatility of the *FXPut*.
**'single_vol'** is also an alias for this.
.. ipython:: python
fxrr.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="vol")
fxrr.instruments[0].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="vol")
fxrr.instruments[1].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="vol")
- **'premium'**: the summed cash premium amount, of both options, applicable to the 'payment'
date.
.. ipython:: python
fxrr.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="premium")
fxrr.instruments[0].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="premium")
fxrr.instruments[1].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="premium")
- **'pips_or_%'**: if the premium currency is LHS of ``pair`` this is a % of notional, whilst if
the premium currency is RHS this gives a number of pips of the FX rate. Summed over both
options.
.. ipython:: python
fxrr.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="pips_or_%")
fxrr.instruments[0].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="pips_or_%")
fxrr.instruments[1].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="pips_or_%")
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **fx option** and generalised **settlement** parameters.
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
strike: 2-tuple of float, Variable, str, :red:`required`
The strike of the put and the call in order.
pair: str, :red:`required`
The currency pair for the FX rate which settles the option, in 3-digit codes, e.g. "eurusd".
May be included as part of ``spec``.
notional: float, :green:`optional (set by 'defaults')`
The notional amount of each option expressed in units of LHS of ``pair``.
eval_date: datetime, :green:`optional`
Only required if ``expiry`` is given as string tenor.
Should be entered as today (also called horizon) and **not** spot. Spot is derived
from ``delivery_lag`` and ``calendar``.
modifier : str, :green:`optional (set by 'defaults')`
The modification rule, in {"F", "MF", "P", "MP"} for date evaluation.
eom: bool, :green:`optional (set by 'defaults')`
Whether to use end-of-month rolls when expiry is given as a month or year tenor.
calendar : calendar or str, :green:`optional`
The holiday calendar object to use. If str, looks up named calendar from
static data.
delivery_lag: int, :green:`optional (set by 'defaults')`
The number of business days after expiry that the physical settlement of the FX
exchange occurs.
payment_lag: int or datetime, :green:`optional (set by 'defaults')`
The number of business days after expiry to pay premium. If a *datetime* is given this will
set the premium date explicitly.
premium_ccy: str, :green:`optional (set as RHS of 'pair')`
The currency in which the premium is paid. Can *only* be one of the two currencies
in `pair`.
delta_type: FXDeltaMethod, str, :green:`optional (set by 'defaults')`
When deriving strike from delta use the equation associated with *'spot'* or *'forward'*
delta. If premium currency is LHS of ``pair`` then this will produce
**premium adjusted** delta values. If the `premium_ccy` is RHS of ``pair`` then delta values
are **unadjusted**.
.. note::
The following define additional **rate** parameters.
premium: 2-tuple of float, :green:`optional`
The amount paid for the put and call in order. If not given assumes unpriced
*Options* and sets this as mid-market premium during pricing.
option_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of each option's :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
metric : str, :green:`optional (set as "pips_or_%")`
The pricing metric returned by the ``rate`` method. See **Pricing**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
vol: str, Smile, Surface, float, Dual, Dual2, Variable, Sequence
Pricing objects passed directly to the *Instrument's* methods' ``vol`` argument. See
**Pricing**.
spec : str, optional
An identifier to pre-populate many field with conventional values. See
:ref:`here` for more info and available values.
"""
_rate_scalar = 100.0
def __init__(
self,
expiry: datetime | str,
strike: tuple[DualTypes | str, DualTypes | str],
pair: str_ = NoInput(0),
*,
notional: DualTypes_ = NoInput(0),
eval_date: datetime | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
modifier: str_ = NoInput(0),
eom: bool_ = NoInput(0),
delivery_lag: int_ = NoInput(0),
premium: tuple[DualTypes_, DualTypes_] = (NoInput(0), NoInput(0)),
premium_ccy: str_ = NoInput(0),
payment_lag: str | datetime_ = NoInput(0),
option_fixings: DualTypes_ = NoInput(0),
delta_type: str_ = NoInput(0),
metric: str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
self._n = 2
vol_ = self._parse_vol(vol)
notional_ = _drb(defaults.notional, notional)
options = [
FXPut(
pair=pair,
expiry=expiry,
delivery_lag=delivery_lag,
payment_lag=payment_lag,
calendar=calendar,
modifier=modifier,
eom=eom,
eval_date=eval_date,
strike=strike[0],
notional=-notional_,
option_fixings=option_fixings[0]
if isinstance(option_fixings, tuple | list)
else option_fixings,
delta_type=delta_type,
premium=premium[0],
premium_ccy=premium_ccy,
curves=curves,
vol=vol_[0],
metric=NoInput(0),
spec=spec,
),
FXCall(
pair=pair,
expiry=expiry,
delivery_lag=delivery_lag,
payment_lag=payment_lag,
calendar=calendar,
modifier=modifier,
eom=eom,
eval_date=eval_date,
strike=strike[1],
notional=notional_,
option_fixings=option_fixings[1]
if isinstance(option_fixings, tuple | list)
else option_fixings,
delta_type=delta_type,
premium=premium[1],
premium_ccy=premium_ccy,
curves=curves,
vol=vol_[1],
metric=NoInput(0),
spec=spec,
),
]
super().__init__(
options=options,
rate_weight=[-1.0, 1.0],
rate_weight_vol=[-1.0, 1.0],
metric=metric,
curves=curves,
vol=vol_,
)
self.kwargs.leg1["notional"] = notional_
@classmethod
def _parse_vol(cls, vol: VolStrat_) -> tuple[_Vol, _Vol]: # type: ignore[override]
if not isinstance(vol, list | tuple):
vol = (vol,) * 2
return FXPut._parse_vol(vol[0]), FXCall._parse_vol(vol[1])
================================================
FILE: python/rateslib/instruments/fx_options/straddle.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.fx_options.call_put import FXCall, FXPut
from rateslib.instruments.fx_options.risk_reversal import _BaseFXOptionStrat
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DualTypes,
DualTypes_,
VolStrat_,
VolT_,
_Vol,
bool_,
datetime,
datetime_,
int_,
str_,
)
class FXStraddle(_BaseFXOptionStrat):
"""
An *FX Straddle* :class:`~rateslib.instruments._BaseFXOptionStrat`.
A *Straddle* is composed of a :class:`~rateslib.instruments.FXPut`
and :class:`~rateslib.instruments.FXCall` with the same strike.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import FXStraddle, FXForwards, FXRates, FXDeltaVolSmile, Curve, dt
.. ipython:: python
fxs = FXStraddle(
expiry="3m",
strike=1.10, # <- "atm_delta" is also a common input
eval_date=dt(2020, 1, 1),
spec="eurusd_call",
notional=1000000,
)
fxs.cashflows()
.. rubric:: Pricing
The pricing mirrors that for an :class:`~rateslib.instruments.FXCall`. All options use the
same ``curves``. Allowable inputs are:
.. code-block:: python
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = {"rate_curve": rate_curve, "disc_curve": disc_curve} # dict form is explicit
Any *FXOption* also requires an :class:`~rateslib.fx.FXForwards` as input to the ``fx``
argument.
A ``vol`` argument must be provided to each *Instrument*. This can either be a single
value universally used for all, or an individual item as part of a sequence. Allowed
inputs are:
.. code-block:: python
vol = 12.0 | vol_obj # a single item universally applied
vol = [12.0, 12.0] # values for the Put and Call respectively
The following pricing ``metric`` are available, with examples:
.. ipython:: python
eur = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.98})
usd = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.96})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2020, 1, 3)),
fx_curves={"eureur": eur, "eurusd": eur, "usdusd": usd},
)
fxvs = FXDeltaVolSmile(
nodes={0.25: 11.0, 0.5: 9.8, 0.75: 10.7},
expiry=dt(2020, 4, 1),
eval_date=dt(2020, 1, 1),
delta_type="forward",
)
- **'vol'**: the implied volatility value of the straddle from a volatility object.
**'single_vol'** is also an alias for this, since both options assume the same volatility.
.. ipython:: python
fxs.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="vol")
- **'premium'**: the summed cash premium amount, of both options, applicable to the 'payment'
date.
.. ipython:: python
fxs.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="premium")
fxs.instruments[0].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="premium")
fxs.instruments[1].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="premium")
- **'pips_or_%'**: if the premium currency is LHS of ``pair`` this is a % of notional, whilst if
the premium currency is RHS this gives a number of pips of the FX rate. Summed over both
options.
.. ipython:: python
fxs.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="pips_or_%")
fxs.instruments[0].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="pips_or_%")
fxs.instruments[1].rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="pips_or_%")
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **fx option** and generalised **settlement** parameters.
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
strike: float, Variable, str, :red:`required`
The strike of the put and the call.
pair: str, :red:`required`
The currency pair for the FX rate which settles the option, in 3-digit codes, e.g. "eurusd".
May be included as part of ``spec``.
notional: float, :green:`optional (set by 'defaults')`
The notional amount of each option expressed in units of LHS of ``pair``.
eval_date: datetime, :green:`optional`
Only required if ``expiry`` is given as string tenor.
Should be entered as today (also called horizon) and **not** spot. Spot is derived
from ``delivery_lag`` and ``calendar``.
modifier : str, :green:`optional (set by 'defaults')`
The modification rule, in {"F", "MF", "P", "MP"} for date evaluation.
eom: bool, :green:`optional (set by 'defaults')`
Whether to use end-of-month rolls when expiry is given as a month or year tenor.
calendar : calendar or str, :green:`optional`
The holiday calendar object to use. If str, looks up named calendar from
static data.
delivery_lag: int, :green:`optional (set by 'defaults')`
The number of business days after expiry that the physical settlement of the FX
exchange occurs.
payment_lag: int or datetime, :green:`optional (set by 'defaults')`
The number of business days after expiry to pay premium. If a *datetime* is given this will
set the premium date explicitly.
premium_ccy: str, :green:`optional (set as RHS of 'pair')`
The currency in which the premium is paid. Can *only* be one of the two currencies
in `pair`.
delta_type: FXDeltaMethod, str, :green:`optional (set by 'defaults')`
When deriving strike from delta use the equation associated with *'spot'* or *'forward'*
delta. If premium currency is LHS of ``pair`` then this will produce
**premium adjusted** delta values. If the `premium_ccy` is RHS of ``pair`` then delta values
are **unadjusted**.
.. note::
The following define additional **rate** parameters.
premium: 2-tuple of float, :green:`optional`
The amount paid for the put and call in order. If not given assumes unpriced
*Options* and sets this as mid-market premium during pricing.
option_fixings: 2-tuple of float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of each option's :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
metric : str, :green:`optional (set as "pips_or_%")`
The pricing metric returned by the ``rate`` method. See **Pricing**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
vol: str, Smile, Surface, float, Dual, Dual2, Variable, Sequence
Pricing objects passed directly to the *Instrument's* methods' ``vol`` argument. See
**Pricing**.
spec : str, optional
An identifier to pre-populate many field with conventional values. See
:ref:`here` for more info and available values.
"""
_rate_scalar = 100.0
def __init__(
self,
expiry: datetime | str,
strike: DualTypes | str,
pair: str_ = NoInput(0),
*,
notional: DualTypes_ = NoInput(0),
eval_date: datetime | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
modifier: str_ = NoInput(0),
eom: bool_ = NoInput(0),
delivery_lag: int_ = NoInput(0),
premium: tuple[DualTypes_, DualTypes_] = (NoInput(0), NoInput(0)),
premium_ccy: str_ = NoInput(0),
payment_lag: str | datetime_ = NoInput(0),
option_fixings: DualTypes_ = NoInput(0),
delta_type: str_ = NoInput(0),
metric: str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
vol_ = self._parse_vol(vol)
notional_ = _drb(defaults.notional, notional)
options = [
FXPut(
pair=pair,
expiry=expiry,
delivery_lag=delivery_lag,
payment_lag=payment_lag,
calendar=calendar,
modifier=modifier,
eom=eom,
eval_date=eval_date,
strike=strike,
notional=notional_,
option_fixings=option_fixings[0]
if isinstance(option_fixings, tuple | list)
else option_fixings,
delta_type=delta_type,
premium=premium[0],
premium_ccy=premium_ccy,
curves=curves,
vol=vol_[0],
metric=NoInput(0),
spec=spec,
),
FXCall(
pair=pair,
expiry=expiry,
delivery_lag=delivery_lag,
payment_lag=payment_lag,
calendar=calendar,
modifier=modifier,
eom=eom,
eval_date=eval_date,
strike=strike,
notional=notional_,
option_fixings=option_fixings[1]
if isinstance(option_fixings, tuple | list)
else option_fixings,
delta_type=delta_type,
premium=premium[1],
premium_ccy=premium_ccy,
curves=curves,
vol=vol_[1],
metric=NoInput(0),
spec=spec,
),
]
super().__init__(
options=options,
rate_weight=[1.0, 1.0],
rate_weight_vol=[0.5, 0.5],
metric=metric,
curves=curves,
vol=vol_,
)
self.kwargs.leg1["notional"] = notional_
self.kwargs.leg2["premium_ccy"] = self.instruments[0].kwargs.leg2["premium_ccy"]
@classmethod
def _parse_vol(cls, vol: VolStrat_) -> tuple[_Vol, _Vol]: # type: ignore[override]
if not isinstance(vol, list | tuple):
vol = (vol,) * 2
return FXPut._parse_vol(vol[0]), FXCall._parse_vol(vol[1])
def _set_notionals(self, notional: DualTypes) -> None:
"""
Set the notionals on each option period. Mainly used by Brokerfly for vega neutral
strangle and straddle.
"""
for option in self.instruments:
option.kwargs.leg1["notional"] = notional
option._option.settlement_params._notional = notional
================================================
FILE: python/rateslib/instruments/fx_options/strangle.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual import dual_log, newton_1dim
from rateslib.dual.utils import _set_ad_order_objects
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import FXDeltaMethod
from rateslib.instruments.fx_options.call_put import FXCall, FXPut
from rateslib.instruments.fx_options.risk_reversal import _BaseFXOptionStrat
from rateslib.instruments.protocols.pricing import (
_get_curve,
_get_fx_forwards_maybe_from_solver,
_get_fx_vol,
_parse_curves,
_parse_vol,
_Vol,
)
from rateslib.periods.utils import _validate_fx_as_forwards
from rateslib.splines import evaluate
from rateslib.volatility import FXDeltaVolSmile, FXDeltaVolSurface, FXSabrSmile, FXSabrSurface
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CalInput,
CurvesT_,
DualTypes,
DualTypes_,
FXForwards,
FXForwards_,
Solver,
Solver_,
VolStrat_,
VolT_,
_BaseFXOptionPeriod,
_FXVolOption,
_Vol,
bool_,
datetime,
datetime_,
int_,
str_,
)
class FXStrangle(_BaseFXOptionStrat):
"""
An *FX Strangle* :class:`~rateslib.instruments._BaseFXOptionStrat`.
A *Strangle* is composed of a lower strike :class:`~rateslib.instruments.FXPut`
and a higher strike :class:`~rateslib.instruments.FXCall`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import FXStrangle, Curve, FXForwards, FXRates, FXDeltaVolSmile, dt
.. ipython:: python
fxs = FXStrangle(
expiry="3m",
strike=["-10d", "10d"],
eval_date=dt(2020, 1, 1),
spec="eurusd_call",
notional=1000000,
)
fxs.cashflows()
.. rubric:: Pricing
The pricing mirrors that for an :class:`~rateslib.instruments.FXCall`. All options use the
same ``curves``.
Allowable inputs are:
.. code-block:: python
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = {"rate_curve": rate_curve, "disc_curve": disc_curve} # dict form is explicit
Any *FXOption* also requires an :class:`~rateslib.fx.FXForwards` as input to the ``fx``
argument.
A ``vol`` argument must be provided to each *Instrument*. This can either be a single
value universally used for all, or an individual item as part of a sequence. Allowed
inputs are:
.. code-block:: python
vol = 12.0 | vol_obj # a single item universally applied
vol = [12.0, 12.0] # values for the Put and Call respectively
*FXStrangles* have peculiar market conventions. If the strikes are given as delta percentages
then numeric values will first be derived using the *'single_vol'* approach. Any *'premium'*
or *'pips_or_%'* values can then be calculated using those strikes and this volatility.
The following pricing ``metric`` are available, with examples:
.. ipython:: python
eur = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.98})
usd = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.96})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2020, 1, 3)),
fx_curves={"eureur": eur, "eurusd": eur, "usdusd": usd},
)
fxvs = FXDeltaVolSmile(
nodes={0.25: 11.0, 0.5: 9.8, 0.75: 10.7},
expiry=dt(2020, 4, 1),
eval_date=dt(2020, 1, 1),
delta_type="forward",
)
- **'single_vol'**: the singular volatility value that when applied to each option separately
yields a summed premium amount equal to the summed premium when each option is valued with
the appropriate volatility from an object (with the strikes determined by the single vol).
**'vol'** is an alias for single vol and returns the same value.
.. ipython:: python
fxs.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="single_vol")
fxs.rate(vol=[12.163490, 12.163490], curves=[eur, usd], fx=fxf, metric="premium")
This requires an iterative calculation for which the tolerance is set to 1e-6 with a
maximum allowed number of iterations of 10.
- **'premium'**: the summed cash premium amount, of both options, applicable to the 'payment'
date. If strikes are given as delta percentages then they are first determined using the
*'single_vol'*.
.. ipython:: python
fxs.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="premium")
- **'pips_or_%'**: if the premium currency is LHS of ``pair`` this is a % of notional, whilst if
the premium currency is RHS this gives a number of pips of the FX rate. Summed over both
options. For strikes set with delta percentages these are first determined using the
'single_vol'.
.. ipython:: python
fxs.rate(vol=fxvs, curves=[eur, usd], fx=fxf, metric="pips_or_%")
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **fx option** and generalised **settlement** parameters.
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
strike: 2-tuple of float, Variable, str, :red:`required`
The strikes of the put and the call in order.
pair: str, :red:`required`
The currency pair for the FX rate which settles the option, in 3-digit codes, e.g. "eurusd".
May be included as part of ``spec``.
notional: float, :green:`optional (set by 'defaults')`
The notional amount of each option expressed in units of LHS of ``pair``.
eval_date: datetime, :green:`optional`
Only required if ``expiry`` is given as string tenor.
Should be entered as today (also called horizon) and **not** spot. Spot is derived
from ``delivery_lag`` and ``calendar``.
modifier : str, :green:`optional (set by 'defaults')`
The modification rule, in {"F", "MF", "P", "MP"} for date evaluation.
eom: bool, :green:`optional (set by 'defaults')`
Whether to use end-of-month rolls when expiry is given as a month or year tenor.
calendar : calendar or str, :green:`optional`
The holiday calendar object to use. If str, looks up named calendar from
static data.
delivery_lag: int, :green:`optional (set by 'defaults')`
The number of business days after expiry that the physical settlement of the FX
exchange occurs.
payment_lag: int or datetime, :green:`optional (set by 'defaults')`
The number of business days after expiry to pay premium. If a *datetime* is given this will
set the premium date explicitly.
premium_ccy: str, :green:`optional (set as RHS of 'pair')`
The currency in which the premium is paid. Can *only* be one of the two currencies
in `pair`.
delta_type: FXDeltaMethod, str, :green:`optional (set by 'defaults')`
When deriving strike from delta use the equation associated with *'spot'* or *'forward'*
delta. If premium currency is LHS of ``pair`` then this will produce
**premium adjusted** delta values. If the `premium_ccy` is RHS of ``pair`` then delta values
are **unadjusted**.
.. note::
The following define additional **rate** parameters.
premium: 2-tuple of float, :green:`optional`
The amount paid for the put and call in order. If not given assumes unpriced
*Options* and sets this as mid-market premium during pricing.
option_fixings: 2-tuple of float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of each option's :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
metric : str, :green:`optional (set as "single_vol")`
The pricing metric returned by the ``rate`` method. See **Pricing**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
vol: str, Smile, Surface, float, Dual, Dual2, Variable, Sequence
Pricing objects passed directly to the *Instrument's* methods' ``vol`` argument. See
**Pricing**.
spec : str, optional
An identifier to pre-populate many field with conventional values. See
:ref:`here` for more info and available values.
"""
_rate_scalar = 100.0
def __init__(
self,
expiry: datetime | str,
strike: tuple[DualTypes | str, DualTypes | str],
pair: str_ = NoInput(0),
*,
notional: DualTypes_ = NoInput(0),
eval_date: datetime | NoInput = NoInput(0),
calendar: CalInput = NoInput(0),
modifier: str_ = NoInput(0),
eom: bool_ = NoInput(0),
delivery_lag: int_ = NoInput(0),
premium: tuple[DualTypes_, DualTypes_] = (NoInput(0), NoInput(0)),
premium_ccy: str_ = NoInput(0),
payment_lag: str | datetime_ = NoInput(0),
option_fixings: DualTypes_ = NoInput(0),
delta_type: str_ = NoInput(0),
metric: str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
vol_ = self._parse_vol(vol)
notional_ = _drb(defaults.notional, notional)
options = [
FXPut(
pair=pair,
expiry=expiry,
delivery_lag=delivery_lag,
payment_lag=payment_lag,
calendar=calendar,
modifier=modifier,
eom=eom,
eval_date=eval_date,
strike=strike[0],
notional=notional_,
option_fixings=option_fixings[0]
if isinstance(option_fixings, tuple | list)
else option_fixings,
delta_type=delta_type,
premium=premium[0],
premium_ccy=premium_ccy,
curves=curves,
vol=vol_[0],
metric=NoInput(0),
spec=spec,
),
FXCall(
pair=pair,
expiry=expiry,
delivery_lag=delivery_lag,
payment_lag=payment_lag,
calendar=calendar,
modifier=modifier,
eom=eom,
eval_date=eval_date,
strike=strike[1],
notional=notional_,
option_fixings=option_fixings[1]
if isinstance(option_fixings, tuple | list)
else option_fixings,
delta_type=delta_type,
premium=premium[1],
premium_ccy=premium_ccy,
curves=curves,
vol=vol_[1],
metric=NoInput(0),
spec=spec,
),
]
super().__init__(
options=options,
rate_weight=[1.0, 1.0],
rate_weight_vol=[0.5, 0.5],
metric=_drb("single_vol", metric),
curves=curves,
vol=vol_,
)
self.kwargs.leg1["notional"] = notional_
self.kwargs.meta["fixed_delta"] = [
isinstance(strike[0], str)
and strike[0][-1].lower() == "d"
and strike[0].lower() != "atm_forward",
isinstance(strike[1], str)
and strike[1][-1].lower() == "d"
and strike[1].lower() != "atm_forward",
]
self.kwargs.leg1["delivery"] = self.instruments[0].kwargs.leg1["delivery"]
self.kwargs.leg1["delta_type"] = self.instruments[0].kwargs.leg1["delta_type"]
self.kwargs.leg1["expiry"] = self.instruments[0].kwargs.leg1["expiry"]
@classmethod
def _parse_vol(cls, vol: VolStrat_) -> tuple[_Vol, _Vol]: # type: ignore[override]
if not isinstance(vol, list | tuple):
vol = (vol,) * 2
return FXPut._parse_vol(vol[0]), FXCall._parse_vol(vol[1])
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
return self._rate(
curves=curves,
solver=solver,
fx=fx,
base=base,
vol=vol,
metric=metric,
forward=forward,
settlement=settlement,
)
def _rate(
self,
curves: CurvesT_,
solver: Solver_,
fx: FXForwards_,
base: str_,
vol: VolStrat_,
metric: str_,
settlement: datetime_,
forward: datetime_,
record_greeks: bool = False,
) -> DualTypes:
metric_: str = _drb(self.kwargs.meta["metric"], metric).lower()
if metric_ != "single_vol" and not any(self.kwargs.meta["fixed_delta"]):
# the strikes are explicitly defined and independent across options.
# can evaluate separately, therefore the default method will suffice.
return super().rate(
curves=curves, solver=solver, fx=fx, base=base, vol=vol, metric=metric_
)
else:
# must perform single vol evaluation to determine mkt convention strikes
single_vol = self._rate_single_vol(
curves=curves, solver=solver, fx=fx, base=base, vol=vol, record_greeks=record_greeks
)
if metric_ == "single_vol":
return single_vol
elif metric_ in ["premium", "pips_or_%"]:
# return the premiums using the single_vol as the volatility
return super().rate(
curves=curves, solver=solver, fx=fx, vol=single_vol, metric=metric_
)
elif metric_ == "vol":
# this will return the same value as the single_vol, since the `vol` is
# directly specified
# return super().rate(
# curves=curves, solver=solver, fx=fx, vol=single_vol, metric=metric_
# )
return single_vol
else:
raise ValueError(
f"Metric {metric_} must be in {{'single_vol', 'premium', 'pips_or_%', 'vol'}}."
)
def _rate_single_vol(
self,
curves: CurvesT_,
solver: Solver_,
fx: FXForwards_,
base: str_,
vol: VolStrat_,
record_greeks: bool,
) -> DualTypes:
"""
Solve the single vol rate metric for a strangle using iterative market convergence routine.
"""
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", False, False, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
v: tuple[tuple[_Vol, _Vol], tuple[_Vol, _Vol], Solver] = _parse_vol( # type: ignore[assignment]
self, vol, solver, True
)
fxf = _validate_fx_as_forwards(_get_fx_forwards_maybe_from_solver(solver=solver, fx=fx))
vol_0 = _get_fx_vol(True, False, v[0][0], v[1][0], solver)
vol_1 = _get_fx_vol(True, False, v[0][1], v[1][1], solver)
# Get initial data from objects in their native AD order
spot: datetime = fxf.pairs_settlement[self.kwargs.leg1["pair"].pair]
w_spot: DualTypes = rate_curve[spot]
w_deli: DualTypes = rate_curve[self.kwargs.leg1["delivery"]]
f_d: DualTypes = fxf.rate(self.kwargs.leg1["pair"], self.kwargs.leg1["delivery"])
f_t: DualTypes = fxf.rate(self.kwargs.leg1["pair"], spot)
z_w_0 = (
1.0
if self.kwargs.leg1["delta_type"]
in [FXDeltaMethod.ForwardPremiumAdjusted, FXDeltaMethod.Forward]
else w_deli / w_spot
)
f_0 = (
f_d
if self.kwargs.leg1["delta_type"]
in [FXDeltaMethod.ForwardPremiumAdjusted, FXDeltaMethod.Forward]
else f_t
)
eta1 = None
fzw1zw0: DualTypes = 0.0
if isinstance(
vol_0, FXDeltaVolSurface | FXDeltaVolSmile
): # multiple Vol objects cannot be used, will derive conventions from the first one found.
eta1 = (
-0.5
if vol_0.meta.delta_type
in [FXDeltaMethod.ForwardPremiumAdjusted, FXDeltaMethod.SpotPremiumAdjusted]
else 0.5
)
z_w_1 = (
1.0
if vol_0.meta.delta_type
in [FXDeltaMethod.ForwardPremiumAdjusted, FXDeltaMethod.Forward]
else w_deli / w_spot
)
fzw1zw0 = f_0 * z_w_1 / z_w_0
# Determine the initial guess for Newton type iterations
_ad = _set_ad_order_objects([0] * 5, [vol_0, vol_1, rate_curve, disc_curve, fxf])
gks: list[dict[str, Any]] = [
self.instruments[0]._analytic_greeks_reduced(
curves=[rate_curve, disc_curve],
solver=NoInput(0),
fx=fxf,
base=base,
vol=vol_0,
),
self.instruments[1]._analytic_greeks_reduced(
curves=[rate_curve, disc_curve],
solver=NoInput(0),
fx=fxf,
base=base,
vol=vol_1,
),
]
g0: DualTypes = gks[0]["__vol"] * gks[0]["vega"] + gks[1]["__vol"] * gks[1]["vega"]
g0 /= gks[0]["vega"] + gks[1]["vega"]
put_op_period: _BaseFXOptionPeriod = self.instruments[0]._option
call_op_period: _BaseFXOptionPeriod = self.instruments[1]._option
def root1d(
tgt_vol: DualTypes, fzw1zw0: DualTypes, as_float: bool
) -> tuple[DualTypes, DualTypes]:
if not as_float:
# reset objects to their original order and perform final iterations
_set_ad_order_objects(_ad, [vol_0, vol_1, rate_curve, disc_curve, fxf])
# Determine the greeks of the options with the current tgt_vol iterate
gks = [
self.instruments[0]._analytic_greeks_reduced(
curves=[rate_curve, disc_curve],
solver=NoInput(0),
fx=fxf,
base=base,
vol=tgt_vol * 100.0,
),
self.instruments[1]._analytic_greeks_reduced(
curves=[rate_curve, disc_curve],
solver=NoInput(0),
fx=fxf,
base=base,
vol=tgt_vol * 100.0,
),
]
# Also determine the greeks of these options measured with the market smile vol.
# (note the strikes have been set by previous call, call OptionPeriods direct
# to avoid re-determination)
s_gks = [
put_op_period._base_analytic_greeks(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=fxf,
fx_vol=vol_0,
_reduced=True,
),
call_op_period._base_analytic_greeks(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=fxf,
fx_vol=vol_1,
_reduced=True,
),
]
# The value of the root function is derived from the 4 previous calculated prices
f0 = s_gks[0]["__bs76"] + s_gks[1]["__bs76"] - gks[0]["__bs76"] - gks[1]["__bs76"]
dc1_dvol1_0 = _d_c_hat_d_sigma_hat(gks[0], self.kwargs.meta["fixed_delta"][0])
dcmkt_dvol1_0 = _d_c_mkt_d_sigma_hat(
gks[0],
s_gks[0],
self.kwargs.leg1["expiry"],
vol_0,
eta1,
self.kwargs.meta["fixed_delta"][0],
fzw1zw0,
fxf,
)
dc1_dvol1_1 = _d_c_hat_d_sigma_hat(gks[1], self.kwargs.meta["fixed_delta"][1])
dcmkt_dvol1_1 = _d_c_mkt_d_sigma_hat(
gks[1],
s_gks[1],
self.kwargs.leg1["expiry"],
vol_1,
eta1,
self.kwargs.meta["fixed_delta"][1],
fzw1zw0,
fxf,
)
f1 = dcmkt_dvol1_0 + dcmkt_dvol1_1 - dc1_dvol1_0 - dc1_dvol1_1
return f0, f1
root_solver = newton_1dim(
root1d,
g0,
args=(fzw1zw0,),
pre_args=(True,), # solve `as_float` in iterations
final_args=(False,), # capture AD in final iterations
raise_on_fail=True,
max_iter=10,
func_tol=1e-6,
)
tgt_vol: DualTypes = root_solver["g"] * 100.0
if record_greeks: # this needs to be explicitly called since it degrades performance
self._greeks["strangle"] = {
"single_vol": {
"FXPut": self.instruments[0].analytic_greeks(curves, solver, fxf, tgt_vol),
"FXCall": self.instruments[1].analytic_greeks(curves, solver, fxf, tgt_vol),
},
"market_vol": {
"FXPut": put_op_period.analytic_greeks(rate_curve, disc_curve, fxf, vol_0),
"FXCall": call_op_period.analytic_greeks(rate_curve, disc_curve, fxf, vol_1),
},
}
return tgt_vol
# Calculations related to Strange:single_vol
def _d_c_hat_d_sigma_hat(
g: dict[str, Any], # greeks
fixed_delta: bool,
) -> DualTypes:
"""
Return the total derivative of option priced with single vol with respect to single
vol.
Parameters
----------
g: dict
The dict of greeks for the given option period measured against the tgt, single vol.
fixed_delta: bool
Whether the given FXOption is defined by fixed delta or an explicit strike.
Returns
-------
DualTypes
"""
if not fixed_delta:
# kega is 0.0
return g["vega"] # type: ignore[no-any-return]
else:
return g["_kappa"] * g["_kega"] + g["vega"] # type: ignore[no-any-return]
def _d_c_mkt_d_sigma_hat(
g: dict[str, Any], # greeks
sg: dict[str, Any], # smile_greeks
expiry: datetime,
vol: _FXVolOption,
eta1: float | None,
fixed_delta: bool,
fzw1zw0: DualTypes | None,
fxf: FXForwards,
) -> DualTypes:
"""
Return the total derivative of option priced with mkt vol with respect to single
vol.
Parameters
----------
g: dict
The dict of greeks for the given option period measured against the tgt, single vol.
sg: dict
The dict of greeks for the given option period measured against the smile.
expiry: datetime
The expiry of the Option.
vol: VolObj
The smile object.
eta1: float | None
The delta type of the Smile if available
fixed_delta: bool
Whether the option is defined by fixed delta or an explicit strike.
fxf: FXForwards,
Used by SabrSurface to forecast multiple forward rates for cross-sectional smiles before
interpolation.
Returns
-------
DualTypes
"""
if not fixed_delta:
return 0.0 # kega is zero and the mkt vol has no sensitivity to vol_hat.
else:
if isinstance(vol, FXDeltaVolSurface | FXDeltaVolSmile):
if isinstance(vol, FXDeltaVolSurface):
vol = vol.get_smile(expiry)
dvol_ddeltaidx = evaluate(vol.nodes.spline.spline, sg["_delta_index"], 1) * 0.01
ddeltaidx_dvol1 = sg["gamma"] * fzw1zw0
if eta1 < 0: # type: ignore[operator]
# premium adjusted vol smile
ddeltaidx_dvol1 += sg["_delta_index"]
ddeltaidx_dvol1 *= g["_kega"] / sg["__strike"]
_ = dual_log(sg["__strike"] / sg["__forward"]) / sg["__vol"]
_ += eta1 * sg["__vol"] * sg["__sqrt_t"] ** 2
_ *= dvol_ddeltaidx * sg["gamma"] * fzw1zw0
ddeltaidx_dvol1 /= 1 + _
dvol_dvol1: DualTypes = dvol_ddeltaidx * ddeltaidx_dvol1
elif isinstance(vol, FXSabrSmile):
dvol_dk = vol._d_sabr_d_k_or_f(
k=sg["__strike"],
f=sg["__forward"],
expiry=expiry,
as_float=False,
derivative=1,
)[1]
dvol_dvol1 = dvol_dk * g["_kega"]
elif isinstance(vol, FXSabrSurface):
dvol_dk = vol._d_sabr_d_k_or_f(
k=sg["__strike"],
f=fxf,
expiry=expiry,
as_float=False,
derivative=1,
)[1]
dvol_dvol1 = dvol_dk * g["_kega"]
else:
dvol_dvol1 = 0.0
return sg["_kappa"] * g["_kega"] + sg["vega"] * dvol_dvol1 # type: ignore[no-any-return]
================================================
FILE: python/rateslib/instruments/fx_options/vol_value.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, NoReturn
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _KWArgs
from rateslib.instruments.protocols.pricing import (
_get_fx_forwards_maybe_from_solver,
_get_fx_vol,
_parse_vol,
_Vol,
)
from rateslib.periods.utils import _validate_fx_as_forwards
from rateslib.volatility import FXDeltaVolSmile, FXDeltaVolSurface, FXSabrSmile, FXSabrSurface
from rateslib.volatility.ir import _BaseIRCube, _BaseIRSmile
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CurvesT_,
DualTypes,
FXForwards_,
Solver_,
VolT_,
datetime_,
str_,
)
class FXVolValue(_BaseInstrument):
"""
A pseudo *Instrument* used to calibrate an *FX Vol Object* within a
:class:`~rateslib.solver.Solver`.
.. rubric:: Examples
Examples
--------
The below :class:`~rateslib.volatility.FXDeltaVolSmile` is solved directly
from calibrating volatility values.
.. ipython:: python
:suppress:
from rateslib.volatility import FXDeltaVolSmile
from rateslib.instruments import FXVolValue
from rateslib.solver import Solver
.. ipython:: python
smile = FXDeltaVolSmile(
nodes={0.3: 10.0, 0.7: 10.0},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
id="VolSmile",
)
instruments = [
FXVolValue(0.4, vol="VolSmile"),
FXVolValue(0.6, vol=smile)
]
solver = Solver(curves=[smile], instruments=instruments, s=[8.9, 7.8])
smile[0.3]
smile[0.4]
smile[0.6]
smile[0.7]
.. rubric:: Pricing
An *FX Vol Value* requires, and will calibrate, just one *FX Vol Object*.
Allowable inputs are:
.. code-block:: python
vol = fx_vol_obj | [fx_vol_obj] # a single object is detected
vol = {"fx_vol": fx_vol_obj} # dict form is explicit
Currently the only available ``metric`` is *'vol'* which returns the specific volatility value
for the index value, i.e. a delta-index for a *DeltaVol* type object, or a strike for a
*SABR* type object.
.. role:: red
.. role:: green
Parameters
----------
index_value : float, Dual, Dual2, :red:`required`
The value of some index to the *FXVolSmile* or *FXVolSurface*.
expiry: datetime, :green:`optional`
The expiry at which to evaluate. This will only be used with *Surfaces*, not *Smiles*.
metric: str, :green:`optional (set as 'vol')`
The default metric to return from the ``rate`` method.
vol: str, FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, :green:`optional`
The associated object from which to determine the ``rate``.
"""
_rate_scalar = 1.0
def __init__(
self,
index_value: DualTypes,
expiry: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
vol: VolT_ = NoInput(0),
):
user_args = dict(
expiry=expiry,
index_value=index_value,
vol=self._parse_vol(vol),
metric=metric,
)
default_args = dict(convention=defaults.convention, metric="vol", curves=NoInput(0))
self._kwargs = _KWArgs(
spec=NoInput(0),
user_args=user_args,
default_args=default_args,
meta_args=["curves", "metric", "vol"],
)
def _parse_vol(self, vol: VolT_) -> _Vol:
if isinstance(vol, _Vol):
return vol
elif isinstance(vol, _BaseIRSmile | _BaseIRCube):
raise TypeError(
f"`vol` must be suitable object for FX vol pricing. Got {type(vol).__name__}"
)
else:
return _Vol(fx_vol=vol)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
"""
Return a value derived from a *Curve*.
Parameters
----------
curves : Curve, LineCurve, str or list of such
Uses only one *Curve*, the one given or the first in the list.
solver : Solver, optional
The numerical :class:`~rateslib.solver.Solver` that constructs
``Curves`` from calibrating instruments.
fx : float, FXRates, FXForwards, optional
Not used.
base : str, optional
Not used.
vol: float, Dual, Dual2, FXDeltaVolSmile or FXDeltaVolSurface
The volatility used in calculation.
metric: str in {"curve_value", "index_value", "cc_zero_rate"}, optional
Configures which type of value to return from the applicable *Curve*.
Returns
-------
float, Dual, Dual2, Variable
"""
v = _parse_vol(self, vol, solver, False)
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
if metric_ == "vol":
vol_ = _get_fx_vol(False, False, *v)
if isinstance(vol_, FXDeltaVolSmile | FXDeltaVolSurface):
# Must initialise with an ``expiry`` if a Surface is used
return vol_._get_index(
delta_index=self.kwargs.leg1["index_value"], expiry=self.kwargs.leg1["expiry"]
)
elif isinstance(vol_, FXSabrSmile):
fx_ = _validate_fx_as_forwards(
_get_fx_forwards_maybe_from_solver(solver=solver, fx=fx)
)
# if Sabr VolObj is not initialised with a `pair` this will create an error
pair: str = vol_.meta.pair # type: ignore[assignment]
return vol_.get_from_strike(
k=self.kwargs.leg1["index_value"],
f=fx_.rate(pair=pair, settlement=vol_.meta.delivery),
expiry=self.kwargs.leg1["expiry"],
)[1]
elif isinstance(vol_, FXSabrSurface):
fx_ = _validate_fx_as_forwards(
_get_fx_forwards_maybe_from_solver(solver=solver, fx=fx)
)
# if Sabr VolObj is not initialised with a `pair` this will create an error
return vol_.get_from_strike(
k=self.kwargs.leg1["index_value"],
f=fx_,
expiry=self.kwargs.leg1["expiry"],
)[1]
else:
raise RuntimeError(
"FX Vol type is unmapped. Please report this issue."
) # pragma: no cover
raise ValueError("`metric` must be in {'vol'}.")
def npv(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`VolValue` instrument has no concept of NPV.")
def cashflows(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`VolValue` instrument has no concept of cashflows.")
def analytic_delta(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`VolValue` instrument has no concept of analytic delta.")
================================================
FILE: python/rateslib/instruments/fx_swap.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.data.fixings import _fx_index_set_cross, _get_fx_index
from rateslib.enums.generics import NoInput
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_get_fx_forwards_maybe_from_solver,
_parse_curves,
_Vol,
)
from rateslib.legs import CustomLeg
from rateslib.periods import Cashflow
from rateslib.scheduling import Schedule
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FXForwards_,
FXIndex,
FXIndex_,
LegFixings,
RollDay,
Sequence,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime_,
str_,
)
class FXSwap(_BaseInstrument):
"""
An *FX swap* composing two
:class:`~rateslib.legs.CustomLeg`
of individual :class:`~rateslib.periods.Cashflow` of different currencies.
.. rubric:: Examples
.. ipython:: python
:suppress:
from datetime import datetime as dt
from rateslib.instruments.fx_swap import FXSwap
from rateslib import Curve, FXRates, FXForwards
Paying a 3M EURUSD *FX Swap* expressed in USD notional at 56.5 swap points.
.. ipython:: python
fxs = FXSwap(
effective=dt(2022, 1, 19),
termination="3m",
calendar="tgt|fed",
pair="eurusd",
leg2_notional=-10e6,
split_notional=-10.25e6,
fx_rate=1.15,
points=56.5,
)
fxs.cashflows()
.. rubric:: Pricing
An *FX Swap* requires a *disc curve* and a *leg2 disc curve* to discount the cashflows
of the respective currencies (typically with the same collateral definition).
The following input formats are allowed:
.. code-block:: python
curves = [disc_curve, leg2_disc_curve] # two curves are applied in the given order
curves = [None, disc_curve, None, leg2_disc_curve] # four curves applied to each leg
curves = {"disc_curve": disc_curve, "leg2_disc_curve": leg2_disc_curve} # dict form is explicit
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The settlement date of the first currency pair.
termination : datetime, str, :red:`required`
The settlement of the second currency pair. If given as string requires additional
scheduling arguments to derive from ``effective``.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
If ``termination`` is str tenor, the roll day for its determination.
eom : bool, :green:`optional`
If ``termination`` is str tenor, the end-of-month preference if ``roll`` is not specified.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional (set by 'defaults')`
If ``termination`` is str tenor, the adjustment to apply to its determination.
calendar : calendar, str, :green:`optional (set as 'all')`
If ``termination`` is str tenor, the calendar to apply to its determination.
.. note::
The following define generalised **settlement** parameters.
pair : FXIndex, str, :red:`required`
The FX pair of the *Instrument* (6-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
To define the notional of the trade in units of LHS pair use ``notional``.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
To define the notional of the trade in units of RHS pair use ``leg2_notional``.
Only one of ``notional`` or ``leg2_notional`` can be specified.
split_notional: float, Variable, :green:`optional`
If the second cashflow has a rate adjusted notional to mitigate spot FX risk this is
entered as this argument. If not given the *FX Swap* is assumed not to have split notional.
Expressed in the same units as that given for either ``notional`` or ``leg2_notional``.
.. note::
The following are **rate parameters**. Both must be given simultaneously or not
at all.
fx_rate : float, Dual, Dual2, Variable, :green:`optional`
The ``fx_rate`` with direction according to ``pair`` to define the missing notional.
points : float, Dual, Dual2, Variable, :green:`optional`
The swap points valued (in 10,000ths) to add to ``fx_rate`` to arrive at the
FX rate at maturity of the swap.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
Notes
-----
An *FXSwap* is constructed from two *Legs* where one is non-deliverable. A fully
specified *Instrument* is one whose non-deliverable *fx fixings* are set at initialisation
via ``points`` and either ``fx_fixings`` or ``leg2_fx_fixings``. If these are not given then
these values will be forecast :class:`~rateslib.data.fixings.FXFixing`, which will likely
impact risk sensitivity calculations. This is best observed in the following example where
two similar *FXSwaps* are created, but their risks (as demonstrated by the Dual gradients)
are different.
.. ipython:: python
eurusd = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.95})
usdusd = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.94})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.15}, settlement=dt(2000, 1, 3)),
fx_curves={"usdusd": usdusd, "eureur": eurusd, "eurusd": eurusd},
)
fxs1 = FXSwap(
dt(2000, 1, 10),
dt(2000, 4, 10),
pair="eurusd",
notional=1e6,
fx_rate=1.1502327721341274, # <- mid-market value inserted as float
points=30.303287307187343 # <- mid-market value inserted as float
)
fxs2 = FXSwap(
dt(2000, 1, 10),
dt(2000, 4, 10),
pair="eurusd",
notional=1e6,
)
fxs1.npv(curves=[eurusd, usdusd], fx=fxf)
fxs2.npv(curves=[eurusd, usdusd], fx=fxf)
""" # noqa: E501
_rate_scalar = 1.0
@property
def leg1(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An FXSwap requires a disc curve and a leg2 disc curve
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_disc_curve=curves.get("leg2_disc_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
disc_curve=curves[0],
leg2_disc_curve=curves[1],
)
elif len(curves) == 4:
return _Curves(
disc_curve=curves[1],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(self).__name__} requires 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
raise ValueError(f"{type(self).__name__} requires 2 curve types. Got 1.")
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def __init__(
self,
# scheduling
effective: datetime,
termination: datetime | str,
pair: FXIndex | str,
*,
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
# settlement
notional: DualTypes_ = NoInput(0),
leg2_notional: DualTypes_ = NoInput(0),
split_notional: DualTypes_ = NoInput(0),
# rate
fx_rate: DualTypes_ = NoInput(0),
points: DualTypes_ = NoInput(0),
# meta
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
):
(
fx_index_,
notional_,
leg2_notional_,
fx_fixings_,
leg2_fx_fixings_,
pair_,
leg2_pair_,
fx_rate_,
points_,
) = _validated_fxswap_input_combinations(
pair=pair,
notional=notional,
leg2_notional=leg2_notional,
split_notional=split_notional,
fx_rate=fx_rate,
points=points,
spec=spec,
)
del pair, notional, leg2_notional, split_notional, fx_rate, points
schedule = Schedule(
effective=effective,
termination=termination,
frequency="Z",
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
)
user_args = dict(
effective=schedule.aschedule[0],
termination=schedule.aschedule[1],
leg2_effective=schedule.aschedule[0],
leg2_termination=schedule.aschedule[1],
notional=notional_,
leg2_notional=leg2_notional_,
fx_fixings=fx_fixings_,
leg2_fx_fixings=leg2_fx_fixings_,
points=points_,
curves=self._parse_curves(curves),
fx_rate=fx_rate_,
pair=pair_,
leg2_pair=leg2_pair_,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
currency=fx_index_.pair[:3],
leg2_currency=fx_index_.pair[3:6],
vol=_Vol(),
)
default_args: dict[str, Any] = dict()
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=[
"curves",
"points",
"fx_rate",
"vol",
],
)
self._leg1 = CustomLeg(
periods=[
Cashflow(
currency=self.kwargs.leg1["currency"],
notional=self.kwargs.leg1["notional"][0],
payment=self.kwargs.leg1["effective"],
pair=self.kwargs.leg1["pair"],
fx_fixings=self.kwargs.leg1["fx_fixings"][0],
),
Cashflow(
currency=self.kwargs.leg1["currency"],
notional=self.kwargs.leg1["notional"][1],
payment=self.kwargs.leg1["termination"],
pair=self.kwargs.leg1["pair"],
fx_fixings=self.kwargs.leg1["fx_fixings"][1],
),
]
)
self._leg2 = CustomLeg(
periods=[
Cashflow(
currency=self.kwargs.leg2["currency"],
notional=self.kwargs.leg2["notional"][0],
payment=self.kwargs.leg2["effective"],
pair=self.kwargs.leg2["pair"],
fx_fixings=self.kwargs.leg2["fx_fixings"][0],
),
Cashflow(
currency=self.kwargs.leg2["currency"],
notional=self.kwargs.leg2["notional"][1],
payment=self.kwargs.leg2["termination"],
pair=self.kwargs.leg2["pair"],
fx_fixings=self.kwargs.leg2["fx_fixings"][1],
),
]
)
self._legs = [self._leg1, self._leg2]
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
if isinstance(self.kwargs.leg1["pair"], NoInput):
# then non-deliverability and fx_fixing are on leg2
return self._rate_on_leg(
core_leg="leg1", nd_leg="leg2", curves=curves, fx=fx, solver=solver
)
else:
# then non-deliverability and fx_fixing are on leg1
return self._rate_on_leg(
core_leg="leg2", nd_leg="leg1", curves=curves, fx=fx, solver=solver
)
def _rate_on_leg(
self,
core_leg: str,
nd_leg: str,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
fx_ = _get_fx_forwards_maybe_from_solver(solver=solver, fx=fx)
core_curve = "" if core_leg == "leg1" else "leg2_"
nd_curve = "" if nd_leg == "leg1" else "leg2_"
core_leg_: CustomLeg = getattr(self, core_leg)
nd_leg_: CustomLeg = getattr(self, nd_leg)
# then non-deliverability and fx_fixing are on leg2
disc_curve = _get_curve(f"{core_curve}disc_curve", False, False, *c)
core_npv: DualTypes = core_leg_.npv( # type: ignore[assignment]
disc_curve=disc_curve,
base=self.leg2.settlement_params.currency,
fx=fx_,
local=False,
)
nd_disc_curve = _get_curve(f"{nd_curve}disc_curve", False, False, *c)
nd_cf1_npv = self.leg2.periods[0].local_npv(disc_curve=nd_disc_curve, fx=fx_)
net_zero_cf = (core_npv + nd_cf1_npv) / nd_disc_curve[
nd_leg_.periods[1].settlement_params.payment
]
required_fx = net_zero_cf / nd_leg_.periods[1].settlement_params.notional
original_fx = nd_leg_.periods[0].non_deliverable_params.fx_fixing.value_or_forecast(fx=fx_) # type: ignore[attr-defined]
_: DualTypes = (required_fx - original_fx) * 10000.0
return _
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _validated_fxswap_input_combinations(
pair: FXIndex | str_,
notional: DualTypes_,
leg2_notional: DualTypes_,
split_notional: DualTypes_,
fx_rate: DualTypes_,
points: DualTypes_,
spec: str_,
) -> tuple[
FXIndex,
list[DualTypes],
list[DualTypes],
LegFixings,
LegFixings,
FXIndex_,
FXIndex_,
DualTypes_,
DualTypes_,
]:
"""Method to handle arg parsing for 2 or 3 currency NDF instruments with default value
setting and erroring raising.
Returns
-------
(currency, pair, leg2_pair, notional, leg2_notional, fx_rate)
"""
kw = _KWArgs(
user_args=dict(
pair=pair,
notional=notional,
leg2_notional=leg2_notional,
split_notional=split_notional,
fx_rate=fx_rate,
points=points,
),
default_args=dict(),
spec=spec,
meta_args=["pair", "fx_rate", "split_notional", "points"],
)
# FXSwaps are physically settled so do not allow WMR cross methodology to impact
# forecast rates for FXFixings.
fx_index_ = _fx_index_set_cross(_get_fx_index(kw.meta["pair"]), allow_cross=False)
if isinstance(kw.leg1["notional"], NoInput) and isinstance(kw.leg2["notional"], NoInput):
# set a default
kw.leg1["notional"] = defaults.notional
match (
not isinstance(kw.leg1["notional"], NoInput),
not isinstance(kw.leg2["notional"], NoInput),
not isinstance(kw.meta["split_notional"], NoInput),
):
case (True, True, _):
raise ValueError(
"The notional of an FXSwap can only be given on one Leg. Got two notionals.\n"
"Use one notional and the `fx_rate` of `pair` to establish the implied "
"transactional opposite notional."
)
case (False, True, False):
# then leg2 notional is given
kw.leg2["notional"] = [kw.leg2["notional"], -1.0 * kw.leg2["notional"]]
kw.leg1["notional"] = [-1.0 * v for v in kw.leg2["notional"]]
kw.leg1["pair"], kw.leg2["pair"] = fx_index_, NoInput(0)
case (False, True, True):
# then leg2 notional as a split
if kw.meta["split_notional"] * kw.leg2["notional"] < 0:
raise ValueError(
"A notional and the `split_notional` cannot be given with different signs."
)
kw.leg2["notional"] = [kw.leg2["notional"], -1.0 * kw.meta["split_notional"]]
kw.leg1["notional"] = [-1.0 * v for v in kw.leg2["notional"]]
kw.leg1["pair"], kw.leg2["pair"] = fx_index_, NoInput(0)
case (True, False, False):
# then leg1 notional is given
kw.leg1["notional"] = [kw.leg1["notional"], -1.0 * kw.leg1["notional"]]
kw.leg2["notional"] = [-1.0 * v for v in kw.leg1["notional"]]
kw.leg1["pair"], kw.leg2["pair"] = NoInput(0), fx_index_
case (True, False, True):
kw.leg1["notional"] = [kw.leg1["notional"], -1.0 * kw.meta["split_notional"]]
kw.leg2["notional"] = [-1.0 * v for v in kw.leg1["notional"]]
kw.leg1["pair"], kw.leg2["pair"] = NoInput(0), fx_index_
if (not isinstance(kw.meta["fx_rate"], NoInput) and isinstance(kw.meta["points"], NoInput)) or (
isinstance(kw.meta["fx_rate"], NoInput) and not isinstance(kw.meta["points"], NoInput)
):
raise ValueError(
"For an FXSwap transaction both `fx_rate` and `points` must be given.\n"
"Providing only one component is not allowed, please provide the missing element.\n"
f"Got for `fx_rate`: {kw.meta['fx_rate']}\n"
f"Got for `points`: {kw.meta['points']}\n"
)
elif not isinstance(kw.meta["fx_rate"], NoInput) and not isinstance(kw.meta["points"], NoInput):
if not isinstance(kw.leg1["pair"], NoInput):
kw.leg1["fx_fixings"] = [
kw.meta["fx_rate"],
kw.meta["fx_rate"] + kw.meta["points"] / 10000.0,
]
kw.leg2["fx_fixings"] = [NoInput(0), NoInput(0)]
else:
kw.leg1["fx_fixings"] = [NoInput(0), NoInput(0)]
kw.leg2["fx_fixings"] = [
kw.meta["fx_rate"],
kw.meta["fx_rate"] + kw.meta["points"] / 10000.0,
]
else:
kw.leg1["fx_fixings"] = [NoInput(0), NoInput(0)]
kw.leg2["fx_fixings"] = [NoInput(0), NoInput(0)]
return (
fx_index_,
kw.leg1["notional"],
kw.leg2["notional"],
kw.leg1["fx_fixings"],
kw.leg2["fx_fixings"],
kw.leg1["pair"],
kw.leg2["pair"],
kw.meta["fx_rate"],
kw.meta["points"],
)
================================================
FILE: python/rateslib/instruments/iirs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg, FloatLeg
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FloatRateSeries,
Frequency,
FXForwards_,
IndexMethod,
LegFixings,
RollDay,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class IIRS(_BaseInstrument):
"""
An *indexed interest rate swap (IIRS)* composing a :class:`~rateslib.legs.FixedLeg`
and a :class:`~rateslib.legs.FloatLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import IIRS
from rateslib import fixings
from datetime import datetime as dt
from pandas import Series
.. ipython:: python
fixings.add("CPI_UK", Series(index=[dt(1999, 10, 1), dt(1999, 11, 1)], data=[110.0, 112.0]))
iirs = IIRS(
effective=dt(2000, 1, 1),
termination="2y",
frequency="A",
leg2_frequency="S",
index_fixings="CPI_UK",
index_lag=3,
fixed_rate=2.0,
)
iirs.cashflows()
.. ipython:: python
:suppress:
fixings.pop("CPI_UK")
.. rubric:: Pricing
An *IIRS* requires a *disc curve* on both legs (which should be the same *Curve*), an
*index curve* for index forecasting on the *FixedLeg*, and a
*leg2 rate curve* to forecast rates on the *FloatLeg*. The following input formats are
allowed:
.. code-block:: python
curves = [index_curve, disc_curve, leg2_rate_curve] # three curves are applied in order
curves = [index_curve, disc_curve, leg2_rate_curve, disc_curve] # four curves applied to each leg
curves = { # dict form is explicit
"leg2_rate_curve": leg2_rate_curve,
"disc_curve": disc_curve,
"index_curve": index_curve,
}
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
leg2_effective : datetime, :green:`optional (inherited from leg1)`
leg2_termination : datetime, str, :green:`optional (inherited from leg1)`
leg2_frequency : Frequency, str, :green:`optional (inherited from leg1)`
leg2_stub : StubInference, str, :green:`optional (inherited from leg1)`
leg2_front_stub : datetime, :green:`optional (inherited from leg1)`
leg2_back_stub : datetime, :green:`optional (inherited from leg1)`
leg2_roll : RollDay, int, str, :green:`optional (inherited from leg1)`
leg2_eom : bool, :green:`optional (inherited from leg1)`
leg2_modifier : Adjuster, str, :green:`optional (inherited from leg1)`
leg2_calendar : calendar, str, :green:`optional (inherited from leg1)`
leg2_payment_lag: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_payment_lag_exchange: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_ex_div: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_convention: str, :green:`optional (inherited from leg1)`
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional_exchange: bool, :green:`optional (set as False)`
Whether to include a final notional exchange on both legs, which affects the PV since
the *FixedLeg* has an *indexed* cashflow.
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
leg2_amortization : float, Dual, Dual2, Variable, str, Amortization, :green:`optional (negatively inherited from leg1)`
.. note::
The following are **rate parameters**.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
leg2_fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
leg2_fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
leg2_fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
leg2_float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
leg2_spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
leg2_rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
.. note::
The following parameters define **indexation**.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value applied as the base index value for all *Periods*.
If not given and ``index_fixings`` is a string fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The index value for the reference date.
Best practice is to supply this value as string identifier relating to the global
``fixings`` object.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
""" # noqa: E501
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
@property
def leg2_float_spread(self) -> DualTypes_:
return self.leg2.float_spread
@leg2_float_spread.setter
def leg2_float_spread(self, value: DualTypes) -> None:
self.kwargs.leg2["float_spread"] = value
self.leg2.float_spread = value
@property
def leg1(self) -> FixedLeg:
"""The :class:`~rateslib.legs.FixedLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> FloatLeg:
"""The :class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> list[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
leg2_effective: datetime_ = NoInput(1),
leg2_termination: datetime | str_ = NoInput(1),
leg2_frequency: Frequency | str_ = NoInput(1),
leg2_stub: str_ = NoInput(1),
leg2_front_stub: datetime_ = NoInput(1),
leg2_back_stub: datetime_ = NoInput(1),
leg2_roll: int | RollDay | str_ = NoInput(1),
leg2_eom: bool_ = NoInput(1),
leg2_modifier: str_ = NoInput(1),
leg2_calendar: CalInput = NoInput(1),
leg2_payment_lag: int_ = NoInput(1),
leg2_payment_lag_exchange: int_ = NoInput(1),
leg2_convention: str_ = NoInput(1),
leg2_ex_div: int_ = NoInput(1),
# settlement params
currency: str_ = NoInput(0),
notional_exchange: bool = False,
notional: float_ = NoInput(0),
amortization: float_ = NoInput(0),
leg2_notional: float_ = NoInput(-1),
leg2_amortization: float_ = NoInput(-1),
# index params
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: LegFixings = NoInput(0),
# rate params
fixed_rate: DualTypes_ = NoInput(0),
leg2_float_spread: DualTypes_ = NoInput(0),
leg2_spread_compound_method: str_ = NoInput(0),
leg2_rate_fixings: LegFixings = NoInput(0),
leg2_fixing_method: str_ = NoInput(0),
leg2_fixing_frequency: Frequency | str_ = NoInput(0),
leg2_fixing_series: FloatRateSeries | str_ = NoInput(0),
# meta params
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
user_args = dict(
effective=effective,
termination=termination,
frequency=frequency,
fixed_rate=fixed_rate,
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings,
stub=stub,
front_stub=front_stub,
back_stub=back_stub,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
payment_lag_exchange=payment_lag_exchange,
ex_div=ex_div,
notional=notional,
currency=currency,
amortization=amortization,
convention=convention,
leg2_float_spread=leg2_float_spread,
leg2_spread_compound_method=leg2_spread_compound_method,
leg2_rate_fixings=leg2_rate_fixings,
leg2_fixing_method=leg2_fixing_method,
leg2_fixing_series=leg2_fixing_series,
leg2_fixing_frequency=leg2_fixing_frequency,
leg2_effective=leg2_effective,
leg2_termination=leg2_termination,
leg2_frequency=leg2_frequency,
leg2_stub=leg2_stub,
leg2_front_stub=leg2_front_stub,
leg2_back_stub=leg2_back_stub,
leg2_roll=leg2_roll,
leg2_eom=leg2_eom,
leg2_modifier=leg2_modifier,
leg2_calendar=leg2_calendar,
leg2_payment_lag=leg2_payment_lag,
leg2_payment_lag_exchange=leg2_payment_lag_exchange,
leg2_ex_div=leg2_ex_div,
leg2_notional=leg2_notional,
leg2_amortization=leg2_amortization,
leg2_convention=leg2_convention,
curves=self._parse_curves(curves),
final_exchange=notional_exchange,
leg2_final_exchange=notional_exchange,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
leg2_currency=NoInput(1),
initial_exchange=False,
leg2_initial_exchange=False,
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
index_lag=defaults.index_lag,
index_method=defaults.index_method,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "vol"],
)
self._leg1 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._leg2 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self._leg1, self._leg2]
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_npv: DualTypes = self.leg2.local_npv(
rate_curve=_get_curve("leg2_rate_curve", True, True, *c),
disc_curve=_get_curve("leg2_disc_curve", False, True, *c),
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
# self.leg1.fixed_rate = 0.0
# leg1_npv: DualTypes = self.leg1.local_npv(
# rate_curve=NoInput(0),
# disc_curve=_get_maybe_curve_maybe_from_solver(
# self.kwargs.meta["curves"], _curves, "disc_curve", solver
# ),
# index_curve=_get_maybe_curve_maybe_from_solver(
# self.kwargs.meta["curves"], _curves, "index_curve", solver
# ),
# settlement=settlement,
# forward=forward,
# )
# self.leg1.fixed_rate = self.kwargs.leg1["fixed_rate"]
return (
self.leg1.spread(
target_npv=-leg2_npv, # - leg1_npv,
rate_curve=NoInput(0),
disc_curve=_get_curve("disc_curve", False, True, *c),
index_curve=_get_curve("index_curve", False, True, *c),
settlement=settlement,
forward=forward,
)
/ 100
)
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg1_npv: DualTypes = self.leg1.local_npv(
rate_curve=NoInput(0),
disc_curve=_get_curve("disc_curve", False, True, *c),
index_curve=_get_curve("index_curve", False, True, *c),
settlement=settlement,
forward=forward,
)
return self.leg2.spread(
target_npv=-leg1_npv,
rate_curve=_get_curve("leg2_rate_curve", True, True, *c),
disc_curve=_get_curve("leg2_disc_curve", False, True, *c),
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# the test for an unpriced IIRS is that its fixed rate is not set.
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
self.leg1.fixed_rate = _dual_float(mid_market_rate)
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An IIRS has three curve requirements: an index_curve, a leg2_rate_curve and a
disc_curve used by both legs.
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
index_curve=curves.get("index_curve", NoInput(0)),
leg2_rate_curve=_drb(
curves.get("rate_curve", NoInput(0)),
curves.get("leg2_rate_curve", NoInput(0)),
),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 3:
return _Curves(
disc_curve=curves[1],
index_curve=curves[0],
leg2_rate_curve=curves[2],
leg2_disc_curve=curves[1],
)
elif len(curves) == 4:
return _Curves(
disc_curve=curves[1],
index_curve=curves[0],
leg2_rate_curve=curves[2],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(self).__name__} requires 3 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
raise ValueError(f"{type(self).__name__} requires 3 curve types. Got 1.")
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
================================================
FILE: python/rateslib/instruments/ir_options/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.instruments.ir_options.call_put import IRSCall, IRSPut, _BaseIRSOption
from rateslib.instruments.ir_options.risk_reversal import IRSRiskReversal
from rateslib.instruments.ir_options.straddle import IRSStraddle, _BaseIRSOptionStrat
from rateslib.instruments.ir_options.strangle import IRSStrangle
from rateslib.instruments.ir_options.vol_value import IRVolValue
__all__ = [
"IRSCall",
"IRSPut",
"IRSStraddle",
"IRSStrangle",
"IRSRiskReversal",
"IRVolValue",
"_BaseIRSOption",
"_BaseIRSOptionStrat",
]
================================================
FILE: python/rateslib/instruments/ir_options/call_put.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from abc import ABCMeta
from datetime import datetime
from typing import TYPE_CHECKING, NoReturn
from rateslib import defaults
from rateslib.curves._parsers import _validate_obj_not_no_input
from rateslib.default import plot
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import (
IROptionMetric,
SwaptionSettlementMethod,
_get_ir_option_metric,
)
from rateslib.instruments.irs import IRS
from rateslib.instruments.protocols import _BaseInstrument, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_maybe_get_ir_vol_maybe_from_solver,
_parse_curves,
_Vol,
)
from rateslib.legs import CustomLeg
from rateslib.periods import Cashflow, IRSCallPeriod, IRSPutPeriod
from rateslib.periods.utils import (
_get_ir_vol_value_and_forward_maybe_from_obj,
)
from rateslib.volatility.fx import FXVolObj
from rateslib.volatility.ir import _BaseIRSmile
from rateslib.volatility.ir.utils import _get_ir_expiry_and_payment
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
Arr1dF64,
CurveOption_,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FXForwards_,
IRSSeries,
PlotOutput,
Sequence,
Solver_,
VolT_,
_BaseCurve_,
_BaseIRSOptionPeriod,
_BaseLeg,
_IRVolOption_,
_IRVolPricingParams,
bool_,
datetime_,
float_,
str_,
)
class _BaseIRSOption(_BaseInstrument, metaclass=ABCMeta):
"""
Abstract base class for implementing *IR Swaptions*.
See :class:`~rateslib.instruments.IRSCall` and
:class:`~rateslib.instruments.IRSPut`.
"""
_pricing: _IRVolPricingParams
def analytic_greeks(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
) -> dict[str, Any]:
"""
Return various pricing metrics of the *FX Option*.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import Curve, FXCall, dt, FXForwards, FXRates, FXDeltaVolSmile
.. ipython:: python
eur = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.98})
usd = Curve({dt(2020, 1, 1): 1.0, dt(2021, 1, 1): 0.96})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2020, 1, 3)),
fx_curves={"eureur": eur, "eurusd": eur, "usdusd": usd},
)
fxvs = FXDeltaVolSmile(
nodes={0.25: 11.0, 0.5: 9.8, 0.75: 10.7},
delta_type="forward",
eval_date=dt(2020, 1, 1),
expiry=dt(2020, 4, 1)
)
fxc = FXCall(
expiry="3m",
strike=1.10,
eval_date=dt(2020, 1, 1),
spec="eurusd_call",
)
fxc.analytic_greeks(fx=fxf, curves=[eur, usd], vol=fxvs)
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
Returns
-------
dict
"""
return self._analytic_greeks_set_metrics(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
set_metrics=True,
)
def _analytic_greeks_set_metrics(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
set_metrics: bool_ = True,
) -> dict[str, Any]:
"""
Return various pricing metrics of the *FX Option*.
Returns
-------
float, Dual, Dual2
"""
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", True, False, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
index_curve = _get_curve("index_curve", False, False, *c)
_vol = self._parse_vol(vol)
ir_vol = _maybe_get_ir_vol_maybe_from_solver(
vol=_vol, vol_meta=self.kwargs.meta["vol"], solver=solver
)
if set_metrics:
self._set_strike_and_vol(
rate_curve=rate_curve, disc_curve=disc_curve, index_curve=index_curve, vol=ir_vol
)
# self._set_premium(curves, fx)
return self._option.analytic_greeks(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
ir_vol=ir_vol,
premium=NoInput(0),
premium_payment=NoInput(0),
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
raise NotImplementedError(
"`local_analytic_rate_fixings` is not implemented for `_BaseIRSOption` types."
)
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
raise NotImplementedError("`spread` is not implemented for `_BaseIRSOption` types.")
@property
def _rate_scalar(self) -> float: # type: ignore[override]
if type(self.kwargs.meta["metric"]) in [
IROptionMetric.BlackVolShift,
IROptionMetric.NormalVol,
]:
return 100.0
else:
return 1.0
@property
def leg1(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument* containing the
:class:`~rateslib.periods.IROptionPeriod`."""
return self._leg1
@property
def leg2(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument* containing the
premium :class:`~rateslib.periods.Cashflow`."""
return self._leg2
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
@property
def _option(self) -> _BaseIRSOptionPeriod:
return self.leg1.periods[0] # type: ignore[return-value]
@property
def _irs(self) -> IRS:
return self._option.ir_option_params.option_fixing.irs
@property
def _premium(self) -> Cashflow:
return self.leg2.periods[0] # type: ignore[return-value]
@classmethod
def _parse_curves(cls, curves: CurvesT_) -> _Curves:
"""
A Swaption has 3 curve requirements. See **Pricing**.
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, list | tuple):
if len(curves) == 1:
return _Curves(
rate_curve=curves[0],
index_curve=curves[0],
disc_curve=curves[0],
leg2_disc_curve=curves[0],
)
elif len(curves) == 2:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
index_curve=curves[1],
leg2_disc_curve=curves[1],
)
elif len(curves) == 3:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
index_curve=curves[2],
leg2_disc_curve=curves[1],
)
else:
raise ValueError(
f"{type(cls).__name__} requires only 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
index_curve=curves.get("index_curve", NoInput(0)),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
rate_curve=curves, # type: ignore[arg-type]
disc_curve=curves, # type: ignore[arg-type]
index_curve=curves, # type: ignore[arg-type]
leg2_disc_curve=curves, # type: ignore[arg-type]
)
@classmethod
def _parse_vol(cls, vol: VolT_) -> _Vol:
"""
IR options requires only a single IRVolObj or a scalar.
"""
if isinstance(vol, _Vol):
return vol
elif isinstance(vol, FXVolObj):
raise TypeError("`vol` cannot be an FX type vol object and must be IR type vol object.")
else:
return _Vol(ir_vol=vol)
def __init__(
self,
expiry: datetime | str,
tenor: datetime | str,
strike: DualTypes | str,
irs_series: IRSSeries | str,
*,
notional: DualTypes_ = NoInput(0),
eval_date: datetime | NoInput = NoInput(0),
premium: DualTypes_ = NoInput(0),
payment_lag: str | datetime_ = NoInput(0),
option_fixings: DualTypes_ = NoInput(0),
settlement_method: SwaptionSettlementMethod | str_ = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolT_ = NoInput(0),
spec: str_ = NoInput(0),
call: bool = True,
):
user_args = dict(
tenor=tenor,
expiry=expiry,
notional=notional,
strike=strike,
irs_series=irs_series,
option_fixings=option_fixings,
settlement_method=settlement_method,
leg2_payment_lag=payment_lag,
leg2_premium=premium,
metric=metric,
curves=self._parse_curves(curves),
vol=self._parse_vol(vol),
)
# instrument_args: dict[str, Any] = dict()
default_args = dict(
notional=defaults.notional,
metric=defaults.ir_option_metric,
settlement_method=defaults.ir_option_settlement,
)
self._kwargs = _KWArgs(
user_args=user_args,
default_args=default_args,
spec=spec,
meta_args=["curves", "vol", "metric"],
)
# determine the `expiry` and `delivery` as datetimes if derived from other combinations
(self.kwargs.leg1["expiry"], self.kwargs.leg2["payment"]) = _get_ir_expiry_and_payment(
eval_date=eval_date,
expiry=self.kwargs.leg1["expiry"],
irs_series=self.kwargs.leg1["irs_series"],
payment_lag=self.kwargs.leg2["payment_lag"],
)
# sanitise
self.kwargs.meta["metric"] = _get_ir_option_metric(self.kwargs.meta["metric"])
self._leg1 = CustomLeg(
[
IRSCallPeriod( # type: ignore[abstract]
expiry=self.kwargs.leg1["expiry"],
tenor=self.kwargs.leg1["tenor"],
irs_series=self.kwargs.leg1["irs_series"],
strike=NoInput(0)
if isinstance(self.kwargs.leg1["strike"], str)
else self.kwargs.leg1["strike"],
notional=self.kwargs.leg1["notional"],
option_fixings=self.kwargs.leg1["option_fixings"],
metric=self.kwargs.meta["metric"],
settlement_method=self.kwargs.leg1["settlement_method"],
)
if call
else IRSPutPeriod( # type: ignore[abstract]
expiry=self.kwargs.leg1["expiry"],
tenor=self.kwargs.leg1["tenor"],
irs_series=self.kwargs.leg1["irs_series"],
strike=NoInput(0)
if isinstance(self.kwargs.leg1["strike"], str)
else self.kwargs.leg1["strike"],
notional=self.kwargs.leg1["notional"],
option_fixings=self.kwargs.leg1["option_fixings"],
metric=self.kwargs.meta["metric"],
settlement_method=self.kwargs.leg1["settlement_method"],
)
]
)
self._leg2 = CustomLeg(
[
Cashflow(
notional=_drb(0.0, self.kwargs.leg2["premium"]),
payment=self.kwargs.leg2["payment"],
currency=self._leg1.settlement_params.currency,
),
]
)
self._legs = [self._leg1, self._leg2]
def __repr__(self) -> str:
return f""
def _set_strike_and_vol(
self,
rate_curve: CurveOption_,
disc_curve: _BaseCurve_,
index_curve: _BaseCurve_,
vol: _IRVolOption_,
) -> None:
"""
Set the strike, if necessary, and determine pricing metrics from the volatility objects.
The strike for the *OptionPeriod* is either; string or numeric.
If it is string, then a numeric strike must be determined with an associated vol.
If it is numeric then the volatility must be determined for the given strike.
Pricing elements are captured and cached so they can be used later by subsequent methods.
"""
if isinstance(vol, _BaseIRSmile): # TODO _BaseIRCube
eval_date = vol.meta.eval_date
else:
_ = _validate_obj_not_no_input(disc_curve, "disc_curve")
eval_date = _.nodes.initial
_pricing = _get_ir_vol_value_and_forward_maybe_from_obj(
rate_curve=rate_curve,
index_curve=index_curve,
strike=self.kwargs.leg1["strike"],
ir_vol=vol,
irs=self._irs,
tenor=self._option.ir_option_params.option_fixing.termination,
expiry=self._option.ir_option_params.expiry,
t_e=self._option.ir_option_params.time_to_expiry(eval_date),
)
# Review section in book regarding Hyper-parameters and Solver interaction
self._option.ir_option_params.strike = _pricing.k
self._pricing = _pricing
# self._option_periods[0].strike = _dual_float(self._pricing.k)
def _set_premium(
self,
rate_curve: CurveOption_,
disc_curve: _BaseCurve_,
index_curve: _BaseCurve_,
pricing: _IRVolPricingParams,
) -> None:
"""
Set an unspecified premium on the Option to be equal to the mid-market premium.
"""
if isinstance(self.kwargs.leg2["premium"], NoInput):
# then set the CashFlow to mid-market
npv: DualTypes = self._option.npv( # type: ignore[assignment]
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
ir_vol=pricing,
local=False,
forward=self.kwargs.leg2["payment"],
)
self._premium.settlement_params._notional = _dual_float(npv)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", True, False, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
index_curve = _get_curve("index_curve", False, False, *c)
_vol = self._parse_vol(vol)
del vol
ir_vol = _maybe_get_ir_vol_maybe_from_solver(
vol=_vol, vol_meta=self.kwargs.meta["vol"], solver=solver
)
self._set_strike_and_vol(
rate_curve=rate_curve, disc_curve=disc_curve, index_curve=index_curve, vol=ir_vol
)
# Premium is not required for rate and also sets as float
# Review section: "Hyper-parameters and Solver interaction" before enabling.
# self._set_premium(curves, fx)
metric_ = _get_ir_option_metric(_drb(self.kwargs.meta["metric"], metric))
del metric
value = self._option.rate(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
ir_vol=self._pricing,
metric=metric_,
)
if (
metric_ in [IROptionMetric.Premium(), IROptionMetric.PercentNotional()]
and self.leg2.settlement_params.payment != self.leg1.settlement_params.payment
):
return (
value
* disc_curve[self.leg2.settlement_params.payment]
/ disc_curve[self.leg1.settlement_params.payment]
)
else:
return value
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", True, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
index_curve = _get_curve("index_curve", False, True, *c)
_vol = self._parse_vol(vol)
del vol
ir_vol = _maybe_get_ir_vol_maybe_from_solver(
vol=_vol, vol_meta=self.kwargs.meta["vol"], solver=solver
)
self._set_strike_and_vol(
rate_curve=rate_curve, disc_curve=disc_curve, index_curve=index_curve, vol=ir_vol
)
self._set_premium(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
pricing=self._pricing,
)
if not local:
base_ = _drb(self.legs[0].settlement_params.currency, base)
else:
base_ = base
opt_npv = self._option.npv(
rate_curve=rate_curve, # _validate_obj_not_no_input(rate_curve, "rate curve"),
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
base=base_,
local=local,
ir_vol=self._pricing,
settlement=settlement,
forward=forward,
)
prem_npv = self._premium.npv(
disc_curve=_get_curve("leg2_disc_curve", False, True, *c),
fx=fx,
base=base_,
local=local,
settlement=settlement,
forward=forward,
)
if local:
return {k: opt_npv.get(k, 0) + prem_npv.get(k, 0) for k in set(opt_npv) | set(prem_npv)} # type:ignore[union-attr, arg-type]
else:
return opt_npv + prem_npv # type: ignore[operator]
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", True, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
index_curve = _get_curve("index_curve", False, True, *c)
_vol = self._parse_vol(vol)
del vol
try:
ir_vol = _maybe_get_ir_vol_maybe_from_solver(
vol=_vol, vol_meta=self.kwargs.meta["vol"], solver=solver
)
self._set_strike_and_vol(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
vol=ir_vol,
)
self._set_premium(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
pricing=self._pricing,
)
except Exception: # noqa: S110
pass # `cashflows` proceed without pricing determined values
return self._cashflows_from_legs(
curves=c[0],
solver=solver,
fx=fx,
base=base,
settlement=settlement,
forward=forward,
vol=_vol,
)
def analytic_delta(self, *args: Any, leg: int = 1, **kwargs: Any) -> NoReturn:
"""Not implemented for Option types.
Use :meth:`~rateslib.instruments._BaseFXOption.analytic_greeks`.
"""
raise NotImplementedError("For Option types use `analytic_greeks`.")
def _plot_payoff(
self,
window: tuple[float, float] | NoInput = NoInput(0),
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
) -> tuple[Arr1dF64, Arr1dF64]:
"""
Mechanics to determine (x,y) coordinates for payoff at expiry plot.
"""
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", True, True, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
index_curve = _get_curve("index_curve", False, False, *c)
_vol = self._parse_vol(vol)
del vol
ir_vol = _maybe_get_ir_vol_maybe_from_solver(
vol=_vol, vol_meta=self.kwargs.meta["vol"], solver=solver
)
self._set_strike_and_vol(
rate_curve=rate_curve, disc_curve=disc_curve, index_curve=index_curve, vol=ir_vol
)
# self._set_premium(curves, fx)
x, y = self._option._payoff_at_expiry(window)
return x, y
def plot_payoff(
self,
range: tuple[float, float] | NoInput = NoInput(0), # noqa: A002
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
base: str_ = NoInput(0),
vol: float_ = NoInput(0),
) -> PlotOutput:
"""
Return a plot of the payoff at expiry, indexed by the *FXFixing* value.
Parameters
----------
range: list of float, :green:`optional`
A range of values for the *FXFixing* value at expiry to use as the x-axis.
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
Returns
-------
(Figure, Axes, list[Lines2D])
"""
x, y = self._plot_payoff(window=range, curves=curves, solver=solver, fx=fx, vol=vol)
return plot([x], [y]) # type: ignore
#
# def local_analytic_rate_fixings(
# self,
# *,
# curves: CurvesT_ = NoInput(0),
# solver: Solver_ = NoInput(0),
# fx: FXForwards_ = NoInput(0),
# vol: VolT_ = NoInput(0),
# settlement: datetime_ = NoInput(0),
# forward: datetime_ = NoInput(0),
# ) -> DataFrame:
# return DataFrame()
#
# def spread(
# self,
# *,
# curves: CurvesT_ = NoInput(0),
# solver: Solver_ = NoInput(0),
# fx: FXForwards_ = NoInput(0),
# vol: VolT_ = NoInput(0),
# base: str_ = NoInput(0),
# settlement: datetime_ = NoInput(0),
# forward: datetime_ = NoInput(0),
# ) -> DualTypes:
# """
# Not implemented for Option types. Use :meth:`~rateslib.instruments._BaseFXOption.rate`.
# """
# raise NotImplementedError(f"`spread` is not implemented for type: {type(self).__name__}")
class IRSCall(_BaseIRSOption):
"""
An *IR Payer Swaption*.
.. warning::
*Swaptions* and *IR Volatility* are in Beta status introduced in v2.7.0
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, Curve, IRSCall
.. ipython:: python
iro = IRSCall(
expiry=dt(2027, 2, 16),
tenor="6m",
strike=3.02,
notional=100e6,
irs_series="usd_irs",
premium=10000.0,
)
# iro.cashflows()
.. rubric:: Pricing
A *Swaption* requires from one to three *Curves*;
- a ``rate_curve`` used to forecast the rates on the :class:`~rateslib.legs.FloatLeg` of the
underlying :class:`~rateslib.instruments.IRS`.
- a ``disc_curve`` used to discount the value of the *Swaption* and the premium under the
terms of its bilateral collateral agreement.
- an ``index_curve`` used as the price alignment index rate for the discounting of the
underlying :class:`~rateslib.instruments.IRS`. This does not necessarily need to equal the
``disc_curve``.
Allowable inputs are:
.. code-block:: python
curves = rate_curve | [rate_curve] # one curve is used as all curves
curves = [rate_curve, disc_curve] # two curves are applied in the given order, index_curve is set equal to disc_curve
curves = [rate_curve, disc_curve, index_curve] # three curves applied in the given order
curves = {
"rate_curve": rate_curve,
"disc_curve": disc_curve
"index_curve": index_curve
} # dict form is explicit
A *Swaption* also requires an *IRVolatility* object or numeric value for the ``vol`` argument.
If a numeric value is given it is assumed to be a Black (log-normal) volatility without shift.
Allowed inputs are:
.. code-block:: python
vol = 12.0 # a specific Black (log-normal) calendar-day annualized vol until expiry
vol = vol_obj # an explicit volatility object, e.g. IRSabrSmile
The following pricing ``metric`` are available, with examples:
.. ipython:: python
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
- **"BlackVolShift(_)"**:
The *rate* method will make the necessary conversions between the different volatility
representations.
.. ipython:: python
iro.rate(curves=[curve], vol=25.16, metric="BlackVolShift_0")
iro.rate(curves=[curve], vol=25.16, metric="BlackVolShift_100")
iro.rate(curves=[curve], vol=25.16, metric="BlackVolShift_200")
iro.rate(curves=[curve], vol=25.16, metric="BlackVolShift_300")
- **"NormalVol"**: the equivalent number of basis point volatility used in the Bachelier
formula:
.. ipython:: python
iro.rate(curves=[curve], vol=25.16, metric="NormalVol")
- **"Premium"**: the cash premium amount applicable to the 'payment' date, expressed in the
premium currency.
.. ipython:: python
iro.rate(curves=[curve], vol=25.16, metric="Premium")
- **"PercentNotional"**: the cash premium amount expressed as a percentage of the
notional.
.. ipython:: python
iro.rate(curves=[curve], vol=25.16, metric="PercentNotional")
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **ir option** and generalised **settlement** parameters.
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
tenor: datetime, str, :red:`required`
The parameter defining the maturity of the underlying :class:`~rateslib.instruments.IRS`.
irs_series: IRSSeries, str, :red:`required`
The standard conventions applied to the underlying :class:`~rateslib.instruments.IRS`.
strike: float, Variable, str, :red:`required`
The strike value of the option.
If str, there are two possibilities; {"atm", "{}bps"}. "atm" will produce a strike equal
to the mid-market *IRS* rate, whilst "20bps" or "-50bps" will yield a strike that number
of basis points different to the mid-market rate.
notional: float, :green:`optional (set by 'defaults')`
The notional amount expressed in units of ``currency`` fo the ``irs_series``.
eval_date: datetime, :green:`optional`
Only required if ``expiry`` is given as string tenor.
Should be entered as today (also called horizon) and **not** spot.
payment_lag: int or datetime, :green:`optional (set as IRS effective)`
The number of business days after expiry to pay premium. If a *datetime* is given this will
set the premium date explicitly.
settlement_method: SwaptionSettlementMethod, str, :green:`optional (set by 'default')`
The method for deriving the settlement cashflow or underlying value.
.. note::
The following define additional **rate** parameters.
premium: float, :green:`optional`
The amount paid for the option. If not given assumes an unpriced *Option* and sets this as
mid-market premium during pricing.
option_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the option :class:`~rateslib.data.fixings.IRSFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
metric: IROptionMetric, str, :green:`optional` (set by 'default')`
The metric used by default in the
:meth:`~rateslib.instruments._BaseIRSOption.rate` method. See **Pricing**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
vol: str, Smile, Surface, float, Dual, Dual2, Variable
Pricing objects passed directly to the *Instrument's* methods' ``vol`` argument. See
**Pricing**.
spec : str, optional
An identifier to pre-populate many field with conventional values. See
:ref:`here` for more info and available values.
""" # noqa: E501
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, call=True, **kwargs)
class IRSPut(_BaseIRSOption):
"""
An *IR Receiver Swaption*.
For parameters and examples see :class:`~rateslib.instruments.IRSCall`.
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, call=False, **kwargs)
================================================
FILE: python/rateslib/instruments/ir_options/risk_reversal.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import IROptionMetric
from rateslib.instruments.ir_options.call_put import IRSCall, IRSPut
from rateslib.instruments.ir_options.straddle import _BaseIRSOptionStrat
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurvesT_,
DualTypes,
DualTypes_,
IRSSeries,
SwaptionSettlementMethod,
VolStrat_,
VolT_,
_Vol,
datetime,
datetime_,
str_,
)
class IRSRiskReversal(_BaseIRSOptionStrat):
"""
An *IR Risk Reversal* :class:`~rateslib.instruments._BaseIRSOptionStrat`.
.. warning::
*Swaptions* and *IR Volatility* are in Beta status introduced in v2.7.0
A *Risk Reversal* is composed of a lower strike :class:`~rateslib.instruments.IRSPut`
and a higher strike :class:`~rateslib.instruments.IRSCall` with the same expiry and tenor.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import IRSRiskReversal, Curve, dt
.. ipython:: python
irstr = IRSRiskReversal(
eval_date=dt(2020, 1, 1),
expiry="3m",
tenor="1Y",
strike=("-20bps", "+20bps"),
irs_series="usd_irs",
notional=1000000,
)
irstr.cashflows()
.. rubric:: Pricing
The pricing mirrors that for an :class:`~rateslib.instruments.IRSCall`. All options use the
same ``curves``. Allowable inputs are:
.. code-block:: python
curves = rate_curve | [rate_curve] # one curve is used as all curves
curves = [rate_curve, disc_curve] # two curves are applied in the given order, index_curve is set equal to disc_curve
curves = [rate_curve, disc_curve, index_curve] # three curves applied in the given order
curves = {
"rate_curve": rate_curve,
"disc_curve": disc_curve
"index_curve": index_curve
} # dict form is explicit
A ``vol`` argument must be provided to each *Instrument*. This can either be a single
value universally used for all, or an individual item as part of a sequence. Allowed
inputs are:
.. code-block:: python
vol = 12.0 | vol_obj # a single item universally applied
vol = [12.0, 12.0] # values for the Put and Call respectively
The following pricing ``metric`` are available, with examples:
TODO
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **ir option** and generalised **settlement** parameters.
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
tenor: datetime, str, :red:`required`
The parameter defining the maturity of the underlying :class:`~rateslib.instruments.IRS`.
irs_series: IRSSeries, str, :red:`required`
The standard conventions applied to the underlying :class:`~rateslib.instruments.IRS`.
strike: 2-tuple of float, Variable, str, :red:`required`
The strike values of each option.
If str, there are two possibilities; {"atm", "{}bps"}. "atm" will produce a strike equal
to the mid-market *IRS* rate, whilst "20bps" or "-50bps" will yield a strike that number
of basis points different to the mid-market rate.
notional: float, :green:`optional (set by 'defaults')`
The notional amount expressed in units of ``currency`` of the ``irs_series``.
Applies to the higher strike *Call*, the *Put* assumes the negated notional.
eval_date: datetime, :green:`optional`
Only required if ``expiry`` is given as string tenor.
Should be entered as today (also called horizon) and **not** spot.
payment_lag: int or datetime, :green:`optional (set as IRS effective)`
The number of business days after expiry to pay premium. If a *datetime* is given this will
set the premium date explicitly.
settlement_method: SwaptionSettlementMethod, str, :green:`optional (set by 'default')`
The method for deriving the settlement cashflow or underlying value.
.. note::
The following define additional **rate** parameters.
premium: 2-tuple of float, :green:`optional`
The amount paid for the put and call in order. If not given assumes unpriced
*Options* and sets this as mid-market premium during pricing.
option_fixings: 2-tuple of float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of each option's :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
metric : str, :green:`optional (set as "pips_or_%")`
The pricing metric returned by the ``rate`` method. See **Pricing**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
vol: str, Smile, Surface, float, Dual, Dual2, Variable, Sequence
Pricing objects passed directly to the *Instrument's* methods' ``vol`` argument. See
**Pricing**.
spec : str, optional
An identifier to pre-populate many field with conventional values. See
:ref:`here` for more info and available values.
""" # noqa: E501
_rate_scalar = 100.0
def __init__(
self,
expiry: datetime | str,
tenor: datetime | str,
strike: tuple[DualTypes | str, DualTypes | str],
irs_series: IRSSeries | str,
*,
notional: DualTypes_ = NoInput(0),
eval_date: datetime | NoInput = NoInput(0),
premium: tuple[DualTypes_, DualTypes_] = (NoInput(0), NoInput(0)),
payment_lag: str | datetime_ = NoInput(0),
option_fixings: DualTypes_ = NoInput(0),
settlement_method: SwaptionSettlementMethod | str_ = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
vol_ = self._parse_vol(vol)
notional_ = _drb(defaults.notional, notional)
options = [
IRSPut(
irs_series=irs_series,
expiry=expiry,
payment_lag=payment_lag,
eval_date=eval_date,
tenor=tenor,
strike=strike[0],
notional=-notional_,
option_fixings=option_fixings[0]
if isinstance(option_fixings, tuple | list)
else option_fixings,
settlement_method=settlement_method,
premium=premium[0],
curves=curves,
vol=vol_[0],
metric=NoInput(0),
spec=spec,
),
IRSCall(
irs_series=irs_series,
expiry=expiry,
payment_lag=payment_lag,
eval_date=eval_date,
tenor=tenor,
strike=strike[1],
notional=notional_,
option_fixings=option_fixings[1]
if isinstance(option_fixings, tuple | list)
else option_fixings,
settlement_method=settlement_method,
premium=premium[1],
curves=curves,
vol=vol_[1],
metric=NoInput(0),
spec=spec,
),
]
super().__init__(
options=options,
rate_weight=[-1.0, 1.0],
rate_weight_vol=[-1.0, 1.0],
metric=metric,
curves=curves,
vol=vol_,
)
self.kwargs.leg1["notional"] = notional_
@classmethod
def _parse_vol(cls, vol: VolStrat_) -> tuple[_Vol, _Vol]: # type: ignore[override]
if not isinstance(vol, list | tuple):
vol = (vol,) * 2
return IRSPut._parse_vol(vol[0]), IRSCall._parse_vol(vol[1])
def _set_notionals(self, notional: DualTypes) -> None:
"""
Set the notionals on each option period. Mainly used by Brokerfly for vega neutral
strangle and straddle.
"""
for option in self.instruments:
option.kwargs.leg1["notional"] = notional
option._option.settlement_params._notional = notional
================================================
FILE: python/rateslib/instruments/ir_options/straddle.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from pandas import DataFrame
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import IROptionMetric, _get_ir_option_metric
from rateslib.instruments.ir_options.call_put import IRSCall, IRSPut, _BaseIRSOption
from rateslib.instruments.protocols import _KWArgs
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CurvesT_,
DualTypes,
DualTypes_,
FXForwards_,
IRSSeries,
Sequence,
Solver_,
SwaptionSettlementMethod,
VolStrat_,
VolT_,
_Vol,
datetime,
datetime_,
str_,
)
class _BaseIRSOptionStrat(_BaseIRSOption):
"""
A custom option strategy composed of a list of :class:`~rateslib.instruments._BaseIRSOption`,
or other :class:`~rateslib.instruments._BaseIRSOptionStrat` objects, of the same
:class:`~rateslib.data.fixings.IRSSeries`.
.. warning::
*Swaptions* and *IR Volatility* are in Beta status introduced in v2.7.0
Parameters
----------
options: list
The *IROptions* or *IROptionStrats* which make up the strategy.
rate_weight: list
The multiplier for non-vol type metrics that sums the options to a final *rate*.
E.g. A *RiskReversal* uses [-1.0, 1.0] for a sale and a purchase.
E.g. A *Straddle* uses [1.0, 1.0] for summing two premium purchases.
rate_weight_vol: list
The multiplier for the *'vol'* metric that sums the options to a final *rate*.
E.g. A *RiskReversal* uses [-1.0, 1.0] to obtain the vol difference between two options.
E.g. A *Straddle* uses [0.5, 0.5] to obtain the volatility at the strike of each option.
"""
_greeks: dict[str, Any] = {}
_strat_elements: tuple[_BaseIRSOption | _BaseIRSOptionStrat, ...]
@property
def kwargs(self) -> _KWArgs:
"""The :class:`~rateslib.instruments.protocols._KWArgs` of the *Instrument*."""
return self._kwargs
def __init__(
self,
options: Sequence[_BaseIRSOption | _BaseIRSOptionStrat],
rate_weight: list[float],
rate_weight_vol: list[float],
metric: IROptionMetric | str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
):
self._n = len(options)
if self._n != len(rate_weight) or self._n != len(rate_weight_vol):
raise ValueError(
"`rate_weight` and `rate_weight_vol` must have same length as `options`.",
)
self._kwargs = _KWArgs(
spec=NoInput(0),
user_args=dict(
rate_weight=rate_weight,
rate_weight_vol=rate_weight_vol,
instruments=tuple(options),
metric=metric,
irs_series=options[0].kwargs.leg1["irs_series"],
curves=NoInput(0),
vol=vol,
),
default_args=dict(
metric=defaults.ir_option_metric,
),
meta_args=["metric", "vol", "curves", "instruments", "rate_weight", "rate_weight_vol"],
)
self.kwargs.meta["curves"] = self._parse_curves(curves)
@classmethod
def _parse_vol(cls, vol: VolStrat_) -> VolStrat_: # type: ignore[override]
raise NotImplementedError(f"{type(cls).__name__} must implement `_parse_vol`.")
@property
def instruments(self) -> tuple[_BaseIRSOption | _BaseIRSOptionStrat, ...]:
return self.kwargs.meta["instruments"] # type: ignore[no-any-return]
def __repr__(self) -> str:
return f""
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
) -> DualTypes:
vol_: VolStrat_ = self._parse_vol(vol)
metric_: IROptionMetric = _get_ir_option_metric(_drb(self.kwargs.meta["metric"], metric))
match type(metric_):
case IROptionMetric.NormalVol | IROptionMetric.BlackVolShift:
weights = self.kwargs.meta["rate_weight_vol"]
case IROptionMetric.Premium | IROptionMetric.PercentNotional:
weights = self.kwargs.meta["rate_weight"]
_: DualTypes = 0.0
for option, vol__, weight in zip(self.instruments, vol_, weights, strict=True): # type: ignore[misc, arg-type]
_ += (
option.rate(
curves=curves,
solver=solver,
fx=fx,
base=base,
vol=vol__, # type: ignore[arg-type]
metric=metric_,
settlement=settlement,
forward=forward,
)
* weight
)
return _
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
vol_ = self._parse_vol(vol)
results = [
option.npv(
curves=curves,
solver=solver,
fx=fx,
base=base,
local=local,
vol=vol__, # type: ignore[arg-type]
forward=forward,
settlement=settlement,
)
for (option, vol__) in zip(self.instruments, vol_, strict=True) # type: ignore[arg-type]
]
if local:
df = DataFrame(results).fillna(0.0)
df_sum = df.sum()
_: DualTypes | dict[str, DualTypes] = df_sum.to_dict() # type: ignore[assignment]
else:
_ = sum(results) # type: ignore[arg-type]
return _
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._cashflows_from_instruments(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
base=base,
)
def _plot_payoff(
self,
window: tuple[float, float] | NoInput = NoInput(0),
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
) -> tuple[Any, Any]:
vol_ = self._parse_vol(vol)
y = None
for inst, vol__ in zip(self.instruments, vol_, strict=True): # type: ignore[arg-type]
x, y_ = inst._plot_payoff(
window=window,
curves=curves,
solver=solver,
fx=fx,
vol=vol__, # type: ignore[arg-type]
)
if y is None:
y = y_
else:
y += y_
return x, y
def analytic_greeks(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolStrat_ = NoInput(0),
) -> dict[str, Any]:
# implicitly call set_pricing_mid for unpriced parameters
# this may be important for Strategies whose options are
# dependent upon each other, (RR and Straddle do not have interdependent options)
self.rate(curves=curves, solver=solver, fx=fx, vol=vol)
vol_: VolStrat_ = self._parse_vol(vol=vol)
gks = []
for inst, vol_i in zip(self.instruments, vol_, strict=True): # type: ignore[misc, arg-type]
if isinstance(inst, _BaseIRSOptionStrat):
gks.append(
inst.analytic_greeks(
curves=curves,
solver=solver,
fx=fx,
vol=vol_i,
)
)
else: # option is _BaseIRSOption
gks.append(
inst._analytic_greeks_set_metrics(
curves=curves,
solver=solver,
fx=fx,
vol=vol_i, # type: ignore[arg-type]
set_metrics=False, # already done in the rate call above
)
)
_unit_attrs = ["delta", "gamma", "vega", "vomma", "vanna", "__bs76", "__bachelier"]
_: dict[str, Any] = {}
for attr in _unit_attrs:
tally = 0.0
for i, gk in enumerate(gks):
if attr not in gk:
continue
tally += gk[attr] * self.kwargs.meta["rate_weight"][i]
_[attr] = tally
_notional_attrs = [
f"delta_{self.settlement_params.currency}",
f"gamma_{self.settlement_params.currency}",
f"vega_{self.settlement_params.currency}",
]
for attr in _notional_attrs:
_[attr] = sum(gk[attr] * self.kwargs.meta["rate_weight"][i] for i, gk in enumerate(gks))
_.update(
{
"__class": "IROptionStrat",
"__options": gks,
"__notional": self.kwargs.leg1["notional"],
},
)
return _
class IRSStraddle(_BaseIRSOptionStrat):
"""
An *IR Straddle* :class:`~rateslib.instruments._BaseIRSOptionStrat`.
.. warning::
*Swaptions* and *IR Volatility* are in Beta status introduced in v2.7.0
A *Straddle* is composed of a :class:`~rateslib.instruments.IRSPut`
and :class:`~rateslib.instruments.IRSCall` with the same strike, expiry and tenor.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import IRSStraddle, Curve, dt
.. ipython:: python
irstr = IRSStraddle(
eval_date=dt(2020, 1, 1),
expiry="3m",
tenor="1Y",
strike="atm",
irs_series="usd_irs",
notional=1000000,
)
irstr.cashflows()
.. rubric:: Pricing
The pricing mirrors that for an :class:`~rateslib.instruments.IRSCall`. All options use the
same ``curves``. Allowable inputs are:
.. code-block:: python
curves = rate_curve | [rate_curve] # one curve is used as all curves
curves = [rate_curve, disc_curve] # two curves are applied in the given order, index_curve is set equal to disc_curve
curves = [rate_curve, disc_curve, index_curve] # three curves applied in the given order
curves = {
"rate_curve": rate_curve,
"disc_curve": disc_curve
"index_curve": index_curve
} # dict form is explicit
A ``vol`` argument must be provided to each *Instrument*. This can either be a single
value universally used for all, or an individual item as part of a sequence. Allowed
inputs are:
.. code-block:: python
vol = 12.0 | vol_obj # a single item universally applied
vol = [12.0, 12.0] # values for the Put and Call respectively
The following pricing ``metric`` are available, with examples:
TODO
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **ir option** and generalised **settlement** parameters.
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
tenor: datetime, str, :red:`required`
The parameter defining the maturity of the underlying :class:`~rateslib.instruments.IRS`.
irs_series: IRSSeries, str, :red:`required`
The standard conventions applied to the underlying :class:`~rateslib.instruments.IRS`.
strike: float, Variable, str, :red:`required`
The strike value of the option.
If str, there are two possibilities; {"atm", "{}bps"}. "atm" will produce a strike equal
to the mid-market *IRS* rate, whilst "20bps" or "-50bps" will yield a strike that number
of basis points different to the mid-market rate.
notional: float, :green:`optional (set by 'defaults')`
The notional amount expressed in units of ``currency`` fo the ``irs_series``.
eval_date: datetime, :green:`optional`
Only required if ``expiry`` is given as string tenor.
Should be entered as today (also called horizon) and **not** spot.
payment_lag: int or datetime, :green:`optional (set as IRS effective)`
The number of business days after expiry to pay premium. If a *datetime* is given this will
set the premium date explicitly.
settlement_method: SwaptionSettlementMethod, str, :green:`optional (set by 'default')`
The method for deriving the settlement cashflow or underlying value.
.. note::
The following define additional **rate** parameters.
premium: 2-tuple of float, :green:`optional`
The amount paid for the put and call in order. If not given assumes unpriced
*Options* and sets this as mid-market premium during pricing.
option_fixings: 2-tuple of float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of each option's :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
metric : str, :green:`optional (set as "pips_or_%")`
The pricing metric returned by the ``rate`` method. See **Pricing**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
vol: str, Smile, Surface, float, Dual, Dual2, Variable, Sequence
Pricing objects passed directly to the *Instrument's* methods' ``vol`` argument. See
**Pricing**.
spec : str, optional
An identifier to pre-populate many field with conventional values. See
:ref:`here` for more info and available values.
""" # noqa: E501
_rate_scalar = 100.0
def __init__(
self,
expiry: datetime | str,
tenor: datetime | str,
strike: DualTypes | str,
irs_series: IRSSeries | str,
*,
notional: DualTypes_ = NoInput(0),
eval_date: datetime | NoInput = NoInput(0),
premium: tuple[DualTypes_, DualTypes_] = (NoInput(0), NoInput(0)),
payment_lag: str | datetime_ = NoInput(0),
option_fixings: DualTypes_ = NoInput(0),
settlement_method: SwaptionSettlementMethod | str_ = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
vol_ = self._parse_vol(vol)
notional_ = _drb(defaults.notional, notional)
options = [
IRSPut(
irs_series=irs_series,
expiry=expiry,
payment_lag=payment_lag,
eval_date=eval_date,
tenor=tenor,
strike=strike,
notional=notional_,
option_fixings=option_fixings[0]
if isinstance(option_fixings, tuple | list)
else option_fixings,
settlement_method=settlement_method,
premium=premium[0],
curves=curves,
vol=vol_[0],
metric=NoInput(0),
spec=spec,
),
IRSCall(
irs_series=irs_series,
expiry=expiry,
payment_lag=payment_lag,
eval_date=eval_date,
tenor=tenor,
strike=strike,
notional=notional_,
option_fixings=option_fixings[1]
if isinstance(option_fixings, tuple | list)
else option_fixings,
settlement_method=settlement_method,
premium=premium[1],
curves=curves,
vol=vol_[1],
metric=NoInput(0),
spec=spec,
),
]
super().__init__(
options=options,
rate_weight=[1.0, 1.0],
rate_weight_vol=[0.5, 0.5],
metric=metric,
curves=curves,
vol=vol_,
)
self.kwargs.leg1["notional"] = notional_
@classmethod
def _parse_vol(cls, vol: VolStrat_) -> tuple[_Vol, _Vol]: # type: ignore[override]
if not isinstance(vol, list | tuple):
vol = (vol,) * 2
return IRSPut._parse_vol(vol[0]), IRSCall._parse_vol(vol[1])
def _set_notionals(self, notional: DualTypes) -> None:
"""
Set the notionals on each option period. Mainly used by Brokerfly for vega neutral
strangle and straddle.
"""
for option in self.instruments:
option.kwargs.leg1["notional"] = notional
option._option.settlement_params._notional = notional
================================================
FILE: python/rateslib/instruments/ir_options/strangle.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import IROptionMetric
from rateslib.instruments.ir_options.call_put import IRSCall, IRSPut
from rateslib.instruments.ir_options.straddle import _BaseIRSOptionStrat
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurvesT_,
DualTypes,
DualTypes_,
IRSSeries,
SwaptionSettlementMethod,
VolStrat_,
VolT_,
_Vol,
datetime,
datetime_,
str_,
)
class IRSStrangle(_BaseIRSOptionStrat):
"""
An *IR Strangle* :class:`~rateslib.instruments._BaseIRSOptionStrat`.
.. warning::
*Swaptions* and *IR Volatility* are in Beta status introduced in v2.7.0
A *Strangle* is composed of a lower strike :class:`~rateslib.instruments.IRSPut`
and a higher strike :class:`~rateslib.instruments.IRSCall` with the same expiry and tenor.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import IRSStrangle, Curve, dt
.. ipython:: python
irstr = IRSStrangle(
eval_date=dt(2020, 1, 1),
expiry="3m",
tenor="1Y",
strike=("-20bps", "+20bps"),
irs_series="usd_irs",
notional=1000000,
)
irstr.cashflows()
.. rubric:: Pricing
The pricing mirrors that for an :class:`~rateslib.instruments.IRSCall`. All options use the
same ``curves``. Allowable inputs are:
.. code-block:: python
curves = rate_curve | [rate_curve] # one curve is used as all curves
curves = [rate_curve, disc_curve] # two curves are applied in the given order, index_curve is set equal to disc_curve
curves = [rate_curve, disc_curve, index_curve] # three curves applied in the given order
curves = {
"rate_curve": rate_curve,
"disc_curve": disc_curve
"index_curve": index_curve
} # dict form is explicit
A ``vol`` argument must be provided to each *Instrument*. This can either be a single
value universally used for all, or an individual item as part of a sequence. Allowed
inputs are:
.. code-block:: python
vol = 12.0 | vol_obj # a single item universally applied
vol = [12.0, 12.0] # values for the Put and Call respectively
The following pricing ``metric`` are available, with examples:
TODO
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **ir option** and generalised **settlement** parameters.
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
tenor: datetime, str, :red:`required`
The parameter defining the maturity of the underlying :class:`~rateslib.instruments.IRS`.
irs_series: IRSSeries, str, :red:`required`
The standard conventions applied to the underlying :class:`~rateslib.instruments.IRS`.
strike: 2-tuple of float, Variable, str, :red:`required`
The strike values of each option.
If str, there are two possibilities; {"atm", "{}bps"}. "atm" will produce a strike equal
to the mid-market *IRS* rate, whilst "20bps" or "-50bps" will yield a strike that number
of basis points different to the mid-market rate.
notional: float, :green:`optional (set by 'defaults')`
The notional amount expressed in units of ``currency`` fo the ``irs_series``.
eval_date: datetime, :green:`optional`
Only required if ``expiry`` is given as string tenor.
Should be entered as today (also called horizon) and **not** spot.
payment_lag: int or datetime, :green:`optional (set as IRS effective)`
The number of business days after expiry to pay premium. If a *datetime* is given this will
set the premium date explicitly.
settlement_method: SwaptionSettlementMethod, str, :green:`optional (set by 'default')`
The method for deriving the settlement cashflow or underlying value.
.. note::
The following define additional **rate** parameters.
premium: 2-tuple of float, :green:`optional`
The amount paid for the put and call in order. If not given assumes unpriced
*Options* and sets this as mid-market premium during pricing.
option_fixings: 2-tuple of float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of each option's :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
metric : str, :green:`optional (set as "pips_or_%")`
The pricing metric returned by the ``rate`` method. See **Pricing**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
vol: str, Smile, Surface, float, Dual, Dual2, Variable, Sequence
Pricing objects passed directly to the *Instrument's* methods' ``vol`` argument. See
**Pricing**.
spec : str, optional
An identifier to pre-populate many field with conventional values. See
:ref:`here` for more info and available values.
""" # noqa: E501
_rate_scalar = 100.0
def __init__(
self,
expiry: datetime | str,
tenor: datetime | str,
strike: tuple[DualTypes | str, DualTypes | str],
irs_series: IRSSeries | str,
*,
notional: DualTypes_ = NoInput(0),
eval_date: datetime | NoInput = NoInput(0),
premium: tuple[DualTypes_, DualTypes_] = (NoInput(0), NoInput(0)),
payment_lag: str | datetime_ = NoInput(0),
option_fixings: DualTypes_ = NoInput(0),
settlement_method: SwaptionSettlementMethod | str_ = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
vol: VolT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
vol_ = self._parse_vol(vol)
notional_ = _drb(defaults.notional, notional)
options = [
IRSPut(
irs_series=irs_series,
expiry=expiry,
payment_lag=payment_lag,
eval_date=eval_date,
tenor=tenor,
strike=strike[0],
notional=notional_,
option_fixings=option_fixings[0]
if isinstance(option_fixings, tuple | list)
else option_fixings,
settlement_method=settlement_method,
premium=premium[0],
curves=curves,
vol=vol_[0],
metric=NoInput(0),
spec=spec,
),
IRSCall(
irs_series=irs_series,
expiry=expiry,
payment_lag=payment_lag,
eval_date=eval_date,
tenor=tenor,
strike=strike[1],
notional=notional_,
option_fixings=option_fixings[1]
if isinstance(option_fixings, tuple | list)
else option_fixings,
settlement_method=settlement_method,
premium=premium[1],
curves=curves,
vol=vol_[1],
metric=NoInput(0),
spec=spec,
),
]
super().__init__(
options=options,
rate_weight=[1.0, 1.0],
rate_weight_vol=[0.5, 0.5],
metric=metric,
curves=curves,
vol=vol_,
)
self.kwargs.leg1["notional"] = notional_
@classmethod
def _parse_vol(cls, vol: VolStrat_) -> tuple[_Vol, _Vol]: # type: ignore[override]
if not isinstance(vol, list | tuple):
vol = (vol,) * 2
return IRSPut._parse_vol(vol[0]), IRSCall._parse_vol(vol[1])
def _set_notionals(self, notional: DualTypes) -> None:
"""
Set the notionals on each option period. Mainly used by Brokerfly for vega neutral
strangle and straddle.
"""
for option in self.instruments:
option.kwargs.leg1["notional"] = notional
option._option.settlement_params._notional = notional
================================================
FILE: python/rateslib/instruments/ir_options/vol_value.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING, NoReturn
from rateslib import defaults
from rateslib.data.fixings import _get_irs_series
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import OptionPricingModel, OptionType, _get_ir_option_metric
from rateslib.instruments.ir_options.call_put import _BaseIRSOption
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_maybe_get_ir_vol_maybe_from_solver,
_parse_curves,
_Vol,
)
from rateslib.periods.parameters import _IROptionParams
from rateslib.periods.utils import (
_get_ir_vol_value_and_forward_maybe_from_obj,
)
from rateslib.rs import IROptionMetric
from rateslib.scheduling import add_tenor
from rateslib.volatility.ir import IRSabrCube, IRSabrSmile
from rateslib.volatility.utils import _OptionModelBachelier, _OptionModelBlack76
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CurvesT_,
DualTypes,
FXForwards_,
IRSSeries,
Solver_,
VolT_,
datetime,
datetime_,
str_,
)
class IRVolValue(_BaseInstrument):
"""
A pseudo *Instrument* used to calibrate an *IR Vol Object* within a
:class:`~rateslib.solver.Solver`.
.. rubric:: Examples
Examples
--------
The below :class:`~rateslib.volatility.FXDeltaVolSmile` is solved directly
from calibrating volatility values.
.. ipython:: python
:suppress:
from rateslib.volatility import IRSabrSmile
from rateslib.instruments import IRVolValue
from rateslib.solver import Solver
..
.. ipython:: python
smile = IRSabrSmile(
nodes={"alpha": 0.20, "beta": 0.5, "rho": 0.05, "nu": 0.60},
eval_date=dt(2026, 2, 12),
tenor="1y",
expiry=dt(2027, 2, 12),
irs_series="usd_irs",
id="VolSmile",
)
instruments = [
IRVolValue(2.5, vol="VolSmile"),
IRVolValue(3.5, vol=smile)
]
solver = Solver(curves=[smile], instruments=instruments, s=[8.9, 7.8])
smile[2.1]
smile[2.5]
smile[3.5]
smile[3.9]
.. rubric:: Pricing
An *IR Vol Value* requires, and will calibrate, just one *IR Vol Object*.
Allowable inputs are:
.. code-block:: python
vol = ir_vol_obj | [ir_vol_obj] # a single object is detected
vol = {"ir_vol": ir_vol_obj} # dict form is explicit
The ``curves`` must match the pricing for an :class:`~rateslib.instruments.IRS`, since the
atm-rate is determined directly from an *IRS* instance.
The available ``metric`` are:
- **'normal_vol'**: which returns a normal volatility in bps suitable for the Bachelier pricing
formula.
- **'black_vol_shift_{}'**: same as above but allowing an explicit shift.
- **'alpha', 'beta', 'rho', 'nu'**: returns the SABR parameters explicitly for a SABR based
pricing object.
.. role:: red
.. role:: green
Parameters
----------
expiry: datetime, str, :red:`required`
The expiry of the option. If given in string tenor format, e.g. "1M" requires an
``eval_date``. See **Notes**.
tenor: datetime, str, :red:`required`
The parameter defining the maturity of the underlying :class:`~rateslib.instruments.IRS`.
strike: float, Variable, str, :red:`required`
The strike value used as the index value to the pricing model.
If str, there are two possibilities; {"atm", "{}bps"}. "atm" will produce a strike equal
to the mid-market *IRS* rate, whilst "20bps" or "-50bps" will yield a strike that number
of basis points different to the mid-market rate.
irs_series: IRSSeries, str, :red:`required`
The standard conventions applied to the underlying :class:`~rateslib.instruments.IRS`.
eval_date: datetime, :green:`optional`
If expiry is given as string tenor, use eval date to determine the date.
metric: str, IROptionMetric, :green:`optional (set as 'normal_vol')`
The default metric to return from the ``rate`` method.
vol: str, IRVolObj, :green:`optional`
The associated object from which to determine the ``rate``.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
"""
@property
def rate_scalar(self) -> float:
metric_ = self.kwargs.meta["metric"].lower()
match metric_:
case "alpha" | "beta" | "rho" | "nu":
return 1.0
case "normal_vol":
return 100.0
case _ if "black_vol_shift_" in metric_:
return 100.0
case _:
raise NotImplementedError(
"The provided metric for IRVolValue is not rate scalar mapped."
)
_rate_scalar = 1.0
def __init__(
self,
expiry: datetime | str,
tenor: datetime | str,
strike: DualTypes | str,
irs_series: IRSSeries | str,
*,
eval_date: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
vol: VolT_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
):
user_args = dict(
tenor=tenor,
expiry=expiry,
strike=strike,
irs_series=irs_series,
vol=self._parse_vol(vol),
metric=metric,
curves=self._parse_curves(curves),
)
default_args = dict(convention=defaults.convention, metric="normal_vol", curves=NoInput(0))
self._kwargs = _KWArgs(
spec=NoInput(0),
user_args=user_args,
default_args=default_args,
meta_args=["curves", "metric", "vol", "curves"],
)
if isinstance(self.kwargs.leg1["expiry"], str):
if isinstance(eval_date, NoInput):
raise ValueError("`tenor` as string requires an `eval_date` to quantify.")
series_ = _get_irs_series(self.kwargs.leg1["irs_series"])
self.kwargs.leg1["expiry"] = add_tenor(
start=eval_date,
tenor=self.kwargs.leg1["expiry"],
modifier=series_.modifier,
calendar=series_.calendar,
)
@cached_property
def _ir_option_params(self) -> _IROptionParams:
return _IROptionParams(
_expiry=self.kwargs.leg1["expiry"],
_tenor=self.kwargs.leg1["tenor"],
_irs_series=_get_irs_series(self.kwargs.leg1["irs_series"]),
_strike=self.kwargs.leg1["strike"],
# unused parameters
_direction=OptionType.Put,
_metric=defaults.ir_option_metric,
_option_fixings=NoInput(0),
_settlement_method=defaults.ir_option_settlement,
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
ir_vol = _maybe_get_ir_vol_maybe_from_solver(
vol=self._parse_vol(vol), vol_meta=self.kwargs.meta["vol"], solver=solver
)
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
del metric
if metric_ in ["alpha", "beta", "rho", "nu"]:
if isinstance(ir_vol, IRSabrSmile):
return getattr(ir_vol.nodes, metric_) # type: ignore[no-any-return]
elif isinstance(ir_vol, IRSabrCube):
smile: IRSabrSmile = ir_vol.get_smile( # type: ignore[assignment]
expiry=self.kwargs.leg1["expiry"],
tenor=self._ir_option_params.option_fixing.termination,
)
return getattr(smile.nodes, metric_) # type: ignore[no-any-return]
else:
raise ValueError(
"A SABR parameter `metric` can only be obtained from a SABR type vol pricing "
"object."
)
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", True, True, *c)
# disc_curve: _BaseCurve = _fetch_pricing_curve("disc_curve", False, False, *c)
index_curve = _get_curve("index_curve", False, False, *c)
metric__ = _get_ir_option_metric(metric_)
del metric_
if not hasattr(ir_vol, "get_from_strike"):
raise TypeError("`vol` for IRVolValue must be of type _BaseIRSmile or _BaseIRCube.")
params = _get_ir_vol_value_and_forward_maybe_from_obj(
rate_curve=rate_curve,
index_curve=index_curve,
strike=self.kwargs.leg1["strike"],
ir_vol=ir_vol,
irs=self._ir_option_params.option_fixing.irs,
tenor=self._ir_option_params.option_fixing.termination,
expiry=self._ir_option_params.expiry,
t_e=ir_vol.meta._t_expiry(self._ir_option_params.expiry), # type: ignore[union-attr]
)
match type(metric__):
case IROptionMetric.Premium | IROptionMetric.PercentNotional:
raise ValueError(
"`metric` cannot be a cash or monetary quantity for this Instrument type"
)
case IROptionMetric.NormalVol:
if params.pricing_model == OptionPricingModel.Bachelier:
return params.vol
else:
return _OptionModelBlack76.convert_to_bachelier(
f=params.f, k=params.k, shift=params.shift, t_e=params.t_e, vol=params.vol
)
case IROptionMetric.BlackVolShift:
required_shift = metric__.shift()
if params.pricing_model == OptionPricingModel.Bachelier:
return _OptionModelBachelier.convert_to_black76(
f=params.f, k=params.k, shift=required_shift, t_e=params.t_e, vol=params.vol
)
else:
return _OptionModelBlack76.convert_to_new_shift(
f=params.f,
k=params.k,
old_shift=params.shift,
target_shift=required_shift,
t_e=params.t_e,
vol=params.vol,
)
case _:
raise RuntimeError( # pragma: no cover
"Unexpected error: unmapped IROptionMetric branch - please report."
)
def _parse_curves(self, curves: CurvesT_) -> _Curves:
return _BaseIRSOption._parse_curves(curves)
def _parse_vol(self, vol: VolT_) -> _Vol:
return _BaseIRSOption._parse_vol(vol)
def npv(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError(
"`VolValue` instrument has no concept of NPV."
) # pragma: no cover
def cashflows(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError(
"`VolValue` instrument has no concept of cashflows."
) # pragma: no cover
def analytic_delta(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError(
"`VolValue` instrument has no concept of analytic delta."
) # pragma: no cover
================================================
FILE: python/rateslib/instruments/irs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import LegMtm
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_get_fx_forwards_maybe_from_solver,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg, FloatLeg
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Adjuster,
CalInput,
Convention,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FloatRateSeries,
Frequency,
FXForwards_,
LegFixings,
RollDay,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class IRS(_BaseInstrument):
"""
An *interest rate swap (IRS)* composing a :class:`~rateslib.legs.FixedLeg`
and a :class:`~rateslib.legs.FloatLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import IRS
from rateslib.data.fixings import FXIndex
from datetime import datetime as dt
from rateslib import fixings
from pandas import Series
.. ipython:: python
irs = IRS(
effective=dt(2000, 1, 1),
termination="2y",
spec="usd_irs",
fixed_rate=2.0,
)
irs.cashflows()
.. rubric:: Pricing
An *IRS* requires a *disc curve* on both legs (which should be the same *Curve*) and a
*leg2 rate curve* to forecast rates on the *FloatLeg*. The following input formats are
allowed:
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = [None, disc_curve, rate_curve, disc_curve] # four curves applied to each leg
curves = {"leg2_rate_curve": rate_curve, "disc_curve": disc_curve} # dict form is explicit
``metric`` is unused by *IRS* and is always fixed '*rate*'.
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: Convention, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.scheduling.Convention` applied to calculations of period accrual
dates. See :meth:`~rateslib.scheduling.dcf`.
leg2_effective : datetime, :green:`optional (inherited from leg1)`
leg2_termination : datetime, str, :green:`optional (inherited from leg1)`
leg2_frequency : Frequency, str, :green:`optional (inherited from leg1)`
leg2_stub : StubInference, str, :green:`optional (inherited from leg1)`
leg2_front_stub : datetime, :green:`optional (inherited from leg1)`
leg2_back_stub : datetime, :green:`optional (inherited from leg1)`
leg2_roll : RollDay, int, str, :green:`optional (inherited from leg1)`
leg2_eom : bool, :green:`optional (inherited from leg1)`
leg2_modifier : Adjuster, str, :green:`optional (inherited from leg1)`
leg2_calendar : calendar, str, :green:`optional (inherited from leg1)`
leg2_payment_lag: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_payment_lag_exchange: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_ex_div: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_convention: Convention, str, :green:`optional (inherited from leg1)`
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
leg2_amortization : float, Dual, Dual2, Variable, str, Amortization, :green:`optional (negatively inherited from leg1)`
.. note::
The following are **rate parameters**.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
leg2_fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.FloatFixingMethod` describing the determination
of the floating rate for each period.
leg2_fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
leg2_fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
leg2_float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
leg2_spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
leg2_rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
leg2_zero_periods: bool, :green:`optional (set as False)`
Used to define whether to use a multi-period IBOR classification. See
:class:`~rateslib.legs.FloatLeg` for examples.
.. note::
The following define **non-deliverability** parameters. If the swap is
directly deliverable do not use these parameters. Review the **notes** section
non-deliverability.
pair: FXIndex, str, :green:`optional`
The currency pair for :class:`~rateslib.data.fixings.FXFixing` that determines *Period*
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* according
to non-deliverability.
leg2_fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* on *Leg2*
according to non-deliverability.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
Notes
-----
**Non-Deliverable IRS (NDIRS)**
An *NDIRS* can be constructed by using the ``pair`` argument. The ``currency`` defines the
*settlement currency*, whilst the *reference currency* is derived from ``pair`` and the
``notional`` is expressed *reference currency* units.
The ``fx_fixings`` argument is typically used to provide an FX fixing series from which to
extract non-deliverable :class:`~rateslib.data.fixings.FXFixing` data. The ``leg2_fx_fixings``
inherits from the former and is likely to always be omitted, unless the fixings are provided
as a list (against best practice) and the schedules do not align.
For **pricing**, whilst a traditional *IRS* can be priced with just one *Curve*, e.g. "sofr"
for a conventional USD IRS, an ND-IRS will always require 2 different curves: a *leg2 rate
curve* for forecasting rates in the non-deliverable reference currency, and a *disc curve* for
discounting cashflows in the settlement currency.
The following is an example of a THB ND-IRS settled in USD with notional of 10mm THB.
.. ipython:: python
fixings.add("WMR_10AM_TYO_USDTHB", Series(index=[dt(2000, 6, 30), dt(2001, 1, 2)], data=[35.25, 37.0]))
irs = IRS(
effective=dt(2000, 1, 1),
termination="2y",
frequency="S",
currency="usd", # <- USD set as the settlement currency
pair=FXIndex("usdthb", "fed", 1, "fed", -1), # <- THB inferred as the reference currency
fx_fixings="WMR_10AM_TYO",
fixed_rate=2.0,
# all other arguments set as normal IRS
)
irs.cashflows()
.. ipython:: python
:suppress:
fixings.pop("WMR_10AM_TYO_USDTHB")
Further information is available in the documentation for a :class:`~rateslib.legs.FixedLeg`.
""" # noqa: E501
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.legs.FixedLeg`."""
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
@property
def leg2_float_spread(self) -> DualTypes_:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
return self.leg2.float_spread
@leg2_float_spread.setter
def leg2_float_spread(self, value: DualTypes) -> None:
self.kwargs.leg2["float_spread"] = value
self.leg2.float_spread = value
@property
def leg1(self) -> FixedLeg:
"""The :class:`~rateslib.legs.FixedLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> FloatLeg:
"""The :class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> list[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: Adjuster | str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: Adjuster | str | int_ = NoInput(0),
payment_lag_exchange: Adjuster | str | int_ = NoInput(0),
ex_div: Adjuster | str | int_ = NoInput(0),
convention: Convention | str_ = NoInput(0),
leg2_effective: datetime_ = NoInput(1),
leg2_termination: datetime | str_ = NoInput(1),
leg2_frequency: Frequency | str_ = NoInput(1),
leg2_stub: str_ = NoInput(1),
leg2_front_stub: datetime_ = NoInput(1),
leg2_back_stub: datetime_ = NoInput(1),
leg2_roll: int | RollDay | str_ = NoInput(1),
leg2_eom: bool_ = NoInput(1),
leg2_modifier: Adjuster | str_ = NoInput(1),
leg2_calendar: CalInput = NoInput(1),
leg2_payment_lag: Adjuster | str | int_ = NoInput(1),
leg2_payment_lag_exchange: Adjuster | str | int_ = NoInput(1),
leg2_ex_div: Adjuster | str | int_ = NoInput(1),
leg2_convention: Convention | str_ = NoInput(1),
# settlement parameters
currency: str_ = NoInput(0),
notional: float_ = NoInput(0),
amortization: float_ = NoInput(0),
leg2_notional: float_ = NoInput(-1),
leg2_amortization: float_ = NoInput(-1),
# non-deliverability
pair: str_ = NoInput(0),
fx_fixings: LegFixings = NoInput(0),
leg2_fx_fixings: LegFixings = NoInput(1),
# rate parameters
fixed_rate: DualTypes_ = NoInput(0),
leg2_float_spread: DualTypes_ = NoInput(0),
leg2_spread_compound_method: str_ = NoInput(0),
leg2_rate_fixings: LegFixings = NoInput(0),
leg2_fixing_method: str_ = NoInput(0),
leg2_fixing_frequency: Frequency | str_ = NoInput(0),
leg2_fixing_series: FloatRateSeries | str_ = NoInput(0),
leg2_zero_periods: bool_ = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
user_args = dict(
# scheduling
effective=effective,
leg2_effective=leg2_effective,
termination=termination,
leg2_termination=leg2_termination,
frequency=frequency,
leg2_frequency=leg2_frequency,
stub=stub,
leg2_stub=leg2_stub,
front_stub=front_stub,
leg2_front_stub=leg2_front_stub,
back_stub=back_stub,
leg2_back_stub=leg2_back_stub,
roll=roll,
leg2_roll=leg2_roll,
eom=eom,
leg2_eom=leg2_eom,
modifier=modifier,
leg2_modifier=leg2_modifier,
calendar=calendar,
leg2_calendar=leg2_calendar,
payment_lag=payment_lag,
leg2_payment_lag=leg2_payment_lag,
payment_lag_exchange=payment_lag_exchange,
leg2_payment_lag_exchange=leg2_payment_lag_exchange,
ex_div=ex_div,
leg2_ex_div=leg2_ex_div,
convention=convention,
leg2_convention=leg2_convention,
# settlement
currency=currency,
notional=notional,
leg2_notional=leg2_notional,
amortization=amortization,
leg2_amortization=leg2_amortization,
# non-deliverability
pair=pair,
fx_fixings=fx_fixings,
leg2_fx_fixings=leg2_fx_fixings,
# rate
fixed_rate=fixed_rate,
leg2_float_spread=leg2_float_spread,
leg2_spread_compound_method=leg2_spread_compound_method,
leg2_rate_fixings=leg2_rate_fixings,
leg2_fixing_method=leg2_fixing_method,
leg2_fixing_series=leg2_fixing_series,
leg2_fixing_frequency=leg2_fixing_frequency,
leg2_zero_periods=leg2_zero_periods,
# meta
curves=self._parse_curves(curves),
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
leg2_currency=NoInput(1),
leg2_pair=NoInput(1),
initial_exchange=False,
final_exchange=False,
leg2_initial_exchange=False,
leg2_final_exchange=False,
mtm=LegMtm.Payment,
leg2_mtm=LegMtm.Payment,
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "vol"],
)
self._leg1 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._leg2 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self.leg1, self.leg2]
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_rate_curve = _get_curve("leg2_rate_curve", True, True, *c)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
fx_ = _get_fx_forwards_maybe_from_solver(solver, fx)
leg2_npv: DualTypes = self.leg2.local_npv(
rate_curve=leg2_rate_curve,
disc_curve=leg2_disc_curve,
index_curve=NoInput(0),
fx=fx_,
settlement=settlement,
forward=forward,
)
return (
self.leg1.spread(
target_npv=-leg2_npv,
rate_curve=NoInput(0),
disc_curve=disc_curve,
fx=fx_,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
/ 100
)
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_rate_curve = _get_curve("leg2_rate_curve", True, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
fx_ = _get_fx_forwards_maybe_from_solver(solver, fx)
leg1_npv: DualTypes = self.leg1.local_npv(
rate_curve=NoInput(0),
disc_curve=disc_curve,
index_curve=NoInput(0),
fx=fx_,
settlement=settlement,
forward=forward,
)
return self.leg2.spread(
target_npv=-leg1_npv,
rate_curve=leg2_rate_curve,
fx=fx_,
disc_curve=disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
fx=fx,
settlement=settlement,
forward=forward,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# the test for an unpriced IRS is that its fixed rate is not set.
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
fx=fx,
)
self.leg1.fixed_rate = _dual_float(mid_market_rate)
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
@classmethod
def _parse_curves(cls, curves: CurvesT_) -> _Curves:
"""
An IRS has two curve requirements: a leg2_rate_curve and a disc_curve used by both legs.
When given as only 1 element this curve is applied to all of the those components
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
leg2_rate_curve=curves[0],
disc_curve=curves[1],
leg2_disc_curve=curves[1],
)
elif len(curves) == 1:
return _Curves(
leg2_rate_curve=curves[0],
disc_curve=curves[0],
leg2_disc_curve=curves[0],
)
elif len(curves) == 4:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
leg2_rate_curve=curves[2],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(cls).__name__} requires only 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_rate_curve=_drb(
curves.get("rate_curve", NoInput(0)),
curves.get("leg2_rate_curve", NoInput(0)),
),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
leg2_rate_curve=curves, # type: ignore[arg-type]
disc_curve=curves, # type: ignore[arg-type]
leg2_disc_curve=curves, # type: ignore[arg-type]
)
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
================================================
FILE: python/rateslib/instruments/loan.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual import ift_1dim
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import LegMtm
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg, FloatLeg
from rateslib.scheduling import Frequency
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Adjuster,
CalInput,
Convention,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FixingsRates_,
FloatRateSeries,
FXForwards_,
IndexMethod,
LegFixings,
LegIndexBase,
RollDay,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class Loan(_BaseInstrument):
"""
A *loan obligation* composing either a :class:`~rateslib.legs.FixedLeg` or a
:class:`~rateslib.legs.FloatLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import Loan
from datetime import datetime as dt
.. ipython:: python
loan = Loan(dt(2022, 1, 4), "3m", "Q", notional=10e6, fixed_rate=10.0, calendar="nyc")
loan.cashflows()
.. rubric:: Pricing
A *Loan* with a fixed rate requires one *disc curve* for discounting.
A *Loan* with a floating rate may require an additional *rate curve* for forecasting if
rate fixings have not been published.
A *Loan* that is indexed may require an additional *index curve*.
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = [rate_curve, disc_curve] # two curves given in the specified order
curves = [rate_curve, disc_curve, index_curve] # three curves given in the specified order
curves = { # dict form is explicit
"rate_curve": rate_curve,
"disc_curve": disc_curve,
"index_curve": index_curve,
}
The *rate* method is not generally implemented for a *Loan*. However, for flexibility,
one ``metric`` that is available:
- *'npv'*: returns the result of the :meth:`~rateslib.instruments.Fee.npv` method.
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of leg1 (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set from 'leg2_notional' or 'defaults' )`
The initial leg1 notional, defined in units of the currency of the leg. Only one
of ``notional`` and ``leg2_notional`` can be given. The alternate leg notional is derived
via non-deliverability :class:`~rateslib.data.fixings.FXFixing`.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
.. note::
The following are **rate parameters**.
fixed : bool, :green:`optional (set as True)`
Whether leg1 is a :class:`~rateslib.legs.FixedLeg` or a :class:`~rateslib.legs.FloatLeg`.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
.. note::
The following define **non-deliverability** parameters. If the fee is
directly deliverable do not use these parameters.
pair: FXIndex, str, :green:`optional`
The currency pair for :class:`~rateslib.data.fixings.FXFixing` that determines *Period*
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
mtm: bool, :green:`optional (set to False)`
If *True* use non-deliverability defined by payment date, else use non-deliverability
defined by a single fixing related to the effective date.
fx_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` according
to non-deliverability.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value set of the base index value.
If not given and ``index_fixings`` is a str fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The index value for the reference date.
If a scalar value this is used directly. If a string identifier will link to the
central ``fixings`` object and data loader. See :ref:`fixings `.
index_base_type: LegIndexBase, :green:`optional (set as 'initial')`
A parameter to define how the ``index_base_date`` is set on each period. See notes.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
metric: str, :green:`optional (set as 'leg1')`
Determines which calculation metric to return by default when using the
:meth:`~rateslib.instruments.Loan.rate` method.
Notes
-----
How does a :class:`~rateslib.instruments.Loan` compare with
a :class:`~rateslib.instruments.FixedRateBond` or :class:`~rateslib.instruments.FloatRateNote`?
All of these *Instruments* consist of a single *Leg* with interest payments.
However, a :class:`~rateslib.instruments.Loan` is modeled with its initial cashflow and final
cashflow, whilst the :class:`~rateslib.instruments.FixedRateBond` and
:class:`~rateslib.instruments.FloatRateNote` do not include their initial cashflow.
This is a conceptual choice. *Bonds* typically trade in the primary and secondary market and
therefore the initial cashflow, for the purchase of the security, is a transactional
quantity based or price or YTM. Due to this variation the initial cashflow is excluded
from a *Bonds* cashflow representation.
*Loans* are *Instruments* that are considered to be accounting entries,
so the initial cashflow is usually well defined between two counterparties, and is therefore
included.
**Indexation**
The loan payments can be based on some indexed quantity. The ``index_base_date``
for each payment will be set according to ``index_base_type``, and follows the
logic applied to a :class:`~rateslib.legs.FixedLeg`.
""" # noqa: E501
_rate_scalar = 1.0
@property
def leg1(self) -> FixedLeg | FloatLeg:
"""The :class:`~rateslib.legs.FixedLeg` or :class:`~rateslib.legs.FloatLeg`
of the *Instrument*."""
return self._leg1
@property
def legs(self) -> list[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs # type: ignore[return-value]
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: Adjuster | str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: Adjuster | str | int_ = NoInput(0),
payment_lag_exchange: Adjuster | str | int_ = NoInput(0),
ex_div: Adjuster | str | int_ = NoInput(0),
convention: Convention | str_ = NoInput(0),
# settlement parameters
currency: str_ = NoInput(0),
notional: float_ = NoInput(0),
amortization: float_ = NoInput(0),
# rate parameters
fixed: bool_ = NoInput(0),
fixed_rate: DualTypes_ = NoInput(0),
float_spread: DualTypes_ = NoInput(0),
spread_compound_method: str_ = NoInput(0),
rate_fixings: FixingsRates_ = NoInput(0),
fixing_method: str_ = NoInput(0),
fixing_frequency: Frequency | str_ = NoInput(0),
fixing_series: FloatRateSeries | str_ = NoInput(0),
# # non-deliverability
pair: str_ = NoInput(0),
fx_fixings: LegFixings = NoInput(0),
mtm: bool_ = NoInput(0),
# index params
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: LegFixings = NoInput(0),
index_base_type: LegIndexBase | str_ = NoInput(0),
# meta parameters
metric: str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
user_args = dict(
# scheduling
effective=effective,
termination=termination,
frequency=frequency,
stub=stub,
front_stub=front_stub,
back_stub=back_stub,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
payment_lag_exchange=payment_lag_exchange,
ex_div=ex_div,
convention=convention,
# settlement
currency=currency,
notional=notional,
amortization=amortization,
# non-deliverability
pair=pair,
fx_fixings=fx_fixings,
mtm=mtm,
# indexation
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings,
index_base_type=index_base_type,
# rate
fixed_rate=fixed_rate,
float_spread=float_spread,
spread_compound_method=spread_compound_method,
rate_fixings=rate_fixings,
fixing_method=fixing_method,
fixing_frequency=fixing_frequency,
fixing_series=fixing_series,
# meta
fixed=fixed,
curves=self._parse_curves(curves),
metric=metric,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
initial_exchange=True,
final_exchange=True,
vol=_Vol(),
)
default_args = dict(
currency=defaults.base_currency,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
fixed=True,
mtm=False,
metric="leg1",
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "metric", "fixed", "vol"],
)
# narrowing of fixed or floating
float_attrs = [
"float_spread",
"spread_compound_method",
"rate_fixings",
"fixing_method",
"fixing_frequency",
"fixing_series",
]
if self.kwargs.meta["fixed"]:
for item in float_attrs:
self.kwargs.leg1.pop(item)
else:
self.kwargs.leg1.pop("fixed_rate")
# setting non-deliverability
self.kwargs.leg1["mtm"] = LegMtm.Payment if self.kwargs.leg1["mtm"] else LegMtm.Initial
if self.kwargs.meta["fixed"]:
self._leg1: FixedLeg | FloatLeg = FixedLeg(
**_convert_to_schedule_kwargs(self.kwargs.leg1, 1)
)
else:
self._leg1 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._legs = [self._leg1]
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
A FixedRate Loan only requires one curve for discounting.
A FloatRate Loan requires upto two, one for discounting and one for forecasting rates.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
index_curve=curves.get("index_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
)
elif len(curves) == 1:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[0],
index_curve=curves[0],
)
elif len(curves) == 3:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
index_curve=curves[2],
)
else:
raise ValueError(
f"{type(self).__name__} requires upto 3 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
rate_curve=curves, # type: ignore[arg-type]
disc_curve=curves, # type: ignore[arg-type]
index_curve=curves, # type: ignore[arg-type]
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
if metric_ == "npv":
return self.npv( # type: ignore[return-value]
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
local=False,
)
c = _parse_curves(self, curves, solver)
disc_curve = _get_curve("disc_curve", False, False, *c)
settlement_ = _drb(disc_curve.nodes.initial, settlement)
period_index = self.leg1._period_index(settlement_)
tgt_notional = -self.leg1._regular_periods[period_index].settlement_params.notional
if metric_ == "fixed_rate":
raise NotImplementedError("metric 'float_rate' not implemented for Loan.")
if not isinstance(self.leg1, FixedLeg):
raise TypeError("Can only use 'fixed_rate' for FixedLeg Loan.")
fixed_rate_ = self.leg1.fixed_rate
def s(g):
self.leg1.fixed_rate = g
pv = self._npv_local_excluding_first_exchange(
curves=curves,
solver=solver,
settlement=settlement_,
forward=forward,
)
return pv
result = ift_1dim(
s=s,
s_tgt=tgt_notional,
h="ytm_quadratic",
ini_h_args=(-3.0, 2.0, 12.0),
func_tol=1e-5,
conv_tol=1e-6,
max_iter=20,
)
self.leg1.fixed_rate = fixed_rate_
return result["g"]
elif metric == "float_spread":
raise NotImplementedError("metric 'float_rate' not implemented for Loan.")
if not isinstance(self.leg1, FloatLeg):
raise TypeError("Can only use 'float_spread' for FloatLeg Loan.")
float_spread_ = self.leg1.float_spread
def s(g):
self.leg1.float_spread = g
pv = self._npv_local_excluding_first_exchange(
curves=curves,
solver=solver,
settlement=settlement_,
forward=forward,
)
return pv
result = ift_1dim(
s=s,
s_tgt=tgt_notional,
h="ytm_quadratic",
ini_h_args=(-300.0, 200.0, 1200.0),
func_tol=1e-5,
conv_tol=1e-6,
max_iter=20,
)
self.leg1.fixed_rate = float_spread_
return result["g"]
else:
raise ValueError("`metric`must be in {'npv', 'cashflow'}.")
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
# def _npv_local_excluding_first_exchange(
# self,
# *,
# curves: CurvesT_ = NoInput(0),
# solver: Solver_ = NoInput(0),
# fx: FXForwards_ = NoInput(0),
# vol: VolT_ = NoInput(0),
# settlement: datetime_ = NoInput(0),
# forward: datetime_ = NoInput(0),
# ) -> DualTypes | dict[str, DualTypes]:
# c = _parse_curves(self, curves, solver)
# disc_curve = _get_curve("disc_curve", False, False, *c)
# first_npv = self.leg1.periods[0].npv(
# disc_curve=disc_curve, settlement=settlement, forward=forward
# )
# return (
# super().npv(
# curves=curves,
# solver=solver,
# fx=fx,
# vol=vol,
# settlement=settlement,
# forward=forward,
# )
# - first_npv
# )
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def analytic_delta(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
leg: int = 1,
) -> DualTypes | dict[str, DualTypes]:
return super().analytic_delta(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
leg=leg,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
================================================
FILE: python/rateslib/instruments/ndf.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.data.fixings import FXIndex, _get_fx_index
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_fx_forwards_maybe_from_solver,
_Vol,
)
from rateslib.legs import CustomLeg
from rateslib.periods import Cashflow
from rateslib.periods.utils import _validate_fx_as_forwards
from rateslib.scheduling.frequency import _get_fx_expiry_and_delivery_and_payment
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Adjuster,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FXForwards_,
FXIndex,
FXIndex_,
LegFixings,
PeriodFixings,
Sequence,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime_,
str_,
)
class NDF(_BaseInstrument):
"""
A *non-deliverable FX forward* (NDF), composing two
:class:`~rateslib.legs.CustomLeg`
of individual :class:`~rateslib.periods.Cashflow`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import fixings, NDF
from datetime import datetime as dt
from rateslib.data.fixings import FXIndex
.. ipython:: python
ndf = NDF(dt(2026, 1, 5), FXIndex("usdbrl", "fed", 2), fx_rate=5.5)
ndf.cashflows()
.. rubric:: Pricing
The methods of an *NDF* require an :class:`~rateslib.fx.FXForwards` object for ``fx`` .
They also require a *disc curve*, which is an appropriate curve to discount the
cashflows of the deliverable settlement currency. The following input
formats are allowed:
.. code-block:: python
curves = disc_curve | [disc_curve] # one curve
curves = [None, disc_curve, None, disc_curve] # four curves
curves = { # dict form is explicit
"disc_curve": disc_curve,
"leg2_disc_curve": disc_curve,
}
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following are **settlement parameters**.
settlement : datetime, str, :red:`required`
The date of settlement for the currency ``pair`` and payment date.
pair : FXIndex, str, :red:`required`
The :class:`~rateslib.data.fixings.FXIndex` containing the FX pair implying the
reference currencies and notional of *leg1* and *leg2* respectively.
currency : str, :green:`optional (set as LHS currency in pair)`
The physical *settlement currency* of each leg. If not a currency in ``pair`` then each
leg will be non-deliverable (3-digit code).
notional : float, :green:`optional`
The notional of *leg1* expressed in units of LHS currency of ``pair``. This can be
derived from ``fx_rate`` and ``leg2_notional``.
leg2_notional : float, :green:`optional`
The notional of *leg2* expressed in units of RHS currency of ``pair``. This can be
derived from ``fx_rate`` and ``notional``.
fx_rate : float, :green:`optional`
The transational FX rate of ``pair``. This can be derived from ``notional`` and
``leg2_notional``.
.. note::
The following are **scheduling parameters** required only if ``settlement`` given
as string tenor.
eval_date: datetime, :green:`optional`
Today's date from which spot and other dates may be determined.
modifier: Adjuster, str, :green:`optional`
The date adjuster for determining tenor dates under the convention for ``pair``.
eom: bool, :green:`optional`
Whether tenors under ``pair`` adopt EOM convention or not.
.. note::
The following are **FX fixing parameters** defining the settlement of the transaction.
fx_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for settlement of *leg1* if
that leg is non-deliverable. If a scalar is used directly.
If a string identifier will link to the central ``fixings`` object and data loader.
reversed: bool, :green:`optional (set as False)`
Only used by a 3-currency NDF. Standard direction of the pair is '*settlement:reference*',
unless ``reversed`` is *True*, in which case '*reference:settlement*' is used.
leg2_fx_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for settlement of *leg2* if
that leg is non-deliverable. If a scalar is used directly.
If a string identifier will link to the central ``fixings`` object and data loader.
leg2_reversed: bool, :green:`optional (set as False)`
Only used by a 3-currency NDF. Standard direction of the pair is '*settlement:reference*',
unless ``reversed`` is *True*, in which case '*reference:settlement*' is used.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
Notes
-----
*NDFs* in *rateslib* replicate an :class:`~rateslib.instruments.FXForward` whose cashflows
are paid out netted in a single *settlement currency*. Two types are allowed:
- A **two currency** *NDF* where one *Leg* is directly deliverable in its own currency and
the other *Leg* is non-deliverable.
- A **three currency** *NDF* when both *Legs* with cashflow currencies of ``pair`` are
non-deliverable into a third ``currency``.
.. ipython:: python
fixings.add("WMR_10AM_TY0_USDINR", Series(index=[dt(2026, 2, 16)], data=[92.5]))
fixings.add("WMR_10AM_TY0_USDSGD", Series(index=[dt(2026, 2, 16)], data=[1.290]))
.. tabs::
.. tab:: Two Currency NDF
The **required** parameters of a two currency NDF are as follows;
- A ``pair`` which defines the currency pair and implicitly determines the
*reference currency*. The *settlement currency* for both *Legs* is inferred as the
LHS, although this can be manually set by using the ``currency`` argument.
- A ``notional`` or ``leg2_notional``. Each notional should be expressed in the
*reference currency* for that *Leg*. If both are given that defines the
transactional ``fx_rate``. If an ``fx_rate`` is given that will imply the missing
notional.
- ``fx_fixings`` or ``leg2_fx_fixings``. FX fixings can only be added to the
non-deliverable *Leg*.
This example is a USDINR *NDF* in 500mm INR payment with an initially agreed FX rate of
USDINR 92.0
.. ipython:: python
ndf = NDF(
settlement=dt(2026, 2, 18),
currency="usd", # <- USD settlement currency
pair="usdinr", # <- INR reference currency implied
leg2_notional=500e6, # <- Leg2 is based on the reference currency (INR)
leg2_fx_fixings="WMR_10AM_TY0",
fx_rate=92.0, # <- Leg1 notional is implied as -5.43mm
)
ndf.cashflows()
.. tab:: Three Currency NDXCS
The **required** parameters of a three currency NDXCS are as follows;
- A ``currency`` which defines the *settlement currency* on both legs.
- A ``pair`` which defines the currency pair and implicitly determines
the *reference currency 1* and *reference currency 2*.
- A ``notional`` or ``leg2_notional``. Each notional should be expressed in the
*reference currency* for that *Leg*. If both are given that defines the
transactional ``fx_rate``. If an ``fx_rate`` is given that will imply the missing
notional.
- ``fx_fixings`` and ``leg2_fx_fixings``. Both legs are non-deliverable so FX fixings
may be provided to both *Leg*.
This example is a SGDINR *NDF* in 500mm INR payment with an initially agreed FX rate of
SGDINR 70.1
.. ipython:: python
ndf = NDF(
settlement=dt(2026, 2, 18),
currency="usd", # <- USD settlement currency
pair=FXIndex("SGDINR", "mum", 2), # <- SGD + INR reference currencies
leg2_notional=500e6, # <- INR notional
fx_rate=70.1, # <- Transaction rate of pair
fx_fixings="WMR_10AM_TY0", # <- Data series tag for FXFixings on Leg1
leg2_fx_fixings="WMR_10AM_TY0", # <- Data series tag for FXFixings on Leg2
)
ndf.cashflows()
.. ipython:: python
:suppress:
fixings.pop("WMR_10AM_TY0_USDINR")
fixings.pop("WMR_10AM_TY0_USDSGD")
"""
_rate_scalar = 1.0
@property
def leg1(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> CustomLeg:
"""The :class:`~rateslib.legs.CustomLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An NDF requires 1 disc curve for the cashflows in the delivery currency.
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 1:
return _Curves(
disc_curve=curves[0],
leg2_disc_curve=curves[0],
)
elif len(curves) == 4:
return _Curves(
disc_curve=curves[1],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(self).__name__} requires 1 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
disc_curve=curves, # type: ignore[arg-type]
leg2_disc_curve=curves, # type: ignore[arg-type]
)
def __init__(
self,
settlement: datetime,
pair: FXIndex | str,
*,
# settlement and rate
currency: str_ = NoInput(0),
fx_rate: DualTypes_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
leg2_notional: DualTypes_ = NoInput(0),
# scheduling
eval_date: datetime_ = NoInput(0),
modifier: Adjuster | str_ = NoInput(0),
eom: bool_ = NoInput(0),
# fx fixings
fx_fixings: PeriodFixings = NoInput(0),
leg2_fx_fixings: PeriodFixings = NoInput(0),
reversed: bool_ = NoInput(0), # noqa: A002
leg2_reversed: bool_ = NoInput(0),
# meta
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
):
(currency_, pair_, leg2_pair_, notional_, leg2_notional_, fx_rate_, fx_index_) = (
_validated_ndf_input_combinations(
currency=currency,
pair=pair,
notional=notional,
leg2_notional=leg2_notional,
fx_fixings=fx_fixings,
leg2_fx_fixings=leg2_fx_fixings,
fx_rate=fx_rate,
reversed=reversed,
leg2_reversed=leg2_reversed,
spec=spec,
)
)
del currency, pair, notional, leg2_notional, fx_rate
user_args = dict(
currency=currency_,
pair=pair_,
leg2_currency=currency_,
leg2_pair=leg2_pair_,
notional=notional_,
leg2_notional=leg2_notional_,
fx_rate=fx_rate_,
curves=self._parse_curves(curves),
eval_date=eval_date,
modifier=modifier,
eom=eom,
settlement=settlement,
fx_fixings=fx_fixings,
leg2_fx_fixings=leg2_fx_fixings,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
vol=_Vol(),
leg2_settlement=NoInput(1),
fx_index=fx_index_,
)
default_args = dict(
payment_lag=defaults.payment_lag_specific[type(self).__name__],
modifier=defaults.modifier,
eom=defaults.eom_fx,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=[
"curves",
"eval_date",
"calendar",
"modifier",
"payment_lag",
"eom",
"vol",
"fx_rate",
"fx_index",
],
)
# post input determination for 'settlement'
if not isinstance(self.kwargs.leg1["settlement"], datetime):
_, settlement_, _ = _get_fx_expiry_and_delivery_and_payment(
eval_date=self.kwargs.meta["eval_date"],
expiry=self.kwargs.leg1["settlement"],
delivery_lag=self.kwargs.meta["fx_index"].settle,
calendar=self.kwargs.meta["fx_index"].calendar,
modifier=self.kwargs.meta["modifier"],
eom=self.kwargs.meta["eom"],
payment_lag=0,
)
self.kwargs.leg1["settlement"] = settlement_
self.kwargs.leg2["settlement"] = settlement_
# construct legs
self._leg1 = CustomLeg(
periods=[
Cashflow(
currency=self.kwargs.leg1["currency"],
notional=-1.0
* (
0.0
if isinstance(self.kwargs.leg1["notional"], NoInput)
else self.kwargs.leg1["notional"]
),
payment=self.kwargs.leg1["settlement"],
pair=self.kwargs.leg1["pair"],
fx_fixings=self.kwargs.leg1["fx_fixings"],
),
]
)
self._leg2 = CustomLeg(
periods=[
Cashflow(
currency=self.kwargs.leg2["currency"],
notional=-1.0
* (
0.0
if isinstance(self.kwargs.leg2["notional"], NoInput)
else self.kwargs.leg2["notional"]
),
payment=self.kwargs.leg2["settlement"],
pair=self.kwargs.leg2["pair"],
fx_fixings=self.kwargs.leg2["fx_fixings"],
)
]
)
self._legs = [self._leg1, self._leg2]
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
fx_ = _validate_fx_as_forwards(_get_fx_forwards_maybe_from_solver(solver=solver, fx=fx))
return fx_.rate(
pair=self.kwargs.meta["fx_index"].pair, settlement=self.kwargs.leg1["settlement"]
)
def _set_pricing_mid(
self,
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
) -> None:
if isinstance(self.kwargs.meta["fx_rate"], NoInput):
# determine the mid-market FX rate and set the notional of the appropriate leg
mid_market_rate = self.rate(fx=fx, solver=solver)
if isinstance(self.kwargs.leg2["notional"], NoInput):
self.leg2.periods[0].settlement_params._notional = _dual_float(
-self.leg1.periods[0].settlement_params.notional * mid_market_rate
)
elif isinstance(self.kwargs.leg1["notional"], NoInput):
self.leg1.periods[0].settlement_params._notional = _dual_float(
-self.leg2.periods[0].settlement_params.notional / mid_market_rate
)
else:
raise RuntimeError( # pragma: no cover
"The is no `notional` to determine. Please report this bug. Detailing the"
"initialisation of the NDF."
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
solver=solver,
fx=fx,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _validated_ndf_input_combinations(
currency: str_,
pair: FXIndex | str_,
notional: DualTypes_,
leg2_notional: DualTypes_,
fx_fixings: LegFixings,
leg2_fx_fixings: LegFixings,
fx_rate: DualTypes_,
reversed: bool_, # noqa: A002
leg2_reversed: bool_,
spec: str_,
) -> tuple[str, FXIndex_, FXIndex_, DualTypes_, DualTypes_, DualTypes_, FXIndex]:
"""Method to handle arg parsing for 2 or 3 currency NDF instruments with default value
setting and erroring raising.
Returns
-------
(currency, pair, leg2_pair, notional, leg2_notional, fx_rate)
"""
kw = _KWArgs(
user_args=dict(
currency=currency,
leg2_currency=NoInput(1),
pair=pair,
notional=notional,
leg2_notional=leg2_notional,
fx_fixings=fx_fixings,
leg2_fx_fixings=leg2_fx_fixings,
fx_rate=fx_rate,
reversed=reversed,
leg2_reversed=leg2_reversed,
),
default_args=dict(
reversed=False,
leg2_reversed=False,
),
spec=spec,
meta_args=["pair", "fx_rate"],
)
fx_index_ = _get_fx_index(kw.meta["pair"])
# set a default settlement `currency` if none is provided
if isinstance(kw.leg1["currency"], NoInput):
kw.leg1["currency"] = fx_index_.pair[:3]
kw.leg2["currency"] = fx_index_.pair[:3]
else:
kw.leg1["currency"] = kw.leg1["currency"].lower()
kw.leg2["currency"] = kw.leg2["currency"].lower()
if kw.leg1["currency"] not in fx_index_.pair:
# then the NDF is a 3-currency instrument
return _validated_3ccy_ndf_input_combinations(
currency=kw.leg1["currency"],
fx_index=fx_index_,
notional=kw.leg1["notional"],
leg2_notional=kw.leg2["notional"],
fx_rate=kw.meta["fx_rate"],
reversed=kw.leg1["reversed"],
leg2_reversed=kw.leg2["reversed"],
)
else:
return _validated_2ccy_ndf_input_combinations(
currency=kw.leg1["currency"],
fx_index=fx_index_,
notional=kw.leg1["notional"],
leg2_notional=kw.leg2["notional"],
fx_fixings=kw.leg1["fx_fixings"],
leg2_fx_fixings=kw.leg2["fx_fixings"],
fx_rate=kw.meta["fx_rate"],
)
def _validated_2ccy_ndf_input_combinations(
currency: str,
fx_index: FXIndex,
notional: DualTypes_,
leg2_notional: DualTypes_,
fx_fixings: LegFixings,
leg2_fx_fixings: LegFixings,
fx_rate: DualTypes_,
) -> tuple[str, FXIndex_, FXIndex_, DualTypes_, DualTypes_, DualTypes_, FXIndex]:
"""Method to handle arg parsing for 2 currency NDF instruments with default value
setting and erroring raising.
Notional:
if no notional is given then leg1 is set from 'defaults'
if both notionals are given then the fx_rate is inferred.
if one notional and the fx_rate is given then the alternative notional is inferred.
two notionals AND fx_rate imply possible triangulation failure and raise
notional can be given on any leg and the alternative notional is inferred from the `fx_rate`
Returns
-------
(currency, pair, leg2_pair, notional, leg2_notional, fx_rate)
"""
leg1_nd = fx_index.pair[3:] == currency
if leg1_nd:
pair_: FXIndex_ = fx_index
leg2_pair_: FXIndex_ = NoInput(0)
else:
pair_ = NoInput(0)
leg2_pair_ = fx_index
notional_, leg2_notional_, fx_rate_ = _notional_and_fx_rate_validation(
notional, leg2_notional, fx_rate
)
# parse the fixings input: should only be relevant for the single non-deliverable leg
if not leg1_nd and not isinstance(fx_fixings, NoInput):
raise ValueError(
f"Leg1 of NDF is directly deliverable (reference ccy '{fx_index.pair[:3]}' and "
f"settlement ccy '{currency}').\n"
"Do not supply `fx_fixings` for leg1, perhaps you meant `leg2_fx_fixings`?"
)
if leg1_nd and not isinstance(leg2_fx_fixings, NoInput):
raise ValueError(
f"Leg2 of NDF is directly deliverable (reference ccy '{fx_index.pair[3:]}' and "
f"settlement ccy '{currency}').\n"
"Do not supply `leg2_fx_fixings` for leg2, perhaps you meant `fx_fixings`?"
)
return (
currency,
pair_,
leg2_pair_,
notional_,
leg2_notional_,
fx_rate_,
fx_index,
)
def _validated_3ccy_ndf_input_combinations(
currency: str,
fx_index: FXIndex,
notional: DualTypes_,
leg2_notional: DualTypes_,
fx_rate: DualTypes_,
reversed: bool, # noqa: A002
leg2_reversed: bool,
) -> tuple[str, FXIndex_, FXIndex_, DualTypes_, DualTypes_, DualTypes_, FXIndex]:
"""Method to handle arg parsing for 3 currency NDF instruments with default value
setting and erroring raising.
Returns
-------
(currency, pair, leg2_pair, notional, leg2_notional, fx_rate)
"""
# both legs are non-deliverable
if reversed:
pair = f"{fx_index.pair[:3]}{currency}"
else:
pair = f"{currency}{fx_index.pair[:3]}"
if leg2_reversed:
leg2_pair = f"{fx_index.pair[3:]}{currency}"
else:
leg2_pair = f"{currency}{fx_index.pair[3:]}"
try:
pair_index: FXIndex = _get_fx_index(pair)
except ValueError:
# no index exists in STATIC, clone from fx_index
pair_index = FXIndex(pair=pair, calendar=fx_index.calendar, settle=fx_index.settle)
pair_index = FXIndex(
pair=pair_index.pair,
calendar=pair_index.calendar,
settle=pair_index.settle,
isda_mtm_calendar=fx_index.isda_mtm_calendar,
isda_mtm_settle=fx_index.isda_mtm_settle,
)
try:
leg2_pair_index: FXIndex = _get_fx_index(leg2_pair)
except ValueError:
# no index exists in STATIC, clone from fx_index
leg2_pair_index = FXIndex(pair=pair, calendar=fx_index.calendar, settle=fx_index.settle)
leg2_pair_index = FXIndex(
pair=leg2_pair_index.pair,
calendar=leg2_pair_index.calendar,
settle=leg2_pair_index.settle,
isda_mtm_calendar=fx_index.isda_mtm_calendar,
isda_mtm_settle=fx_index.isda_mtm_settle,
)
notional_, leg2_notional_, fx_rate_ = _notional_and_fx_rate_validation(
notional, leg2_notional, fx_rate
)
return (
currency,
pair_index,
leg2_pair_index,
notional_,
leg2_notional_,
fx_rate_,
fx_index,
)
def _notional_and_fx_rate_validation(
notional: DualTypes_,
leg2_notional: DualTypes_,
fx_rate: DualTypes_,
) -> tuple[DualTypes_, DualTypes_, DualTypes_]:
"""
method to parse the input arguments in their various combinations.
Notional:
if no notional is given then leg1 is set from 'defaults'
if both notionals are given then the fx_rate is inferred.
if one notional and the fx_rate is given then the alternative notional is inferred.
two notionals AND fx_rate imply possible triangulation failure and raise
notional can be given on any leg and the alternative notional is inferred from the `fx_rate`
"""
# set a default `notional` if no notional on any leg is given
if isinstance(notional, NoInput) and isinstance(leg2_notional, NoInput):
notional_: DualTypes_ = defaults.notional
leg2_notional_: DualTypes_ = leg2_notional
else:
notional_ = notional
leg2_notional_ = leg2_notional
del notional, leg2_notional
# parse fx_rate / notional / and / leg2_notional
if not isinstance(notional_, NoInput) and not isinstance(leg2_notional_, NoInput):
if not isinstance(fx_rate, NoInput):
raise ValueError(
"`notional`, `leg2_notional` and `fx_rate` cannot all be given simultaneously.\n"
"Provide, at most, two of these arguments for an NDF."
)
if notional_ * leg2_notional_ > 0:
raise ValueError(
"When providing `notional` and `leg2_notional` on an NDF, the two must be opposite "
"signs, indicating both a buy and a sell ."
)
else:
fx_rate_: DualTypes_ = -leg2_notional_ / notional_
elif isinstance(notional_, NoInput) and not isinstance(leg2_notional_, NoInput):
if isinstance(fx_rate, NoInput):
# then the NDF is unpriced and will requiring setting to mid-market at price time
fx_rate_ = NoInput(0)
else:
fx_rate_ = fx_rate
notional_ = -leg2_notional_ / fx_rate
elif not isinstance(notional_, NoInput) and isinstance(leg2_notional_, NoInput):
if isinstance(fx_rate, NoInput):
# then the NDF is unpriced and will requiring setting to mid-market at price time
fx_rate_ = NoInput(0)
else:
fx_rate_ = fx_rate
leg2_notional_ = -notional_ * fx_rate
else:
raise RuntimeError( # pragma: no cover
"This line should never be reached. "
"Report issue for NDF initialization providing input arguments."
)
return notional_, leg2_notional_, fx_rate_
================================================
FILE: python/rateslib/instruments/ndxcs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import LegMtm
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_get_fx_forwards_maybe_from_solver,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg, FloatLeg
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FloatRateSeries,
Frequency,
FXForwards_,
LegFixings,
RollDay,
Sequence,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class NDXCS(_BaseInstrument):
"""
A *non-deliverable cross-currency swap (XCS)* composing either
:class:`~rateslib.legs.FixedLeg`
and/or :class:`~rateslib.legs.FloatLeg` in different currencies.
.. rubric:: Examples
An INR NDXCS vs SOFR (IRUSON5 Curncy)
.. ipython:: python
:suppress:
from rateslib.instruments import NDXCS
from datetime import datetime as dt
from rateslib import fixings
from pandas import Series
.. ipython:: python
fixings.add("WMR_10AM_TY0_USDINR", Series(index=[dt(2025, 1, 8), dt(2025, 7, 4)], data=[92.0, 92.5]))
ndxcs = NDXCS(
effective=dt(2025, 1, 8),
termination="1y",
frequency="S",
currency="usd",
pair="usdinr",
notional=5e6, # <- INR Leg
fixed=True,
fx_fixings="WMR_10AM_TY0",
leg2_fx_fixings=91.55, # <- USD Notional at execution
payment_lag=0,
)
ndxcs.cashflows()
.. rubric:: Pricing
The methods of a *NDXCS* require an :class:`~rateslib.fx.FXForwards` object for ``fx`` .
They also require a *disc curve* for discounting both legs in the *settlement currency*
and (if not *FixedLegs*) a *rate curve* and a *leg2 rate curve* for forecasting the floating
rates on either *Leg*. The following input formats are allowed:
.. code-block:: python
curves = [rate_curve, disc_curve, leg2_rate_curve, disc_curve] # four curves
curves = { # dict form is explicit
"rate_curve": rate_curve,
"disc_curve": disc_curve,
"leg2_rate_curve": leg2_rate_curve,
}
The available pricing ``metric`` are in *{'leg1', 'leg2'}* which will return a *float spread*
or a *fixed rate* on the specified leg, for the appropriate *Leg* type.
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
leg2_effective : datetime, :green:`optional (inherited from leg1)`
leg2_termination : datetime, str, :green:`optional (inherited from leg1)`
leg2_frequency : Frequency, str, :green:`optional (inherited from leg1)`
leg2_stub : StubInference, str, :green:`optional (inherited from leg1)`
leg2_front_stub : datetime, :green:`optional (inherited from leg1)`
leg2_back_stub : datetime, :green:`optional (inherited from leg1)`
leg2_roll : RollDay, int, str, :green:`optional (inherited from leg1)`
leg2_eom : bool, :green:`optional (inherited from leg1)`
leg2_modifier : Adjuster, str, :green:`optional (inherited from leg1)`
leg2_calendar : calendar, str, :green:`optional (inherited from leg1)`
leg2_payment_lag: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_payment_lag_exchange: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_ex_div: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_convention: str, :green:`optional (inherited from leg1)`
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of leg1 (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set from 'leg2_notional' or 'defaults' )`
The initial leg1 notional, defined in units of the currency of the leg. Only one
of ``notional`` and ``leg2_notional`` can be given. The alternate leg notional is derived
via non-deliverability :class:`~rateslib.data.fixings.FXFixing`.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
leg2_currency : str, :red:`required`
The currency of the leg2.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
leg2_amortization : float, Dual, Dual2, Variable, str, Amortization, :green:`optional (negatively inherited from leg1)`
.. note::
The following are the **non-deliverability parameters**
pair: str, :red:`required (if 'leg2_pair' not given)`
The currency pair for :class:`~rateslib.data.fixings.FXFixing` that determines *Period*
settlement on *Leg1*. The *reference currency* is implied from ``pair``.
Must include ``currency``. Not required if this leg is not *non-deliverable*.
leg2_pair: str, :green:`optional`
The currency pair for :class:`~rateslib.data.fixings.FXFixing` that determines *Period*
settlement on *Leg2*. Not required if not a 3-currency NDXCS.
fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* according
to non-deliverability. Not required if this leg is not *non-deliverable*.
leg2_fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* on *Leg2*
according to non-deliverability. Not required if this leg is not *non-deliverable*.
.. note::
The following are **rate parameters**.
fixed : bool, :green:`optional (set as False)`
Whether leg1 is a :class:`~rateslib.legs.FixedLeg` or a :class:`~rateslib.legs.FloatLeg`.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
leg2_fixed : bool, :green:`optional (set as False)`
leg2_fixed_rate : float or None
leg2_fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
leg2_fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
leg2_fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
leg2_float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
leg2_spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
leg2_rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
.. note::
The following are the cross-currency **non-deliverable** parameters. For
further details and examples see **Notes**.
fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* according
to non-deliverability. This can only be provided if ``leg2_notional`` is given. The
currency pair is expressed in direction 'currency:leg2_currency'.
mtm: bool, :green:`optional (set to False)`
Define the *XCS* is mark-to-market on leg1. Only one leg can be mark-to-market.
leg2_fx_fixings:
This can only be provided if ``notional`` is given. The
currency pair is expressed in direction 'currency:leg2_currency'.
leg2_mtm: bool, :green:`optional (set to False)`
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
metric: str, :green:`optional (set as 'leg1')`
Determines which calculation metric to return by default when using the
:meth:`~rateslib.instruments.XCS.rate` method.
Notes
-----
A non-deliverable *XCS* replicates a non-mtm cross-currency swap
whose cashflows are paid out only in one *settlement currency*. This type of swap
allows two configurations;
- A **two currency** *NDXCS* where one leg is based on a *reference currency* and
difference *settlement currency*, whilst the other leg is based purely on cashflows
generated in the *settlement currency*.
- A **three currency** *NDXCS* where one leg is based on *reference currency 1* with a
*settlement currency* and the other leg is based on *reference currency 2* but
also settling in *settlement currency*.
.. tabs::
.. tab:: Two Currency NDXCS
The **required** parameters of a two currency NDXCS are as follows;
- A ``currency`` which defines the *settlement currency* on both legs.
- A ``pair`` which defines the currency pair and implicitly determines the *reference currency*.
- A ``notional`` or ``leg2_notional``. The placement of the notional defines which *Leg* is the
one that is based on the *reference currency*. Any notional quantity must be given in units of
*reference currency*.
- ``fx_fixings`` and ``leg2_fx_fixings``. These are FX fixings that are used by both legs;
one leg will have a fixed rate of exchange for all periods (a single entry usually determined
when the transaction is agreed), the other leg with base its ND FX Fixings on some future
data series.
This example swaps a 500mm INR *FloatLeg* non-deliverable into USD into a USD *FloatLeg*
with an initially agreed FX rate of USDINR 92.0
.. ipython:: python
ndxcs = NDXCS(
effective=dt(2026, 1, 1),
termination="18M",
frequency="S",
currency="usd", # <- USD settlement currency
pair="usdinr", # <- INR reference currency implied
notional=500e6, # <- Leg1 is based on the reference currency
fx_fixings="WMR_10AM_TY0",
leg2_fx_fixings=92.0, # <- The USD Leg notional is implied as 5.43mm
)
ndxcs.cashflows()
The *Leg* based on the *reference currency* is a non-deliverable *Leg* with a ``mtm``
parameter set to *True*, whilst the other *Leg* is non-deliverable with ``mtm`` set to
*False* and is based on one single FX rate.
.. tab:: Three Currency NDXCS
The **required** parameters of a three currency NDXCS are as follows;
- A ``currency`` which defines the *settlement currency* on both legs.
- A ``pair`` which defines the currency pair and implicitly determines
the *reference currency 1*.
- A ``leg2_pair`` which defines the currency pair of *Leg2* and implicitly
determines the *reference currency 2*.
- A ``notional`` and ``leg2_notional``. These must be pre-determined at an appropriate
rate of exchange, usually this is agreed at transaction execution. These must be
expressed in *reference currency 1* units and *reference currency 2* units
respectively.
- ``fx_fixings`` and ``leg2_fx_fixings`` which determine the future rates of exchange
on both non-deliverable legs.
This example swaps a 500mm INR *FloatLeg* non-deliverable into USD into a CHF *FloatLeg*
non-deliverable into USD with an initial FX rate of CHFINR 125.0.
.. ipython:: python
fixings.add("WMR_10AM_TY0_USDCHF", Series(index=[dt(2025, 1, 6)], data=[0.9]))
ndxcs = NDXCS(
effective=dt(2026, 1, 1),
termination="18M",
frequency="S",
currency="usd", # <- USD settlement currency
pair="usdinr", # <- INR reference currency 1 implied
leg2_pair="usdchf", # <- CHF reference currency 2 implied
notional=500e6, # <- Leg1 is based on the reference currency 1
leg2_notional=500e6/125.0, # <- Leg2 entered directly in ref currency 2 units
fx_fixings="WMR_10AM_TY0", # <- Data series tag for FXFixings on Leg1
leg2_fx_fixings="WMR_10AM_TY0", # <- Data series tag for FXFixings on Leg2
)
ndxcs.cashflows()
Both *Legs* are non-deliverable with their ``mtm`` parameters set to *True*.
.. ipython:: python
:suppress:
fixings.pop("WMR_10AM_TY0_USDINR")
fixings.pop("WMR_10AM_TY0_USDCHF")
""" # noqa: E501
def _rate_scalar_calc(self) -> float:
if self.kwargs.meta["metric"] == "leg1":
return 1.0 if isinstance(self.leg1, FixedLeg) else 100.0
else:
return 1.0 if isinstance(self.leg2, FixedLeg) else 100.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.legs.FixedLeg`."""
if isinstance(self.leg1, FixedLeg):
return self.leg1.fixed_rate
else:
raise AttributeError(f"Leg1 is of type: {type(self.leg1).__name__}")
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
if isinstance(self.leg1, FixedLeg):
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
else:
raise AttributeError(f"Leg1 is of type: {type(self.leg1).__name__}")
@property
def float_spread(self) -> DualTypes:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
if isinstance(self.leg1, FloatLeg):
return self.leg1.float_spread
else:
raise AttributeError(f"Leg1 is of type: {type(self.leg1).__name__}")
@float_spread.setter
def float_spread(self, value: DualTypes) -> None:
if isinstance(self.leg1, FloatLeg):
self.kwargs.leg1["float_spread"] = value
self.leg1.float_spread = value
else:
raise AttributeError(f"Leg1 is of type: {type(self.leg1).__name__}")
@property
def leg2_fixed_rate(self) -> DualTypes_:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
if isinstance(self.leg2, FixedLeg):
return self.leg2.fixed_rate
else:
raise AttributeError(f"Leg2 is of type: {type(self.leg2).__name__}")
@leg2_fixed_rate.setter
def leg2_fixed_rate(self, value: DualTypes_) -> None:
if isinstance(self.leg2, FixedLeg):
self.kwargs.leg2["fixed_rate"] = value
self.leg2.fixed_rate = value
else:
raise AttributeError(f"Leg2 is of type: {type(self.leg2).__name__}")
@property
def leg2_float_spread(self) -> DualTypes_:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
if isinstance(self.leg2, FloatLeg):
return self.leg2.float_spread
else:
raise AttributeError(f"Leg2 is of type: {type(self.leg2).__name__}")
@leg2_float_spread.setter
def leg2_float_spread(self, value: DualTypes) -> None:
if isinstance(self.leg2, FloatLeg):
self.kwargs.leg2["float_spread"] = value
self.leg2.float_spread = value
else:
raise AttributeError(f"Leg2 is of type: {type(self.leg2).__name__}")
@property
def leg1(self) -> FixedLeg | FloatLeg:
"""The first :class:`~rateslib.legs.FixedLeg` or
:class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> FixedLeg | FloatLeg:
"""The second :class:`~rateslib.legs.FixedLeg` or
:class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
leg2_effective: datetime_ = NoInput(1),
leg2_termination: datetime | str_ = NoInput(1),
leg2_frequency: Frequency | str_ = NoInput(1),
leg2_stub: str_ = NoInput(1),
leg2_front_stub: datetime_ = NoInput(1),
leg2_back_stub: datetime_ = NoInput(1),
leg2_roll: int | RollDay | str_ = NoInput(1),
leg2_eom: bool_ = NoInput(1),
leg2_modifier: str_ = NoInput(1),
leg2_calendar: CalInput = NoInput(1),
leg2_payment_lag: int_ = NoInput(1),
leg2_payment_lag_exchange: int_ = NoInput(1),
leg2_ex_div: int_ = NoInput(1),
leg2_convention: str_ = NoInput(1),
# settlement parameters
currency: str_ = NoInput(0),
notional: float_ = NoInput(0),
amortization: float_ = NoInput(0),
leg2_notional: float_ = NoInput(0),
leg2_amortization: float_ = NoInput(0),
# nondeliverable params
pair: str_ = NoInput(0),
leg2_pair: str_ = NoInput(0),
fx_fixings: LegFixings = NoInput(0),
leg2_fx_fixings: LegFixings = NoInput(0),
# rate parameters
fixed: bool_ = NoInput(0),
fixed_rate: DualTypes_ = NoInput(0),
float_spread: DualTypes_ = NoInput(0),
spread_compound_method: str_ = NoInput(0),
rate_fixings: LegFixings = NoInput(0),
fixing_method: str_ = NoInput(0),
fixing_frequency: Frequency | str_ = NoInput(0),
fixing_series: FloatRateSeries | str_ = NoInput(0),
leg2_fixed: bool_ = NoInput(0),
leg2_mtm: bool_ = NoInput(0),
leg2_fixed_rate: DualTypes_ = NoInput(0),
leg2_float_spread: DualTypes_ = NoInput(0),
leg2_spread_compound_method: str_ = NoInput(0),
leg2_rate_fixings: LegFixings = NoInput(0),
leg2_fixing_method: str_ = NoInput(0),
leg2_fixing_frequency: Frequency | str_ = NoInput(0),
leg2_fixing_series: FloatRateSeries | str_ = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str_ = NoInput(0),
) -> None:
user_args = dict(
# scheduling
effective=effective,
leg2_effective=leg2_effective,
termination=termination,
leg2_termination=leg2_termination,
frequency=frequency,
leg2_frequency=leg2_frequency,
stub=stub,
leg2_stub=leg2_stub,
front_stub=front_stub,
leg2_front_stub=leg2_front_stub,
back_stub=back_stub,
leg2_back_stub=leg2_back_stub,
roll=roll,
leg2_roll=leg2_roll,
eom=eom,
leg2_eom=leg2_eom,
modifier=modifier,
leg2_modifier=leg2_modifier,
calendar=calendar,
leg2_calendar=leg2_calendar,
payment_lag=payment_lag,
leg2_payment_lag=leg2_payment_lag,
payment_lag_exchange=payment_lag_exchange,
leg2_payment_lag_exchange=leg2_payment_lag_exchange,
ex_div=ex_div,
leg2_ex_div=leg2_ex_div,
convention=convention,
leg2_convention=leg2_convention,
# settlement
currency=currency,
notional=notional,
leg2_notional=leg2_notional,
amortization=amortization,
leg2_amortization=leg2_amortization,
# non-deliverability
pair=pair,
leg2_pair=leg2_pair,
fx_fixings=fx_fixings,
leg2_fx_fixings=leg2_fx_fixings,
# rate
fixed_rate=fixed_rate,
float_spread=float_spread,
spread_compound_method=spread_compound_method,
rate_fixings=rate_fixings,
fixing_method=fixing_method,
fixing_frequency=fixing_frequency,
fixing_series=fixing_series,
leg2_fixed_rate=leg2_fixed_rate,
leg2_float_spread=leg2_float_spread,
leg2_spread_compound_method=leg2_spread_compound_method,
leg2_rate_fixings=leg2_rate_fixings,
leg2_fixing_method=leg2_fixing_method,
leg2_fixing_frequency=leg2_fixing_frequency,
leg2_fixing_series=leg2_fixing_series,
# meta
fixed=fixed,
leg2_fixed=leg2_fixed,
curves=self._parse_curves(curves),
metric=metric,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
leg2_currency=NoInput(1),
initial_exchange=True,
final_exchange=True,
leg2_initial_exchange=True,
leg2_final_exchange=True,
vol=_Vol(),
)
default_args = dict(
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
currency=defaults.base_currency,
fixed=False,
leg2_fixed=False,
metric="leg1",
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "metric", "fixed", "leg2_fixed", "vol"],
)
# validation of currencies and pairs
if isinstance(self.kwargs.leg1["notional"], NoInput) and isinstance(
self.kwargs.leg2["notional"], NoInput
):
self.kwargs.leg1["notional"] = defaults.notional
(
self.kwargs.leg1["mtm"],
self.kwargs.leg2["mtm"],
self.kwargs.leg1["pair"],
self.kwargs.leg2["pair"],
) = self._init_args(
currency=self.kwargs.leg1["currency"].lower(),
pair=self.kwargs.leg1["pair"],
leg2_pair=self.kwargs.leg2["pair"],
notional=self.kwargs.leg1["notional"],
leg2_notional=self.kwargs.leg2["notional"],
)
# narrowing of fixed or floating
float_attrs = [
"float_spread",
"spread_compound_method",
"rate_fixings",
"fixing_method",
"fixing_frequency",
"fixing_series",
]
if self.kwargs.meta["fixed"]:
for item in float_attrs:
self.kwargs.leg1.pop(item)
else:
self.kwargs.leg1.pop("fixed_rate")
if self.kwargs.meta["leg2_fixed"]:
for item in float_attrs:
self.kwargs.leg2.pop(item)
else:
self.kwargs.leg2.pop("fixed_rate")
# populate non-deliverable leg, based on which leg notional is given
if isinstance(self.kwargs.leg1["notional"], NoInput):
self._kwargs.leg1["notional"] = -1.0 * self._kwargs.leg2["notional"]
self._kwargs.leg1["amortization"] = (
NoInput(0)
if isinstance(self._kwargs.leg2["amortization"], NoInput)
else -1.0 * self._kwargs.leg2["amortization"]
)
if isinstance(self.kwargs.leg2["notional"], NoInput):
self._kwargs.leg2["notional"] = -1.0 * self._kwargs.leg1["notional"]
self._kwargs.leg2["amortization"] = (
NoInput(0)
if isinstance(self._kwargs.leg1["amortization"], NoInput)
else -1.0 * self._kwargs.leg1["amortization"]
)
if self.kwargs.meta["fixed"]:
self._leg1: FixedLeg | FloatLeg = FixedLeg(
**_convert_to_schedule_kwargs(self.kwargs.leg1, 1)
)
else:
self._leg1 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
if self.kwargs.meta["leg2_fixed"]:
self._leg2: FixedLeg | FloatLeg = FixedLeg(
**_convert_to_schedule_kwargs(self.kwargs.leg2, 1)
)
else:
self._leg2 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self.leg1, self.leg2]
self._rate_scalar = self._rate_scalar_calc()
def _init_args(
self,
currency: str,
pair: str_,
leg2_pair: str_,
notional: DualTypes_,
leg2_notional: DualTypes_,
) -> tuple[LegMtm, LegMtm, str, str]:
if isinstance(pair, NoInput):
raise ValueError("`pair` must be given when creating a NDXCS.")
else:
pair_: str = pair.lower()
if currency not in pair_:
raise ValueError(f"`pair` must contain {currency}.")
if isinstance(leg2_pair, str):
leg2_pair_: str = leg2_pair.lower()
if currency not in leg2_pair_:
raise ValueError(f"`leg2_pair` must contain {currency}.")
return NDXCS._init_three_currency(pair_, leg2_pair_, notional, leg2_notional)
else:
return NDXCS._init_two_currency(pair_, notional, leg2_notional)
@staticmethod
def _init_two_currency(
pair: str,
notional: DualTypes_,
leg2_notional: DualTypes_,
) -> tuple[LegMtm, LegMtm, str, str]:
if isinstance(notional, NoInput):
# then reference Leg is leg2
mtm, leg2_mtm = LegMtm.Initial, LegMtm.Payment
else:
if not isinstance(leg2_notional, NoInput):
raise ValueError(
"Only one of `notional` or `leg2_notional` can be given for a two-currency "
"NDXCS.\nIf you are trying to set either notional based on a transacted "
"FX rate, then:\n1) Set the notional in reference currency units on the "
"reference currency leg.\n2) Set the ``fx_fixing`` or ``leg2_fx_fixing`` value "
"as this scalar for the leg that is solely based on the settlement currency."
)
mtm, leg2_mtm = LegMtm.Payment, LegMtm.Initial
return mtm, leg2_mtm, pair, pair
@staticmethod
def _init_three_currency(
pair: str,
leg2_pair: str,
notional: DualTypes_,
leg2_notional: DualTypes_,
) -> tuple[LegMtm, LegMtm, str, str]:
if isinstance(notional, NoInput) or isinstance(leg2_notional, NoInput):
raise ValueError(
"A three-currency NDXCS requires both `notional` and `leg2_notional` to be given.\n"
"These should be given in their relevant reference currencies, according to the "
"initially agreed FX Rate between them."
)
return LegMtm.Payment, LegMtm.Payment, pair, leg2_pair
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_rate_curve = _get_curve("leg2_rate_curve", True, True, *c)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, True, *c)
rate_curve = _get_curve("rate_curve", True, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
metric_ = _drb(self.kwargs.meta["metric"], metric)
fx_ = _get_fx_forwards_maybe_from_solver(fx=fx, solver=solver)
if metric_ == "leg1":
leg2_npv: DualTypes = self.leg2.npv( # type: ignore[assignment]
rate_curve=leg2_rate_curve,
disc_curve=leg2_disc_curve,
base=self.leg1.settlement_params.currency,
fx=fx_,
settlement=settlement,
forward=forward,
)
spread = self.leg1.spread(
target_npv=-leg2_npv,
rate_curve=rate_curve,
disc_curve=disc_curve,
settlement=settlement,
fx=fx_,
forward=forward,
)
if self.kwargs.meta["fixed"]:
return spread / 100.0
else:
return spread
elif metric_ == "leg2":
leg1_npv: DualTypes = self.leg1.npv( # type: ignore[assignment]
rate_curve=rate_curve,
disc_curve=disc_curve,
base=self.leg2.settlement_params.currency,
fx=fx_,
settlement=settlement,
forward=forward,
)
spread = self.leg2.spread(
target_npv=-leg1_npv,
rate_curve=leg2_rate_curve,
disc_curve=leg2_disc_curve,
settlement=settlement,
forward=forward,
fx=fx_,
)
if self.kwargs.meta["leg2_fixed"]:
return spread / 100.0
else:
return spread
else:
raise ValueError("`metric` must be in {'leg1', 'leg2'}")
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
return self.rate(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
metric=metric,
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
fx=fx,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# all float_spread are assumed to be equal to zero if not given.
# missing fixed rates will be priced and set if possible.
if isinstance(self.leg1, FixedLeg) and isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
if isinstance(self.leg2, FixedLeg) and isinstance(
self.kwargs.leg2["fixed_rate"], NoInput
):
raise ValueError("At least one leg must have a defined `fixed_rate`.")
mid_price = self.rate(
curves=curves,
solver=solver,
fx=fx,
settlement=settlement,
forward=forward,
metric="leg1",
)
self.leg1.fixed_rate = _dual_float(mid_price)
elif isinstance(self.leg2, FixedLeg) and isinstance(
self.kwargs.leg2["fixed_rate"], NoInput
):
# leg1 cannot be fixed with NoInput - this branch is covered above
mid_price = self.rate(
curves=curves,
solver=solver,
fx=fx,
settlement=settlement,
forward=forward,
metric="leg2",
)
self.leg2.fixed_rate = _dual_float(mid_price)
elif (
isinstance(self.leg1, FloatLeg)
and isinstance(self.kwargs.leg1["float_spread"], NoInput)
and isinstance(self.leg2, FloatLeg)
and isinstance(self.kwargs.leg2["float_spread"], NoInput)
):
# then no FloatLeg pricing parameters are provided
mid_price = self.rate(
curves=curves,
solver=solver,
fx=fx,
settlement=settlement,
forward=forward,
)
if self.kwargs.meta["metric"].lower() == "leg1":
self.leg1.float_spread = _dual_float(mid_price)
else:
self.leg2.float_spread = _dual_float(mid_price)
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
A XCS requires 4 curves (mostly if float-float, otherwise it needs 2)
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_rate_curve=curves.get("leg2_rate_curve", NoInput(0)),
leg2_disc_curve=curves.get("leg2_disc_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) == 4:
return _Curves(
rate_curve=NoInput(0) if curves[0] is None else curves[0],
disc_curve=curves[1],
leg2_rate_curve=NoInput(0) if curves[2] is None else curves[2],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(self).__name__} requires 4 curve type input. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else:
raise ValueError(f"{type(self).__name__} requires 4 curve type input. Got 1.")
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
================================================
FILE: python/rateslib/instruments/portfolio.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING, NoReturn
from pandas import DataFrame
from rateslib import defaults
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.pricing import (
_get_fx_maybe_from_solver,
)
from rateslib.periods.utils import _maybe_fx_converted
if TYPE_CHECKING:
from rateslib.local_types import (
Any,
CurvesT_,
DualTypes,
FXForwards_,
Solver_,
VolT_,
datetime_,
str_,
)
def _instrument_npv(
instrument: _BaseInstrument, *args: Any, **kwargs: Any
) -> DualTypes | dict[str, DualTypes]: # pragma: no cover
# this function is captured by TestPortfolio pooling but is not registered as a parallel process
# used for parallel processing with Portfolio.npv
return instrument.npv(*args, **kwargs)
class Portfolio(_BaseInstrument):
"""
A collection of :class:`~rateslib.instruments.protocols._BaseInstrument`.
.. rubric:: Examples
The following initialises a *Portfolio* of *IRSs*.
.. ipython:: python
:suppress:
from rateslib.instruments import Portfolio, IRS
from datetime import datetime as dt
.. ipython:: python
pf = Portfolio(instruments=[
IRS(dt(2000, 1, 1), "1y", notional=10e3, spec="eur_irs", curves=["estr"]),
IRS(dt(2000, 1, 1), "2y", notional=10e3, spec="eur_irs", curves=["estr"]),
IRS(dt(2000, 1, 1), "3y", notional=10e3, spec="eur_irs", curves=["estr"]),
])
pf.cashflows()
.. rubric:: Pricing
Each :class:`~rateslib.instruments.protocols._BaseInstrument` should have
its own ``curves`` and ``vol`` objects set at its initialisation, according to the
documentation for that *Instrument*. For the pricing methods ``curves`` and ``vol`` objects,
these can be universally passed to each *Instrument* but in many cases that would be
technically impossible since each *Instrument* might require difference pricing objects, e.g.
if the *Instruments* have difference currencies. For a *Portfolio*
of three *IRS* in the same currency this would be possible, however.
Parameters
----------
instruments : list of _BaseInstrument
The collection of *Instruments*.
Notes
-----
A *Portfolio* is just a container for multiple
:class:`~rateslib.instruments.protocols._BaseInstrument`.
There is no concept of a :meth:`~rateslib.instruments.Portfolio.rate`.
"""
_instruments: Sequence[_BaseInstrument]
@property
def instruments(self) -> Sequence[_BaseInstrument]:
"""The *Instruments* contained within the *Portfolio*."""
return self._instruments
def __init__(self, instruments: Sequence[_BaseInstrument]) -> None:
if not isinstance(instruments, Sequence):
raise ValueError("`instruments` should be a list of Instruments.")
self._instruments = instruments
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
"""
Return the NPV of the *Portfolio* by summing individual *Instrument* NPVs.
"""
# if the pool is 1 do not do any parallel processing and return the single core func
if defaults.pool == 1:
local_npv: dict[str, DualTypes] = self._npv_single_core(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
else:
from functools import partial
from multiprocessing import Pool
func = partial(
_instrument_npv,
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=True,
forward=forward,
settlement=settlement,
)
p = Pool(defaults.pool)
results = p.map(func, self.instruments)
p.close()
# Aggregate results:
_ = DataFrame(results).fillna(0.0)
_ = _.sum()
local_npv = _.to_dict() # type: ignore[assignment]
# ret = {}
# for result in results:
# for ccy in result:
# if ccy in ret:
# ret[ccy] += result[ccy]
# else:
# ret[ccy] = result[ccy]
if not local:
single_value: DualTypes = 0.0
base_ = _drb(self.settlement_params.currency, base)
for k, v in local_npv.items():
single_value += _maybe_fx_converted(
value=v,
currency=k,
fx=_get_fx_maybe_from_solver(fx=fx, solver=solver),
base=base_,
forward=forward,
)
return single_value
else:
return local_npv
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_instruments(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._cashflows_from_instruments(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
base=base,
)
def rate(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`rate` is not defined for Portfolio.")
def analytic_delta(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`analytic_delta` is not defined for Portfolio.")
================================================
FILE: python/rateslib/instruments/protocols/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from abc import ABCMeta
from typing import TYPE_CHECKING
from rateslib.instruments.protocols.analytic_delta import _WithAnalyticDelta
from rateslib.instruments.protocols.analytic_fixings import _WithAnalyticRateFixings
from rateslib.instruments.protocols.cashflows import _WithCashflows
from rateslib.instruments.protocols.fixings import _WithFixings
from rateslib.instruments.protocols.kwargs import _KWArgs
from rateslib.instruments.protocols.npv import _WithNPV
from rateslib.instruments.protocols.rate import _WithRate
from rateslib.instruments.protocols.sensitivities import _WithSensitivities
if TYPE_CHECKING:
pass
# from rateslib.typing import ()
class _BaseInstrument(
_WithSensitivities,
_WithNPV,
_WithRate,
_WithCashflows,
_WithFixings,
_WithAnalyticDelta,
_WithAnalyticRateFixings,
metaclass=ABCMeta,
):
"""Abstract base class used in the construction of *Instruments*."""
__all__ = [
"_KWArgs",
"_WithNPV",
"_WithRate",
"_WithCashflows",
"_WithFixings",
"_WithAnalyticDelta",
"_WithAnalyticRateFixings",
"_WithSensitivities",
"_BaseInstrument",
]
================================================
FILE: python/rateslib/instruments/protocols/analytic_delta.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.enums.generics import NoInput
from rateslib.instruments.protocols.pricing import (
_get_curve,
_get_fx_forwards_maybe_from_solver,
_get_fx_vol,
_parse_curves,
_parse_vol,
_WithPricingObjs,
)
if TYPE_CHECKING:
from rateslib.local_types import (
CurvesT_,
DualTypes,
FXForwards_,
Solver_,
VolT_,
_KWArgs,
datetime_,
str_,
)
class _WithAnalyticDelta(_WithPricingObjs, Protocol):
"""
Protocol to determine the *analytic rate delta* of a particular *Leg* of an *Instrument*.
"""
@property
def kwargs(self) -> _KWArgs: ...
def analytic_delta(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
leg: int = 1,
) -> DualTypes | dict[str, DualTypes]:
"""
Calculate the analytic rate delta of a *Leg* of the *Instrument*.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, Curve, IRS
.. ipython:: python
curve = Curve({dt(2000, 1, 1): 1.0, dt(2010, 1, 1): 0.75})
irs = IRS(dt(2000, 1, 1), "3Y", spec="usd_irs", fixed_rate=1.0, curves=[curve])
irs.analytic_delta()
irs.analytic_delta(local=True)
.. role:: red
.. role:: green
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
local: bool, :green:`optional (set as False)`
An override flag to return a dict of NPV values indexed by string currency.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
leg: int, :green:`optional (set as 1)`
The *Leg* over which to calculate the analytic rate delta.
Returns
-------
float, Dual, Dual2, Variable or dict of such indexed by string currency.
"""
c = _parse_curves(self, curves, solver) # type: ignore[arg-type]
v = _parse_vol(self, vol, solver, False) # type: ignore[call-overload, misc]
prefix = "" if leg == 1 else "leg2_"
if hasattr(self, "legs"):
rate_curve = _get_curve(f"{prefix}rate_curve", True, True, *c)
disc_curve = _get_curve(f"{prefix}disc_curve", False, True, *c)
index_curve = _get_curve(f"{prefix}index_curve", False, True, *c)
value: DualTypes | dict[str, DualTypes] = self.legs[leg - 1].analytic_delta(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx_vol=_get_fx_vol(True, True, *v),
fx=_get_fx_forwards_maybe_from_solver(fx=fx, solver=solver),
base=base,
local=local,
settlement=settlement,
forward=forward,
)
else:
raise NotImplementedError("`analytic_delta` can only called on Leg based Instruments.")
return value
================================================
FILE: python/rateslib/instruments/protocols/analytic_fixings.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from typing import TYPE_CHECKING, Protocol
from pandas import DataFrame, DatetimeIndex, concat
from rateslib.enums.generics import NoInput
from rateslib.instruments.protocols.pricing import (
_get_curve,
_get_fx_maybe_from_solver,
_get_fx_vol,
_parse_curves,
_parse_vol,
_WithPricingObjs,
)
if TYPE_CHECKING:
from rateslib.local_types import (
CurvesT_,
FXForwards_,
Solver_,
VolT_,
_KWArgs,
_Vol,
datetime_,
)
def _composit_fixings_table(df_result: DataFrame, df: DataFrame) -> DataFrame:
"""
Add a DataFrame to an existing fixings table by extending or adding to relevant columns.
Parameters
----------
df_result: The main DataFrame that will be updated
df: The incoming DataFrame with new data to merge
Returns
-------
DataFrame
"""
# reindex the result DataFrame
if df_result.empty:
return df
else:
df_result = df_result.reindex(index=df_result.index.union(df.index))
# # update existing columns with missing data from the new available data
# for c in [c for c in df.columns if c in df_result.columns and c[1] in ["dcf", "rates"]]:
# df_result[c] = df_result[c].combine_first(df[c])
# merge by addition existing values with missing filled to zero
m = [c for c in df.columns if c in df_result.columns]
if len(m) > 0:
df_result[m] = df_result[m].add(df[m], fill_value=0.0)
# append new columns without additional calculation
a = [c for c in df.columns if c not in df_result.columns]
if len(a) > 0:
df_result[a] = df[a]
# df_result.columns = MultiIndex.from_tuples(df_result.columns)
return df_result
class _WithAnalyticRateFixings(_WithPricingObjs, Protocol):
"""
Protocol to determine the *analytic rate fixings' sensitivity* of a particular *Instrument*.
"""
@property
def kwargs(self) -> _KWArgs: ...
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Calculate the sensitivity to rate fixings of the *Instrument*, expressed in local
settlement currency per basis point.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, Curve, IRS
.. ipython:: python
curve1 = Curve({dt(2000, 1, 1): 1.0, dt(2010, 1, 1): 0.75}, id="Eur1mCurve")
curve3 = Curve({dt(2000, 1, 1): 1.0, dt(2010, 1, 1): 0.70}, id="Eur3mCurve")
irs = IRS(dt(2000, 1, 1), "20m", spec="eur_irs3", curves=[{"1m": curve1, "3m": curve3}, curve1])
irs.local_analytic_rate_fixings()
.. role:: red
.. role:: green
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
Notes
-----
This analytic method will index the sensitivities with series identifier according to the
*Curve* id which has forecast the fixing.
""" # noqa: E501
raise NotImplementedError(
f"{type(self).__name__} must implement `local_analytic_rate_fixings`"
)
def _local_analytic_rate_fixings_from_legs(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
assert hasattr(self, "legs") # noqa: S101
# this is a generic implementation to handle 2 legs.
c = _parse_curves(self, curves, solver) # type: ignore[arg-type]
v = _parse_vol(self, vol, solver, False) # type: ignore[call-overload, misc]
fx_vol = _get_fx_vol(True, True, *v)
_vol_meta: _Vol = self.kwargs.meta["vol"]
_fx_maybe_from_solver = _get_fx_maybe_from_solver(fx=fx, solver=solver)
dfs: list[DataFrame] = []
for leg, names in zip(
self.legs,
[
("rate_curve", "disc_curve", "index_curve"),
("leg2_rate_curve", "leg2_disc_curve", "leg2_index_curve"),
],
strict=False,
):
rate_curve = _get_curve(names[0], True, True, *c)
disc_curve = _get_curve(names[1], False, True, *c)
index_curve = _get_curve(names[2], False, True, *c)
dfs.append(
leg.local_analytic_rate_fixings(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=_fx_maybe_from_solver,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
)
)
with warnings.catch_warnings():
# TODO: pandas 2.1.0 has a FutureWarning for concatenating DataFrames with Null entries
warnings.filterwarnings("ignore", category=FutureWarning)
df = concat(dfs)
return df.sort_index()
def _local_analytic_rate_fixings_from_instruments(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
assert hasattr(self, "instruments") # noqa: S101
df_result = DataFrame(index=DatetimeIndex([], name="obs_dates"))
for inst in self.instruments:
try:
df = inst.local_analytic_rate_fixings(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
forward=forward,
settlement=settlement,
)
except AttributeError:
continue
df_result = _composit_fixings_table(df_result, df)
return df_result
================================================
FILE: python/rateslib/instruments/protocols/cashflows.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from typing import TYPE_CHECKING, Protocol
from pandas import DataFrame, concat, isna
from rateslib import defaults
from rateslib.enums.generics import NoInput
from rateslib.instruments.protocols.kwargs import _KWArgs
from rateslib.instruments.protocols.pricing import (
_get_curve,
_get_fx_maybe_from_solver,
_get_fx_vol,
_maybe_get_ir_vol_maybe_from_solver,
_parse_curves,
_parse_vol,
_WithPricingObjs,
)
if TYPE_CHECKING:
from rateslib.local_types import (
Any,
CurvesT_,
FXForwards_,
Solver_,
VolT_,
datetime_,
str_,
)
class _WithCashflows(_WithPricingObjs, Protocol):
"""
Protocol to determine cashflows for any *Instrument* type.
"""
_kwargs: _KWArgs
@property
def kwargs(self) -> _KWArgs:
return self._kwargs
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Return aggregated cashflow data for the *Instrument*.
.. warning::
This method is a convenience method to provide a visual representation of all
associated calculation data. Calling this method to extract certain values
should be avoided. It is more efficient to source relevant parameters or calculations
from object attributes or other methods directly.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, Curve, IRS
.. ipython:: python
irs = IRS(dt(2000, 1, 1), "3Y", spec="usd_irs", fixed_rate=1.0)
irs.cashflows()
Providing relevant pricing objects will ensure all data that can be calculated is returned.
.. ipython:: python
curve = Curve({dt(2000, 1, 1): 1.0, dt(2010, 1, 1): 0.75})
irs.cashflows(curves=[curve])
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
"""
raise NotImplementedError(f"{type(self).__name__} must implement `cashflows`.")
def _cashflows_from_legs(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Return aggregated cashflow data for the *Period*.
.. warning::
This method is a convenience method to provide a visual representation of all
associated calculation data. Calling this method to extracting certain values
should be avoided. It is more efficent to source relevant parameters or calculations
from object attributes or other methods directly.
Returns
-------
dict of values
"""
# this is a generalist implementation of an NPV function for an instrument with 2 legs.
# most instruments may be likely to implement NPV directly to benefit from optimisations
# specific to that instrument
assert hasattr(self, "legs") # noqa: S101
c = _parse_curves(self, curves, solver) # type: ignore[arg-type]
v = _parse_vol(self, vol, solver, False) # type: ignore[call-overload, misc]
fx_vol = _get_fx_vol(True, True, *v)
_fx_maybe_from_solver = _get_fx_maybe_from_solver(fx=fx, solver=solver)
ir_vol = _maybe_get_ir_vol_maybe_from_solver(self.kwargs.meta["vol"], v[0], solver)
legs_df = [
self.legs[0].cashflows(
rate_curve=_get_curve("rate_curve", True, True, *c),
disc_curve=_get_curve("disc_curve", False, True, *c),
index_curve=_get_curve("index_curve", False, True, *c),
fx=_fx_maybe_from_solver,
fx_vol=fx_vol,
ir_vol=ir_vol,
settlement=settlement,
forward=forward,
base=base,
)
]
if len(self.legs) > 1:
legs_df.append(
self.legs[1].cashflows(
rate_curve=_get_curve("leg2_rate_curve", True, True, *c),
disc_curve=_get_curve("leg2_disc_curve", False, True, *c),
index_curve=_get_curve("leg2_index_curve", False, True, *c),
fx=_fx_maybe_from_solver,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
base=base,
)
)
# filter empty or all NaN
dfs_filtered = [_ for _ in legs_df if not (_.empty or isna(_).all(axis=None))]
with warnings.catch_warnings():
# TODO: pandas 2.1.0 has a FutureWarning for concatenating DataFrames with Null entries
warnings.filterwarnings("ignore", category=FutureWarning)
_: DataFrame = concat(
dfs_filtered, keys=[f"leg{i + 1}" for i in range(len(dfs_filtered))]
)
return _
def _cashflows_from_instruments(self, *args: Any, **kwargs: Any) -> DataFrame:
# this is a generalist implementation of an NPV function for an instrument with 2 legs.
# most instruments may be likely to implement NPV directly to benefit from optimisations
# specific to that instrument
assert hasattr(self, "instruments") # noqa: S101
with warnings.catch_warnings():
# TODO: pandas 2.1.0 has a FutureWarning for concatenating DataFrames with Null entries
warnings.filterwarnings("ignore", category=FutureWarning)
_: DataFrame = concat(
[_.cashflows(*args, **kwargs) for _ in self.instruments],
keys=[f"inst{i}" for i in range(len(self.instruments))],
)
return _
def cashflows_table(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Aggregate the values derived from a
:meth:`~rateslib.instruments.protocols._WithCashflows.cashflows`, grouped by date,
settlement currency and collateral.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, Curve, IRS
.. ipython:: python
irs = IRS(dt(2000, 1, 1), "3Y", spec="usd_irs", fixed_rate=1.0)
curve = Curve({dt(2000, 1, 1): 1.0, dt(2010, 1, 1): 0.75})
irs.cashflows_table(curves=[curve])
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
local: bool, :green:`optional (set as False)`
An override flag to return a dict of NPV values indexed by string currency.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
"""
cashflows = self.cashflows(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
cashflows = cashflows[
[
defaults.headers["currency"],
defaults.headers["collateral"],
defaults.headers["payment"],
defaults.headers["cashflow"],
]
]
_: DataFrame = cashflows.groupby( # type: ignore[assignment]
[
defaults.headers["currency"],
defaults.headers["collateral"],
defaults.headers["payment"],
],
dropna=False,
)
_ = _.sum().unstack([0, 1]).droplevel(0, axis=1)
_.columns.names = ["local_ccy", "collateral_ccy"]
_.index.names = ["payment"]
_ = _.sort_index(ascending=True, axis=0).infer_objects().fillna(0.0)
return _
================================================
FILE: python/rateslib/instruments/protocols/fixings.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from pandas import DataFrame, Series
from rateslib.enums.generics import NoInput
from rateslib.periods.protocols.fixings import (
_replace_fixings_with_ad_variables,
_reset_fixings_data,
_structure_sensitivity_data,
)
if TYPE_CHECKING:
from rateslib.local_types import (
CurvesT_,
DualTypes,
FXForwards_,
Sequence,
Solver_,
VolT_,
datetime_,
int_,
str_,
)
class _WithFixings(Protocol):
"""
Protocol for determining fixing sensitivity for a *Period* with AD.
.. rubric:: Provided methods
.. autosummary::
~_WithFixings.reset_fixings
"""
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]: ...
def reset_fixings(self, state: int_ = NoInput(0)) -> None:
"""
Resets any fixings values of the *Instrument* derived using the given data state.
.. role:: green
Parameters
----------
state: int, :green:`optional`
The *state id* of the data series that set the fixing. Only fixings determined by this
data will be reset. If not given resets all fixings.
Returns
-------
None
"""
if hasattr(self, "legs"):
for leg in self.legs:
leg.reset_fixings(state)
elif hasattr(self, "instruments"):
for inst in self.instruments:
inst.reset_fixings(state)
def local_fixings(
self,
identifiers: Sequence[tuple[str, Series]],
scalars: Sequence[float] | NoInput = NoInput(0),
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Calculate the sensitivity to fixings of the *Instrument*, expressed in local
settlement currency.
.. role:: red
.. role:: green
Parameters
----------
identifiers: Sequence of tuple[str, Series], :red:`required`
These are the series string identifiers and the data values that will be used in each
Series to determine the sensitivity against.
scalars: Sequence of floats, :green:`optional (each set as 1.0)`
A sequence of scalars to multiply the sensitivities by for each on of the
``identifiers``.
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
"""
original_data, index, state = _replace_fixings_with_ad_variables(identifiers)
# Extract sensitivity data
pv: dict[str, DualTypes] = self.npv( # type: ignore[assignment]
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
local=True,
)
df = _structure_sensitivity_data(pv, index, identifiers, scalars)
_reset_fixings_data(self, original_data, state, identifiers)
return df
================================================
FILE: python/rateslib/instruments/protocols/kwargs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.enums.generics import NoInput
from rateslib.scheduling import Schedule
if TYPE_CHECKING:
from rateslib.local_types import (
Any,
str_,
)
def _get_args_from_spec(spec: str_) -> dict[str, Any]:
"""
Get ``spec`` args from ``defaults`` or empty dict.
"""
if isinstance(spec, NoInput):
return {}
return defaults.spec.get(spec.lower(), {})
def _update_not_noinput(base_kwargs: dict[str, Any], new_kwargs: dict[str, Any]) -> dict[str, Any]:
"""
Update the `base_kwargs` with `new_kwargs` (user values) unless those new values are NoInput.
"""
updaters = {
k: v for k, v in new_kwargs.items() if k not in base_kwargs or not isinstance(v, NoInput)
}
return {**base_kwargs, **updaters}
def _update_with_defaults(
base_kwargs: dict[str, Any], default_kwargs: dict[str, Any]
) -> dict[str, Any]:
"""
Update the `base_kwargs` with `default_kwargs` if the base_values are NoInput.blank.
"""
updaters = {
k: v
for k, v in default_kwargs.items()
if k in base_kwargs and base_kwargs[k] is NoInput.blank
}
return {**base_kwargs, **updaters}
def _inherit_or_negate(kwargs: dict[str, Any], ignore_blank: bool = False) -> dict[str, Any]:
"""Amend the values of leg2 kwargs if they are defaulted to inherit or negate from leg1."""
def _replace(k: str, v: Any) -> Any:
# either inherit or negate the value in leg2 from that in leg1
if "leg2_" in k:
if not isinstance(v, NoInput):
return v # do nothing if the attribute is an input
try:
leg1_v = kwargs[k[5:]]
except KeyError:
return v
if leg1_v is NoInput.blank:
if ignore_blank:
return v # this allows an inheritor or negator to be called a second time
else:
return NoInput(0)
if v is NoInput(-1):
if isinstance(leg1_v, list):
return [_ * -1.0 for _ in leg1_v]
elif isinstance(leg1_v, tuple):
return tuple([_ * -1.0 for _ in leg1_v])
else:
return leg1_v * -1.0
elif v is NoInput(1):
return leg1_v
return v # do nothing to leg1 attributes
return {k: _replace(k, v) for k, v in kwargs.items()}
def _convert_to_schedule_kwargs(kwargs: dict[str, Any], leg: int) -> dict[str, Any]:
_ = "" if leg == 1 else "leg2_"
ex_div = kwargs.pop(f"{_}ex_div", NoInput(0))
if isinstance(ex_div, int):
ex_div = -1 * ex_div # negate this input for business days backwards
kwargs[f"{_}schedule"] = Schedule(
effective=kwargs.pop(f"{_}effective", NoInput(0)),
termination=kwargs.pop(f"{_}termination", NoInput(0)),
frequency=kwargs.pop(f"{_}frequency", NoInput(0)),
stub=kwargs.pop(f"{_}stub", NoInput(0)),
front_stub=kwargs.pop(f"{_}front_stub", NoInput(0)),
back_stub=kwargs.pop(f"{_}back_stub", NoInput(0)),
roll=kwargs.pop(f"{_}roll", NoInput(0)),
eom=kwargs.pop(f"{_}eom", NoInput(0)),
modifier=kwargs.pop(f"{_}modifier", NoInput(0)),
calendar=kwargs.pop(f"{_}calendar", NoInput(0)),
payment_lag=kwargs.pop(f"{_}payment_lag", NoInput(0)),
payment_lag_exchange=kwargs.pop(f"{_}payment_lag_exchange", NoInput(0)),
extra_lag=ex_div,
)
return kwargs
class _KWArgs:
"""
Class to manage keyword argument population of *Leg* based *Instruments*.
This will first populate any provided ``spec`` arguments if given.
Second, the user input arguments that are specific values will overwrite these.
Thridly, system ``defaults`` wil be populated.
Finally, any remaining NoInput arguments of leg2 that are set to `inherit` or `negate` will
derive their values from leg1.
"""
@property
def leg1(self) -> dict[str, Any]:
"""Keyword arguments pass to construction of *Leg1*."""
return self._leg1_args
@property
def leg2(self) -> dict[str, Any]:
"""Keyword arguments pass to construction of *Leg2*."""
return self._leg2_args
@property
def meta(self) -> dict[str, Any]:
"""Meta keyword arguments associated with the *Instrument*."""
return self._meta_args
def __init__(
self,
user_args: dict[str, Any],
default_args: dict[str, Any] | None = None,
meta_args: list[str] | None = None,
spec: str_ = NoInput(0),
) -> None:
default_args_ = default_args or {}
meta_args_ = meta_args or []
kwargs = _get_args_from_spec(spec)
kwargs = _update_not_noinput(kwargs, user_args)
kwargs = _update_with_defaults(kwargs, default_args_)
kwargs = _inherit_or_negate(kwargs)
self._meta_args = {}
for k in meta_args_:
if k in kwargs:
self._meta_args[k] = kwargs.pop(k)
self._leg2_args = {k[5:]: v for k, v in kwargs.items() if "leg2_" in k}
self._leg1_args = {k: v for k, v in kwargs.items() if "leg2_" not in k}
def __eq__(self, other: Any) -> bool:
if not isinstance(other, _KWArgs):
return False
else:
# bools = [
# self.leg1.keys() == other.leg1.keys(),
# self.leg2.keys() == other.leg2.keys(),
# self.meta.keys() == other.meta.keys(),
# all(self.leg1[k] == other.leg1[k] for k in self.leg1.keys()),
# all(self.leg2[k] == other.leg2[k] for k in self.leg2.keys()),
# all(self.meta[k] == other.meta[k] for k in self.meta.keys()),
# ]
bools = [
self.leg1 == other.leg1,
self.leg2 == other.leg2,
self.meta == other.meta,
]
return all(bools)
================================================
FILE: python/rateslib/instruments/protocols/npv.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols.kwargs import _KWArgs
from rateslib.instruments.protocols.pricing import (
_get_curve,
_get_fx_maybe_from_solver,
_get_fx_vol,
_parse_curves,
_parse_vol,
_WithPricingObjs,
)
from rateslib.periods.utils import _maybe_fx_converted
if TYPE_CHECKING:
from rateslib.local_types import (
CurvesT_,
DualTypes,
FXForwards_,
Solver_,
VolT_,
_SettlementParams,
datetime_,
str_,
)
class _WithNPV(_WithPricingObjs, Protocol):
"""
Protocol to establish value of any *Instrument* type.
"""
_kwargs: _KWArgs
@property
def settlement_params(self) -> _SettlementParams:
"""
The default :class:`~rateslib.periods.parameters._SettlementParams` of the *Instrument*.
This is used to define a ``base`` currency when one is not specified.
"""
if hasattr(self, "legs"):
return self.legs[0].settlement_params # type: ignore[no-any-return]
elif hasattr(self, "instruments"):
return self.instruments[0].settlement_params # type: ignore[no-any-return]
else:
raise NotImplementedError(
f"`settlement_params` not implemented for type {type(self).__name__}"
)
@property
def kwargs(self) -> _KWArgs:
"""The :class:`~rateslib.instruments.protocols._KWArgs` container for
the *Instrument*."""
return self._kwargs
def __repr__(self) -> str:
return f""
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
"""
Calculate the NPV of the *Instrument* converted to any other *base* accounting currency.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, Curve, IRS
.. ipython:: python
curve = Curve({dt(2000, 1, 1): 1.0, dt(2010, 1, 1): 0.75})
irs = IRS(dt(2000, 1, 1), "3Y", spec="usd_irs", fixed_rate=1.0, curves=[curve])
irs.npv()
irs.npv(local=True)
.. role:: red
.. role:: green
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
local: bool, :green:`optional (set as False)`
An override flag to return a dict of NPV values indexed by string currency.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable or dict of such indexed by string currency.
Notes
-----
If ``base`` is not given then this function will return the value obtained from
determining the PV in local *settlement currency*.
If ``base`` is provided this then an :class:`~rateslib.fx.FXForwards` object may be
required to perform conversions. An :class:`~rateslib.fx.FXRates` object is also allowed
for this conversion although best practice does not recommend it due to possible
settlement date conflicts.
"""
# this is a generalist implementation of an NPV function for an instrument with 2 legs.
# most instruments may be likely to implement NPV directly to benefit from optimisations
# specific to that instrument
assert hasattr(self, "legs") # noqa: S101
c = _parse_curves(self, curves, solver) # type: ignore[arg-type]
v = _parse_vol(self, vol, solver, False) # type: ignore[call-overload, misc]
fx_vol = _get_fx_vol(True, True, *v)
_fx_maybe_from_solver = _get_fx_maybe_from_solver(fx=fx, solver=solver)
local_npv: dict[str, DualTypes] = {}
for leg, names in zip(
self.legs,
[
("rate_curve", "disc_curve", "index_curve"),
("leg2_rate_curve", "leg2_disc_curve", "leg2_index_curve"),
],
strict=False,
):
leg_local_npv = leg.local_npv(
rate_curve=_get_curve(names[0], True, True, *c),
disc_curve=_get_curve(names[1], False, True, *c),
index_curve=_get_curve(names[2], False, True, *c),
fx=_fx_maybe_from_solver,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
)
if leg.settlement_params.currency in local_npv:
local_npv[leg.settlement_params.currency] += leg_local_npv
else:
local_npv[leg.settlement_params.currency] = leg_local_npv
if not local:
single_value: DualTypes = 0.0
base_ = _drb(self.settlement_params.currency, base)
for k_, v_ in local_npv.items():
single_value += _maybe_fx_converted(
value=v_,
currency=k_,
fx=_fx_maybe_from_solver,
base=base_,
forward=forward,
)
return single_value
else:
return local_npv
def _npv_single_core(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> dict[str, DualTypes]:
"""
Private NPV summation function used with a single thread, over all `self.instruments`.
Returns a dict type: local = True
"""
assert hasattr(self, "instruments") # noqa: S101
local_npv: dict[str, DualTypes] = {}
for instrument in self.instruments:
inst_local_npv = instrument.npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=True,
settlement=settlement,
forward=forward,
)
for k, v in inst_local_npv.items():
if k in local_npv:
local_npv[k] += v
else:
local_npv[k] = v
return local_npv
================================================
FILE: python/rateslib/instruments/protocols/pricing.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from typing import TYPE_CHECKING, Literal, Protocol, overload
from rateslib import defaults
from rateslib.curves import MultiCsaCurve, ProxyCurve
from rateslib.dual import Dual, Dual2, Variable
from rateslib.enums.generics import NoInput, _drb
from rateslib.volatility import _BaseIRCube, _BaseIRSmile
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
FX_,
Any,
CurvesT_,
DualTypes,
FXForwards_,
FXVol_,
IRVol_,
NoInput,
Solver,
Solver_,
VolStrat_,
VolT_,
_BaseCurve,
_BaseCurve_,
_BaseCurveOrDict,
_BaseCurveOrDict_,
_BaseCurveOrId,
_BaseCurveOrIdOrIdDict,
_BaseCurveOrIdOrIdDict_,
_BaseInstrument,
_FXVolObj,
_IRVolObj,
_IRVolOption_,
)
class _WithPricingObjs(Protocol):
"""
Protocol to determine individual *curves* and *vol* inputs for each *Instrument*.
This protocol contains two internal methods for parsing ``curves`` and ``vol`` inputs
according to individual *Instruments* for pricing methods, such as
:meth:`~rateslib.instruments.protocols._WithNpv.npv` and
:meth:`~rateslib.instruments.protocols._WithRate.rate`.
"""
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""Method is needed to map the `curves` argument input for any individual *Instrument* into
the more defined :class:`~rateslib.curves._parsers._Curves` structure.
"""
raise NotImplementedError(
f"{type(self).__name__} must implement `_parse_curves` of class `_WithPricingObjs`."
)
def _parse_vol(self, vol: VolT_) -> _Vol:
"""Method is needed to map the `vol` argument input for any individual *Instrument* into
the more defined :class:`~rateslib.curves._parsers._Vol` structure.
"""
raise NotImplementedError(
f"{type(self).__name__} must implement `_parse_vol` of class `_WithPricingObjs`."
)
class _Curves:
"""
Container for a pricing object providing a mapping for curves.
"""
def __init__(
self,
*,
rate_curve: _BaseCurveOrIdOrIdDict_ = NoInput(0),
disc_curve: _BaseCurveOrIdOrIdDict_ = NoInput(0),
index_curve: _BaseCurveOrIdOrIdDict_ = NoInput(0),
leg2_rate_curve: _BaseCurveOrIdOrIdDict_ = NoInput(0),
leg2_disc_curve: _BaseCurveOrIdOrIdDict_ = NoInput(0),
leg2_index_curve: _BaseCurveOrIdOrIdDict_ = NoInput(0),
):
self._rate_curve = rate_curve
self._disc_curve = disc_curve
self._index_curve = index_curve
self._leg2_rate_curve = leg2_rate_curve
self._leg2_disc_curve = leg2_disc_curve
self._leg2_index_curve = leg2_index_curve
def __eq__(self, other: Any) -> bool:
if not isinstance(other, _Curves):
return False
else:
bools = [
self.disc_curve == other.disc_curve,
self.index_curve == other.index_curve,
self.rate_curve == other.rate_curve,
self.leg2_rate_curve == other.leg2_rate_curve,
self.leg2_disc_curve == other.leg2_disc_curve,
self.leg2_index_curve == other.leg2_index_curve,
]
return all(bools)
@property
def rate_curve(self) -> _BaseCurveOrIdOrIdDict_:
"""The curve used for floating rate or hazard rate forecasting on leg1."""
return self._rate_curve
@property
def disc_curve(self) -> _BaseCurveOrIdOrIdDict_:
"""The curve used for discounting on leg1."""
return self._disc_curve
@property
def index_curve(self) -> _BaseCurveOrIdOrIdDict_:
"""The index curve used for forecasting index values on leg1."""
return self._index_curve
@property
def leg2_rate_curve(self) -> _BaseCurveOrIdOrIdDict_:
"""The curve used for floating rate or hazard rate forecasting on leg2."""
return self._leg2_rate_curve
@property
def leg2_disc_curve(self) -> _BaseCurveOrIdOrIdDict_:
"""The curve used for discounting on leg2."""
return self._leg2_disc_curve
@property
def leg2_index_curve(self) -> _BaseCurveOrIdOrIdDict_:
"""The index curve used for forecasting index values on leg2."""
return self._leg2_index_curve
class _Vol:
"""
Container for a pricing object providing a mapping for volatility.
"""
def __init__(
self,
*,
fx_vol: FXVol_ = NoInput(0),
ir_vol: IRVol_ = NoInput(0),
):
self._fx_vol = fx_vol
self._ir_vol = ir_vol
@property
def fx_vol(self) -> FXVol_:
"""The FX vol object used for modelling FX volatility."""
return self._fx_vol
@property
def ir_vol(self) -> IRVol_:
"""The IR vol object used for modelling IR volatility."""
return self._ir_vol
def __eq__(self, other: Any) -> bool:
if not isinstance(other, _Vol):
return False
else:
return self.fx_vol == other.fx_vol and self.ir_vol == other.ir_vol
def _parse_curves(
obj: _BaseInstrument, curves: CurvesT_, solver: Solver_
) -> tuple[_Curves, _Curves, Solver_]:
return (obj._parse_curves(curves), obj.kwargs.meta["curves"], solver)
@overload
def _parse_vol(
obj: _BaseInstrument,
vol: VolT_,
solver: Solver_,
sequence: Literal[False],
) -> tuple[_Vol, _Vol, Solver_]: ...
@overload
def _parse_vol(
obj: _BaseInstrument,
vol: VolStrat_,
solver: Solver_,
sequence: Literal[True],
) -> tuple[VolStrat_, VolStrat_, Solver_]: ...
def _parse_vol(
obj: _BaseInstrument,
vol: VolT_ | VolStrat_,
solver: Solver_,
sequence: bool,
) -> tuple[_Vol | VolStrat_, _Vol | VolStrat_, Solver_]:
return obj._parse_vol(vol), obj.kwargs.meta["vol"], solver # type: ignore[arg-type]
# Solver and Curve mapping
@overload
def _get_curve(
name: str,
allow_dict: Literal[False],
allow_no_input: Literal[True],
curves: _Curves,
curves_meta: _Curves,
solver: Solver_,
) -> _BaseCurve_: ...
@overload
def _get_curve(
name: str,
allow_dict: Literal[False],
allow_no_input: Literal[False],
curves: _Curves,
curves_meta: _Curves,
solver: Solver_,
) -> _BaseCurve: ...
@overload
def _get_curve(
name: str,
allow_dict: Literal[True],
allow_no_input: Literal[True],
curves: _Curves,
curves_meta: _Curves,
solver: Solver_,
) -> _BaseCurveOrDict_: ...
@overload
def _get_curve(
name: str,
allow_dict: Literal[True],
allow_no_input: Literal[False],
curves: _Curves,
curves_meta: _Curves,
solver: Solver_,
) -> _BaseCurveOrDict: ...
def _get_curve(
name: str,
allow_dict: bool,
allow_no_input: bool,
curves: _Curves,
curves_meta: _Curves,
solver: Solver_,
) -> _BaseCurveOrDict_:
curve: _BaseCurveOrIdOrIdDict_ = _drb(getattr(curves_meta, name), getattr(curves, name))
if isinstance(curve, NoInput) or curve is None:
if allow_no_input:
return NoInput(0)
else:
raise ValueError(f"`{name}` must be provided. Got NoInput.")
elif isinstance(solver, NoInput):
return _validate_base_curve_or_dict( # type: ignore[no-any-return, call-overload]
curve=curve, allow_dict=allow_dict, allow_no_input=allow_no_input
)
else:
return _get_curve_from_solver( # type: ignore[no-any-return, call-overload]
curve=curve,
solver=solver,
allow_dict=allow_dict,
)
@overload
def _validate_base_curve_or_dict(
curve: _BaseCurveOrIdOrIdDict,
allow_dict: Literal[True],
allow_no_input: Literal[True],
) -> _BaseCurveOrDict_: ...
@overload
def _validate_base_curve_or_dict(
curve: _BaseCurveOrIdOrIdDict,
allow_dict: Literal[True],
allow_no_input: Literal[False],
) -> _BaseCurveOrDict: ...
@overload
def _validate_base_curve_or_dict(
curve: _BaseCurveOrIdOrIdDict,
allow_dict: Literal[False],
allow_no_input: Literal[True],
) -> _BaseCurve_: ...
@overload
def _validate_base_curve_or_dict(
curve: _BaseCurveOrIdOrIdDict,
allow_dict: Literal[False],
allow_no_input: Literal[False],
) -> _BaseCurve: ...
def _validate_base_curve_or_dict(
curve: _BaseCurveOrIdOrIdDict,
allow_dict: bool,
allow_no_input: bool,
) -> _BaseCurveOrDict_:
"""
Validate that a curve input is an object and not a string id.
"""
if isinstance(curve, dict):
if not allow_dict:
raise ValueError("Cannot supply a dict type object as this `curve`.")
else:
return {
k: _validate_base_curve(v, allow_no_input=allow_no_input) # type: ignore[call-overload]
for k, v in curve.items()
}
else:
return _validate_base_curve(curve, allow_no_input=allow_no_input) # type: ignore[no-any-return, call-overload]
@overload
def _validate_base_curve(curve: _BaseCurveOrId, allow_no_input: Literal[False]) -> _BaseCurve: ...
@overload
def _validate_base_curve(curve: _BaseCurveOrId, allow_no_input: Literal[True]) -> _BaseCurve_: ...
def _validate_base_curve(curve: _BaseCurveOrId, allow_no_input: bool) -> _BaseCurve_:
if isinstance(curve, str):
if allow_no_input:
return NoInput(0)
else:
raise ValueError(
f"`curves` must contain _BaseCurve, not str, if `solver` not given. "
f"Got id: '{curve}'"
)
return curve
@overload
def _get_curve_from_solver(
curve: _BaseCurveOrIdOrIdDict, solver: Solver, allow_dict: Literal[True]
) -> _BaseCurveOrDict: ...
@overload
def _get_curve_from_solver(
curve: _BaseCurveOrIdOrIdDict, solver: Solver, allow_dict: Literal[False]
) -> _BaseCurve: ...
def _get_curve_from_solver(
curve: _BaseCurveOrIdOrIdDict, solver: Solver, allow_dict: bool
) -> _BaseCurveOrDict:
"""
Maps a "Curve | str | dict[str, Curve | str]" to a "Curve | dict[str, Curve]" via a Solver.
If curve input involves strings get objects directly from solver curves mapping.
This is the explicit variety which does not handle NoInput.
"""
if isinstance(curve, dict):
if not allow_dict:
raise ValueError("Cannot supply a dict type object as this `curve`.")
parsed_dict: dict[str, _BaseCurve] = {
k: _parse_curve_or_id_from_solver_(curve=v, solver=solver) for k, v in curve.items()
}
return parsed_dict
else:
return _parse_curve_or_id_from_solver_(curve, solver)
def _parse_curve_or_id_from_solver_(curve: _BaseCurveOrId, solver: Solver) -> _BaseCurve:
"""
Maps a "Curve | str" to a "Curve" via a Solver mapping.
If a Curve, runs a check against whether that Curve is associated with the given Solver,
and perform an action based on `defaults.curve_not_in_solver`
"""
if isinstance(curve, str):
return solver._get_pre_curve(curve)
elif type(curve) is ProxyCurve or type(curve) is MultiCsaCurve:
# TODO: (mid) consider also adding CompositeCurves as exceptions under the same rule
# Proxy curves and MultiCsaCurves can exist outside of Solvers but be constructed
# directly from an FXForwards object tied to a Solver using only a Solver's
# dependent curves and AD variables.
return curve # type: ignore[no-any-return] # mypy error
else:
try:
# it is a safeguard to load curves from solvers when a solver is
# provided and multiple curves might have the same id
__: _BaseCurve = solver._get_pre_curve(curve.id)
if id(__) != id(curve): # Python id() is a memory id, not a string label id.
raise ValueError(
"A curve has been supplied, as part of ``curves``, which has the same "
f"`id` ('{curve.id}'),\nas one of the curves available as part of the "
"Solver's collection but is not the same object.\n"
"This is ambiguous and cannot price.\n"
"Either refactor the arguments as follows:\n"
"1) remove the conflicting curve: [curves=[..], solver=] -> "
"[curves=None, solver=]\n"
"2) change the `id` of the supplied curve and ensure the rateslib.defaults "
"option 'curve_not_in_solver' is set to 'ignore'.\n"
" This will remove the ability to accurately price risk metrics.",
)
return __
except AttributeError:
raise AttributeError(
"`curve` has no attribute `id`, likely it not a valid object, got: "
f"{curve}.\nSince a solver is provided have you missed labelling the `curves` "
f"of the instrument or supplying `curves` directly?",
)
except KeyError:
if defaults.curve_not_in_solver == "ignore":
return curve
elif defaults.curve_not_in_solver == "warn":
warnings.warn("`curve` not found in `solver`.", UserWarning)
return curve
else:
raise ValueError("`curve` must be in `solver`.")
# Solver and FX Vol mapping
@overload
def _get_fx_vol(
allow_numeric: Literal[True],
allow_no_input: Literal[True],
vol: _Vol,
vol_meta: _Vol,
solver: Solver_,
) -> _FXVolObj | DualTypes | NoInput: ...
@overload
def _get_fx_vol(
allow_numeric: Literal[True],
allow_no_input: Literal[False],
vol: _Vol,
vol_meta: _Vol,
solver: Solver_,
) -> _FXVolObj | DualTypes: ...
@overload
def _get_fx_vol(
allow_numeric: Literal[False],
allow_no_input: Literal[True],
vol: _Vol,
vol_meta: _Vol,
solver: Solver_,
) -> _FXVolObj | NoInput: ...
@overload
def _get_fx_vol(
allow_numeric: Literal[False],
allow_no_input: Literal[False],
vol: _Vol,
vol_meta: _Vol,
solver: Solver_,
) -> _FXVolObj: ...
def _get_fx_vol(
allow_numeric: bool,
allow_no_input: bool,
vol: _Vol,
vol_meta: _Vol,
solver: Solver_,
) -> _FXVolObj | DualTypes | NoInput:
fx_vol_ = _drb(vol_meta.fx_vol, vol.fx_vol)
if isinstance(fx_vol_, NoInput) or fx_vol_ is None:
if allow_no_input:
return NoInput(0)
else:
raise ValueError("`fx_vol` must be provided. Got NoInput.")
elif isinstance(fx_vol_, float | Dual | Dual2 | Variable):
if allow_numeric:
return fx_vol_
else:
raise ValueError("`fx_vol` must be an object. Got numeric quantity.")
elif isinstance(solver, NoInput):
return _validate_base_fx_vol(fx_vol=fx_vol_, allow_no_input=allow_no_input) # type: ignore[no-any-return, call-overload]
else:
return _get_fx_vol_from_solver(fx_vol=fx_vol_, solver=solver)
@overload
def _validate_base_fx_vol(fx_vol: _FXVolObj | str, allow_no_input: Literal[False]) -> _FXVolObj: ...
@overload
def _validate_base_fx_vol(
fx_vol: _FXVolObj | str, allow_no_input: Literal[True]
) -> _FXVolObj | NoInput: ...
def _validate_base_fx_vol(fx_vol: _FXVolObj | str, allow_no_input: bool) -> _FXVolObj | NoInput:
if isinstance(fx_vol, str):
if allow_no_input:
return NoInput(0)
else:
raise ValueError(
f"`fx_vol` must contain FXVol object, not str, if `solver` not given. "
f"Got id: '{fx_vol}'"
)
return fx_vol
def _get_fx_vol_from_solver(fx_vol: _FXVolObj | str, solver: Solver) -> _FXVolObj:
if isinstance(fx_vol, str):
return solver._get_pre_fxvol(fx_vol)
try:
# it is a safeguard to load curves from solvers when a solver is
# provided and multiple curves might have the same id
__: _FXVolObj = solver._get_pre_fxvol(fx_vol.id)
if id(__) != id(fx_vol): # Python id() is a memory id, not a string label id.
raise ValueError(
"An FXVol object has been supplied, as part of ``vol``, which has the same "
f"`id` ('{fx_vol.id}'),\nas one of the curves available as part of the "
"Solver's collection but is not the same object.\n"
"This is ambiguous and cannot price.\n"
"Either refactor the arguments as follows:\n"
"1) remove the conflicting object: [vol=[..], solver=] -> "
"[vol=None, solver=]\n"
"2) change the `id` of the supplied FXVol object and ensure the rateslib.defaults "
"option 'curve_not_in_solver' is set to 'ignore'.\n"
" This will remove the ability to accurately price risk metrics.",
)
return __
except AttributeError:
raise AttributeError(
"FXVol object has no attribute `id`, likely it is not a valid object, got: "
f"{fx_vol}.\nSince a solver is provided have you missed labelling the `curves` "
f"of the instrument or supplying `curves` directly?",
)
except KeyError:
if defaults.curve_not_in_solver == "ignore":
return fx_vol
elif defaults.curve_not_in_solver == "warn":
warnings.warn("FXVol object not found in `solver`.", UserWarning)
return fx_vol
else:
raise ValueError("FXVol object must be in `solver`.")
# Solver and IR Vol mapping
def _maybe_get_ir_vol_maybe_from_solver(
vol_meta: _Vol,
vol: _Vol,
# name: str, = "fx_vol"
solver: Solver_,
) -> _IRVolOption_:
ir_vol_ = _drb(vol_meta.ir_vol, vol.ir_vol)
if isinstance(ir_vol_, NoInput | float | Dual | Dual2 | Variable):
return ir_vol_
elif isinstance(solver, NoInput):
return _validate_ir_vol_is_not_id(ir_vol=ir_vol_)
else:
return _get_ir_vol_from_solver(ir_vol=ir_vol_, solver=solver)
def _get_ir_vol_from_solver(
ir_vol: _BaseIRSmile | _BaseIRCube[Any] | str, solver: Solver
) -> _BaseIRSmile | _BaseIRCube[Any]:
if isinstance(ir_vol, str):
return solver._get_pre_irvol(ir_vol)
try:
# it is a safeguard to load curves from solvers when a solver is
# provided and multiple curves might have the same id
__: _IRVolObj = solver._get_pre_irvol(ir_vol.id)
if id(__) != id(ir_vol): # Python id() is a memory id, not a string label id.
raise ValueError(
"An IRVol object has been supplied, as part of ``vol``, which has the same "
f"`id` ('{ir_vol.id}'),\nas one of the curves available as part of the "
"Solver's collection but is not the same object.\n"
"This is ambiguous and cannot price.\n"
"Either refactor the arguments as follows:\n"
"1) remove the conflicting object: [vol=[..], solver=] -> "
"[vol=None, solver=]\n"
"2) change the `id` of the supplied IRVol object and ensure the rateslib.defaults "
"option 'curve_not_in_solver' is set to 'ignore'.\n"
" This will remove the ability to accurately price risk metrics.",
)
return __
except AttributeError:
raise AttributeError(
"IRVol object has no attribute `id`, likely it is not a valid object, got: "
f"{ir_vol}.\nSince a solver is provided have you missed labelling the `curves` "
f"of the instrument or supplying `curves` directly?",
)
except KeyError:
if defaults.curve_not_in_solver == "ignore":
return ir_vol
elif defaults.curve_not_in_solver == "warn":
warnings.warn("FXVol object not found in `solver`.", UserWarning)
return ir_vol
else:
raise ValueError("FXVol object must be in `solver`.")
def _validate_ir_vol_is_not_id(ir_vol: _IRVolObj | str) -> _IRVolObj:
if isinstance(ir_vol, str): # curve is a str ID
raise ValueError(
f"`vol` must contain IRVol object, not str, if `solver` not given. Got id: '{ir_vol}'"
)
return ir_vol
# FX and Solver mapping
def _get_fx_forwards_maybe_from_solver(solver: Solver_, fx: FXForwards_) -> FXForwards_:
if isinstance(fx, NoInput):
if isinstance(solver, NoInput):
fx_: FXForwards_ = NoInput(0)
else:
if isinstance(solver.fx, NoInput):
fx_ = NoInput(0)
else:
# TODO disallow `fx` on Solver as FXRates. Only allow FXForwards.
fx_ = solver._get_fx()
else:
fx_ = fx
if (
not isinstance(solver, NoInput)
and not isinstance(solver.fx, NoInput)
and id(fx) != id(solver.fx)
):
warnings.warn(
"Solver contains an `fx` attribute but an `fx` argument has been "
"supplied which will be used but is not the same. This can lead "
"to calculation inconsistencies, mathematically.",
UserWarning,
)
return fx_
def _get_fx_maybe_from_solver(solver: Solver_, fx: FXForwards_) -> FXForwards_:
if isinstance(fx, NoInput):
if isinstance(solver, NoInput):
fx_: FX_ = NoInput(0)
else:
if isinstance(solver.fx, NoInput):
fx_ = NoInput(0)
else:
fx_ = solver._get_fx() # will validate the state
else:
fx_ = fx
if (
not isinstance(solver, NoInput)
and not isinstance(solver.fx, NoInput)
and id(fx) != id(solver.fx)
):
warnings.warn(
"Solver contains an `fx` attribute but an `fx` argument has been "
"supplied which will be used but is not the same. This can lead "
"to calculation inconsistencies, mathematically.",
UserWarning,
)
return fx_ # type: ignore[return-value]
================================================
FILE: python/rateslib/instruments/protocols/rate.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.enums.generics import NoInput
if TYPE_CHECKING:
from rateslib.local_types import (
CurvesT_,
DualTypes,
FXForwards_,
Solver_,
VolT_,
datetime_,
str_,
)
class _WithRate(Protocol):
"""
Protocol to establish a *rate* pricing metric of any *Instrument* type.
"""
_rate_scalar: float
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
# Overloaded rate docs are for: IndexFixedRateBond
"""
Calculate some pricing rate metric for the *Instrument*.
.. rubric:: Examples
The default metric for an :class:`~rateslib.instruments.irs.IRS` is its fixed *'rate'*.
.. ipython:: python
:suppress:
from rateslib import dt, Curve, IRS
.. ipython:: python
curve = Curve({dt(2000, 1, 1): 1.0, dt(2010, 1, 1): 0.75})
irs = IRS(dt(2000, 1, 1), "3Y", spec="usd_irs", curves=[curve], fixed_rate=2.0)
irs.rate() # <- `fixed_rate` on fixed leg to equate value with float leg
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
local: bool, :green:`optional (set as False)`
An override flag to return a dict of NPV values indexed by string currency.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
metric: str, :green:`optional`
The specific calculation to perform and the value to return.
See **Pricing** on each *Instrument* for details of allowed inputs.
Returns
-------
float, Dual, Dual2, Variable
"""
raise NotImplementedError(f"`rate` must be implemented for type: {type(self).__name__}")
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
"""
Calculate some pricing spread metric for the *Instrument*.
This calculation may be an alias for :meth:`~rateslib.instruments.protocols._WithRate.rate`
with a specific `metric` and is designated at an *Instrument* level.
.. rubric:: Examples
The *'spread'* on an :class:`~rateslib.instruments.irs.IRS` is the float leg spread to
equate value with the fixed leg.
.. ipython:: python
:suppress:
from rateslib import dt, Curve, IRS
.. ipython:: python
curve = Curve({dt(2000, 1, 1): 1.0, dt(2010, 1, 1): 0.75})
irs = IRS(dt(2000, 1, 1), "3Y", spec="usd_irs", curves=[curve], fixed_rate=2.0)
irs.spread() # <- `spread` on float leg to equate value with fixed leg
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :green:`optional`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
local: bool, :green:`optional (set as False)`
An override flag to return a dict of NPV values indexed by string currency.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable
"""
raise NotImplementedError(f"`spread` is not implemented for type: {type(self).__name__}")
@property
def rate_scalar(self) -> float:
"""
A scaling quantity associated with the :class:`~rateslib.solver.Solver` risk calculations.
"""
return self._rate_scalar
================================================
FILE: python/rateslib/instruments/protocols/sensitivities.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.enums.generics import NoInput
from rateslib.fx import FXForwards, FXRates
from rateslib.instruments.protocols.npv import _WithNPV
from rateslib.instruments.protocols.pricing import (
_get_fx_forwards_maybe_from_solver,
)
if TYPE_CHECKING:
from rateslib.local_types import (
CurvesT_,
DataFrame,
Dual,
Dual2,
FXForwards_,
NoInput,
Solver_,
VolT_,
datetime_,
str_,
)
class _WithSensitivities(_WithNPV, Protocol):
"""
Protocol to establish **delta** and **gamma** calculations using a
:class:`~rateslib.solver.Solver` of any *Instrument* type.
"""
def delta(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Calculate delta risk of an *Instrument* against the calibrating instruments in a
:class:`~rateslib.solver.Solver`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import IRS, Curve, Solver, dt
.. ipython:: python
curve = Curve({dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.85, dt(2010, 1, 1): 0.75})
solver = Solver(
curves=[curve],
instruments=[
IRS(dt(2000, 1, 1), "2Y", spec="usd_irs", curves=[curve]),
IRS(dt(2000, 1, 1), "5Y", spec="usd_irs", curves=[curve]),
],
s=[2.0, 2.25],
instrument_labels=["2Y", "5Y"],
id="US_RATES"
)
irs = IRS(dt(2000, 1, 1), "3Y", spec="usd_irs", curves=[curve])
irs.delta(solver=solver)
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :red:`required`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
Notes
-----
**Delta** measures the sensitivity of the *PV* to a change in any of the calibrating
instruments of the given :class:`~rateslib.solver.Solver`. Values are returned
according to the ``rate_scalar`` quantity at an *Instrument* level and according to the
``metric`` used to derive the :meth:`~rateslib.instruments.protocols._WithRate.rate`
method of each *Instrument*.
"""
if isinstance(solver, NoInput):
raise ValueError("`solver` is required for delta/gamma methods.")
npv: dict[str, Dual] = self.npv( # type: ignore[assignment]
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
forward=forward,
settlement=settlement,
local=True,
)
return solver.delta(
npv=npv, base=base, fx=_get_fx_forwards_maybe_from_solver(fx=fx, solver=solver)
)
def exo_delta(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
vars: list[str], # noqa: A002
vars_scalar: list[float] | NoInput = NoInput(0),
vars_labels: list[str] | NoInput = NoInput(0),
) -> DataFrame:
"""
Calculate delta risk of an *Instrument* against some exogenous user created *Variables*,
via a :class:`~rateslib.solver.Solver`.
See :ref:`What are exogenous variables? ` in the cookbook.
.. rubric:: Examples
This example calculates the risk of the fixed rate increasing by 1bp and the notional
increasing by 1mm. Mathematically this should be equivalent to the `npv` and the
`analytic delta` (although the calculation is based on AD and is completely independent
of the solver).
.. ipython:: python
:suppress:
from rateslib import IRS, Curve, Solver, dt, Variable
.. ipython:: python
curve = Curve({dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.85, dt(2010, 1, 1): 0.75})
solver = Solver(
curves=[curve],
instruments=[
IRS(dt(2000, 1, 1), "2Y", spec="usd_irs", curves=[curve]),
IRS(dt(2000, 1, 1), "5Y", spec="usd_irs", curves=[curve]),
],
s=[2.0, 2.25],
instrument_labels=["2Y", "5Y"],
id="US_RATES"
)
irs = IRS(dt(2000, 1, 1), "3Y", spec="usd_irs", fixed_rate=Variable(3.0, ["R"]), notional=Variable(1e6, ["N"]), curves=[curve])
irs.exo_delta(solver=solver, vars=["R", "N"], vars_scalar=[1e-2, 1e6])
irs.analytic_delta()
irs.npv()
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :red:`required`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
vars : list[str], :red:`required`
The variable tags which to determine sensitivities for.
vars_scalar : list[float], :green:`optional`
Scaling factors for each variable, for example converting rates to basis point etc.
Defaults to ones.
vars_labels : list[str], :green:`optional`
Alternative names to relabel variables in DataFrames.
Returns
-------
DataFrame
""" # noqa: E501
if isinstance(solver, NoInput):
raise ValueError("`solver` is required for delta/gamma methods.")
npv: dict[str, Dual | Dual2] = self.npv( # type: ignore[assignment]
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
forward=forward,
settlement=settlement,
local=True,
)
return solver.exo_delta(
npv=npv,
vars=vars,
base=base,
fx=_get_fx_forwards_maybe_from_solver(fx=fx, solver=solver),
vars_scalar=vars_scalar,
vars_labels=vars_labels,
)
def gamma(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Calculate cross-gamma risk of an *Instrument* against the calibrating instruments of a
:class:`~rateslib.solver.Solver`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import IRS, Curve, Solver, dt
.. ipython:: python
curve = Curve({dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.85, dt(2010, 1, 1): 0.75})
solver = Solver(
curves=[curve],
instruments=[
IRS(dt(2000, 1, 1), "2Y", spec="usd_irs", curves=[curve]),
IRS(dt(2000, 1, 1), "5Y", spec="usd_irs", curves=[curve]),
],
s=[2.0, 2.25],
instrument_labels=["2Y", "5Y"],
id="US_RATES"
)
irs = IRS(dt(2000, 1, 1), "3Y", spec="usd_irs", curves=[curve])
irs.gamma(solver=solver)
Parameters
----------
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
solver: Solver, :red:`required`
A :class:`~rateslib.solver.Solver` object containing *Curve*, *Smile*, *Surface*, or
*Cube* mappings for pricing.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting FX rates, if necessary.
vol: _Vol, :green:`optional`
Pricing objects. See **Pricing** on each *Instrument* for details of allowed inputs.
base: str, :green:`optional (set to settlement currency)`
The currency to convert the *local settlement* NPV to.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
Notes
-----
**Gamma** measures the second order cross-sensitivity of the *PV* to a change in any
of the calibrating instruments of the given :class:`~rateslib.solver.Solver`. Values are
returned according to the ``rate_scalar`` quantity at an *Instrument* level and according
to the ``metric`` used to derive the :meth:`~rateslib.instruments.protocols._WithRate.rate`
method of each *Instrument*.
"""
if isinstance(solver, NoInput):
raise ValueError("`solver` is required for delta/gamma methods.")
fx_ = _get_fx_forwards_maybe_from_solver(fx=fx, solver=solver)
# store original order
if id(solver.fx) != id(fx_) and isinstance(fx_, FXRates | FXForwards):
# then the fx_ object is available on solver but that is not being used.
_ad_fx = fx_._ad
fx_._set_ad_order(2)
_ad_svr = solver._ad
solver._set_ad_order(2)
npv: dict[str, Dual2] = self.npv( # type: ignore[assignment]
curves=curves,
solver=solver,
fx=fx_,
vol=vol,
base=NoInput(0), # local override
settlement=settlement,
forward=forward,
local=True,
)
grad_s_sT_P: DataFrame = solver.gamma(npv, base, fx_)
# reset original order
if id(solver.fx) != id(fx_) and isinstance(fx_, FXRates | FXForwards):
fx_._set_ad_order(_ad_fx)
solver._set_ad_order(_ad_svr)
return grad_s_sT_P
================================================
FILE: python/rateslib/instruments/protocols/utils.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, TypeVar
from rateslib.enums.generics import NoInput
if TYPE_CHECKING:
from rateslib.local_types import (
CurveOption_,
)
# def _get_fx_maybe_from_solver(
# fx: FX_,
# solver: Solver_,
# ) -> FX_:
# # Get the `fx` from Solver only if not directly provided and Solver exists.
# fx_: FXForwards_
# if isinstance(fx, NoInput):
# if not isinstance(solver, NoInput):
# fx_ = solver.fx
# else:
# fx_ = NoInput(0)
# else:
# fx_ = fx
# return fx_
T = TypeVar("T")
def _validate_obj_not_no_input(obj: T | NoInput, name: str) -> T:
if isinstance(obj, NoInput):
raise ValueError(f"`{name}` must be supplied. Got NoInput or None.")
return obj
def _maybe_set_ad_order(
curve: CurveOption_, order: int | dict[str, int | None] | None
) -> int | dict[str, int | None] | None:
"""method is used internally to set AD order and then later revert the curve to its original"""
if isinstance(curve, NoInput) or order is None:
return None # do nothing
else:
if isinstance(curve, dict):
# method will return a dict of orders if a dict of curves is provided as input
if isinstance(order, dict):
return {
k: _maybe_set_ad_order(v, order[k]) # type: ignore[misc]
for k, v in curve.items()
}
else:
return {
k: _maybe_set_ad_order(v, order) # type: ignore[misc]
for k, v in curve.items()
}
else:
try:
original_order = curve.ad
curve._set_ad_order(order) # type: ignore[arg-type]
except AttributeError:
# Curve has no method (possibly a custom curve and not a subclass of _BaseCurve)
return None
return original_order
# def _map_fx_vol_or_id_from_solver_(curve: CurveOrId, solver: Solver) -> _BaseCurve:
# """
# Maps a "FXVol | str" to a "Curve" via a Solver mapping.
#
# If a Curve, runs a check against whether that Curve is associated with the given Solver,
# and perform an action based on `defaults.curve_not_in_solver`
# """
# if isinstance(curve, str):
# return solver._get_pre_curve(curve)
# elif type(curve) is ProxyCurve or type(curve) is MultiCsaCurve:
# # TODO: (mid) consider also adding CompositeCurves as exceptions under the same rule
# # Proxy curves and MultiCsaCurves can exist outside of Solvers but be constructed
# # directly from an FXForwards object tied to a Solver using only a Solver's
# # dependent curves and AD variables.
# return curve
# else:
# try:
# # it is a safeguard to load curves from solvers when a solver is
# # provided and multiple curves might have the same id
# __: _BaseCurve = solver._get_pre_curve(curve.id)
# if id(__) != id(curve): # Python id() is a memory id, not a string label id.
# raise ValueError(
# "A curve has been supplied, as part of ``curves``, which has the same "
# f"`id` ('{curve.id}'),\nas one of the curves available as part of the "
# "Solver's collection but is not the same object.\n"
# "This is ambiguous and cannot price.\n"
# "Either refactor the arguments as follows:\n"
# "1) remove the conflicting curve: [curves=[..], solver=] -> "
# "[curves=None, solver=]\n"
# "2) change the `id` of the supplied curve and ensure the rateslib.defaults "
# "option 'curve_not_in_solver' is set to 'ignore'.\n"
# " This will remove the ability to accurately price risk metrics.",
# )
# return __
# except AttributeError:
# raise AttributeError(
# "`curve` has no attribute `id`, likely it not a valid object, got: "
# f"{curve}.\nSince a solver is provided have you missed labelling the `curves` "
# f"of the instrument or supplying `curves` directly?",
# )
# except KeyError:
# if defaults.curve_not_in_solver == "ignore":
# return curve
# elif defaults.curve_not_in_solver == "warn":
# warnings.warn("`curve` not found in `solver`.", UserWarning)
# return curve
# else:
# raise ValueError("`curve` must be in `solver`.")
================================================
FILE: python/rateslib/instruments/sbs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FloatLeg
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FloatRateSeries,
Frequency,
FXForwards_,
LegFixings,
RollDay,
Sequence,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class SBS(_BaseInstrument):
"""
A *single currency basis swap (SBS)* composing a :class:`~rateslib.legs.FloatLeg`
and a :class:`~rateslib.legs.FloatLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import SBS
from datetime import datetime as dt
.. ipython:: python
sbs = SBS(
effective=dt(2000, 1, 1),
termination="1y",
spec="eur_sbs36",
float_spread=9.5,
)
sbs.cashflows()
.. rubric:: Pricing
An *SBS* requires a *disc curve* on both legs (which should be the same *Curve*) and a
*rate curve* and *leg2 rate curve* to forecast rates on each *FloatLeg*. The following input
formats are allowed:
.. code-block:: python
curves = [rate_curve, disc_curve, leg2_rate_curve] # three curves
curves = [rate_curve, disc_curve, leg2_rate_curve, disc_curve] # four curves
curves = { # dict form is explicit
"rate_curve": rate_curve,
"disc_curve": disc_curve,
"leg2_rate_curve": leg2_rate_curve,
}
The available pricing ``metric`` are in *{'leg1', 'leg2'}* which will return a *float spread*
on the specified leg. The default is to price the spread on *leg1*.
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
leg2_effective : datetime, :green:`optional (inherited from leg1)`
leg2_termination : datetime, str, :green:`optional (inherited from leg1)`
leg2_frequency : Frequency, str, :green:`optional (inherited from leg1)`
leg2_stub : StubInference, str, :green:`optional (inherited from leg1)`
leg2_front_stub : datetime, :green:`optional (inherited from leg1)`
leg2_back_stub : datetime, :green:`optional (inherited from leg1)`
leg2_roll : RollDay, int, str, :green:`optional (inherited from leg1)`
leg2_eom : bool, :green:`optional (inherited from leg1)`
leg2_modifier : Adjuster, str, :green:`optional (inherited from leg1)`
leg2_calendar : calendar, str, :green:`optional (inherited from leg1)`
leg2_payment_lag: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_payment_lag_exchange: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_ex_div: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_convention: str, :green:`optional (inherited from leg1)`
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
leg2_amortization : float, Dual, Dual2, Variable, str, Amortization, :green:`optional (negatively inherited from leg1)`
.. note::
The following are **rate parameters**.
fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
leg2_fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
leg2_fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
leg2_fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
leg2_float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
leg2_spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
leg2_rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
metric : str, :green:`optional` (set by 'defaults')
The pricing metric returned by :meth:`~rateslib.instruments.SBS.rate`.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
""" # noqa: E501
_rate_scalar = 100.0
@property
def float_spread(self) -> DualTypes_:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
return self.leg1.float_spread
@float_spread.setter
def float_spread(self, value: DualTypes) -> None:
self.kwargs.leg1["float_spread"] = value
self.leg1.float_spread = value
@property
def leg2_float_spread(self) -> DualTypes_:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
return self.leg2.float_spread
@leg2_float_spread.setter
def leg2_float_spread(self, value: DualTypes) -> None:
self.kwargs.leg2["float_spread"] = value
self.leg2.float_spread = value
@property
def leg1(self) -> FloatLeg:
"""The :class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> FloatLeg:
"""The second :class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
leg2_effective: datetime_ = NoInput(1),
leg2_termination: datetime | str_ = NoInput(1),
leg2_frequency: Frequency | str_ = NoInput(1),
leg2_stub: str_ = NoInput(1),
leg2_front_stub: datetime_ = NoInput(1),
leg2_back_stub: datetime_ = NoInput(1),
leg2_roll: int | RollDay | str_ = NoInput(1),
leg2_eom: bool_ = NoInput(1),
leg2_modifier: str_ = NoInput(1),
leg2_calendar: CalInput = NoInput(1),
leg2_payment_lag: int_ = NoInput(1),
leg2_payment_lag_exchange: int_ = NoInput(1),
leg2_ex_div: int_ = NoInput(1),
leg2_convention: str_ = NoInput(1),
# settlement parameters
currency: str_ = NoInput(0),
notional: float_ = NoInput(0),
amortization: float_ = NoInput(0),
leg2_notional: float_ = NoInput(-1),
leg2_amortization: float_ = NoInput(-1),
# rate parameters
float_spread: DualTypes_ = NoInput(0),
spread_compound_method: str_ = NoInput(0),
rate_fixings: LegFixings = NoInput(0),
fixing_method: str_ = NoInput(0),
fixing_frequency: Frequency | str_ = NoInput(0),
fixing_series: FloatRateSeries | str_ = NoInput(0),
leg2_float_spread: DualTypes_ = NoInput(0),
leg2_spread_compound_method: str_ = NoInput(0),
leg2_rate_fixings: LegFixings = NoInput(0),
leg2_fixing_method: str_ = NoInput(0),
leg2_fixing_frequency: Frequency | str_ = NoInput(0),
leg2_fixing_series: FloatRateSeries | str_ = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str_ = NoInput(0),
) -> None:
user_args = dict(
effective=effective,
termination=termination,
frequency=frequency,
float_spread=float_spread,
spread_compound_method=spread_compound_method,
rate_fixings=rate_fixings,
fixing_method=fixing_method,
stub=stub,
front_stub=front_stub,
back_stub=back_stub,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
payment_lag_exchange=payment_lag_exchange,
ex_div=ex_div,
notional=notional,
currency=currency,
amortization=amortization,
convention=convention,
fixing_frequency=fixing_frequency,
fixing_series=fixing_series,
leg2_fixing_frequency=leg2_fixing_frequency,
leg2_fixing_series=leg2_fixing_series,
leg2_float_spread=leg2_float_spread,
leg2_spread_compound_method=leg2_spread_compound_method,
leg2_rate_fixings=leg2_rate_fixings,
leg2_fixing_method=leg2_fixing_method,
leg2_effective=leg2_effective,
leg2_termination=leg2_termination,
leg2_frequency=leg2_frequency,
leg2_stub=leg2_stub,
leg2_front_stub=leg2_front_stub,
leg2_back_stub=leg2_back_stub,
leg2_roll=leg2_roll,
leg2_eom=leg2_eom,
leg2_modifier=leg2_modifier,
leg2_calendar=leg2_calendar,
leg2_payment_lag=leg2_payment_lag,
leg2_payment_lag_exchange=leg2_payment_lag_exchange,
leg2_ex_div=leg2_ex_div,
leg2_notional=leg2_notional,
leg2_amortization=leg2_amortization,
leg2_convention=leg2_convention,
curves=self._parse_curves(curves),
metric=metric,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
leg2_currency=NoInput(1),
initial_exchange=False,
final_exchange=False,
leg2_initial_exchange=False,
leg2_final_exchange=False,
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
metric=defaults.metric[type(self).__name__],
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "metric", "vol"],
)
self._leg1 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._leg2 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self._leg1, self._leg2]
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
rate_curve = _get_curve("rate_curve", True, True, *c)
leg2_rate_curve = _get_curve("leg2_rate_curve", True, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, True, *c)
metric_: str = _drb(self.kwargs.meta["metric"], metric)
if metric_.lower() == "leg1":
leg2_npv: DualTypes = self.leg2.local_npv(
rate_curve=leg2_rate_curve,
disc_curve=leg2_disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
return self.leg1.spread(
target_npv=-leg2_npv,
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
else: # metric == "leg2"
leg1_npv: DualTypes = self.leg1.local_npv(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
return self.leg2.spread(
target_npv=-leg1_npv,
rate_curve=leg2_rate_curve,
disc_curve=leg2_disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
return self.rate(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
metric=metric,
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# the test for an unpriced IRS is that its fixed rate is not set.
if self.kwargs.meta["metric"].lower() == "leg1":
if isinstance(self.kwargs.leg1["float_spread"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
self.leg1.float_spread = _dual_float(mid_market_rate)
else: # metric == "leg2"
if isinstance(self.kwargs.leg2["float_spread"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
self.leg2.float_spread = _dual_float(mid_market_rate)
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An SBS has three curve requirements:
- a rate_curve
- a disc_curve
- a leg2_rate_curve
When given as only 1 element this curve is applied to all of the those components
When given as 2 elements this will raise an Exception.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_rate_curve=_drb(
curves.get("rate_curve", NoInput(0)),
curves.get("leg2_rate_curve", NoInput(0)),
),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2 or len(curves) == 1 or len(curves) > 4:
raise TypeError(f"Number of `curves` for an SBS must be 3 or 4. Got {len(curves)}.")
elif len(curves) == 3:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
leg2_rate_curve=curves[2],
leg2_disc_curve=curves[1],
)
else: # == 4
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
leg2_rate_curve=curves[2],
leg2_disc_curve=curves[3],
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input
raise TypeError("Number of `curves` for an SBS must be 3 or 4. Got 1.")
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
================================================
FILE: python/rateslib/instruments/spread.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING, NoReturn
from pandas import DataFrame
from rateslib.enums.generics import NoInput
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.pricing import (
_get_fx_maybe_from_solver,
)
from rateslib.periods.utils import _maybe_fx_converted
if TYPE_CHECKING:
from rateslib.local_types import (
Any,
CurvesT_,
DualTypes,
FXForwards_,
Solver_,
VolT_,
datetime_,
str_,
)
class Spread(_BaseInstrument):
"""
A *Spread* of :class:`~rateslib.instruments.protocols._BaseInstrument`.
.. rubric:: Examples
The following initialises a purchased bond asset swap *Instrument* whose *rate* is
the difference between the *IRS* rate and the *fixed rate bond* YTM.
.. ipython:: python
:suppress:
from rateslib.instruments import Spread, IRS, FixedRateBond
from datetime import datetime as dt
.. ipython:: python
irs = IRS(dt(2025, 12, 1), dt(2030, 12, 7), notional=1e6, spec="gbp_irs", curves=["uk_sonia"])
ukt = FixedRateBond(dt(2024, 12, 7), dt(2030, 12, 7), notional=-1e6, fixed_rate=4.75, spec="uk_gb", metric="ytm", curves=["uk_gb"])
asw = Spread(ukt, irs)
asw.cashflows()
.. rubric:: Pricing
Each :class:`~rateslib.instruments.protocols._BaseInstrument` should have
its own ``curves`` and ``vol`` objects set at its initialisation, according to the
documentation for that *Instrument*. For the pricing methods ``curves`` and ``vol`` objects,
these can be universally passed to each *Instrument* but in many cases that would be
technically impossible since each *Instrument* might require difference pricing objects.
In the above example a bond *Curve* and a swap *Curve* are required separately. For a *Spread*
of two *IRS* in the same currency this would be possible, however.
Parameters
----------
instrument1 : _BaseInstrument
The *Instrument* with the shortest maturity.
instrument2 : _BaseInstrument
The *Instrument* with the longest maturity.
Notes
-----
A *Spread* is just a container for two
:class:`~rateslib.instruments.protocols._BaseInstrument`, with an overload
for the :meth:`~rateslib.instruments.Spread.rate` method to calculate the
longer rate minus the shorter (whatever metric is in use for each *Instrument*), which allows
it to offer a lot of flexibility in *pseudo Instrument* creation.
""" # noqa: E501
_instruments: Sequence[_BaseInstrument]
_rate_scalar = 100.0
@property
def instruments(self) -> Sequence[_BaseInstrument]:
"""The *Instruments* contained within the *Portfolio*."""
return self._instruments
def __init__(
self,
instrument1: _BaseInstrument,
instrument2: _BaseInstrument,
) -> None:
self._instruments = [instrument1, instrument2]
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
"""
Return the NPV of the *Portfolio* by summing individual *Instrument* NPVs.
"""
local_npv = self._npv_single_core(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
)
if not local:
single_value: DualTypes = 0.0
for k, v in local_npv.items():
single_value += _maybe_fx_converted(
value=v,
currency=k,
fx=_get_fx_maybe_from_solver(fx=fx, solver=solver),
base=base,
forward=forward,
)
return single_value
else:
return local_npv
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_instruments(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._cashflows_from_instruments(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
base=base,
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
rates: list[DualTypes] = []
for inst in self.instruments:
rates.append(
inst.rate(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
metric=metric,
)
)
return (rates[1] - rates[0]) * 100.0
def analytic_delta(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`analytic_delta` is not defined for Portfolio.")
================================================
FILE: python/rateslib/instruments/stir_future.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_get_fx_maybe_from_solver,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg, FloatLeg
from rateslib.periods.utils import (
_maybe_fx_converted,
_maybe_local,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FixingsRates_,
FloatRateSeries,
Frequency,
FXForwards_,
RollDay,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
int_,
str_,
)
class STIRFuture(_BaseInstrument):
"""
A *short term interest rate (STIR) future* compositing a
:class:`~rateslib.legs.FixedLeg` and :class:`~rateslib.legs.FloatLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import STIRFuture
from datetime import datetime as dt
.. ipython:: python
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
price=99.50,
contracts=10,
)
stir.cashflows()
.. rubric:: Pricing
A *STIRFuture* requires a *disc curve* on both legs (which should be the same *Curve*) and a
*leg2 rate curve* to forecast rates on the *FloatLeg*. The following input formats are
allowed:
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = [None, disc_curve, rate_curve, disc_curve] # four curves applied to each leg
curves = {"leg2_rate_curve": rate_curve, "disc_curve": disc_curve} # dict form is explicit
The available pricing ``metric`` are in *{'rate', 'price'}* which will return the future's
market price in the respective terms.
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
contracts : int
The number of traded contracts.
nominal : float
The nominal value of the contract. See **Notes**.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
.. note::
The following are **rate parameters**.
price : float
The traded price of the future. Defined as 100 minus the fixed rate.
leg2_fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
leg2_fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
leg2_fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
leg2_float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
leg2_spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
leg2_rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
metric : str, :green:`optional` (set by 'defaults')
The pricing metric returned by :meth:`~rateslib.instruments.STIRFuture.rate`.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
Notes
-----
A *STIRFuture* is modelled as a single period *IRS* whose payment date is overloaded to always
result in immediate settlement, thus replicating the behaviour of traditional exchanges.
The immediate date is derived from the discount curve used during pricing.
The ``nominal`` for one contract should be set according to the ``convention`` so that the
correct amount of risk is allocated is to 1bp. For example, for a CME SOFR 3M future, setting
a convention of *ActActICMA* yields a DCF of 0.25 and therefore a ``nominal`` of 1mm USD
yields a 1bp sensitivity of 25 USD for any contract, as per the CME contract specification. The
``leg2_fixing_series`` argument allows full specification of the floating rate index
conventions.
"""
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.legs.FixedLeg`."""
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
@property
def leg1(self) -> FixedLeg:
"""The :class:`~rateslib.legs.FixedLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> FloatLeg:
"""The :class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> list[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An STIRFuture has two curve requirements: a leg2_rate_curve and a disc_curve used by
both legs.
When given as only 1 element this curve is applied to all of the those components
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
leg2_rate_curve=curves[0],
disc_curve=curves[1],
leg2_disc_curve=curves[1],
)
elif len(curves) == 1:
return _Curves(
leg2_rate_curve=curves[0],
disc_curve=curves[0],
leg2_disc_curve=curves[0],
)
elif len(curves) == 4:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[1],
leg2_rate_curve=curves[2],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(self).__name__} requires only 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_rate_curve=_drb(
curves.get("rate_curve", NoInput(0)),
curves.get("leg2_rate_curve", NoInput(0)),
),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
leg2_rate_curve=curves, # type: ignore[arg-type]
disc_curve=curves, # type: ignore[arg-type]
leg2_disc_curve=curves, # type: ignore[arg-type]
)
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
# settlement parameters
currency: str_ = NoInput(0),
contracts: int = 1,
nominal: float | NoInput = NoInput(0),
# rate parameters
price: DualTypes_ = NoInput(0),
leg2_float_spread: DualTypes_ = NoInput(0),
leg2_spread_compound_method: str_ = NoInput(0),
leg2_rate_fixings: FixingsRates_ = NoInput(0),
leg2_fixing_method: str_ = NoInput(0),
leg2_fixing_frequency: Frequency | str_ = NoInput(0),
leg2_fixing_series: FloatRateSeries | str_ = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str_ = NoInput(0),
) -> None:
user_args = dict(
# scheduling
effective=effective,
termination=termination,
frequency=frequency,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
ex_div=ex_div,
convention=convention,
# settlement
currency=currency,
nominal=nominal,
contracts=contracts,
# rate
price=price,
leg2_float_spread=leg2_float_spread,
leg2_spread_compound_method=leg2_spread_compound_method,
leg2_rate_fixings=leg2_rate_fixings,
leg2_fixing_method=leg2_fixing_method,
leg2_fixing_series=leg2_fixing_series,
leg2_fixing_frequency=leg2_fixing_frequency,
# meta
curves=self._parse_curves(curves),
metric=metric,
)
instrument_args = dict(
leg2_effective=NoInput.inherit,
leg2_termination=NoInput.inherit,
leg2_frequency=NoInput.inherit,
leg2_roll=NoInput.inherit,
leg2_eom=NoInput.inherit,
leg2_modifier=NoInput.inherit,
leg2_calendar=NoInput.inherit,
leg2_payment_lag=NoInput.inherit,
leg2_ex_div=NoInput.inherit,
leg2_convention=NoInput.inherit,
leg2_currency=NoInput.inherit,
fixed_rate=NoInput(0) if isinstance(price, NoInput) else 100 - price,
vol=_Vol(),
)
default_args = dict(
payment_lag=defaults.payment_lag_specific[type(self).__name__],
nominal=defaults.notional,
metric="rate",
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "contracts", "nominal", "price", "metric", "vol"],
)
self._kwargs.leg1["notional"] = -self.kwargs.meta["nominal"] * self.kwargs.meta["contracts"]
self._kwargs.leg2["notional"] = self.kwargs.meta["nominal"] * self.kwargs.meta["contracts"]
self._leg1 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._leg2 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self.leg1, self.leg2]
if self._leg1.schedule.n_periods != 1:
raise ValueError(
"The scheduling parameters of the STIRFuture must define exactly "
f"one regular period. Got '{self.leg1.schedule.n_periods}'."
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(curves=curves, solver=solver, settlement=settlement, forward=forward)
local_npv = super().npv( # type: ignore[index]
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=True,
settlement=settlement,
forward=forward,
)[self.leg1.settlement_params.currency]
c = _parse_curves(self, curves, solver)
disc_curve = _get_curve("disc_curve", False, False, *c)
npv_immediate = local_npv / disc_curve[self.leg1.settlement_params.payment]
if not local:
return _maybe_fx_converted(
value=npv_immediate,
currency=self.leg1.settlement_params.currency,
fx=_get_fx_maybe_from_solver(solver=solver, fx=fx),
base=_drb(self.leg1.settlement_params.currency, base),
forward=forward,
)
else:
return {self.leg1.settlement_params.currency: npv_immediate}
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# the test for an unpriced IRS is that its fixed rate is not set.
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
metric="rate",
)
self.leg1.fixed_rate = _dual_float(mid_market_rate)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_rate_curve = _get_curve("leg2_rate_curve", True, True, *c)
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
rate = self.leg2._regular_periods[0].rate(rate_curve=leg2_rate_curve)
if metric_ == "price":
return 100 - rate
elif metric_ == "rate":
return rate
else:
raise ValueError("`metric` must be in {'rate', 'price'}.")
def analytic_delta(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
leg: int = 1,
) -> DualTypes | dict[str, DualTypes]:
unadjusted_local_analytic_delta = super().analytic_delta( # type: ignore[index]
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=True,
settlement=settlement,
forward=forward,
leg=leg,
)[self.leg1.settlement_params.currency]
c = _parse_curves(self, curves, solver)
prefix = "" if leg == 1 else "leg2_"
disc_curve = _get_curve(f"{prefix}disc_curve", False, False, *c)
adjusted_local_analytic_delta = (
unadjusted_local_analytic_delta / disc_curve[self.leg1.settlement_params.payment]
)
return _maybe_local(
value=adjusted_local_analytic_delta,
local=local,
currency=self.leg1.settlement_params.currency,
fx=_get_fx_maybe_from_solver(solver=solver, fx=fx),
base=base,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
df = self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
c = _parse_curves(self, curves, solver)
disc_curve = _get_curve("leg2_disc_curve", False, False, *c)
return df / disc_curve[self.leg1.settlement_params.payment] # type: ignore[operator]
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
df = super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
df[defaults.headers["payment"]] = None
c = _parse_curves(self, curves, solver)
disc_curve = _get_curve("disc_curve", False, True, *c)
if isinstance(disc_curve, NoInput):
pass
else:
df[defaults.headers["payment"]] = disc_curve.nodes.initial
df[defaults.headers["npv"]] = df[defaults.headers["npv"]] / df[defaults.headers["df"]]
df[defaults.headers["npv_fx"]] = (
df[defaults.headers["npv_fx"]] / df[defaults.headers["df"]]
)
df[defaults.headers["df"]] = 1.0
return df
================================================
FILE: python/rateslib/instruments/value.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, NoReturn
from rateslib.curves.utils import _CurveType
from rateslib.dual.utils import dual_log
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import IndexMethod
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _KWArgs
from rateslib.instruments.protocols.pricing import _Curves, _get_curve, _parse_curves
from rateslib.scheduling import dcf
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CurvesT_,
DualTypes,
FXForwards_,
Solver_,
VolT_,
datetime,
datetime_,
str_,
)
class Value(_BaseInstrument):
"""
A pseudo *Instrument* used to calibrate a *Curve* within a :class:`~rateslib.solver.Solver`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import Value
from datetime import datetime as dt
from rateslib import Curve, Solver
The below :class:`~rateslib.curves.Curve` is solved directly
from a calibrating DF value on 1st Nov 2022.
.. ipython:: python
val = Value(dt(2022, 11, 1), curves=["v"], metric="curve_value")
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="v")
solver = Solver(curves=[curve], instruments=[val], s=[0.99])
curve[dt(2022, 11, 1)]
.. rubric:: Pricing
A *Value* requires, and will calibrate, just one *Curve*. This *Curve*, appropriating
a *rate curve* or an *index curve*, is dependent upon the ``metric``.
Allowable inputs are:
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = {"rate_curve": rate_curve} | {"index_curve": index_curve} # dict form is explicit
The various *rate* ``metric`` that can be calculated for a *Curve* are as follows;
- *'curve_value'*: returns the discount factor or a value from a DF-based or value-based
*rate curve*.
- *'index_value'*: returns a daily interpolated index value using an index lag derived from the
*index curve*.
- *'cc_zero_rate'*: returns a continuously compounded zero rate to the provided *effective*
date from a DF based *rate curve*.
- *'o/n_rate'*: returns a 1 calendar day rate starting on the effective date with the provided
*convention* from a *rate curve*.
.. role:: red
.. role:: green
Parameters
----------
effective : datetime, :red:`required`
The datetime index for which the `rate`, which is just the curve value, is
returned.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
metric : str, :green:`optional` (set as 'curve_value')
The pricing metric returned by :meth:`~rateslib.instruments.Value.rate`. See
**Pricing**.
"""
_rate_scalars = {
"curve_value": 100.0,
"index_value": 100.0,
"cc_zero_rate": 1.0,
"o/n_rate": 1.0,
}
def __init__(
self,
effective: datetime,
*,
metric: str_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
) -> None:
user_args = dict(
effective=effective,
curves=self._parse_curves(curves),
metric=metric,
)
default_args = dict(metric="curve_value")
self._kwargs = _KWArgs(
spec=NoInput(0),
user_args=user_args,
default_args=default_args,
meta_args=["curves", "metric"],
)
self._rate_scalar = self._rate_scalars.get(self.kwargs.meta["metric"], 1.0)
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
A Value requires only one 1 curve, which is set as all element values
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
index_curve=curves.get("index_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) != 1:
raise ValueError(
f"{type(self).__name__} requires only 1 curve types. Got {len(curves)}."
)
else:
return _Curves(
rate_curve=curves[0],
disc_curve=curves[0],
index_curve=curves[0],
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input
return _Curves(
rate_curve=curves, # type: ignore[arg-type]
disc_curve=curves, # type: ignore[arg-type]
index_curve=curves, # type: ignore[arg-type]
)
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
metric_ = _drb(self.kwargs.meta["metric"], metric).lower()
effective: datetime = self.kwargs.leg1["effective"]
if metric_ == "curve_value":
curve = _get_curve("rate_curve", False, False, *c)
ret: DualTypes = curve[effective]
elif metric_ == "cc_zero_rate":
curve = _get_curve("rate_curve", False, False, *c)
if curve._base_type != _CurveType.dfs:
raise TypeError(
"`curve` used with `metric`='cc_zero_rate' must be discount factor based.",
)
dcf_ = dcf(start=curve.nodes.initial, end=effective, convention=curve.meta.convention)
ret = (dual_log(curve[effective]) / -dcf_) * 100
elif metric_ == "index_value":
curve = _get_curve("index_curve", False, False, *c)
ret = curve.index_value(
index_date=effective,
index_lag=curve.meta.index_lag,
index_method=IndexMethod.Daily,
)
elif metric_ == "o/n_rate":
curve = _get_curve("rate_curve", False, False, *c)
ret = curve.rate(effective, "1D") # type: ignore[assignment]
else:
raise ValueError(
"`metric`must be in {'curve_value', 'cc_zero_rate', 'index_value', 'o/n_rate'}."
)
return ret
def npv(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`Value` instrument has no concept of NPV.")
def cashflows(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`Value` instrument has no concept of cashflows.")
def analytic_delta(self, *args: Any, **kwargs: Any) -> NoReturn:
raise NotImplementedError("`Value` instrument has no concept of analytic delta.")
================================================
FILE: python/rateslib/instruments/xcs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.data.fixings import _get_fx_index
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import LegMtm
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_get_fx_forwards_maybe_from_solver,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg, FloatLeg
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FixingsRates_,
FloatRateSeries,
Frequency,
FXForwards_,
FXIndex,
LegFixings,
RollDay,
Sequence,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class XCS(_BaseInstrument):
"""
A *cross-currency swap (XCS)* composing either
:class:`~rateslib.legs.FixedLeg`
and/or :class:`~rateslib.legs.FloatLeg` in different currencies.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import XCS
from datetime import datetime as dt
from rateslib import fixings
from pandas import Series
.. ipython:: python
fixings.add("WMR_LDN11AM_EURUSD", Series(index=[dt(2025, 4, 4)], data=[1.175]))
xcs = XCS(
effective=dt(2025, 1, 8),
termination="6m",
spec="eurusd_xcs",
notional=5e6,
leg2_fx_fixings=(1.15, "WMR_LDN11AM"),
leg2_mtm=True,
)
xcs.cashflows()
.. ipython:: python
:suppress:
fixings.pop("WMR_LDN11AM_EURUSD")
.. rubric:: Pricing
The methods of a *XCS* require an :class:`~rateslib.fx.FXForwards` object for ``fx`` .
They also require a *disc curve* and a *leg2 disc curve* which are appropriate curves for the
relevant currency, typically under the same collateral. For *FloatLegs*, an additional
*rate curve* and *leg2 rate curve* are required. The following input
formats are allowed:
.. code-block:: python
curves = [rate_curve, disc_curve, leg2_rate_curve, leg2_disc_curve] # four curves
curves = { # dict form is explicit
"rate_curve": rate_curve,
"disc_curve": disc_curve,
"leg2_rate_curve": leg2_rate_curve,
"leg2_disc_curve": leg2_disc_curve,
}
The available pricing ``metric`` are in *{'leg1', 'leg2'}* which will return a *float spread*
or a *fixed rate* on the specified leg, for the appropriate *Leg* type.
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
leg2_effective : datetime, :green:`optional (inherited from leg1)`
leg2_termination : datetime, str, :green:`optional (inherited from leg1)`
leg2_frequency : Frequency, str, :green:`optional (inherited from leg1)`
leg2_stub : StubInference, str, :green:`optional (inherited from leg1)`
leg2_front_stub : datetime, :green:`optional (inherited from leg1)`
leg2_back_stub : datetime, :green:`optional (inherited from leg1)`
leg2_roll : RollDay, int, str, :green:`optional (inherited from leg1)`
leg2_eom : bool, :green:`optional (inherited from leg1)`
leg2_modifier : Adjuster, str, :green:`optional (inherited from leg1)`
leg2_calendar : calendar, str, :green:`optional (inherited from leg1)`
leg2_payment_lag: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_payment_lag_exchange: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_ex_div: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_convention: str, :green:`optional (inherited from leg1)`
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of leg1 (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set from 'leg2_notional' or 'defaults' )`
The initial leg1 notional, defined in units of the currency of the leg. Only one
of ``notional`` and ``leg2_notional`` can be given. The alternate leg notional is derived
via non-deliverability :class:`~rateslib.data.fixings.FXFixing`.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
pair: FXIndex, str, :red:`required`
The :class:`~rateslib.data.fixings.FXIndex` implying the *leg2 currency*.
Must include ``currency`` as either LHS or RHS.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
leg2_amortization : float, Dual, Dual2, Variable, str, Amortization, :green:`optional (negatively inherited from leg1)`
.. note::
The following are **rate parameters**.
fixed : bool, :green:`optional (set as False)`
Whether leg1 is a :class:`~rateslib.legs.FixedLeg` or a :class:`~rateslib.legs.FloatLeg`.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
leg2_fixed : bool, :green:`optional (set as False)`
leg2_fixed_rate : float or None
leg2_fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
leg2_fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
leg2_fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
leg2_float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
leg2_spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
leg2_rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
.. note::
The following are the cross-currency **non-deliverable** parameters.
fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* according
to non-deliverability. This can only be provided if ``leg2_notional`` is given. The
currency pair is expressed in direction 'currency:leg2_currency'.
mtm: bool, :green:`optional (set to False)`
Define the *XCS* is mark-to-market on leg1. Only one leg can be mark-to-market.
leg2_fx_fixings:
This can only be provided if ``notional`` is given. The
currency pair is expressed in direction 'currency:leg2_currency'.
leg2_mtm: bool, :green:`optional (set to False)`
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
metric: str, :green:`optional (set as 'leg1')`
Determines which calculation metric to return by default when using the
:meth:`~rateslib.instruments.XCS.rate` method.
Notes
-----
A *XCS* is a flexible instrument.
- Each *Leg* can either be ``fixed`` or, rather, floating.
- One *Leg* can be ``mtm`` or both legs can be non-mtm.
*Legs* are handled by using the mechanics of non-deliverability. If a *Leg* is set to be
*mtm* then the ``notional`` on the opposing *Leg* **must** be specified: this is because
a *mtm-Leg* has a varying notional which must be derived from some fixed reference notional.
Values should always be expressed in currency units of that *Leg* itself.
- ``fx_fixings`` are required on the *Leg* which does not specify a *notional*. This is
true either for a *mtm* or *non-mtm Leg*. It is common for the initial rate of exchange
to be agreed at execution time, meaning the most common form of entry for ``fx_fixings`` is
as a tuple: an arbitrary execution rate and the fixing series, e.g. *(1.224, "WMR_LDN11AM")*.
Fixings should always be expressed according to the direction in ``pair``.
- ``amortization`` can be added in the normal way on the same *Leg* as a *notional* is
specified.
- The pricing ``metric`` can specify which *Leg* a mid-market price is returned by the
:meth:`~rateslib.instruments.XCS.rate` method.
**Is it USD/CAD or CAD/USD or EUR/USD or USD/EUR?**
Actually any *XCS* can be constructed systematically:
i. Set the FX ``pair`` that is standard for the fixings, e.g. *'USDCAD'*.
ii. Set the ``currency`` required on *Leg1* and set the ``mtm`` or ``leg2_mtm``
flag respectively if required.
iii. Set the ``notional`` or ``leg2_notional`` as necessary or chosen.
iv. Set the ``fx_fixings`` or ``leg2_fx_fixings`` as necessary.
v. Set the ``metric``.
**For example**, we initialize a MTM GBP/USD XCS in £100m. The MTM leg is USD so the notional
must be expressed on the GBP leg. The pricing spread is applied to the GBP leg.
.. ipython:: python
:suppress:
fixings.add("WMR_LDN11AM_GBPUSD", Series(index=[dt(1999, 1, 1)], data=[100.0]))
fixings.add("WMR_LDN11AM_USDJPY", Series(index=[dt(1999, 1, 1)], data=[100.0]))
.. ipython:: python
xcs = XCS(
effective=dt(2025, 1, 8),
termination="6m",
frequency="Q",
currency="gbp",
notional=100e6,
leg2_mtm=True,
pair="gbpusd",
leg2_fx_fixings=(1.35, "WMR_LDN11AM"),
metric="leg1",
)
Or, we initialise a MTM USD/JPY XCS in ¥1bn with ¥100m amortization. The MTM leg is USD so the
notional must be expressed on the JPY leg. The pricing spread is applied to the JPY leg.
.. ipython:: python
xcs = XCS(
effective=dt(2025, 1, 8),
termination="6m",
frequency="Q",
currency="usd",
mtm=True,
fx_fixings=(155.0, "WMR_LDN11AM"),
pair="usdjpy",
leg2_notional=1e9,
leg2_amortization=100e6,
metric="leg2",
)
xcs.cashflows()
.. ipython:: python
:suppress:
fixings.pop("WMR_LDN11AM_GBPUSD")
fixings.pop("WMR_LDN11AM_USDJPY")
""" # noqa: E501
def _rate_scalar_calc(self) -> float:
if self.kwargs.meta["metric"] == "leg1":
return 1.0 if isinstance(self.leg1, FixedLeg) else 100.0
else:
return 1.0 if isinstance(self.leg2, FixedLeg) else 100.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.legs.FixedLeg`."""
if isinstance(self.leg1, FixedLeg):
return self.leg1.fixed_rate
else:
raise AttributeError(f"Leg1 is of type: {type(self.leg1).__name__}")
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
if isinstance(self.leg1, FixedLeg):
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
else:
raise AttributeError(f"Leg1 is of type: {type(self.leg1).__name__}")
@property
def float_spread(self) -> DualTypes:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
if isinstance(self.leg1, FloatLeg):
return self.leg1.float_spread
else:
raise AttributeError(f"Leg1 is of type: {type(self.leg1).__name__}")
@float_spread.setter
def float_spread(self, value: DualTypes) -> None:
if isinstance(self.leg1, FloatLeg):
self.kwargs.leg1["float_spread"] = value
self.leg1.float_spread = value
else:
raise AttributeError(f"Leg1 is of type: {type(self.leg1).__name__}")
@property
def leg2_fixed_rate(self) -> DualTypes_:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
if isinstance(self.leg2, FixedLeg):
return self.leg2.fixed_rate
else:
raise AttributeError(f"Leg2 is of type: {type(self.leg2).__name__}")
@leg2_fixed_rate.setter
def leg2_fixed_rate(self, value: DualTypes_) -> None:
if isinstance(self.leg2, FixedLeg):
self.kwargs.leg2["fixed_rate"] = value
self.leg2.fixed_rate = value
else:
raise AttributeError(f"Leg2 is of type: {type(self.leg2).__name__}")
@property
def leg2_float_spread(self) -> DualTypes_:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
if isinstance(self.leg2, FloatLeg):
return self.leg2.float_spread
else:
raise AttributeError(f"Leg2 is of type: {type(self.leg2).__name__}")
@leg2_float_spread.setter
def leg2_float_spread(self, value: DualTypes) -> None:
if isinstance(self.leg2, FloatLeg):
self.kwargs.leg2["float_spread"] = value
self.leg2.float_spread = value
else:
raise AttributeError(f"Leg2 is of type: {type(self.leg2).__name__}")
@property
def leg1(self) -> FixedLeg | FloatLeg:
"""The first :class:`~rateslib.legs.FixedLeg` or
:class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> FixedLeg | FloatLeg:
"""The second :class:`~rateslib.legs.FixedLeg` or
:class:`~rateslib.legs.FloatLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
leg2_effective: datetime_ = NoInput(1),
leg2_termination: datetime | str_ = NoInput(1),
leg2_frequency: Frequency | str_ = NoInput(1),
leg2_stub: str_ = NoInput(1),
leg2_front_stub: datetime_ = NoInput(1),
leg2_back_stub: datetime_ = NoInput(1),
leg2_roll: int | RollDay | str_ = NoInput(1),
leg2_eom: bool_ = NoInput(1),
leg2_modifier: str_ = NoInput(1),
leg2_calendar: CalInput = NoInput(1),
leg2_payment_lag: int_ = NoInput(1),
leg2_payment_lag_exchange: int_ = NoInput(1),
leg2_ex_div: int_ = NoInput(1),
leg2_convention: str_ = NoInput(1),
# settlement parameters
currency: str_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
amortization: float_ = NoInput(0),
pair: FXIndex | str_ = NoInput(0),
leg2_notional: DualTypes_ = NoInput(0),
leg2_amortization: float_ = NoInput(0),
# rate parameters
fixed: bool_ = NoInput(0),
mtm: bool_ = NoInput(0),
fixed_rate: DualTypes_ = NoInput(0),
float_spread: DualTypes_ = NoInput(0),
spread_compound_method: str_ = NoInput(0),
rate_fixings: FixingsRates_ = NoInput(0),
fixing_method: str_ = NoInput(0),
fixing_frequency: Frequency | str_ = NoInput(0),
fixing_series: FloatRateSeries | str_ = NoInput(0),
fx_fixings: LegFixings = NoInput(0),
leg2_fixed: bool_ = NoInput(0),
leg2_mtm: bool_ = NoInput(0),
leg2_fixed_rate: DualTypes_ = NoInput(0),
leg2_float_spread: DualTypes_ = NoInput(0),
leg2_spread_compound_method: str_ = NoInput(0),
leg2_rate_fixings: LegFixings = NoInput(0),
leg2_fixing_method: str_ = NoInput(0),
leg2_fixing_frequency: Frequency | str_ = NoInput(0),
leg2_fixing_series: FloatRateSeries | str_ = NoInput(0),
leg2_fx_fixings: LegFixings = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
metric: str_ = NoInput(0),
) -> None:
currency_, leg2_currency_, pair_, mtm_, leg2_mtm_, notional_, leg2_notional_ = (
_validated_xcs_input_combinations(
currency=currency,
pair=pair,
mtm=mtm,
leg2_mtm=leg2_mtm,
notional=notional,
leg2_notional=leg2_notional,
fx_fixings=fx_fixings,
leg2_fx_fixings=leg2_fx_fixings,
spec=spec,
)
)
del mtm
del leg2_mtm
del pair
del currency
del notional
del leg2_notional
user_args = dict(
# scheduling
effective=effective,
leg2_effective=leg2_effective,
termination=termination,
leg2_termination=leg2_termination,
frequency=frequency,
leg2_frequency=leg2_frequency,
stub=stub,
leg2_stub=leg2_stub,
front_stub=front_stub,
leg2_front_stub=leg2_front_stub,
back_stub=back_stub,
leg2_back_stub=leg2_back_stub,
roll=roll,
leg2_roll=leg2_roll,
eom=eom,
leg2_eom=leg2_eom,
modifier=modifier,
leg2_modifier=leg2_modifier,
calendar=calendar,
leg2_calendar=leg2_calendar,
payment_lag=payment_lag,
leg2_payment_lag=leg2_payment_lag,
payment_lag_exchange=payment_lag_exchange,
leg2_payment_lag_exchange=leg2_payment_lag_exchange,
ex_div=ex_div,
leg2_ex_div=leg2_ex_div,
convention=convention,
leg2_convention=leg2_convention,
# settlement
currency=currency_,
leg2_currency=leg2_currency_,
notional=notional_,
leg2_notional=leg2_notional_,
amortization=amortization,
leg2_amortization=leg2_amortization,
# non-deliverability
fx_fixings=fx_fixings,
leg2_fx_fixings=leg2_fx_fixings,
mtm=mtm_,
leg2_mtm=leg2_mtm_,
# rate
fixed_rate=fixed_rate,
float_spread=float_spread,
spread_compound_method=spread_compound_method,
rate_fixings=rate_fixings,
fixing_method=fixing_method,
fixing_frequency=fixing_frequency,
fixing_series=fixing_series,
leg2_fixed_rate=leg2_fixed_rate,
leg2_float_spread=leg2_float_spread,
leg2_spread_compound_method=leg2_spread_compound_method,
leg2_rate_fixings=leg2_rate_fixings,
leg2_fixing_method=leg2_fixing_method,
leg2_fixing_frequency=leg2_fixing_frequency,
leg2_fixing_series=leg2_fixing_series,
# meta
pair=pair_,
fixed=fixed,
leg2_fixed=leg2_fixed,
curves=self._parse_curves(curves),
metric=metric,
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
initial_exchange=True,
final_exchange=True,
leg2_initial_exchange=True,
leg2_final_exchange=True,
vol=_Vol(),
)
default_args = dict(
currency=defaults.base_currency,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
mtm=False,
leg2_mtm=False,
fixed=False,
leg2_fixed=False,
metric="leg1",
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "metric", "fixed", "leg2_fixed", "vol", "pair"],
)
# narrowing of fixed or floating
float_attrs = [
"float_spread",
"spread_compound_method",
"rate_fixings",
"fixing_method",
"fixing_frequency",
"fixing_series",
]
if self.kwargs.meta["fixed"]:
for item in float_attrs:
self.kwargs.leg1.pop(item)
else:
self.kwargs.leg1.pop("fixed_rate")
if self.kwargs.meta["leg2_fixed"]:
for item in float_attrs:
self.kwargs.leg2.pop(item)
else:
self.kwargs.leg2.pop("fixed_rate")
# populate non-deliverable leg, based on which leg notional is given
if isinstance(self.kwargs.leg1["notional"], NoInput):
self._kwargs.leg1["notional"] = -1.0 * self._kwargs.leg2["notional"]
self._kwargs.leg1["amortization"] = (
NoInput(0)
if isinstance(self._kwargs.leg2["amortization"], NoInput)
else -1.0 * self._kwargs.leg2["amortization"]
)
self._kwargs.leg1["pair"] = self.kwargs.meta["pair"]
if isinstance(self.kwargs.leg2["notional"], NoInput):
self._kwargs.leg2["notional"] = -1.0 * self._kwargs.leg1["notional"]
self._kwargs.leg2["amortization"] = (
NoInput(0)
if isinstance(self._kwargs.leg1["amortization"], NoInput)
else -1.0 * self._kwargs.leg1["amortization"]
)
self._kwargs.leg2["pair"] = self.kwargs.meta["pair"]
if self.kwargs.meta["fixed"]:
self._leg1: FixedLeg | FloatLeg = FixedLeg(
**_convert_to_schedule_kwargs(self.kwargs.leg1, 1)
)
else:
self._leg1 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
if self.kwargs.meta["leg2_fixed"]:
self._leg2: FixedLeg | FloatLeg = FixedLeg(
**_convert_to_schedule_kwargs(self.kwargs.leg2, 1)
)
else:
self._leg2 = FloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self.leg1, self.leg2]
self._rate_scalar = self._rate_scalar_calc()
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_rate_curve = _get_curve("leg2_rate_curve", True, True, *c)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, True, *c)
rate_curve = _get_curve("rate_curve", True, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
metric_ = _drb(self.kwargs.meta["metric"], metric)
fx_ = _get_fx_forwards_maybe_from_solver(fx=fx, solver=solver)
if metric_ == "leg1":
leg2_npv: DualTypes = self.leg2.npv( # type: ignore[assignment]
rate_curve=leg2_rate_curve,
disc_curve=leg2_disc_curve,
base=self.leg1.settlement_params.currency,
fx=fx_,
settlement=settlement,
forward=forward,
)
spread = self.leg1.spread(
target_npv=-leg2_npv,
rate_curve=rate_curve,
disc_curve=disc_curve,
settlement=settlement,
fx=fx_,
forward=forward,
)
if self.kwargs.meta["fixed"]:
return spread / 100.0
else:
return spread
elif metric_ == "leg2":
leg1_npv: DualTypes = self.leg1.npv( # type: ignore[assignment]
rate_curve=rate_curve,
disc_curve=disc_curve,
base=self.leg2.settlement_params.currency,
fx=fx_,
settlement=settlement,
forward=forward,
)
spread = self.leg2.spread(
target_npv=-leg1_npv,
rate_curve=leg2_rate_curve,
disc_curve=leg2_disc_curve,
settlement=settlement,
forward=forward,
fx=fx_,
)
if self.kwargs.meta["leg2_fixed"]:
return spread / 100.0
else:
return spread
else:
raise ValueError("`metric` must be in {'leg1', 'leg2'}")
# def _set_rate(self, value: DualTypes, leg: int) -> DualTypes:
# if leg == 1:
# if self.kwargs.meta["fixed"]:
# ret = self.leg1.fixed_rate
# self.leg1.fixed_rate = value
# else:
# ret = self.leg1.float_spread
# self.leg1.float_spread = value
# else: # leg 2
# if self.kwargs.meta["leg2_fixed"]:
# ret = self.leg2.fixed_rate
# self.leg2.fixed_rate = value
# else:
# ret = self.leg2.float_spread
# self.leg2.float_spread = value
# return ret
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
return self.rate(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
metric=metric,
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
fx=fx,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# all float_spread are assumed to be equal to zero if not given.
# missing fixed rates will be priced and set if possible.
if isinstance(self.leg1, FixedLeg) and isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
if isinstance(self.leg2, FixedLeg) and isinstance(
self.kwargs.leg2["fixed_rate"], NoInput
):
raise ValueError("At least one leg must have a defined `fixed_rate`.")
mid_price = self.rate(
curves=curves,
solver=solver,
fx=fx,
settlement=settlement,
forward=forward,
metric="leg1",
)
self.leg1.fixed_rate = _dual_float(mid_price)
elif isinstance(self.leg2, FixedLeg) and isinstance(
self.kwargs.leg2["fixed_rate"], NoInput
):
# leg1 cannot be fixed with NoInput - this branch is covered above
mid_price = self.rate(
curves=curves,
solver=solver,
fx=fx,
settlement=settlement,
forward=forward,
metric="leg2",
)
self.leg2.fixed_rate = _dual_float(mid_price)
elif (
isinstance(self.leg1, FloatLeg)
and isinstance(self.kwargs.leg1["float_spread"], NoInput)
and isinstance(self.leg2, FloatLeg)
and isinstance(self.kwargs.leg2["float_spread"], NoInput)
):
# then no FloatLeg pricing parameters are provided
mid_price = self.rate(
curves=curves,
solver=solver,
fx=fx,
settlement=settlement,
forward=forward,
)
if self.kwargs.meta["metric"].lower() == "leg1":
self.leg1.float_spread = _dual_float(mid_price)
else:
self.leg2.float_spread = _dual_float(mid_price)
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
A XCS requires 4 curves (mostly if float-float, otherwise it needs 2)
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_rate_curve=curves.get("leg2_rate_curve", NoInput(0)),
leg2_disc_curve=curves.get("leg2_disc_curve", NoInput(0)),
)
elif isinstance(curves, list | tuple):
if len(curves) == 4:
return _Curves(
rate_curve=NoInput(0) if curves[0] is None else curves[0],
disc_curve=curves[1],
leg2_rate_curve=NoInput(0) if curves[2] is None else curves[2],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(self).__name__} requires 4 curve type input. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else:
raise ValueError(f"{type(self).__name__} requires 4 curve type input. Got 1.")
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
def _validated_xcs_input_combinations(
currency: str_,
pair: FXIndex | str_,
mtm: bool_,
leg2_mtm: bool_,
notional: DualTypes_,
leg2_notional: DualTypes_,
fx_fixings: LegFixings,
leg2_fx_fixings: LegFixings,
spec: str_,
) -> tuple[str, str, FXIndex, LegMtm, LegMtm, DualTypes_, DualTypes_]:
kw = _KWArgs(
user_args=dict(
currency=currency,
pair=pair,
mtm=mtm,
leg2_mtm=leg2_mtm,
notional=notional,
leg2_notional=leg2_notional,
fx_fixings=fx_fixings,
leg2_fx_fixings=leg2_fx_fixings,
),
default_args=dict(
mtm=False,
leg2_mtm=False,
),
spec=spec,
meta_args=["pair"],
)
if kw.leg1["mtm"] and kw.leg2["mtm"]:
raise ValueError("`mtm` and `leg2_mtm` must define at most one MTM leg.")
mtm_obj: LegMtm = LegMtm.XCS if kw.leg1["mtm"] else LegMtm.Initial
leg2_mtm_obj: LegMtm = LegMtm.XCS if kw.leg2["mtm"] else LegMtm.Initial
# set a default `notional` if no notional on any leg is given
if isinstance(kw.leg1["notional"], NoInput) and isinstance(kw.leg2["notional"], NoInput):
notional_: DualTypes_ = defaults.notional
leg2_notional_: DualTypes_ = leg2_notional
elif not isinstance(kw.leg1["notional"], NoInput) and not isinstance(
kw.leg2["notional"], NoInput
):
raise ValueError(
"The `notional` can only be provided on one leg, expressed in its `currency`.\n"
"For a XCS, the other leg's cashflows are derived via `fx_fixings` and "
"non-deliverability."
)
else:
notional_ = notional
leg2_notional_ = leg2_notional
if not isinstance(notional_, NoInput) and not isinstance(kw.leg1["fx_fixings"], NoInput):
raise ValueError(
"When `notional` is given, that leg is assumed to be deliverable and `fx_fixings` "
"should not be given.\nOnly `leg2_fx_fixings` are required to derive "
"cashflows on leg2 via non-deliverability from leg1's `notional`."
)
if not isinstance(leg2_notional_, NoInput) and not isinstance(kw.leg2["fx_fixings"], NoInput):
raise ValueError(
"When `leg2_notional` is given, that leg is assumed to be deliverable and "
"`leg2_fx_fixings` should not be given.\nOnly `fx_fixings` are required to derive "
"cashflows on leg1 via non-deliverability from leg2's `notional`."
)
if isinstance(kw.meta["pair"], NoInput):
raise ValueError(
"A `pair` must be supplied to a XCS along with the leg1 `currency` to imply the "
"second currency."
)
fx_index_ = _get_fx_index(kw.meta["pair"])
currency_ = _drb(defaults.base_currency, kw.leg1["currency"]).lower()
if currency_ not in fx_index_.pair:
raise ValueError(
"For a XCS, the `currency` must be one of the currencies in the FX index `pair`.\n"
f"Got '{currency_}' and '{fx_index_.pair}'."
)
leg2_currency_ = fx_index_.pair[:3] if currency_ == fx_index_.pair[3:] else fx_index_.pair[3:]
return (
currency_,
leg2_currency_,
fx_index_,
mtm_obj,
leg2_mtm_obj,
notional_,
leg2_notional_,
)
================================================
FILE: python/rateslib/instruments/yoyis.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import LegIndexBase
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
Frequency,
FXForwards_,
IndexMethod,
LegFixings,
RollDay,
Sequence,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class YoYIS(_BaseInstrument):
r"""
A *year-on-year indexed swap (YoYIS)* composing two :class:`~rateslib.legs.FixedLeg` with the
second having ``index_params``.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import YoYIS
from rateslib import fixings
from datetime import datetime as dt
from pandas import Series
.. ipython:: python
fixings.add("CPI_UK", Series(index=[dt(1999, 10, 1), dt(2000, 10, 1), dt(2001, 10, 1), dt(2002, 10, 1)], data=[110.0, 120.0, 125.0, 127.0]))
yoyis = YoYIS(
effective=dt(2000, 1, 1),
termination="3y",
frequency="A",
fixed_rate=2.0,
convention="One",
leg2_index_fixings="CPI_UK",
leg2_index_lag=3,
leg2_index_method="monthly",
)
yoyis.cashflows()
.. ipython:: python
:suppress:
fixings.pop("CPI_UK")
.. rubric:: Pricing
An *YoYIS* requires a *disc curve* on both legs (which should be the same *Curve*), and a
*leg2 index curve* for index forecasting on the second *FixedLeg*.
The following input formats are allowed:
.. code-block:: python
curves = [index_curve, disc_curve] # two curves are applied in order
curves = { # dict form is explicit
"disc_curve": disc_curve,
"leg2_index_curve": index_curve,
}
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
leg2_effective : datetime, :green:`optional (inherited from leg1)`
leg2_termination : datetime, str, :green:`optional (inherited from leg1)`
leg2_frequency : Frequency, str, :green:`optional (inherited from leg1)`
leg2_stub : StubInference, str, :green:`optional (inherited from leg1)`
leg2_front_stub : datetime, :green:`optional (inherited from leg1)`
leg2_back_stub : datetime, :green:`optional (inherited from leg1)`
leg2_roll : RollDay, int, str, :green:`optional (inherited from leg1)`
leg2_eom : bool, :green:`optional (inherited from leg1)`
leg2_modifier : Adjuster, str, :green:`optional (inherited from leg1)`
leg2_calendar : calendar, str, :green:`optional (inherited from leg1)`
leg2_payment_lag: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_payment_lag_exchange: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_ex_div: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_convention: str, :green:`optional (inherited from leg1)`
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
leg2_amortization : float, Dual, Dual2, Variable, str, Amortization, :green:`optional (negatively inherited from leg1)`
.. note::
The following are **rate parameters**.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.FixedLeg`. If `None`
will be set to mid-market when curves are provided.
.. note::
The following parameters define **indexation**.
leg2_index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
leg2_index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
leg2_index_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The index value for the reference date.
Best practice is to supply this value as string identifier relating to the global
``fixings`` object.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
Notes
-------
A *YoYIS* has a nominal :class:`~rateslib.legs.FixedLeg` with a specific ``fixed_rate``, and
a second *Leg* whose cash flows are defined by some index values. *Rateslib* constructs this
object as a second :class:`~rateslib.legs.FixedLeg` which inherits specific properties,
namely:
- The ``leg2_index_base_type`` is *LegIndexBase.PeriodOnPeriod*, to ensure that indexing is not
calculated from one single ``leg2_index_base`` value, but by consecutive dates.
- The ``leg2_fixed_rate`` is 100% to provide a coupon amount that matches the notional.
- The ``leg2_index_only`` parameter is *True* to ensure that the cashflow paid only accounts for
indexation and does not pay that 100% of notional.
Under this definition the unindexed reference cashflow of each period of *Leg2* is the notional
adjusted by the DCF:
.. math::
\mathbb{E^Q} [\bar{C}_t] = -N_i d_i
and the indexed reference cashflow, accounting for indexation only, is:
.. math::
-N_i d_i ( \frac{I_v(m_i)}{I_v(m_{i-1})} - 1 )
which matches the definition of the indexed *Leg* of a *YoYIS*.
""" # noqa: E501
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate of *Leg1*."""
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
@property
def leg1(self) -> FixedLeg:
"""The :class:`~rateslib.legs.FixedLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> FixedLeg:
"""The second :class:`~rateslib.legs.FixedLeg` of the *Instrument* with indexation."""
return self._leg2
@property
def legs(self) -> Sequence[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
leg2_effective: datetime_ = NoInput(1),
leg2_termination: datetime | str_ = NoInput(1),
leg2_frequency: Frequency | str_ = NoInput(1),
leg2_stub: str_ = NoInput(1),
leg2_front_stub: datetime_ = NoInput(1),
leg2_back_stub: datetime_ = NoInput(1),
leg2_roll: int | RollDay | str_ = NoInput(1),
leg2_eom: bool_ = NoInput(1),
leg2_modifier: str_ = NoInput(1),
leg2_calendar: CalInput = NoInput(1),
leg2_payment_lag: int_ = NoInput(1),
leg2_payment_lag_exchange: int_ = NoInput(1),
leg2_convention: str_ = NoInput(1),
leg2_ex_div: int_ = NoInput(1),
# settlement params
currency: str_ = NoInput(0),
notional: float_ = NoInput(0),
amortization: float_ = NoInput(0),
leg2_notional: float_ = NoInput(-1),
leg2_amortization: float_ = NoInput(-1),
# index params
leg2_index_lag: int_ = NoInput(0),
leg2_index_method: IndexMethod | str_ = NoInput(0),
leg2_index_fixings: LegFixings = NoInput(0),
# rate params
fixed_rate: DualTypes_ = NoInput(0),
# meta params
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
user_args = dict(
effective=effective,
termination=termination,
frequency=frequency,
fixed_rate=fixed_rate,
leg2_index_lag=leg2_index_lag,
leg2_index_method=leg2_index_method,
leg2_index_fixings=leg2_index_fixings,
stub=stub,
front_stub=front_stub,
back_stub=back_stub,
roll=roll,
eom=eom,
modifier=modifier,
calendar=calendar,
payment_lag=payment_lag,
payment_lag_exchange=payment_lag_exchange,
ex_div=ex_div,
notional=notional,
currency=currency,
amortization=amortization,
convention=convention,
leg2_effective=leg2_effective,
leg2_termination=leg2_termination,
leg2_frequency=leg2_frequency,
leg2_stub=leg2_stub,
leg2_front_stub=leg2_front_stub,
leg2_back_stub=leg2_back_stub,
leg2_roll=leg2_roll,
leg2_eom=leg2_eom,
leg2_modifier=leg2_modifier,
leg2_calendar=leg2_calendar,
leg2_payment_lag=leg2_payment_lag,
leg2_payment_lag_exchange=leg2_payment_lag_exchange,
leg2_ex_div=leg2_ex_div,
leg2_notional=leg2_notional,
leg2_amortization=leg2_amortization,
leg2_convention=leg2_convention,
curves=self._parse_curves(curves),
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
leg2_currency=NoInput(1),
initial_exchange=False,
leg2_initial_exchange=False,
final_exchange=False,
leg2_final_exchange=False,
leg2_index_base_type=LegIndexBase.PeriodOnPeriod,
leg2_fixed_rate=100.0, # combined with index_only this acts similarly to a cashflow
leg2_index_only=True, # but it is impacted by the DCF of the period.
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
index_lag=defaults.index_lag,
index_method=defaults.index_method,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "vol"],
)
self._leg1 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._leg2 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self._leg1, self._leg2]
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, True, *c)
leg2_index_curve = _get_curve("leg2_index_curve", False, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
leg2_npv: DualTypes = self.leg2.local_npv(
rate_curve=NoInput(0),
disc_curve=leg2_disc_curve,
index_curve=leg2_index_curve,
settlement=settlement,
forward=forward,
)
return (
self.leg1.spread(
target_npv=-leg2_npv, # - leg1_npv,
rate_curve=NoInput(0),
disc_curve=disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
/ 100
)
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, True, *c)
leg2_index_curve = _get_curve("leg2_index_curve", False, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
leg1_npv: DualTypes = self.leg1.local_npv(
rate_curve=NoInput(0),
disc_curve=disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
return self.leg2.spread(
target_npv=-leg1_npv,
rate_curve=NoInput(0),
disc_curve=leg2_disc_curve,
index_curve=leg2_index_curve,
settlement=settlement,
forward=forward,
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# the test for an unpriced IIRS is that its fixed rate is not set.
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
self.leg1.fixed_rate = _dual_float(mid_market_rate)
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An IIRS has three curve requirements: an index_curve, a leg2_rate_curve and a
disc_curve used by both legs.
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
disc_curve=curves.get("disc_curve", NoInput(0)),
index_curve=curves.get("index_curve", NoInput(0)),
leg2_index_curve=_drb(
curves.get("index_curve", NoInput(0)),
curves.get("leg2_index_curve", NoInput(0)),
),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
disc_curve=curves[1],
leg2_index_curve=curves[0],
leg2_disc_curve=curves[1],
)
else:
raise ValueError(
f"{type(self).__name__} requires 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
raise ValueError(f"{type(self).__name__} requires 2 curve types. Got 1.")
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
================================================
FILE: python/rateslib/instruments/zcis.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import FixedLeg, ZeroFixedLeg
from rateslib.scheduling import Frequency
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
Frequency,
FXForwards_,
IndexMethod,
LegFixings,
RollDay,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class ZCIS(_BaseInstrument):
"""
An *indexed zero coupon swap (ZCIS)* composing a :class:`~rateslib.legs.ZeroFixedLeg`
and a :class:`~rateslib.legs.ZeroIndexLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import ZCIS
from datetime import datetime as dt
from rateslib import fixings
from pandas import Series
.. ipython:: python
fixings.add("CPI_UK", Series(index=[dt(1999, 10, 1), dt(1999, 11, 1)], data=[110.0, 112.0]))
zcis = ZCIS(
effective=dt(2000, 1, 10),
termination="2Y",
frequency="A",
fixed_rate=3.5,
currency="gbp",
leg2_index_fixings="CPI_UK",
leg2_index_method="daily",
)
zcis.cashflows()
.. ipython:: python
:suppress:
fixings.pop("CPI_UK")
.. rubric:: Pricing
The methods of a *ZCIS* require a *disc curve* applicable to both legs and a *leg2 index curve*.
The following input formats are allowed:
.. code-block:: python
curves = [index_curve, disc_curve] # two curves
curves = [None, disc_curve, leg2_index_curve, disc_curve] # four curves
curves = { # dict form is explicit
"disc_curve": disc_curve,
"leg2_index_curve": leg2_index_curve,
}
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
leg2_effective : datetime, :green:`optional (inherited from leg1)`
leg2_termination : datetime, str, :green:`optional (inherited from leg1)`
leg2_frequency : Frequency, str, :green:`optional (inherited from leg1)`
leg2_stub : StubInference, str, :green:`optional (inherited from leg1)`
leg2_front_stub : datetime, :green:`optional (inherited from leg1)`
leg2_back_stub : datetime, :green:`optional (inherited from leg1)`
leg2_roll : RollDay, int, str, :green:`optional (inherited from leg1)`
leg2_eom : bool, :green:`optional (inherited from leg1)`
leg2_modifier : Adjuster, str, :green:`optional (inherited from leg1)`
leg2_calendar : calendar, str, :green:`optional (inherited from leg1)`
leg2_payment_lag: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_payment_lag_exchange: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_ex_div: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_convention: str, :green:`optional (inherited from leg1)`
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
.. note::
The following are **rate parameters**.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.ZeroFixedLeg`. If `None`
will be set to mid-market when curves are provided.
.. note::
The following parameters define **indexation**.
leg2_index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
leg2_index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
leg2_index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value applied as the base index value for all *Periods*.
If not given and ``index_fixings`` is a string fixings identifier that will be
used to determine the base index value.
leg2_index_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The index value for the reference date.
Best practice is to supply this value as string identifier relating to the global
``fixings`` object.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
""" # noqa: E501
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.legs.FixedLeg`."""
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
@property
def leg1(self) -> ZeroFixedLeg:
"""The :class:`~rateslib.legs.ZeroFixedLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> FixedLeg:
"""The :class:`~rateslib.legs.ZeroFloatLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> list[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
leg2_effective: datetime_ = NoInput(1),
leg2_termination: datetime | str_ = NoInput(1),
# leg2_frequency: Frequency | str_ = NoInput(1),
leg2_stub: str_ = NoInput(1),
leg2_front_stub: datetime_ = NoInput(1),
leg2_back_stub: datetime_ = NoInput(1),
leg2_roll: int | RollDay | str_ = NoInput(1),
leg2_eom: bool_ = NoInput(1),
leg2_modifier: str_ = NoInput(1),
leg2_calendar: CalInput = NoInput(1),
leg2_payment_lag: int_ = NoInput(1),
leg2_payment_lag_exchange: int_ = NoInput(1),
leg2_ex_div: int_ = NoInput(1),
leg2_convention: str_ = NoInput(1),
# settlement parameters
currency: str_ = NoInput(0),
notional: float_ = NoInput(0),
leg2_notional: float_ = NoInput(-1),
# rate parameters
fixed_rate: DualTypes_ = NoInput(0),
# indexing
leg2_index_base: DualTypes_ = NoInput(0),
leg2_index_lag: int_ = NoInput(0),
leg2_index_method: IndexMethod | str_ = NoInput(0),
leg2_index_fixings: LegFixings = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
user_args = dict(
# scheduling
effective=effective,
leg2_effective=leg2_effective,
termination=termination,
leg2_termination=leg2_termination,
frequency=frequency,
# leg2_frequency=leg2_frequency,
stub=stub,
leg2_stub=leg2_stub,
front_stub=front_stub,
leg2_front_stub=leg2_front_stub,
back_stub=back_stub,
leg2_back_stub=leg2_back_stub,
roll=roll,
leg2_roll=leg2_roll,
eom=eom,
leg2_eom=leg2_eom,
modifier=modifier,
leg2_modifier=leg2_modifier,
calendar=calendar,
leg2_calendar=leg2_calendar,
payment_lag=payment_lag,
leg2_payment_lag=leg2_payment_lag,
payment_lag_exchange=payment_lag_exchange,
leg2_payment_lag_exchange=leg2_payment_lag_exchange,
ex_div=ex_div,
leg2_ex_div=leg2_ex_div,
convention=convention,
leg2_convention=leg2_convention,
# settlement
currency=currency,
notional=notional,
leg2_notional=leg2_notional,
# rate
fixed_rate=fixed_rate,
# indexing
leg2_index_base=leg2_index_base,
leg2_index_lag=leg2_index_lag,
leg2_index_method=leg2_index_method,
leg2_index_fixings=leg2_index_fixings,
# meta
curves=self._parse_curves(curves),
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
leg2_currency=NoInput(1),
initial_exchange=False,
final_exchange=False,
leg2_initial_exchange=False,
leg2_final_exchange=True,
leg2_index_only=True,
leg2_fixed_rate=0.0,
leg2_frequency=Frequency.Zero(),
vol=_Vol(),
)
default_args = dict(
notional=defaults.notional,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
leg2_index_lag=defaults.index_lag,
leg2_index_method=defaults.index_method,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "vol"],
)
self._leg1 = ZeroFixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._leg2 = FixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self.leg1, self.leg2]
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, True, *c)
leg2_index_curve = _get_curve("leg2_index_curve", False, True, *c)
disc_curve = _get_curve("disc_curve", False, True, *c)
leg2_npv: DualTypes = self.leg2.local_npv(
rate_curve=NoInput(0),
disc_curve=leg2_disc_curve,
index_curve=leg2_index_curve,
settlement=settlement,
forward=forward,
)
return (
self.leg1.spread(
target_npv=-leg2_npv,
rate_curve=NoInput(0),
disc_curve=disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
/ 100
)
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
raise NotImplementedError("ZCIS has no concept of `spread` - use `rate` instead.")
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# the test for an unpriced IRS is that its fixed rate is not set.
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
self.leg1.fixed_rate = _dual_float(mid_market_rate)
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An ZCIS has two curve requirements: a leg2_index_curve and a disc_curve used by both legs.
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
if isinstance(curves, dict):
return _Curves(
index_curve=curves.get("index_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_index_curve=_drb(
curves.get("index_curve", NoInput(0)),
curves.get("leg2_index_curve", NoInput(0)),
),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
leg2_index_curve=curves[0],
disc_curve=curves[1],
leg2_disc_curve=curves[1],
)
elif len(curves) == 4:
return _Curves(
leg2_index_curve=curves[2],
disc_curve=curves[1],
leg2_disc_curve=curves[3],
)
else:
raise ValueError(
f"{type(self).__name__} requires only 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else:
raise ValueError(f"{type(self).__name__} requires only 2 curve types. Got 1.")
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
================================================
FILE: python/rateslib/instruments/zcs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.instruments.protocols import _BaseInstrument
from rateslib.instruments.protocols.kwargs import _convert_to_schedule_kwargs, _KWArgs
from rateslib.instruments.protocols.pricing import (
_Curves,
_get_curve,
_parse_curves,
_Vol,
)
from rateslib.legs import ZeroFixedLeg, ZeroFloatLeg
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
CurvesT_,
DataFrame,
DualTypes,
DualTypes_,
FixingsRates_,
FloatRateSeries,
Frequency,
FXForwards_,
RollDay,
Solver_,
VolT_,
_BaseLeg,
bool_,
datetime,
datetime_,
float_,
int_,
str_,
)
class ZCS(_BaseInstrument):
"""
A *zero coupon swap (ZCS)* composing a :class:`~rateslib.legs.ZeroFixedLeg`
and a :class:`~rateslib.legs.ZeroFloatLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.instruments import ZCS
from datetime import datetime as dt
.. ipython:: python
zcs = ZCS(
effective=dt(2000, 1, 1),
termination="2y",
frequency="S",
fixed_rate=2.0,
)
zcs.cashflows()
.. rubric:: Pricing
A *ZCS* requires a *disc curve* on both legs (which should be the same *Curve*) and a
*leg2 rate curve* to forecast rates on the *ZeroFloatLeg*. The following input formats are
allowed:
.. code-block:: python
curves = curve | [curve] # a single curve is repeated for all required curves
curves = [rate_curve, disc_curve] # two curves are applied in the given order
curves = [None, disc_curve, rate_curve, disc_curve] # four curves applied to each leg
curves = {"leg2_rate_curve": rate_curve, "disc_curve": disc_curve} # dict form is explicit
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **scheduling** parameters.
effective : datetime, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
ex_div: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
leg2_effective : datetime, :green:`optional (inherited from leg1)`
leg2_termination : datetime, str, :green:`optional (inherited from leg1)`
leg2_frequency : Frequency, str, :green:`optional (inherited from leg1)`
leg2_stub : StubInference, str, :green:`optional (inherited from leg1)`
leg2_front_stub : datetime, :green:`optional (inherited from leg1)`
leg2_back_stub : datetime, :green:`optional (inherited from leg1)`
leg2_roll : RollDay, int, str, :green:`optional (inherited from leg1)`
leg2_eom : bool, :green:`optional (inherited from leg1)`
leg2_modifier : Adjuster, str, :green:`optional (inherited from leg1)`
leg2_calendar : calendar, str, :green:`optional (inherited from leg1)`
leg2_payment_lag: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_payment_lag_exchange: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_ex_div: Adjuster, int, :green:`optional (inherited from leg1)`
leg2_convention: str, :green:`optional (inherited from leg1)`
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the *Instrument* (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
leg2_notional : float, Dual, Dual2, Variable, :green:`optional (negatively inherited from leg1)`
.. note::
The following are **rate parameters**.
fixed_rate : float or None
The fixed rate applied to the :class:`~rateslib.legs.ZeroFixedLeg`. If `None`
will be set to mid-market when curves are provided.
leg2_fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
leg2_fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
leg2_fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
leg2_float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
leg2_spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
leg2_rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
.. note::
The following are **meta parameters**.
curves : _BaseCurve, str, dict, _Curves, Sequence, :green:`optional`
Pricing objects passed directly to the *Instrument's* methods' ``curves`` argument. See
**Pricing**.
spec: str, :green:`optional`
A collective group of parameters. See
:ref:`default argument specifications `.
""" # noqa: E501
_rate_scalar = 1.0
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.legs.FixedLeg`."""
return self.leg1.fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self.kwargs.leg1["fixed_rate"] = value
self.leg1.fixed_rate = value
# @property
# def float_spread(self) -> NoReturn:
# """The float spread parameter of the composited
# :class:`~rateslib.legs.FloatLeg`."""
# raise AttributeError(f"Attribute not available on {type(self).__name__}")
# @property
# def leg2_fixed_rate(self) -> NoReturn:
# raise AttributeError(f"Attribute not available on {type(self).__name__}")
@property
def leg2_float_spread(self) -> DualTypes_:
"""The float spread parameter of the composited
:class:`~rateslib.legs.FloatLeg`."""
return self.leg2.float_spread
@leg2_float_spread.setter
def leg2_float_spread(self, value: DualTypes) -> None:
self.kwargs.leg2["float_spread"] = value
self.leg2.float_spread = value
@property
def leg1(self) -> ZeroFixedLeg:
"""The :class:`~rateslib.legs.ZeroFixedLeg` of the *Instrument*."""
return self._leg1
@property
def leg2(self) -> ZeroFloatLeg:
"""The :class:`~rateslib.legs.ZeroFloatLeg` of the *Instrument*."""
return self._leg2
@property
def legs(self) -> list[_BaseLeg]:
"""A list of the *Legs* of the *Instrument*."""
return self._legs
def __init__(
self,
# scheduling
effective: datetime_ = NoInput(0),
termination: datetime | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
*,
stub: str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: int | RollDay | str_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: int_ = NoInput(0),
payment_lag_exchange: int_ = NoInput(0),
ex_div: int_ = NoInput(0),
convention: str_ = NoInput(0),
leg2_effective: datetime_ = NoInput(1),
leg2_termination: datetime | str_ = NoInput(1),
leg2_frequency: Frequency | str_ = NoInput(1),
leg2_stub: str_ = NoInput(1),
leg2_front_stub: datetime_ = NoInput(1),
leg2_back_stub: datetime_ = NoInput(1),
leg2_roll: int | RollDay | str_ = NoInput(1),
leg2_eom: bool_ = NoInput(1),
leg2_modifier: str_ = NoInput(1),
leg2_calendar: CalInput = NoInput(1),
leg2_payment_lag: int_ = NoInput(1),
leg2_payment_lag_exchange: int_ = NoInput(1),
leg2_ex_div: int_ = NoInput(1),
leg2_convention: str_ = NoInput(1),
# settlement parameters
currency: str_ = NoInput(0),
notional: float_ = NoInput(0),
# amortization: float_ = NoInput(0),
leg2_notional: float_ = NoInput(-1),
# leg2_amortization: float_ = NoInput(-1),
# rate parameters
fixed_rate: DualTypes_ = NoInput(0),
leg2_float_spread: DualTypes_ = NoInput(0),
leg2_spread_compound_method: str_ = NoInput(0),
leg2_rate_fixings: FixingsRates_ = NoInput(0),
leg2_fixing_method: str_ = NoInput(0),
leg2_fixing_frequency: Frequency | str_ = NoInput(0),
leg2_fixing_series: FloatRateSeries | str_ = NoInput(0),
# meta parameters
curves: CurvesT_ = NoInput(0),
spec: str_ = NoInput(0),
) -> None:
user_args = dict(
# scheduling
effective=effective,
leg2_effective=leg2_effective,
termination=termination,
leg2_termination=leg2_termination,
frequency=frequency,
leg2_frequency=leg2_frequency,
stub=stub,
leg2_stub=leg2_stub,
front_stub=front_stub,
leg2_front_stub=leg2_front_stub,
back_stub=back_stub,
leg2_back_stub=leg2_back_stub,
roll=roll,
leg2_roll=leg2_roll,
eom=eom,
leg2_eom=leg2_eom,
modifier=modifier,
leg2_modifier=leg2_modifier,
calendar=calendar,
leg2_calendar=leg2_calendar,
payment_lag=payment_lag,
leg2_payment_lag=leg2_payment_lag,
payment_lag_exchange=payment_lag_exchange,
leg2_payment_lag_exchange=leg2_payment_lag_exchange,
ex_div=ex_div,
leg2_ex_div=leg2_ex_div,
convention=convention,
leg2_convention=leg2_convention,
# settlement
currency=currency,
notional=notional,
leg2_notional=leg2_notional,
# rate
fixed_rate=fixed_rate,
leg2_float_spread=leg2_float_spread,
leg2_spread_compound_method=leg2_spread_compound_method,
leg2_rate_fixings=leg2_rate_fixings,
leg2_fixing_method=leg2_fixing_method,
leg2_fixing_series=leg2_fixing_series,
leg2_fixing_frequency=leg2_fixing_frequency,
# meta
curves=self._parse_curves(curves),
)
instrument_args = dict( # these are hard coded arguments specific to this instrument
leg2_currency=NoInput(1),
initial_exchange=False,
final_exchange=False,
leg2_initial_exchange=False,
leg2_final_exchange=False,
vol=_Vol(),
# amortization=NoInput(0),
# leg2_amortization=NoInput(0),
)
default_args = dict(
notional=defaults.notional,
payment_lag=defaults.payment_lag_specific[type(self).__name__],
payment_lag_exchange=defaults.payment_lag_exchange,
)
self._kwargs = _KWArgs(
spec=spec,
user_args={**user_args, **instrument_args},
default_args=default_args,
meta_args=["curves", "vol"],
)
self._leg1 = ZeroFixedLeg(**_convert_to_schedule_kwargs(self.kwargs.leg1, 1))
self._leg2 = ZeroFloatLeg(**_convert_to_schedule_kwargs(self.kwargs.leg2, 1))
self._legs = [self.leg1, self.leg2]
def rate(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
metric: str_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_rate_curve = _get_curve("leg2_rate_curve", True, True, *c)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, False, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
leg2_npv: DualTypes = self.leg2.local_npv(
rate_curve=leg2_rate_curve,
disc_curve=leg2_disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
return (
self.leg1.spread(
target_npv=-leg2_npv,
rate_curve=NoInput(0),
disc_curve=disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
/ 100
)
def spread(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
c = _parse_curves(self, curves, solver)
leg2_rate_curve = _get_curve("leg2_rate_curve", True, True, *c)
disc_curve = _get_curve("disc_curve", False, False, *c)
leg2_disc_curve = _get_curve("leg2_disc_curve", False, False, *c)
leg1_npv: DualTypes = self.leg1.local_npv(
rate_curve=NoInput(0),
disc_curve=disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
return self.leg2.spread(
target_npv=-leg1_npv,
rate_curve=leg2_rate_curve,
disc_curve=leg2_disc_curve,
index_curve=NoInput(0),
settlement=settlement,
forward=forward,
)
def npv(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
self._set_pricing_mid(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
return super().npv(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
local=local,
settlement=settlement,
forward=forward,
)
def _set_pricing_mid(
self,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> None:
# the test for an unpriced IRS is that its fixed rate is not set.
if isinstance(self.kwargs.leg1["fixed_rate"], NoInput):
# set a fixed rate for the purpose of generic methods NPV will be zero.
mid_market_rate = self.rate(
curves=curves,
solver=solver,
settlement=settlement,
forward=forward,
)
self.leg1.fixed_rate = _dual_float(mid_market_rate)
def _parse_curves(self, curves: CurvesT_) -> _Curves:
"""
An ZCS has two curve requirements: a leg2_rate_curve and a disc_curve used by both legs.
When given as only 1 element this curve is applied to all of the those components
When given as 2 elements the first is treated as the rate curve and the 2nd as disc curve.
"""
if isinstance(curves, NoInput):
return _Curves()
elif isinstance(curves, dict):
return _Curves(
rate_curve=curves.get("rate_curve", NoInput(0)),
disc_curve=curves.get("disc_curve", NoInput(0)),
leg2_rate_curve=_drb(
curves.get("rate_curve", NoInput(0)),
curves.get("leg2_rate_curve", NoInput(0)),
),
leg2_disc_curve=_drb(
curves.get("disc_curve", NoInput(0)),
curves.get("leg2_disc_curve", NoInput(0)),
),
)
elif isinstance(curves, list | tuple):
if len(curves) == 2:
return _Curves(
leg2_rate_curve=curves[0],
disc_curve=curves[1],
leg2_disc_curve=curves[1],
)
elif len(curves) == 1:
return _Curves(
leg2_rate_curve=curves[0],
disc_curve=curves[0],
leg2_disc_curve=curves[0],
)
else:
raise ValueError(
f"{type(self).__name__} requires only 2 curve types. Got {len(curves)}."
)
elif isinstance(curves, _Curves):
return curves
else: # `curves` is just a single input which is copied across all curves
return _Curves(
leg2_rate_curve=curves, # type: ignore[arg-type]
disc_curve=curves, # type: ignore[arg-type]
leg2_disc_curve=curves, # type: ignore[arg-type]
)
def _parse_vol(self, vol: VolT_) -> _Vol:
return _Vol()
def cashflows(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return super()._cashflows_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
base=base,
settlement=settlement,
forward=forward,
)
def local_analytic_rate_fixings(
self,
*,
curves: CurvesT_ = NoInput(0),
solver: Solver_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
vol: VolT_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
return self._local_analytic_rate_fixings_from_legs(
curves=curves,
solver=solver,
fx=fx,
vol=vol,
settlement=settlement,
forward=forward,
)
================================================
FILE: python/rateslib/legs/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.legs.amortization import Amortization
from rateslib.legs.credit import CreditPremiumLeg, CreditProtectionLeg
from rateslib.legs.custom import CustomLeg
from rateslib.legs.fixed import FixedLeg, ZeroFixedLeg
from rateslib.legs.float import FloatLeg, ZeroFloatLeg
from rateslib.legs.protocols import _BaseLeg
__all__ = [
"FixedLeg",
"FloatLeg",
"ZeroFixedLeg",
"ZeroFloatLeg",
"CreditPremiumLeg",
"CreditProtectionLeg",
"CustomLeg",
"Amortization",
"_BaseLeg",
]
================================================
FILE: python/rateslib/legs/amortization.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from enum import Enum
from typing import TYPE_CHECKING
from rateslib.enums.generics import NoInput
if TYPE_CHECKING:
from rateslib.local_types import DualTypes, DualTypes_, NoInput # pragma: no cover
class _AmortizationType(Enum):
"""
Enumerable type to define the possible types of amortization that some legs can handle.
"""
NoAmortization = 0
ConstantPeriod = 1
CustomSchedule = 2
class Amortization:
"""
An amortization schedule for any :class:`~rateslib.legs._BaseLeg`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.legs import Amortization
.. ipython:: python
obj = Amortization(n=5, initial=1e6, amortization="to_zero")
obj.outstanding
obj.amortization
Parameters
----------
n: int
The number of periods in the schedule.
initial: float, Dual, Dual2, Variable
The notional applied to the first period in the schedule.
amortization: float, Dual, Dual2, Variable, list, tuple, str, optional
The amortization structure to apply to the schedule.
Notes
-----
If ``amortization`` is:
- not specified then the schedule is assumed to have no amortization.
- some scalar then the amortization amount will be a constant value per period.
- a list or tuple of *n-1* scalars, then this is defines a custome amortization schedule.
- a string flag then an amortization schedule will be calculated directly:
- *"to_zero"*: each period will be a constant value ending with zero implied ending balance.
- *"{float}%"*: each period will amortize by a constant percentage of the outstanding balance.
.. rubric:: Using Amortization with Instruments
This section exemplifies how to use :class:`~rateslib.legs.Amortization` with instruments.
**Key Points**
- Amortization can be added to *Instruments* using the per leg ``amortization`` argument.
- It supports constant notional amortization, or custom schedules or
the :class:`~rateslib.legs.Amortization` class can be used to calculate other simple
structures.
- Some *Instruments* have not yet integrated amortization into their calculation, such as
*Bonds*.
**Standard Amortization**
The :class:`~rateslib.legs.FixedLeg` and :class:`~rateslib.legs.FloatLeg` classes both
have ``amortization`` as an input argument. An :class:`~rateslib.legs.Amortization` class
can be directly supplied or other values are internally passed to this class for
syntactic convenience.
The simplest, and most common, type of ``amortization`` to apply is a constant notional
per period.
.. ipython:: python
:suppress:
from rateslib import XCS, IRS, IndexFixedRateBond, FixedRateBond
from rateslib.legs import FixedLeg, FloatLeg, Amortization
from rateslib.scheduling import Schedule
from datetime import datetime as dt
.. tabs::
.. tab:: FixedLeg
.. ipython:: python
fxl = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), "1y", "Q"),
notional=10e6,
amortization=1e6, # <- 1mm reduction per period
)
fxl.cashflows()[["Type", "Acc Start", "Notional"]]
.. tab:: FloatLeg
.. ipython:: python
fll = FloatLeg(
schedule=Schedule(dt(2000, 1, 1), "1y", "M"),
notional=10e6,
amortization=0.5e6, # 0.5mm reduction per period
)
fll.cashflows()[["Type", "Acc Start", "Notional"]]
Here, the *amortization* is expressed in a specific notional amount reduction per period so,
when applied to an :class:`~rateslib.instruments.IRS`, each leg with different
frequencies should be input directly.
If a *Leg* has a *final notional exchange* then any amortized amount would, under standard
convention, be paid out at the same time as the notional change. The final cashflow will be
reduced by the amount of interim exchanges that have already occurred. This can be
exemplified on a :class:`~rateslib.instruments.XCS`.
.. tabs::
.. tab:: IRS
.. ipython:: python
irs = IRS(
effective=dt(2000, 1, 1),
termination="1Y",
frequency="Q",
leg2_frequency="S",
notional=1e6,
amortization=2e5, # <- Reduces notional on 1st July to 600,000
leg2_amortization=-4e5, # <- Aligns the notional on 1st July
)
irs.cashflows()[["Type", "Acc Start", "Notional"]]
.. tab:: Non-MTM XCS
.. ipython:: python
xcs = XCS(
effective=dt(2000, 1, 1),
termination="1y",
spec="eurusd_xcs",
notional=5e6,
amortization=1e6, # <- 1mm reduction and notional exchange per period
leg2_mtm=False,
)
xcs.cashflows()[["Type", "Period", "Acc Start", "Payment", "Ccy", "Notional", "Reference Ccy"]]
.. tab:: MTM XCS
Mark-to-market :class:`~rateslib.instruments.XCS` also support ``amortization`` which
affects the MTM cashflows respectively.
.. ipython:: python
xcs = XCS(
effective=dt(2000, 1, 1),
termination="1y",
spec="eurusd_xcs",
notional=5e6,
amortization=1e6, # <- 1mm reduction and notional exchange per period
leg2_mtm=True,
)
xcs.cashflows()[["Type", "Period", "Acc Start", "Payment", "Ccy", "Notional", "Reference Ccy"]]
.. rubric:: Custom Amortization
By using the :class:`~rateslib.legs.Amortization` class custom amortization can be directly
input to an *Instrument*. The following examples are the same, with the first being
syntactic convenience for the second. The above examples are also syntactic convenience for
applying the same amortization amount each period.
.. tabs::
.. tab:: Amortization List
.. ipython:: python
irs = IRS(
effective=dt(2000, 1, 1),
termination="1Y",
frequency="Q",
leg2_frequency="S",
notional=1e6,
amortization=[100000, 300000, -5000], # <- Reduces notional on 1st July to 600,000
leg2_amortization=[-400000], # <- Aligns the notional on 1st July
)
irs.cashflows()[["Type", "Acc Start", "Notional"]]
.. tab:: Amortization Object
.. ipython:: python
irs = IRS(
effective=dt(2000, 1, 1),
termination="1Y",
frequency="Q",
leg2_frequency="S",
notional=1e6,
amortization=Amortization(4, 1e6, [100000, 300000, -5000]),
leg2_amortization=Amortization(2, -1e6, [-400000])
)
irs.cashflows()[["Type", "Acc Start", "Notional"]]
.. rubric:: Unsupported Instruments
*Instruments* that currently do **not** support amortization are *Bonds*.
.. tabs::
.. tab:: FixedRateBond
.. ipython:: python
try:
FixedRateBond(
effective=dt(2000, 1, 1),
termination="1y",
spec="us_gb",
notional=5e6,
amortization=1e6,
fixed_rate=2.0,
)
except Exception as e:
print(e)
.. tab:: IndexFixedRateBond
.. ipython:: python
try:
IndexFixedRateBond(
effective=dt(2000, 1, 1),
termination="1y",
spec="us_gb",
notional=5e6,
amortization=1e6,
fixed_rate=2.0,
index_base=100.0,
)
except Exception as e:
print(e)
""" # noqa: E501
_type: _AmortizationType
@property
def amortization(self) -> tuple[DualTypes, ...]:
"""A tuple of (n-1) amortization amounts for each *Period*."""
return self._amortization
@property
def outstanding(self) -> tuple[DualTypes, ...]:
"""A tuple of n outstanding notional amounts for each *Period*."""
return self._outstanding
def __init__(
self,
n: int,
initial: DualTypes,
amortization: DualTypes_ | list[DualTypes] | tuple[DualTypes, ...] | str = NoInput(0),
) -> None:
if isinstance(amortization, NoInput):
self._type = _AmortizationType.NoAmortization
self._amortization: tuple[DualTypes, ...] = (0.0,) * (n - 1)
self._outstanding: tuple[DualTypes, ...] = (initial,) * n
elif isinstance(amortization, list | tuple):
self._type = _AmortizationType.CustomSchedule
if len(amortization) != (n - 1):
raise ValueError(
"Custom amortisation schedules must have `n-1` amortization amounts for `n` "
f"periods.\nGot '{len(amortization)}' amounts for '{n}' periods."
)
self._amortization = tuple(amortization)
outstanding = [initial]
for value in amortization:
outstanding.append(outstanding[-1] - value)
self._outstanding = tuple(outstanding)
elif isinstance(amortization, str):
if amortization.lower() == "to_zero":
self._type = _AmortizationType.ConstantPeriod
self._amortization = (initial / n,) * (n - 1)
self._outstanding = (initial,) + tuple([initial * (1 - i / n) for i in range(1, n)])
elif amortization[-1] == "%":
self._type = _AmortizationType.CustomSchedule
amortization_ = [initial * float(amortization[:-1]) / 100]
outstanding_ = [initial]
for i in range(1, n):
outstanding_.append(outstanding_[-1] - amortization_[-1])
if i != n - 1:
amortization_.append(outstanding_[-1] * float(amortization[:-1]) / 100)
self._outstanding = tuple(outstanding_)
self._amortization = tuple(amortization_)
else:
raise ValueError("`amortization` as string must be one of 'to_zero', '{float}%'.")
else: # isinstance(amortization, DualTypes)
self._type = _AmortizationType.ConstantPeriod
self._amortization = (amortization,) * (n - 1)
self._outstanding = (initial,) + tuple(
[initial - amortization * i for i in range(1, n)]
)
def __mul__(self, other: DualTypes) -> Amortization:
return Amortization(
n=len(self.outstanding),
initial=self.outstanding[0] * other,
amortization=[_ * other for _ in self.amortization],
)
def __rmul__(self, other: DualTypes) -> Amortization:
return self.__mul__(other)
def _get_amortization(
amortization: DualTypes_ | list[DualTypes] | tuple[DualTypes, ...] | str | Amortization,
initial: DualTypes,
n: int,
) -> Amortization:
if isinstance(amortization, Amortization):
return amortization
else:
return Amortization(n, initial, amortization)
================================================
FILE: python/rateslib/legs/credit.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.curves import index_left
from rateslib.enums.generics import NoInput, _drb
from rateslib.legs.amortization import Amortization, _get_amortization
from rateslib.legs.protocols import _BaseLeg, _WithExDiv
from rateslib.periods import CreditPremiumPeriod, CreditProtectionPeriod
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CurveOption_,
DualTypes,
DualTypes_,
FXForwards_,
Schedule,
_BaseCurve_,
_FXVolOption_,
_SettlementParams,
bool_,
datetime,
datetime_,
str_,
)
class CreditPremiumLeg(_BaseLeg, _WithExDiv):
"""
A *Leg* containing :class:`~rateslib.periods.CreditPremiumPeriod`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import Schedule
from rateslib.legs import CreditPremiumLeg
from datetime import datetime as dt
.. ipython:: python
cpl = CreditPremiumLeg(
schedule=Schedule(
effective=dt(2000, 3, 20),
termination=dt(2001, 3, 20),
frequency="Q",
modifier="FEX",
),
convention="Act360",
fixed_rate=1.0,
notional=10e6,
)
cpl.cashflows()
.. role:: red
.. role:: green
Parameters
----------
schedule: Schedule, :red:`required`
The :class:`~rateslib.scheduling.Schedule` object which structures contiguous *Periods*.
The schedule object also contains data for payment dates, payment dates for notional
exchanges and ex-dividend dates for each period.
.. note::
The following are **period parameters** combined with the ``schedule``.
convention: str, optional
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the leg (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
.. note::
The following define **rate parameters**.
fixed_rate: float, Dual, Dual2, Variable, :green:`optional`
The fixed rate of each composited :class:`~rateslib.periods.CreditPremiumPeriod`.
.. note::
The following parameters define **credit specific** elements.
premium_accrued: bool, :green:`optional (set by 'defaults')`
Whether an accrued premium is paid on the event of mid-period credit default.
"""
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams` associated with
the first :class:`~rateslib.periods.FloatPeriod`."""
return self._regular_periods[0].settlement_params
@property
def periods(self) -> list[CreditPremiumPeriod]:
"""Combine all period collection types into an ordered list."""
return list(self._regular_periods)
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of each composited
:class:`~rateslib.periods.CreditPremiumPeriod`."""
return self._fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self._fixed_rate = value
for period in self._regular_periods:
period.rate_params.fixed_rate = value
@property
def schedule(self) -> Schedule:
"""The :class:`~rateslib.scheduling.Schedule` object of *Leg*."""
return self._schedule
@property
def amortization(self) -> Amortization:
"""
The :class:`~rateslib.legs.Amortization` object associated with the schedule.
"""
return self._amortization
def accrued(self, settlement: datetime) -> DualTypes:
"""
Calculate the amount of premium accrued until a specific date within the relevant *Period*.
Parameters
----------
settlement: datetime
The date against which accrued is measured.
Returns
-------
float, Dual, Dual2, Variable
"""
_ = index_left(
self.schedule.uschedule,
len(self.schedule.uschedule),
settlement,
)
# This index is valid because this Leg only contains CreditPremiumPeriods and no exchanges.
return self.periods[_].accrued(settlement)
def __init__(
self,
schedule: Schedule,
*,
fixed_rate: NoInput = NoInput(0),
premium_accrued: bool_ = NoInput(0),
# settlement and currency
notional: DualTypes_ = NoInput(0),
amortization: DualTypes_ | list[DualTypes] | Amortization | str = NoInput(0),
currency: str_ = NoInput(0),
# period
convention: str_ = NoInput(0),
) -> None:
self._fixed_rate = fixed_rate
self._schedule = schedule
self._notional: DualTypes = _drb(defaults.notional, notional)
self._amortization: Amortization = _get_amortization(
amortization, self._notional, self.schedule.n_periods
)
self._currency: str = _drb(defaults.base_currency, currency).lower()
self._convention: str = _drb(defaults.convention, convention)
self._regular_periods = tuple(
[
CreditPremiumPeriod(
fixed_rate=fixed_rate,
premium_accrued=premium_accrued,
# currency args
payment=self.schedule.pschedule[i + 1],
currency=self._currency,
notional=self.amortization.outstanding[i],
ex_dividend=self.schedule.pschedule3[i + 1],
# period params
start=self.schedule.aschedule[i],
end=self.schedule.aschedule[i + 1],
frequency=self.schedule.frequency_obj,
convention=self._convention,
termination=self.schedule.aschedule[-1],
stub=self.schedule._stubs[i],
roll=NoInput(0), # defined by Frequency
calendar=self.schedule.calendar,
adjuster=self.schedule.accrual_adjuster,
)
for i in range(self.schedule.n_periods)
]
)
# # No amortization exchanges
# self._interim_exchange_periods = None
# self._exchange_periods = (None, None)
# self._mtm_exchange_periods = None
def spread(
self,
*,
target_npv: DualTypes,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
a_delta = self.local_analytic_delta(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
forward=forward,
settlement=settlement,
)
return -target_npv / a_delta
class CreditProtectionLeg(_BaseLeg):
"""
A *Leg* containing :class:`~rateslib.periods.CreditProtectionPeriod`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import dt, CreditProtectionLeg, Schedule
.. ipython:: python
cpl = CreditProtectionLeg(
schedule=Schedule(
effective=dt(2000, 3, 20),
termination=dt(2001, 3, 30),
frequency="Z",
),
notional=10e6,
)
cpl.cashflows()
.. role:: red
.. role:: green
Parameters
----------
schedule: Schedule, :red:`required`
The :class:`~rateslib.scheduling.Schedule` object which structures contiguous *Periods*.
The schedule object also contains data for payment dates, payment dates for notional
exchanges and ex-dividend dates for each period.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the leg (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
""" # noqa: E501
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams` associated with
the first :class:`~rateslib.periods.FloatPeriod`."""
return self._regular_periods[0].settlement_params
@property
def periods(self) -> list[CreditProtectionPeriod]:
"""Combine all period collection types into an ordered list."""
return list(self._regular_periods)
@property
def schedule(self) -> Schedule:
"""The :class:`~rateslib.scheduling.Schedule` object of *Leg*."""
return self._schedule
@property
def amortization(self) -> Amortization:
"""
The :class:`~rateslib.legs.Amortization` object associated with the schedule.
"""
return self._amortization
def __init__(
self,
schedule: Schedule,
*,
# settlement and currency
notional: DualTypes_ = NoInput(0),
amortization: DualTypes_ | list[DualTypes] | Amortization | str = NoInput(0),
currency: str_ = NoInput(0),
# period
# convention: str_ = NoInput(0),
) -> None:
self._schedule = schedule
self._notional: DualTypes = _drb(defaults.notional, notional)
self._amortization: Amortization = _get_amortization(
amortization, self._notional, self.schedule.n_periods
)
self._currency: str = _drb(defaults.base_currency, currency).lower()
# self._convention: str = _drb(defaults.convention, convention)
self._regular_periods = tuple(
[
CreditProtectionPeriod(
# currency args
payment=self.schedule.pschedule[i + 1],
currency=self._currency,
notional=self.amortization.outstanding[i],
ex_dividend=self.schedule.pschedule3[i + 1],
# period params
start=self.schedule.aschedule[i],
end=self.schedule.aschedule[i + 1],
frequency=self.schedule.frequency_obj,
# convention=self._convention,
termination=self.schedule.aschedule[-1],
stub=self.schedule._stubs[i],
roll=NoInput(0), # defined by Frequency
calendar=self.schedule.calendar,
adjuster=self.schedule.accrual_adjuster,
)
for i in range(self.schedule.n_periods)
]
)
# # No amortization exchanges
# self._interim_exchange_periods = None
# self._exchange_periods = (None, None)
# self._mtm_exchange_periods = None
def analytic_rec_risk(
self,
rate_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
base: str_ = NoInput(0),
) -> float:
"""
Return the analytic recovery risk of the *CreditProtectionLeg* via summing all periods.
For arguments see
:meth:`BasePeriod.analytic_delta()`.
"""
_ = (
period.analytic_rec_risk(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=fx,
base=base,
)
for period in self.periods
)
ret: float = sum(_)
return ret
def spread(self, *args: Any, **kwargs: Any) -> DualTypes:
raise NotImplementedError(f"{type(self).__name__} does not implement `spread`.")
================================================
FILE: python/rateslib/legs/custom.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib.legs.protocols import _BaseLeg
from rateslib.periods.protocols import _BasePeriod
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
DualTypes,
Sequence,
)
class CustomLeg(_BaseLeg):
"""
A *Leg* containing user specified :class:`~rateslib.periods._BasePeriod`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.legs import CustomLeg
from rateslib.periods import FixedPeriod
.. ipython:: python
fp1 = FixedPeriod(
start=dt(2021,1,1),
end=dt(2021,7,1),
payment=dt(2021,7,2),
frequency="Q",
notional=1e6,
convention="Act365F",
fixed_rate=2.10
)
fp2 = FixedPeriod(
start=dt(2021,3,7),
end=dt(2021,9,7),
payment=dt(2021,9,8),
frequency="Q",
notional=-5e6,
convention="Act365F",
fixed_rate=3.10
)
custom_leg = CustomLeg(periods=[fp1, fp2])
custom_leg.cashflows()
Parameters
----------
periods : iterable of _BasePeriod
A sequence of *Periods* to attach to the leg.
""" # noqa: E501
@property
def periods(self) -> Sequence[_BasePeriod]:
"""Combine all period collection types into an ordered list."""
return self._periods
def __init__(self, periods: Sequence[_BasePeriod]) -> None:
if not all(isinstance(p, _BasePeriod) for p in periods):
raise ValueError(
"Each object in `periods` must be an instance of `_BasePeriod`.",
)
self._periods = periods
def spread(self, *args: Any, **kwargs: Any) -> DualTypes:
return super().spread(*args, **kwargs) # type: ignore[safe-super]
================================================
FILE: python/rateslib/legs/fixed.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING
import rateslib.errors as err
from rateslib import defaults
from rateslib.curves._parsers import (
_disc_required_maybe_from_curve,
)
from rateslib.data.fixings import _leg_fixings_to_list
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import LegIndexBase, LegMtm, _get_leg_index_base, _get_leg_mtm
from rateslib.legs.amortization import Amortization, _AmortizationType, _get_amortization
from rateslib.legs.protocols import (
_BaseLeg,
_WithExDiv,
)
from rateslib.periods import (
Cashflow,
FixedPeriod,
MtmCashflow,
ZeroFixedPeriod,
)
from rateslib.periods.protocols import (
_BasePeriod,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurveOption_,
DualTypes,
DualTypes_,
FXForwards_,
FXIndex,
IndexMethod,
LegFixings,
Schedule,
Series,
_BaseCurve_,
_FXVolOption_,
_SettlementParams,
datetime,
datetime_,
int_,
str_,
)
class FixedLeg(_BaseLeg, _WithExDiv):
"""
A *Leg* containing :class:`~rateslib.periods.FixedPeriod`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import fixings, Schedule
from pandas import Series
from rateslib.legs import FixedLeg
from datetime import datetime as dt
.. ipython:: python
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 2, 1),
termination=dt(2002, 2, 1),
frequency="S",
),
convention="ActActICMA",
fixed_rate=2.5,
notional=10e6,
)
fl.cashflows()
.. role:: red
.. role:: green
Parameters
----------
schedule: Schedule, :red:`required`
The :class:`~rateslib.scheduling.Schedule` object which structures contiguous *Periods*.
The schedule object also contains data for payment dates, payment dates for notional
exchanges and ex-dividend dates for each period.
.. note::
The following are **period parameters** combined with the ``schedule``.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the leg (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
initial_exchange : bool, :green:`optional (set as False)`
Whether to also include an initial notional exchange. If *True* then ``final_exchange``
**will** also be set to *True*.
final_exchange : bool, :green:`optional (set as initial_exchange)`
Whether to also include a final notional exchange and interim amortization
notional exchanges.
.. note::
The following define **rate parameters**.
fixed_rate: float, Dual, Dual2, Variable, :green:`optional`
The fixed rate of each composited :class:`~rateslib.periods.FixedPeriod`.
.. note::
The following define **non-deliverable** parameters. If the *Leg* is directly
deliverable then do not set a non-deliverable ``pair`` or any ``fx_fixings``.
pair: FXIndex, str, :green:`optional`
The :class:`~rateslib.data.fixings.FXIndex` for :class:`~rateslib.data.fixings.FXFixing`
defining the currency pair that determines *Period*
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* according
to non-deliverability. Review the **notes** section non-deliverability, and
:ref:`fixings `. This should only ever be entered as either:
- scalar value: 1.15,
- fixings series: "Reuters_ZBS",
- tuple of transaction rate and fixing series: (1.25, "Reuters_ZBC")
mtm: LegMtm or str, :green:`optional (set to 'initial')`
Define how the fixing dates are determined for each :class:`~rateslib.data.fixings.FXFixing`
See **Notes** regarding non-deliverability.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value applied as the base index value for all *Periods*.
If not given and ``index_fixings`` is a string fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The index value for the reference date.
Best practice is to supply this value as string identifier relating to the global
``fixings`` object. See :ref:`fixings `.
index_only: bool, :green:`optional (set as False)`
A flag which indicates that the nominal amount is deducted from the cashflow leaving only
the indexed up quantity.
index_base_type: LegIndexBase, str, :green:`optional (set as 'initial')`
A parameter to define how the ``index_base_date`` is set on each period. See notes.
Notes
-----
**Typical Fixed Legs**
A typical *FixedLeg* has no amortization, no indexation, is directly deliverable and offers
no notional exchanges. This represents one component of, for example, an
:class:`~rateslib.instruments.IRS`.
.. ipython:: python
leg = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), dt(2000, 7, 1), "Q"),
fixed_rate=2.0,
convention="Act360",
notional=5000000,
)
print(leg.cashflows())
**Notional Exchanges**
Notional exchanges are common elements on securities, e.g. a
:class:`~rateslib.instruments.FixedRateBond`. These can be specifically included using the
``final_exchange`` and ``initial_exchange`` parameters.
.. ipython:: python
leg = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), dt(2000, 7, 1), "Q"),
fixed_rate=2.0,
convention="Act360",
notional=5000000,
final_exchange=True,
)
print(leg.cashflows())
Initial and final notional exchanges have opposite directions.
**Amortization**
Amortization can be applied either with customised schedules, or with simpler consistent
amounts per period.
If ``final_exchange`` is *True* then amortization will also create interim notional exchange
cashflows. Note that a same sign ``amortization`` value is translated into
a notional reduction. If ``final_exchange`` is *False*, or amortization is zero, there are no
interim notional exchange cashflows generated.
.. ipython:: python
leg = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), dt(2000, 7, 1), "Q"),
fixed_rate=2.0,
convention="Act360",
notional=5000000,
amortization=1000000,
final_exchange=True,
)
print(leg.cashflows())
**Indexation**
An :class:`~rateslib.instruments.IndexFixedRateBond` is the most common instrument that
uses an index-linked *FixedLeg*. Setting *index* parameters creates the necessary
indexation of cashflows. Note that all previous features such as notional exchanges and
amortization are all adjusted appropriately.
.. ipython:: python
fixings.add("MY_RPI", Series(
index=[dt(2000, 1, 1), dt(2000, 4, 1), dt(2000, 7, 1)],
data=[101.0, 102.0, 103.0]
))
leg = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), dt(2000, 7, 1), "Q"),
fixed_rate=2.0,
convention="Act360",
notional=5000000,
amortization=1000000,
final_exchange=True,
index_fixings="MY_RPI",
index_lag=0,
index_method="monthly",
)
print(leg.cashflows())
Any interim notional exchange cashflows generated by ``amortization`` are also indexed.
If ``index_base_type`` is set to :class:`LegIndexBase.Initial` then every period will have its
*index base date* set to the first date of the adjusted accrual schedule (``aschedule[0]``).
If ``index_base_type`` is set to :class:`LegIndexBase.PeriodOnPeriod` then each *index base
date* is set to the adjusted accrual date of the start of each period.
**Non-Deliverability**
The leg uses a ``mtm`` argument to define the types of non-deliverability that it can
construct. Currently there are three kinds which cater to the various type of requirements
for, *ND-IRS*, *MTM-XCS*, *non-MTM XCS*, *ND-XCS*
.. tabs::
.. tab:: Initial
This uses the *Initial* variant of a :class:`~rateslib.enums.LegMtm` and it
defines all :class:`~rateslib.data.fixings.FXFixing` on the *Leg* to be a single date
at the start of the *Leg* (derived from ``schedule.pschedule2[0]``). Usually this fixing is
directly specified being agreed at execution of the transaction and not dependent
upon a published financial fixing.
This type of *non-deliverability* is suitable to define a *Leg* of one currency, but
expressed by a notional in another currency, and is used for a *non-MTM XCS*.
Since only one fixing is required, ``fx_fixings`` can be entered either as
a known scalar value or string series identifier.
.. ipython:: python
leg = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag=1,
payment_lag_exchange=0,
),
fixed_rate=1.0,
initial_exchange=True,
mtm="initial",
currency="usd",
pair="eurusd",
notional=10e6, # <- Leg is a USD leg but expressed with a EUR notional
fx_fixings=1.25, # <- All periods are treated as 12.5mm USD
)
print(leg.cashflows())
.. tab:: Payment
Under the *Payment* variant of a :class:`~rateslib.enums.LegMtm`
all reference currency cashflows are converted to settlement
currency using an :class:`~rateslib.data.fixings.FXFixing` with a date of the payment.
This is probably the most traditional type of non-deliverability and is suitable
for *NDIRS* and *NDXCS* *Instruments*.
The best practice entry for ``fx_fixings`` depends if the *Leg* has
notional exchanges or not. If there is an initial notional exchange then
a 2-tuple, with the first element being the transacted exchange rate and
the second element referring to the fixing
series for future *FX Fixings*. If only future fixings are required then a string
series is used.
.. ipython:: python
fixings.add("WMR_10AM_TY0_T+2_EURUSD", Series(
index=[dt(2000, 1, 1), dt(2000, 4, 2), dt(2000, 7, 1), dt(2000, 7, 2)],
data=[1.26, 1.27, 1.29, 1.295])
)
leg = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag=1,
payment_lag_exchange=0,
),
fixed_rate=1.0,
initial_exchange=True,
mtm="payment",
currency="usd",
pair="eurusd",
notional=10e6, # <- Leg settles in USD leg but reference cashflows in EUR
fx_fixings=(1.25, "WMR_10AM_TY0_T+2"), # <- Initial exchange rate and future fixings
)
print(leg.cashflows())
.. tab:: XCS
The *XCS* variant of a :class:`~rateslib.enums.LegMtm` is specially configured
for *MTM-XCS*. These *Legs* have their
cashflows determined with :class:`~rateslib.data.fixings.FXFixing` at the start of
each *Period*, in a manner slightly similar to the *Initial* variant, and specifically
generated :class:`~rateslib.periods.MtmCashflow` *Periods* adjusting the value of the
notional by an *FXFixing* at the end of each *Period*.
The best practice entry for ``fx_fixings`` is as a 2-tuple, with the first
element the transacted exchange rate and the second element referring to the fixing
series for future *FX Fixings*.
.. ipython:: python
fixings.add("WMR_4PM_GMT_T+2_EURUSD", Series(
index=[dt(2000, 4, 1), dt(2000, 4, 2), dt(2000, 7, 2)],
data=[1.265, 1.27, 1.29])
)
leg = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag=1,
payment_lag_exchange=0,
),
fixed_rate=1.0,
initial_exchange=True,
currency="usd",
pair="eurusd",
mtm="xcs",
notional=10e6,
fx_fixings=(1.25, "WMR_4PM_GMT_T+2"),
)
print(leg.cashflows())
**Amortization and Non-Deliverability**
When amortization is combined with non-deliverability, the interim notional exchange cashflows
are adjusted appropriately in both the non-mtm and mtm cases.
.. tabs::
.. tab:: Initial
Amortization under this method adopts the same singular fixing as all other *Periods*.
.. ipython:: python
leg = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag=1,
payment_lag_exchange=0,
),
fixed_rate=1.0,
initial_exchange=True,
mtm="initial",
currency="usd",
pair="eurusd",
notional=10e6, # <- Leg is a USD leg but expressed with a EUR notional
amortization=4e6,
fx_fixings=1.25, # <- All periods are treated as 12.5mm USD
)
print(leg.cashflows())
.. tab:: Payment
Amortization under this method settles according to the payment date.
.. ipython:: python
leg = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag=1,
payment_lag_exchange=0,
),
fixed_rate=1.0,
initial_exchange=True,
mtm="payment",
currency="usd",
pair="eurusd",
notional=10e6, # <- Leg settles in USD leg but reference cashflows in EUR
amortization=4e6,
fx_fixings=(1.25, "WMR_10AM_TY0_T+2"), # <- Initial exchange rate and future fixings
)
print(leg.cashflows())
.. tab:: XCS
Amortization for a *XCS* takes places after the :class:`~rateslib.periods.MtmCashflow`.
.. ipython:: python
leg = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag=1,
payment_lag_exchange=0,
),
fixed_rate=1.0,
initial_exchange=True,
currency="usd",
pair="eurusd",
mtm="xcs",
notional=10e6,
amortization=4e6,
fx_fixings=(1.25, "WMR_4PM_GMT_T+2"),
)
print(leg.cashflows())
**Indexation, Non-Deliverability and Amortization**
In the most complicated case, which rarely even relates to real tradable instruments all
of the parameters may be combined. The :meth:`~rateslib.legs.FixedLeg.cashflows`
method outlines the relevant fixing values and dates used in calculations.
.. ipython:: python
leg = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag=2,
payment_lag_exchange=1
),
fixed_rate=1.0,
currency="usd",
pair="eurusd",
initial_exchange=True,
notional=5e6,
amortization=1000000,
mtm="xcs",
fx_fixings=(1.25, "WMR_10AM_TY0_T+2"),
index_lag=0,
index_fixings="MY_RPI",
index_method="monthly",
)
print(leg.cashflows())
.. ipython:: python
:suppress:
fixings.pop("WMR_10AM_TY0_T+2_EURUSD")
fixings.pop("WMR_4PM_GMT_T+2_EURUSD")
fixings.pop("MY_RPI")
""" # noqa: E501
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams` associated with
the first :class:`~rateslib.periods.FixedPeriod`."""
return self._regular_periods[0].settlement_params
@cached_property
def periods(self) -> list[_BasePeriod]:
"""A list of all contained *Periods*."""
periods_: list[_BasePeriod] = []
if self._exchange_periods[0] is not None:
periods_.append(self._exchange_periods[0])
args: tuple[tuple[_BasePeriod], ...] = (self._regular_periods[:-1],) # type: ignore[assignment]
if self._mtm_exchange_periods is not None:
args = args + (self._mtm_exchange_periods,) # type: ignore[operator]
if self._amortization_exchange_periods is not None:
args = args + (self._amortization_exchange_periods,) # type: ignore[operator]
interleaved_periods_: list[_BasePeriod] = [
item for combination in zip(*args, strict=True) for item in combination
]
interleaved_periods_.append(self._regular_periods[-1]) # add last regular period
periods_.extend(interleaved_periods_)
if self._exchange_periods[1] is not None:
periods_.append(self._exchange_periods[1])
return periods_
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of each composited
:class:`~rateslib.periods.FixedPeriod`."""
return self._fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self._fixed_rate = value
for period in self._regular_periods:
period.rate_params.fixed_rate = value
@property
def schedule(self) -> Schedule:
"""The :class:`~rateslib.scheduling.Schedule` object of *Leg*."""
return self._schedule
@property
def amortization(self) -> Amortization:
"""
The :class:`~rateslib.legs.Amortization` object associated with the schedule.
"""
return self._amortization
def __init__(
self,
schedule: Schedule,
*,
# settlement and currency
notional: DualTypes_ = NoInput(0),
amortization: DualTypes_ | list[DualTypes] | Amortization | str = NoInput(0),
currency: str_ = NoInput(0),
# non-deliverable
pair: FXIndex | str_ = NoInput(0),
fx_fixings: LegFixings = NoInput(0),
mtm: LegMtm | str = LegMtm.Initial,
# period
convention: str_ = NoInput(0),
initial_exchange: bool = False,
final_exchange: bool = False,
# rate params
fixed_rate: NoInput = NoInput(0),
# index params
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: LegFixings = NoInput(0),
index_only: bool = False,
index_base_type: LegIndexBase | str_ = NoInput(0),
) -> None:
self._fixed_rate = fixed_rate
del fixed_rate
self._schedule = schedule
del schedule
self._notional: DualTypes = _drb(defaults.notional, notional)
del notional
self._amortization: Amortization = _get_amortization(
amortization, self._notional, self.schedule.n_periods
)
del amortization
self._currency: str = _drb(defaults.base_currency, currency).lower()
del currency
self._convention: str = _drb(defaults.convention, convention)
del convention
self._mtm = _get_leg_mtm(mtm)
del mtm
index_fixings_ = _leg_fixings_to_list(index_fixings, self.schedule.n_periods)
del index_fixings
index_base_type_ = _get_leg_index_base(_drb(defaults.index_base_type, index_base_type))
del index_base_type
# if initial and final exchange with MtM.Payment then there is an extra fixing date
_mtm_param = 1 if (self._mtm == LegMtm.Payment and initial_exchange) else 0
fx_fixings_ = _leg_fixings_to_list(fx_fixings, self.schedule.n_periods + _mtm_param)
del fx_fixings
# Exchange periods
if not initial_exchange:
_ini_cf: Cashflow | None = None
else:
_ini_cf = Cashflow(
payment=self.schedule.pschedule2[0],
notional=-self._amortization.outstanding[0],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[0],
# non-deliverable
pair=pair,
fx_fixings=fx_fixings_[0],
delivery=self.schedule.pschedule2[0],
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[0],
index_base_date=self.schedule.aschedule[0],
index_reference_date=self.schedule.aschedule[0],
index_only=index_only,
)
final_exchange_ = final_exchange or initial_exchange
if not final_exchange_:
_final_cf: Cashflow | None = None
else:
delivery_ = {
LegMtm.Initial: self.schedule.pschedule2[0],
LegMtm.XCS: self.schedule.pschedule2[-2],
LegMtm.Payment: self.schedule.pschedule2[-1],
}
_final_cf = Cashflow(
payment=self.schedule.pschedule2[-1],
notional=self._amortization.outstanding[-1],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[-1],
# non-deliverable
pair=pair,
fx_fixings=fx_fixings_[0] if self._mtm == LegMtm.Initial else fx_fixings_[-1],
delivery=delivery_[self._mtm],
# index parameters
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[-1],
index_base_date=self.schedule.aschedule[0]
if index_base_type_ is LegIndexBase.Initial
else self.schedule.aschedule[-2],
index_reference_date=self.schedule.aschedule[-1],
index_only=index_only,
)
self._exchange_periods = (_ini_cf, _final_cf)
self._regular_periods: tuple[FixedPeriod, ...] = tuple(
[
FixedPeriod(
fixed_rate=self.fixed_rate,
# currency args
payment=self.schedule.pschedule[i + 1],
currency=self._currency,
notional=self.amortization.outstanding[i],
ex_dividend=self.schedule.pschedule3[i + 1],
# period params
start=self.schedule.aschedule[i],
end=self.schedule.aschedule[i + 1],
frequency=self.schedule.frequency_obj,
convention=self._convention,
termination=self.schedule.aschedule[-1],
stub=self.schedule._stubs[i],
roll=NoInput(0), # defined by Frequency
calendar=self.schedule.calendar,
adjuster=self.schedule.accrual_adjuster,
# non-deliverable : Not allowed with notional exchange
pair=pair,
fx_fixings=fx_fixings_[0]
if self._mtm == LegMtm.Initial
else fx_fixings_[i + _mtm_param],
delivery=_fx_delivery(i, self._mtm, self.schedule, False, False),
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[i],
index_base_date=self.schedule.aschedule[0]
if index_base_type_ is LegIndexBase.Initial
else self.schedule.aschedule[i],
index_reference_date=self.schedule.aschedule[i + 1],
index_only=index_only,
)
for i in range(self.schedule.n_periods)
]
)
# amortization exchanges
if not final_exchange_ or self.amortization._type == _AmortizationType.NoAmortization:
self._amortization_exchange_periods: tuple[_BasePeriod, ...] | None = None
else:
# only with notional exchange and some Amortization amount
self._amortization_exchange_periods = tuple(
[
Cashflow(
notional=self.amortization.amortization[i],
payment=self.schedule.pschedule2[i + 1],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[i + 1],
# non-deliverable params
pair=pair,
fx_fixings=fx_fixings_[0]
if self._mtm == LegMtm.Initial
else fx_fixings_[i + 1],
delivery=_fx_delivery(
i, self._mtm, self.schedule, True, True
), # schedule for exchanges
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[i],
index_base_date=self.schedule.aschedule[0]
if index_base_type_ is LegIndexBase.Initial
else self.schedule.aschedule[i],
index_reference_date=self.schedule.aschedule[i + 1],
index_only=index_only,
)
for i in range(self.schedule.n_periods - 1)
]
)
# mtm exchanges
if self._mtm == LegMtm.XCS and final_exchange_:
if isinstance(pair, NoInput):
raise ValueError(err.VE_PAIR_AND_LEG_MTM)
self._mtm_exchange_periods: tuple[_BasePeriod, ...] | None = tuple(
[
MtmCashflow(
payment=self.schedule.pschedule2[i + 1],
notional=-self.amortization.outstanding[i],
pair=pair,
start=self.schedule.pschedule2[i],
end=self.schedule.pschedule2[i + 1],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[i + 1],
fx_fixings_start=fx_fixings_[i],
fx_fixings_end=fx_fixings_[i + 1],
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[i],
index_base_date=self.schedule.aschedule[0]
if index_base_type_ is LegIndexBase.Initial
else self.schedule.aschedule[i],
index_reference_date=self.schedule.aschedule[i + 1],
index_only=index_only,
)
for i in range(self.schedule.n_periods - 1)
]
)
else:
self._mtm_exchange_periods = None
def spread(
self,
*,
target_npv: DualTypes,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
# local_npv is calculated to identify the isolated NPV component of cashflow exchanges.
_ = self.fixed_rate
self.fixed_rate = 0.0
local_npv = self.local_npv(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
forward=forward,
settlement=settlement,
)
self.fixed_rate = _
a_delta = self.local_analytic_delta(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
forward=forward,
settlement=settlement,
)
return -(target_npv - local_npv) / a_delta
class ZeroFixedLeg(_BaseLeg):
"""
A zero coupon *Leg* composed of a single
:class:`~rateslib.periods.ZeroFixedPeriod` .
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.legs import ZeroFixedLeg
from rateslib.scheduling import Schedule
from datetime import datetime as dt
from pandas import Series
.. ipython:: python
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2000, 2, 1),
termination=dt(2002, 2, 1),
frequency="S",
),
fixed_rate=2.5,
notional=10e6,
)
zfl.cashflows()
.. role:: red
.. role:: green
Parameters
----------
schedule: Schedule, :red:`required`
The :class:`~rateslib.scheduling.Schedule` object which structures contiguous *Periods*.
The schedule object also contains data for payment dates, payment dates for notional
exchanges and ex-dividend dates for each period.
.. note::
The following are **period parameters** combined with the ``schedule``.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the leg (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
initial_exchange : bool, :green:`optional (set as False)`
Whether to also include an initial notional exchange. If *True* then ``final_exchange``
**will** also be set to *True*.
final_exchange : bool, :green:`optional (set as initial_exchange)`
Whether to also include a final notional exchange and interim amortization
notional exchanges.
.. note::
The following define **rate parameters**.
fixed_rate: float, Dual, Dual2, Variable, :green:`optional`
The IRR of the composited :class:`~rateslib.periods.ZeroFixedPeriod`.
.. note::
The following define **non-deliverable** parameters. If the *Leg* is directly
deliverable then do not set a non-deliverable ``pair`` or any ``fx_fixings``.
pair: FXIndex, str, :green:`optional`
The :class:`~rateslib.data.fixings.FXIndex` for :class:`~rateslib.data.fixings.FXFixing`
defining the currency pair that determines *Period*
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* according
to non-deliverability.
Review the **notes** section non-deliverability on a :class:`~rateslib.legs.FixedLeg`.
See also :ref:`fixings `.
mtm: LegMtm or str, :green:`optional (set to 'initial')`
Define how the fixing dates are determined for each :class:`~rateslib.data.fixings.FXFixing`
See **Notes** regarding non-deliverability. *XCS* is not allowed on a *Zero* type *Leg*.
Review the **notes** section non-deliverability on a :class:`~rateslib.legs.FixedLeg`.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value applied as the base index value for all *Periods*.
If not given and ``index_fixings`` is a string fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The index value for the reference date.
Best practice is to supply this value as string identifier relating to the global
``fixings`` object. See :ref:`fixings `.
index_only: bool, :green:`optional (set as False)`
A flag which indicates that the nominal amount is deducted from the cashflow leaving only
the indexed up quantity.
"""
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams` associated with
the :class:`~rateslib.periods.ZeroFixedPeriod`."""
return self._regular_periods[0].settlement_params
@cached_property
def periods(self) -> list[_BasePeriod]:
"""A list of all contained *Periods*."""
periods_: list[_BasePeriod] = []
if self._exchange_periods[0] is not None:
periods_.append(self._exchange_periods[0])
periods_.extend(self._regular_periods)
if self._exchange_periods[1] is not None:
periods_.append(self._exchange_periods[1])
return periods_
@property
def schedule(self) -> Schedule:
"""The :class:`~rateslib.scheduling.Schedule` object of *Leg*."""
return self._schedule
@property
def amortization(self) -> Amortization:
"""
The :class:`~rateslib.legs.Amortization` object associated with the schedule.
"""
return self._amortization
def __init__(
self,
schedule: Schedule,
*,
# period
convention: str_ = NoInput(0),
# rate params
fixed_rate: NoInput = NoInput(0),
# settlement and currency
notional: DualTypes_ = NoInput(0),
currency: str_ = NoInput(0),
initial_exchange: bool = False,
final_exchange: bool = False,
# non-deliverable
pair: FXIndex | str_ = NoInput(0),
fx_fixings: LegFixings = NoInput(0),
mtm: LegMtm | str_ = NoInput(0),
# index params
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
index_only: bool = False,
) -> None:
mtm_ = _get_leg_mtm(_drb("initial", mtm))
del mtm
if mtm_ is LegMtm.XCS:
raise ValueError("`mtm` cannot be XCS variant for a ZeroFixedLeg type.")
self._schedule = schedule
if self.schedule.frequency == "Z":
raise ValueError(
"`frequency` for a ZeroFixedLeg should not be 'Z'. The Leg is zero frequency by "
"construction. Set the `frequency` equal to the compounding frequency of the "
"expressed fixed rate, e.g. 'S' for semi-annual compounding.",
)
self._notional: DualTypes = _drb(defaults.notional, notional)
self._currency: str = _drb(defaults.base_currency, currency).lower()
self._convention: str = _drb(defaults.convention, convention)
self._amortization = Amortization(n=self.schedule.n_periods, initial=self._notional)
index_fixings_ = _leg_fixings_to_list(index_fixings, self.schedule.n_periods)
fx_fixings_ = _leg_fixings_to_list(fx_fixings, self.schedule.n_periods)
# Exchange periods
if not initial_exchange:
_ini_cf: Cashflow | None = None
else:
_ini_cf = Cashflow(
payment=self.schedule.pschedule2[0],
notional=-self._amortization.outstanding[0],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[0],
# non-deliverable
pair=pair,
fx_fixings=fx_fixings_[0],
delivery=self.schedule.pschedule2[0],
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[0],
index_base_date=self.schedule.aschedule[0],
index_reference_date=self.schedule.aschedule[0],
index_only=index_only,
)
final_exchange_ = final_exchange or initial_exchange
if not final_exchange_:
_final_cf: Cashflow | None = None
else:
_final_cf = Cashflow(
payment=self.schedule.pschedule2[-1],
notional=self._amortization.outstanding[-1],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[-1],
# non-deliverable
pair=pair,
fx_fixings=fx_fixings_[0] if mtm_ is LegMtm.Initial else fx_fixings_[-1],
delivery=self.schedule.pschedule2[0]
if mtm_ is LegMtm.Initial
else self.schedule.pschedule2[-1],
# index parameters
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[0],
index_base_date=self.schedule.aschedule[0],
index_reference_date=self.schedule.aschedule[-1],
index_only=index_only,
)
self._exchange_periods = (_ini_cf, _final_cf)
self._regular_periods = (
ZeroFixedPeriod(
fixed_rate=NoInput(0),
schedule=self.schedule,
# currency args
currency=self._currency,
notional=self._notional,
# period params
convention=self._convention,
# non-deliverable : Not allowed with notional exchange
pair=pair,
fx_fixings=fx_fixings_[0],
delivery=self.schedule.pschedule2[0]
if mtm_ is LegMtm.Initial
else self.schedule.pschedule2[-1],
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[0],
index_only=index_only,
),
)
self.fixed_rate = fixed_rate
@property
def fixed_rate(self) -> DualTypes_:
"""The fixed rate parameter of the composited
:class:`~rateslib.periods.ZeroFixedPeriod`."""
return self._fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self._fixed_rate = value
for period in self._regular_periods:
period.rate_params.fixed_rate = value
def spread(
self,
*,
target_npv: DualTypes,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
# scale target_npv accounting for notional exchanges
_ = self.fixed_rate
self.fixed_rate = 0.0
local_npv = self.local_npv(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
forward=forward,
settlement=settlement,
)
self.fixed_rate = _
rate_target_npv = target_npv - local_npv
# evaluate settlement relative to ex-div
disc_curve_ = _disc_required_maybe_from_curve(rate_curve, disc_curve)
if not isinstance(settlement, NoInput):
if settlement > self.settlement_params.ex_dividend:
raise ZeroDivisionError(
"A `spread` cannot be determined when the *Leg* always has zero value.\n"
"The given `settlement` is after the `ex_dividend` date."
)
else:
w_fwd = disc_curve_[_drb(settlement, forward)]
else:
if isinstance(forward, NoInput):
w_fwd = 1.0
else:
w_fwd = disc_curve_[forward]
immediate_target_npv = rate_target_npv * w_fwd
unindexed_target_npv = immediate_target_npv / self._regular_periods[0].index_up(
1.0, index_curve=index_curve
)
unindexed_reference_target_npv = unindexed_target_npv / self._regular_periods[
0
].convert_deliverable(1.0, fx=fx)
target_cashflow = (
unindexed_reference_target_npv / disc_curve_[self.settlement_params.payment]
)
f = self.schedule.periods_per_annum
d = self._regular_periods[0].dcf
N = self.settlement_params.notional
R = ((-target_cashflow / N + 1) ** (1 / (d * f)) - 1) * f * 10000.0
return R
def _fx_delivery(
i: int,
mtm: LegMtm,
schedule: Schedule,
is_exchange: bool,
is_amortisation: bool,
) -> datetime:
"""Based on the `mtm` parameter determine the FX fixing dates for regular period 'i'."""
if mtm == LegMtm.Initial:
# then ND type is a one-fixing only, so is determined by only a single rate of exchange
# this date is set to the initial payment exchange date of the schedule
return schedule.pschedule2[0]
elif mtm == LegMtm.Payment:
# then the ND type is a NDXCS or a NDIRS which determines FX at payment
if is_exchange:
return schedule.pschedule2[i + 1]
else:
return schedule.pschedule[i + 1]
else: # LegMtm.XCS
# then the ND type is a MTM-XCS which has special MTMCashflow periods
# the relevant FX fixing is set in advance of the period using notional exchange dates
if is_amortisation:
return schedule.pschedule2[i + 1]
else:
return schedule.pschedule2[i]
================================================
FILE: python/rateslib/legs/float.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING
from pandas import Series
import rateslib.errors as err
from rateslib import defaults
from rateslib.data.fixings import _leg_fixings_to_list
from rateslib.dual import ift_1dim
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import (
FloatFixingMethod,
LegIndexBase,
LegMtm,
SpreadCompoundMethod,
_get_float_fixing_method,
_get_leg_index_base,
_get_leg_mtm,
)
from rateslib.legs.amortization import Amortization, _AmortizationType, _get_amortization
from rateslib.legs.custom import CustomLeg
from rateslib.legs.fixed import _fx_delivery
from rateslib.legs.protocols import _BaseLeg, _WithExDiv
from rateslib.periods import Cashflow, FloatPeriod, MtmCashflow, ZeroFloatPeriod
from rateslib.periods.parameters import _FloatRateParams, _SettlementParams
from rateslib.periods.parameters.rate import _init_float_rate_series
from rateslib.scheduling.schedule import Schedule
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurveOption_,
DualTypes,
DualTypes_,
FloatRateSeries,
Frequency,
FXForwards_,
FXIndex,
IndexMethod,
LegFixings,
Sequence,
_BaseCurve_,
_BasePeriod,
_FXVolOption_,
bool_,
datetime_,
int_,
str_,
)
class FloatLeg(_BaseLeg, _WithExDiv):
"""
A *Leg* containing :class:`~rateslib.periods.FloatPeriod`
(or optionally multiple :class:`~rateslib.periods.ZeroFloatPeriod`).
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import fixings, Schedule, Curve, FloatRateSeries
from pandas import Series
from rateslib.legs import FloatLeg, CustomLeg
from datetime import datetime as dt
.. ipython:: python
fl = FloatLeg(
schedule=Schedule(
effective=dt(2000, 2, 1),
termination=dt(2002, 2, 1),
frequency="S",
),
convention="Act360",
float_spread=25.0,
notional=10e6,
)
fl.cashflows()
.. role:: red
.. role:: green
Parameters
----------
schedule: Schedule, :red:`required`
The :class:`~rateslib.scheduling.Schedule` object which structures contiguous *Periods*.
The schedule object also contains data for payment dates, payment dates for notional
exchanges and ex-dividend dates for each period.
.. note::
The following are **period parameters** combined with the ``schedule``.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the leg (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
amortization: float, Dual, Dual2, Variable, str, Amortization, :green:`optional (set as zero)`
Set a non-constant notional per *Period*. If a scalar value, adjusts the ``notional`` of
each successive period by that same value. Should have
sign equal to that of notional if the notional is to reduce towards zero.
initial_exchange : bool, :green:`optional (set as False)`
Whether to also include an initial notional exchange. If *True* then ``final_exchange``
**will** also be set to *True*.
final_exchange : bool, :green:`optional (set as initial_exchange)`
Whether to also include a final notional exchange and interim amortization
notional exchanges.
.. note::
The following define **rate parameters**.
fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with (non-averaged)
RFR type ``fixing_method``, and when ``zero_periods`` is False.
rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader.
zero_periods: bool, :green:`optional (set as False)`
If *True* a :class:`~rateslib.periods.ZeroFloatPeriod` is used as the regular period
instead of a :class:`~rateslib.periods.FloatPeriod`. See notes.
.. note::
The following define **non-deliverable** parameters. If the *Leg* is directly
deliverable then do not set a non-deliverable ``pair`` or any ``fx_fixings``.
pair: FXIndex, str, :green:`optional`
The :class:`~rateslib.data.fixings.FXIndex` for :class:`~rateslib.data.fixings.FXFixing`
defining the currency pair that determines *Period*
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* according
to non-deliverability.
Review the **notes** section non-deliverability on a :class:`~rateslib.legs.FixedLeg`, and
see also :ref:`fixings `.
This should only ever be entered as either:
- scalar value: 1.15,
- fixings series: "Reuters_ZBS",
- tuple of transaction rate and fixing series: (1.25, "Reuters_ZBC")
mtm: LegMtm or str, :green:`optional (set to 'initial')`
Define how the fixing dates are determined for each :class:`~rateslib.data.fixings.FXFixing`
See **Notes** regarding non-deliverability.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value applied as the base index value for all *Periods*.
If not given and ``index_fixings`` is a string fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The index value for the reference date.
Best practice is to supply this value as string identifier relating to the global
``fixings`` object.
index_only: bool, :green:`optional (set as False)`
A flag which indicates that the nominal amount is deducted from the cashflow leaving only
the indexed up quantity.
index_base_type: LegIndexBase, str, :green:`optional (set as 'initial')`
A parameter to define how the ``index_base_date`` is set on each period. See notes.
Notes
-----
The various combinations of **amortisation**, **non-deliverability**, **indexation**,
and **notional exchanges** are identical to, and demonstrated in the documentation for, a
:class:`~rateslib.legs.FixedLeg` object.
**Classifications**
There are generally five types of index classification that can be constructed with this *Leg*.
.. ipython:: python
:suppress:
curve = Curve({dt(2026, 1, 1): 1.0, dt(2029, 1, 1): 0.86070797}, calendar="nyc", convention="act360")
.. tabs::
.. tab:: RFR
To construct a standard **RFR** (otherwise known as OIS) type leg, use
any of the non-averaging *'RFR'* variants of the
:class:`~rateslib.enums.FloatFixingMethod` for the ``fixing_method`` parameter.
Using this ``fixing_method`` the ``fixing_frequency`` is always assumed to be *'1B'* for
overnight (o/n) rates.
Any ``spread_compound_method`` can be used in combination with these ``fixing_method``.
Each :class:`~rateslib.periods.FloatPeriod` has an **RFR** classification.
Below is an example of the conventional float leg on a USD-SOFR IRS.
.. ipython:: python
rfr_standard = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 22),
termination="2Y",
frequency="A",
calendar="nyc",
payment_lag=2
),
convention="Act360",
fixing_method="rfr_payment_delay",
)
rfr_standard.cashflows(rate_curve=curve)
.. warning::
Do **not** use ``zero_periods`` in the construction of RFR type legs.
Although it is, technically, possible to construct this type of *Leg* using
``zero_periods``. Doing so creates an individual :class:`~rateslib.periods.FloatPeriod`
for every single overnight RFR fixing making up each
:class:`~rateslib.periods.ZeroFloatPeriod`. This is inefficient and removes
other features.
.. tab:: RFR Avg.
To construct an **RFR averaged** type use an *'average'* type variant of the
:class:`~rateslib.enums.FloatFixingMethod` for the ``fixing_method`` parameter.
Each :class:`~rateslib.periods.FloatPeriod` has an **average RFR** classification.
Below is an example of the conventional float leg on an averaged USD-SOFR IRS.
.. ipython:: python
rfr_averaged = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 22),
termination="2Y",
frequency="A",
calendar="nyc",
payment_lag=2
),
convention="Act360",
fixing_method="rfr_payment_delay_avg",
)
rfr_averaged.cashflows(rate_curve=curve)
.. warning::
Rates are calculated directly from the provided ``rate_curve``. There are *no
convexity adjustments* applied to account for the difference between compounded
numéraire and averaged result.
.. tab:: IBOR
To construct a standard **IBOR** type leg use the *'ibor'* variant of
the :class:`~rateslib.enums.FloatFixingMethod` for the ``fixing_method`` parameter.
The ``fixing_frequency`` defining tenor of the index will default to that of the schedule.
Each :class:`~rateslib.periods.FloatPeriod` has an **IBOR** or **IBOR Stub**
classification. Stubs can only appear at the front or back of the *Leg* and depend upon
the ``schedule`` directly identifying those periods as *stubs*.
Below is an example of a standard EURIBOR 3M float leg.
.. ipython:: python
ibor_standard = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 22),
termination="1Y",
frequency="Q",
calendar="tgt",
payment_lag=0
),
currency="eur",
convention="Act360",
fixing_method="ibor(2)",
)
ibor_standard.cashflows(rate_curve=curve)
.. tab:: Unaligned IBOR
To construct a *Leg* with a different tenor **IBOR** index to that of the schedule,
specify the ``fixing_frequency`` directly.
Each :class:`~rateslib.periods.FloatPeriod` has an **Misaligned IBOR** or **IBOR Stub**
classification. Stubs can only appear at the front or back of the *Leg* and depend upon
the ``schedule`` directly identifying those periods as *stubs*. Stub *Periods* will have
the usual tenor interpolation applied, as with regular IBOR *Legs*, and does not factor
the misalignment into the calculation.
Below is an example of a 1Y float leg with quarterly payments with each fixing to
four distinct EURIBOR 6M rates.
.. ipython:: python
ibor_misaligned = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 22),
termination="1Y",
frequency="Q",
calendar="tgt",
payment_lag=0,
),
convention="Act360",
fixing_method="ibor(2)",
fixing_series="eur_ibor",
fixing_frequency="S", # <- frequency of fixing does not match schedule.
)
ibor_misaligned.cashflows(rate_curve=curve)
.. tab:: Multi-IBOR
To construct a *Leg* with multiple IBOR tenor indexes compounded over a single
*Period* set ``zero_periods`` to True. Each *Period* will then be a
:class:`~rateslib.periods.ZeroFloatPeriod`.
This means that each :class:`~rateslib.periods.ZeroFloatPeriod` will need to construct
a sub- :class:`~rateslib.scheduling.Schedule` to define its IBOR publications. Each
sub- :class:`~rateslib.scheduling.Schedule` has a *frequency* equal to
``fixing_frequency`` and each *effective* and *termination* dates match the
*start* and *end* unadjusted accrual dates for each *Period* of the main
``schedule``. When a stub is required, these sub-schedules take steer directly from the
:class:`~rateslib.data.fixings.FloatRateSeries` parameters.
Note the ``float_spread`` is added to each individual
:class:`~rateslib.periods.FloatPeriod` and then all resultant rates are compounded to
yield the final rate for the :class:`~rateslib.periods.ZeroFloatPeriod` (this an
ISDA compounded type calculation).
Two use cases of this have been identified;
- Legacy US-LIBOR single currency basis swaps where the 3M-LIBOR was compounded over
a 6M period to net cashflows with the 6M *Leg*. An example is below:
.. ipython:: python
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 22),
termination="1Y",
frequency="S",
calendar="nyc",
payment_lag=0
),
convention="Act360",
fixing_series="usd_ibor",
fixing_method="ibor(2)",
zero_periods=True,
fixing_frequency="Q",
float_spread=75.0,
)
float_leg.cashflows(rate_curve=curve)
CustomLeg(float_leg.periods[0].float_periods).cashflows(rate_curve=curve)
- CNY *IRS* with quarterly payments setting to 7D tenor rate. Note that these periods
are often not perfectly divisible, resulting in stub periods within each
:class:`~rateslib.periods.ZeroFloatPeriod`. The position and treatment of these
stubs can be controlled under the :class:`~rateslib.data.fixings.FloatRateSeries`.
.. ipython:: python
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 21),
termination=dt(2027, 1, 21),
frequency="Q",
calendar="bjs",
),
currency="CNY",
fixing_frequency="7d",
fixing_method="ibor(1)",
fixing_series=FloatRateSeries(
lag=1,
convention="Act365F",
calendar="bjs",
tenors=["7D"],
zero_period_stub="shortback",
modifier="F",
eom=False,
),
zero_periods=True,
)
The individual fixing dates of each of these 7D periods are stored on each
*rate fixing* of each :class:`~rateslib.periods.FloatPeriod`.
.. ipython:: python
for float_period in float_leg.periods[0].float_periods:
print(float_period.rate_params.rate_fixing.date)
""" # noqa: E501
@property
def rate_params(self) -> _FloatRateParams:
"""The :class:`~rateslib.periods.parameters._FloatRateParams` associated with
the first :class:`~rateslib.periods.FloatPeriod`."""
return self._regular_periods[0].rate_params
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams` associated with
the first :class:`~rateslib.periods.FloatPeriod`."""
return self._regular_periods[0].settlement_params
@property
def periods(self) -> list[_BasePeriod]:
"""A list of all contained *Periods*."""
periods_: list[_BasePeriod] = []
if self._exchange_periods[0] is not None:
periods_.append(self._exchange_periods[0])
args: tuple[tuple[ZeroFloatPeriod | FloatPeriod | MtmCashflow | Cashflow, ...], ...] = (
self._regular_periods[:-1],
)
if self._mtm_exchange_periods is not None:
args += (self._mtm_exchange_periods,)
if self._amortization_exchange_periods is not None:
args += (self._amortization_exchange_periods,)
interleaved_periods_: list[_BasePeriod] = [
item for combination in zip(*args, strict=True) for item in combination
]
interleaved_periods_.append(self._regular_periods[-1]) # add last regular period
periods_.extend(interleaved_periods_)
if self._exchange_periods[1] is not None:
periods_.append(self._exchange_periods[1])
return periods_
@property
def float_spread(self) -> DualTypes:
"""The float spread parameter of each composited
:class:`~rateslib.periods.FloatPeriod`."""
return self._regular_periods[0].rate_params.float_spread
@float_spread.setter
def float_spread(self, value: DualTypes) -> None:
for period in self._regular_periods:
period.rate_params.float_spread = value
@property
def schedule(self) -> Schedule:
"""The :class:`~rateslib.scheduling.Schedule` object of *Leg*."""
return self._schedule
@property
def amortization(self) -> Amortization:
"""
The :class:`~rateslib.legs.Amortization` object associated with the schedule.
"""
return self._amortization
def __init__(
self,
schedule: Schedule,
*,
# settlement and currency
notional: DualTypes_ = NoInput(0),
amortization: DualTypes_ | list[DualTypes] | Amortization | str = NoInput(0),
currency: str_ = NoInput(0),
# non-deliverable
pair: FXIndex | str_ = NoInput(0),
fx_fixings: LegFixings = NoInput(0),
mtm: LegMtm | str = LegMtm.Initial,
# period
convention: str_ = NoInput(0),
initial_exchange: bool = False,
final_exchange: bool = False,
# rate params
float_spread: DualTypes_ = NoInput(0),
rate_fixings: LegFixings = NoInput(0),
fixing_method: FloatFixingMethod | str_ = NoInput(0),
spread_compound_method: SpreadCompoundMethod | str_ = NoInput(0),
fixing_frequency: Frequency | str_ = NoInput(0),
fixing_series: FloatRateSeries | str_ = NoInput(0),
zero_periods: bool_ = NoInput(0),
# index params
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: LegFixings = NoInput(0),
index_only: bool = False,
index_base_type: LegIndexBase | str_ = NoInput(0),
) -> None:
zero_periods_ = _drb(False, zero_periods)
del zero_periods
fixing_method_ = _get_float_fixing_method(
method=_drb(defaults.fixing_method, fixing_method)
)
del fixing_method
self._schedule = schedule
del schedule
self._notional: DualTypes = _drb(defaults.notional, notional)
del notional
self._amortization: Amortization = _get_amortization(
amortization, self._notional, self._schedule.n_periods
)
del amortization
self._currency: str = _drb(defaults.base_currency, currency).lower()
del currency
self._convention: str = _drb(defaults.convention, convention)
del convention
self._mtm = _get_leg_mtm(mtm)
del mtm
index_fixings_ = _leg_fixings_to_list(index_fixings, self.schedule.n_periods)
del index_fixings
index_base_type_ = _get_leg_index_base(_drb(defaults.index_base_type, index_base_type))
del index_base_type
# if initial and final exchange with MtM.Payment then there is an extra fixing date
_mtm_param = 1 if (self._mtm == LegMtm.Payment and initial_exchange) else 0
fx_fixings_ = _leg_fixings_to_list(fx_fixings, self.schedule.n_periods + _mtm_param)
del fx_fixings
# Exchange periods
if not initial_exchange:
_ini_cf: Cashflow | None = None
else:
_ini_cf = Cashflow(
payment=self.schedule.pschedule2[0],
notional=-self._amortization.outstanding[0],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[0],
# non-deliverable
pair=pair,
fx_fixings=fx_fixings_[0],
delivery=self.schedule.pschedule2[0],
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[0],
index_base_date=self.schedule.aschedule[0],
index_reference_date=self.schedule.aschedule[0],
index_only=index_only,
)
final_exchange_ = final_exchange or initial_exchange
if not final_exchange_:
_final_cf: Cashflow | None = None
else:
delivery_ = {
LegMtm.Initial: self.schedule.pschedule2[0],
LegMtm.XCS: self.schedule.pschedule2[-2],
LegMtm.Payment: self.schedule.pschedule2[-1],
}
_final_cf = Cashflow(
payment=self.schedule.pschedule2[-1],
notional=self._amortization.outstanding[-1],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[-1],
# non-deliverable
pair=pair,
fx_fixings=fx_fixings_[0] if self._mtm == LegMtm.Initial else fx_fixings_[-1],
delivery=delivery_[self._mtm],
# index parameters
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[-1],
index_base_date=self.schedule.aschedule[0]
if index_base_type_ is LegIndexBase.Initial
else self.schedule.aschedule[-2],
index_reference_date=self.schedule.aschedule[-1],
index_only=index_only,
)
self._exchange_periods = (_ini_cf, _final_cf)
if not zero_periods_:
rate_fixings_list = _leg_fixings_to_list(rate_fixings, self._schedule.n_periods)
self._regular_periods: tuple[FloatPeriod | ZeroFloatPeriod, ...] = tuple(
[
FloatPeriod(
float_spread=float_spread,
rate_fixings=rate_fixings_list[i],
fixing_method=fixing_method_,
spread_compound_method=spread_compound_method,
fixing_frequency=fixing_frequency,
fixing_series=fixing_series,
# currency args
payment=self.schedule.pschedule[i + 1],
currency=self._currency,
notional=self.amortization.outstanding[i],
ex_dividend=self.schedule.pschedule3[i + 1],
# period params
start=self.schedule.aschedule[i],
end=self.schedule.aschedule[i + 1],
frequency=self.schedule.frequency_obj,
convention=self._convention,
termination=self.schedule.aschedule[-1],
stub=self.schedule._stubs[i],
roll=NoInput(0), # defined by Frequency
calendar=self.schedule.calendar,
adjuster=self.schedule.accrual_adjuster,
# non-deliverable : Not allowed with notional exchange
pair=pair,
fx_fixings=fx_fixings_[0]
if self._mtm == LegMtm.Initial
else fx_fixings_[i + _mtm_param],
delivery=_fx_delivery(i, self._mtm, self.schedule, False, False),
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[i],
index_base_date=self.schedule.aschedule[0]
if index_base_type_ is LegIndexBase.Initial
else self.schedule.aschedule[i],
index_reference_date=self._schedule.aschedule[i + 1],
index_only=index_only,
)
for i in range(self._schedule.n_periods)
]
)
else:
if isinstance(fixing_frequency, NoInput):
raise ValueError(
"A `fixing_frequency` must be given to `FloatLeg` when "
"`zero_periods` is True.\nWhen using `zero_periods` the intention is to "
"create multiple floating rate periods on the leg which themselves are "
"constructed from multiple floating rate fixings compounded up.\n"
"Therefore more parameters are required to properly specify the scheduling.\n"
"See Notes."
)
fixing_series_ = _init_float_rate_series(
fixing_series=fixing_series,
calendar=self._schedule.calendar,
convention=self._convention,
fixing_method=fixing_method_,
adjuster=self.schedule.accrual_adjuster,
)
del fixing_series
# TODO: this fixings to list must account for sub zero periods - quite tricky
rate_fixings_list = _leg_fixings_to_list(rate_fixings, self._schedule.n_periods)
self._regular_periods = tuple(
[
ZeroFloatPeriod(
schedule=Schedule(
# BBG appears to use the `aschedule` for defining these periods.
# rateslib uses the `uschedule` because it is more consistent from
# outer period to outer period, but more real life example are
# required to fully qualify what should be used here.
# Additionally if adjusted dates were used, rateslib inference means it
# might assert unadjusted start dates which may not align with the
# outer schedule. Matching unadjusted dates mitigates inconsistency.
effective=self.schedule.uschedule[i],
termination=self.schedule.uschedule[i + 1],
frequency=fixing_frequency,
payment_lag=self.schedule.payment_adjuster,
payment_lag_exchange=self.schedule.payment_adjuster2,
extra_lag=self.schedule.payment_adjuster3
if self.schedule.payment_adjuster3 is not None
else NoInput(0),
calendar=self.schedule.calendar,
stub=fixing_series_.zero_period_stub,
),
float_spread=float_spread,
rate_fixings=rate_fixings_list[i],
fixing_method=fixing_method_,
spread_compound_method=spread_compound_method,
fixing_frequency=fixing_frequency,
fixing_series=fixing_series_,
# currency args
currency=self._currency,
notional=self.amortization.outstanding[i],
# period params
convention=self._convention,
# non-deliverable : Not allowed with notional exchange
pair=pair,
fx_fixings=fx_fixings_[0]
if self._mtm == LegMtm.Initial
else fx_fixings_[i + _mtm_param],
delivery=_fx_delivery(i, self._mtm, self.schedule, False, False),
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[i],
index_base_date=self.schedule.aschedule[0]
if index_base_type_ is LegIndexBase.Initial
else self.schedule.aschedule[i],
index_reference_date=self._schedule.aschedule[i + 1],
index_only=index_only,
# meta
metric="simple", # to ensure correct cals in the cashflow for the Leg
)
for i in range(self._schedule.n_periods)
]
)
# amortization exchanges
if not final_exchange_ or self.amortization._type == _AmortizationType.NoAmortization:
self._amortization_exchange_periods: tuple[Cashflow, ...] | None = None
else:
self._amortization_exchange_periods = tuple(
[
Cashflow(
notional=self.amortization.amortization[i],
payment=self.schedule.pschedule2[i + 1],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[i + 1],
# non-deliverable params
pair=pair,
fx_fixings=fx_fixings_[0]
if self._mtm == LegMtm.Initial
else fx_fixings_[i + 1],
delivery=_fx_delivery(
i, self._mtm, self.schedule, True, True
), # schedule for exchanges
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[i],
index_base_date=self.schedule.aschedule[0]
if index_base_type_ is LegIndexBase.Initial
else self.schedule.aschedule[i],
index_reference_date=self._schedule.aschedule[i + 1],
index_only=True,
)
for i in range(self._schedule.n_periods - 1)
]
)
# mtm exchanges
if self._mtm == LegMtm.XCS and final_exchange_:
if isinstance(pair, NoInput):
raise ValueError(err.VE_PAIR_AND_LEG_MTM)
self._mtm_exchange_periods: tuple[MtmCashflow, ...] | None = tuple(
[
MtmCashflow(
payment=self.schedule.pschedule2[i + 1],
notional=-self.amortization.outstanding[i],
pair=pair,
start=self.schedule.pschedule2[i],
end=self.schedule.pschedule2[i + 1],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[i + 1],
fx_fixings_start=fx_fixings_[i],
fx_fixings_end=fx_fixings_[i + 1],
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[i],
index_base_date=self.schedule.aschedule[0]
if index_base_type_ is LegIndexBase.Initial
else self.schedule.aschedule[i],
index_reference_date=self.schedule.aschedule[i + 1],
index_only=index_only,
)
for i in range(self.schedule.n_periods - 1)
]
)
else:
self._mtm_exchange_periods = None
@property
def _is_linear(self) -> bool:
"""
Tests if analytic delta spread is a linear function affecting NPV.
This is non-linear if the spread is itself compounded, which only occurs
on RFR trades with *"isda_compounding"* or *"isda_flat_compounding"*, which
should typically be avoided anyway.
Returns
-------
bool
"""
# ruff: noqa: SIM103
if (
not isinstance(self.rate_params.fixing_method, FloatFixingMethod.IBOR)
and self.rate_params.spread_compound_method != SpreadCompoundMethod.NoneSimple
):
return False
return True
def spread(
self,
*,
target_npv: DualTypes,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
if self._is_linear:
local_npv = self.local_npv(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
forward=forward,
settlement=settlement,
)
a_delta = self.local_analytic_delta(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
forward=forward,
settlement=settlement,
)
return -(target_npv - local_npv) / a_delta + self.float_spread
else:
original_z = self.float_spread
def s(g: DualTypes) -> DualTypes:
"""
This determines the NPV change subject to a given float spread change denoted, g.
"""
self.float_spread = g
return self.local_npv(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
forward=forward,
settlement=settlement,
)
result = ift_1dim(
s=s,
s_tgt=target_npv,
h="ytm_quadratic",
ini_h_args=(-300, 300, 1200),
# h="modified_brent",
# ini_h_args=(-10000, 10000),
func_tol=1e-6,
conv_tol=1e-6,
)
self.float_spread = original_z
_: DualTypes = result["g"]
return _
class ZeroFloatLeg(_BaseLeg):
"""
A zero coupon *Leg* composed of a single :class:`~rateslib.periods.ZeroFloatPeriod`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.legs import ZeroFloatLeg
from rateslib.scheduling import Schedule
from datetime import datetime as dt
from pandas import Series
.. ipython:: python
zfl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2000, 2, 1),
termination=dt(2002, 2, 1),
frequency="S",
),
notional=10e6,
)
zfl.cashflows()
zfl.float_periods.cashflows()
.. role:: red
.. role:: green
Parameters
----------
schedule: Schedule, :red:`required`
The :class:`~rateslib.scheduling.Schedule` object which structures contiguous *Periods*.
The schedule object also contains data for payment dates, payment dates for notional
exchanges and ex-dividend dates for each period.
.. note::
The following are **period parameters** combined with the ``schedule``.
convention: str, :green:`optional (set by 'defaults')`
The day count convention applied to calculations of period accrual dates.
See :meth:`~rateslib.scheduling.dcf`.
.. note::
The following define generalised **settlement** parameters.
currency : str, :green:`optional (set by 'defaults')`
The local settlement currency of the leg (3-digit code).
notional : float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The initial leg notional, defined in units of *reference currency*.
initial_exchange : bool, :green:`optional (set as False)`
Whether to also include an initial notional exchange. If *True* then ``final_exchange``
**will** also be set to *True*.
final_exchange : bool, :green:`optional (set as initial_exchange)`
Whether to also include a final notional exchange and interim amortization
notional exchanges.
.. note::
The following define **rate parameters**.
fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for each period.
fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the schedule for an IBOR type ``fixing_method`` or '1B' if RFR type.
fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in each period rate determination.
spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
See :ref:`Fixings `.
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader. See also :ref:`fixings `.
.. note::
The following define **non-deliverable** parameters. If the *Leg* is directly
deliverable then do not set a non-deliverable ``pair`` or any ``fx_fixings``.
pair: FXIndex, str, :green:`optional`
The :class:`~rateslib.data.fixings.FXIndex` for :class:`~rateslib.data.fixings.FXFixing`
defining the currency pair that determines *Period*
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing` for each *Period* according
to non-deliverability. Review the **notes** section non-deliverability
on a :class:`~rateslib.legs.FixedLeg` and see also :ref:`fixings `.
mtm: bool, :green:`optional (set to False)`
Define whether the non-deliverability depends on a single
:class:`~rateslib.data.fixings.FXFixing` defined at the start of the *Leg*, or the end.
Review the **notes** section non-deliverability on a :class:`~rateslib.legs.FixedLeg`.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value applied as the base index value for all *Periods*.
If not given and ``index_fixings`` is a string fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, 2-tuple or list, :green:`optional`
The index value for the reference date.
Best practice is to supply this value as string identifier relating to the global
``fixings`` object. See also :ref:`fixings `.
"""
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams` associated with
the first :class:`~rateslib.periods.FloatPeriod`."""
return self._regular_periods[0].settlement_params
@cached_property
def periods(self) -> Sequence[_BasePeriod]:
"""A list of all contained *Periods*."""
periods_: list[_BasePeriod] = []
if self._exchange_periods[0] is not None:
periods_.append(self._exchange_periods[0])
periods_.extend(self._regular_periods)
if self._exchange_periods[1] is not None:
periods_.append(self._exchange_periods[1])
return periods_
@property
def schedule(self) -> Schedule:
"""The :class:`~rateslib.scheduling.Schedule` object of *Leg*."""
return self._schedule
@property
def amortization(self) -> Amortization:
"""
The :class:`~rateslib.legs.Amortization` object associated with the schedule.
"""
return self._amortization
@property
def rate_params(self) -> _FloatRateParams:
"""The :class:`~rateslib.periods.parameters._FloatRateParams` associated with
the first :class:`~rateslib.periods.FloatPeriod`."""
return self._regular_periods[0].rate_params
@property
def float_spread(self) -> DualTypes:
"""The float spread parameter of each composited
:class:`~rateslib.periods.FloatPeriod`."""
return self._regular_periods[0].rate_params.float_spread
@float_spread.setter
def float_spread(self, value: DualTypes) -> None:
for period in self._regular_periods:
period.rate_params.float_spread = value
@property
def float_periods(self) -> CustomLeg:
"""A :class:`~rateslib.legs.CustomLeg` containing the individual
:class:`~rateslib.periods.FloatPeriod`."""
return CustomLeg(self._regular_periods[0].float_periods)
def __init__(
self,
schedule: Schedule,
*,
float_spread: DualTypes_ = NoInput(0),
rate_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
fixing_method: FloatFixingMethod | str_ = NoInput(0),
spread_compound_method: SpreadCompoundMethod | str_ = NoInput(0),
fixing_frequency: Frequency | str_ = NoInput(0),
fixing_series: FloatRateSeries | str_ = NoInput(0),
# settlement and currency
notional: DualTypes_ = NoInput(0),
currency: str_ = NoInput(0),
# non-deliverable
pair: FXIndex | str_ = NoInput(0),
fx_fixings: LegFixings = NoInput(0),
mtm: bool = False,
# period
convention: str_ = NoInput(0),
initial_exchange: bool = False,
final_exchange: bool = False,
# index params
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
) -> None:
self._schedule = schedule
if self.schedule.frequency == "Z":
raise ValueError(
"`frequency` for a ZeroFloatLeg should not be 'Z'. The Leg is zero frequency by "
"construction. Set the `frequency` equal to the compounding frequency of the "
"expressed fixed rate, e.g. 'S' for semi-annual compounding.",
)
self._notional: DualTypes = _drb(defaults.notional, notional)
self._currency: str = _drb(defaults.base_currency, currency).lower()
self._convention: str = _drb(defaults.convention, convention)
self._amortization = Amortization(n=self.schedule.n_periods, initial=self._notional)
index_fixings_ = _leg_fixings_to_list(index_fixings, self.schedule.n_periods)
fx_fixings_ = _leg_fixings_to_list(fx_fixings, self.schedule.n_periods)
# Exchange periods
if not initial_exchange:
_ini_cf: Cashflow | None = None
else:
_ini_cf = Cashflow(
payment=self.schedule.pschedule2[0],
notional=-self._amortization.outstanding[0],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[0],
# non-deliverable
pair=pair,
fx_fixings=fx_fixings_[0],
delivery=self.schedule.pschedule2[0],
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[0],
index_base_date=self.schedule.aschedule[0],
index_reference_date=self.schedule.aschedule[0],
)
final_exchange_ = final_exchange or initial_exchange
if not final_exchange_:
_final_cf: Cashflow | None = None
else:
_final_cf = Cashflow(
payment=self.schedule.pschedule2[-1],
notional=self._amortization.outstanding[-1],
currency=self._currency,
ex_dividend=self.schedule.pschedule3[-1],
# non-deliverable
pair=pair,
fx_fixings=fx_fixings_[0] if not mtm else fx_fixings_[-1],
delivery=self.schedule.pschedule2[0] if not mtm else self.schedule.pschedule2[-2],
# index parameters
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[-1],
index_base_date=self.schedule.aschedule[0],
index_reference_date=self.schedule.aschedule[-1],
)
self._exchange_periods = (_ini_cf, _final_cf)
self._regular_periods = (
ZeroFloatPeriod(
float_spread=float_spread,
rate_fixings=rate_fixings,
fixing_method=fixing_method,
spread_compound_method=spread_compound_method,
fixing_frequency=fixing_frequency,
fixing_series=fixing_series,
schedule=self.schedule,
# currency args
currency=self._currency,
notional=self._notional,
# period params
convention=self._convention,
# non-deliverable: Not allowed with notional exchange
pair=pair,
fx_fixings=fx_fixings_[0],
delivery=self.schedule.pschedule2[0]
if (not mtm or final_exchange)
else self.schedule.pschedule2[-1],
# index params
index_base=index_base,
index_lag=index_lag,
index_method=index_method,
index_fixings=index_fixings_[0],
),
)
def spread(
self,
*,
target_npv: DualTypes,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
original_z = self.float_spread
def s(g: DualTypes) -> DualTypes:
"""
This determines the NPV of the *Leg* subject to a given float spread change denoted, g.
"""
self.float_spread = g
iteration_local_npv = self.local_npv(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
forward=forward,
settlement=settlement,
)
return iteration_local_npv
result = ift_1dim(
s=s,
s_tgt=target_npv,
h="ytm_quadratic",
ini_h_args=(-300, 300, 1200),
# h="modified_brent",
# ini_h_args=(-10000, 10000),
func_tol=1e-6,
conv_tol=1e-6,
)
self.float_spread = original_z
_: DualTypes = result["g"]
return _
================================================
FILE: python/rateslib/legs/protocols/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from abc import ABCMeta
from rateslib.legs.protocols.analytic_delta import _WithAnalyticDelta
from rateslib.legs.protocols.analytic_fixings import _WithAnalyticRateFixings
from rateslib.legs.protocols.cashflows import _WithCashflows, _WithExDiv
from rateslib.legs.protocols.fixings import _WithFixings
from rateslib.legs.protocols.npv import _WithNPV
class _BaseLeg(
_WithFixings, # inherits _WIthNPV so first in MRO
_WithNPV,
_WithCashflows,
_WithAnalyticDelta,
_WithAnalyticRateFixings,
metaclass=ABCMeta,
):
"""Abstract base class used in the construction of *Legs*."""
pass
__all__ = [
"_WithNPV",
"_WithCashflows",
"_WithFixings",
"_WithAnalyticDelta",
"_WithAnalyticRateFixings",
"_WithExDiv",
"_BaseLeg",
]
================================================
FILE: python/rateslib/legs/protocols/analytic_delta.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.enums.generics import NoInput
from rateslib.periods.utils import (
_maybe_local,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurveOption_,
DualTypes,
FXForwards_,
Sequence,
_BaseCurve_,
_BasePeriod,
_FXVolOption_,
datetime_,
str_,
)
class _WithAnalyticDelta(Protocol):
"""
Protocol to calculate analytical rate delta sensitivities of any *Leg* type.
"""
@property
def periods(self) -> Sequence[_BasePeriod]: ...
def local_analytic_delta(
self,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
"""
Calculate the analytic rate delta of a *Period* expressed in its local settlement currency.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable
"""
local_analytic_delta: DualTypes = sum(
_.try_local_analytic_delta(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
).unwrap()
for _ in self.periods
)
return local_analytic_delta
def analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
"""
Calculate the analytic rate delta of a *Period* expressed in a base currency.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
base: str, optional
The currency to convert the *local settlement* NPV to.
local: bool, optional
An override flag to return a dict of NPV values indexed by string currency.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable
"""
local_analytic_delta: DualTypes = sum(
_.try_local_analytic_delta(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
).unwrap()
for _ in self.periods
)
return _maybe_local(
value=local_analytic_delta,
local=local,
currency=self.periods[0].settlement_params.currency,
fx=fx,
base=base,
forward=forward,
)
================================================
FILE: python/rateslib/legs/protocols/analytic_fixings.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from typing import TYPE_CHECKING, Protocol
from pandas import DataFrame, concat
from rateslib.enums.generics import NoInput
if TYPE_CHECKING:
from rateslib.local_types import (
CurveOption_,
FXForwards_,
Sequence,
_BaseCurve_,
_BasePeriod,
_FXVolOption_,
datetime_,
)
class _WithAnalyticRateFixings(Protocol):
"""
Protocol to calculate analytical rate fixing sensitivities of any *Leg* type.
"""
@property
def periods(self) -> Sequence[_BasePeriod]: ...
def local_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Return a DataFrame of financial sensitivity to published interest rate fixings,
expressed in local **settlement currency** of the *Period*.
If the *Period* has no sensitivity to rates fixings this *DataFrame* is empty.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
"""
dfs = []
for period in self.periods:
dfs.append(
period.local_analytic_rate_fixings(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
)
)
with warnings.catch_warnings():
# TODO: pandas 2.1.0 has a FutureWarning for concatenating DataFrames with Null entries
warnings.filterwarnings("ignore", category=FutureWarning)
return concat(dfs)
================================================
FILE: python/rateslib/legs/protocols/cashflows.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from pandas import DataFrame
from rateslib.curves import index_left
from rateslib.enums.generics import NoInput
if TYPE_CHECKING:
from rateslib.local_types import (
CurveOption_,
FXForwards_,
Schedule,
Sequence,
_BaseCurve_,
_BasePeriod,
_FXVolOption_,
_IRVolOption_,
datetime,
datetime_,
str_,
)
class _WithCashflows(Protocol):
"""
Protocol to generate cashflows of any *Leg* type.
"""
@property
def periods(self) -> Sequence[_BasePeriod]: ...
def cashflows(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Return aggregated cashflow data for the *Leg*.
.. warning::
This method is a convenience method to provide a visual representation of all
associated calculation data. Calling this method to extracting certain values
should be avoided. It is more efficent to source relevant parameters or calculations
from object attributes or other methods directly.
.. role:: red
.. role:: green
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, :green:`optional`
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, :green:`optional`
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, :green:`optional`
Used to discount cashflows.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, :green:`optional`
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
ir_vol: IRSabrSmile, :green:`optional`
The IR volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
base: str, :green:`optional`
The currency to convert relevant values into.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
"""
seq = [
period.cashflows(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
base=base,
settlement=settlement,
forward=forward,
)
for period in self.periods
]
return DataFrame.from_records(seq)
class _WithExDiv(Protocol):
"""
Protocol to determine if a *Leg* is ex-dividend on a given settlement.
"""
@property
def schedule(self) -> Schedule: ...
def _period_index(self, settlement: datetime) -> int:
"""
Get the period index for that which the settlement date fall within.
Uses adjusted dates.
"""
_: int = index_left(
self.schedule.aschedule,
len(self.schedule.aschedule),
settlement,
)
return _
def ex_div(self, settlement: datetime) -> bool:
"""
Return a boolean whether the security is ex-div at the given settlement.
Parameters
----------
settlement : datetime
The settlement date to test.
Returns
-------
bool
Notes
-----
Uses the UK DMO convention of returning *False* if ``settlement``
**is on or before** the ex-div date for a regular coupon period.
This is evaluated by analysing the attribute ``pschedule3`` of the associated
:class:`~rateslib.scheduling.Schedule` object of the *Leg*.
"""
left_period_index = self._period_index(settlement)
ex_div_date = self.schedule.pschedule3[left_period_index + 1]
return settlement > ex_div_date
================================================
FILE: python/rateslib/legs/protocols/fixings.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from pandas import DataFrame, Series
from rateslib.enums.generics import NoInput
from rateslib.legs.protocols.npv import _WithNPV
from rateslib.periods.protocols.fixings import (
_replace_fixings_with_ad_variables,
_reset_fixings_data,
_structure_sensitivity_data,
)
if TYPE_CHECKING:
from rateslib.local_types import (
CurveOption_,
DualTypes,
FXForwards_,
Sequence,
_BaseCurve_,
_BasePeriod,
_FXVolOption_,
datetime_,
int_,
)
class _WithFixings(_WithNPV, Protocol):
"""
Protocol for determining fixing sensitivity for a *Period* with AD.
.. rubric:: Provided methods
.. autosummary::
~_WithFixings.reset_fixings
"""
@property
def periods(self) -> Sequence[_BasePeriod]: ...
def reset_fixings(self, state: int_ = NoInput(0)) -> None:
"""
Resets any fixings values of the *Leg* derived using the given data state.
.. role:: green
Parameters
----------
state: int, :green:`optional`
The *state id* of the data series that set the fixing. Only fixings determined by this
data will be reset. If not given resets all fixings.
Returns
-------
None
"""
for period in self.periods:
period.reset_fixings(state)
def local_fixings(
self,
identifiers: Sequence[tuple[str, Series]],
scalars: Sequence[float] | NoInput = NoInput(0),
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Calculate the sensitivity to fixings of the *Instrument*, expressed in local
settlement currency.
.. role:: red
.. role:: green
Parameters
----------
indentifiers: Sequence of tuple[str, Series], :red:`required`
These are the series string identifiers and the data values that will be used in each
Series to determine the sensitivity against.
scalars: Sequence of floats, :green:`optional (each set as 1.0)`
A sequence of scalars to multiply the sensitivities by for each on of the
``identifiers``.
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
settlement: datetime, optional (set as immediate date)
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional (set as ``settlement``)
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
"""
original_data, index, state = _replace_fixings_with_ad_variables(identifiers)
# Extract sensitivity data
pv: dict[str, DualTypes] = {
self.settlement_params.currency: self.local_npv(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
)
}
df = _structure_sensitivity_data(pv, index, identifiers, scalars)
_reset_fixings_data(self, original_data, state, identifiers)
return df
================================================
FILE: python/rateslib/legs/protocols/npv.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.enums.generics import NoInput
from rateslib.periods.parameters import _SettlementParams
from rateslib.periods.utils import (
_maybe_local,
)
if TYPE_CHECKING:
from rateslib.local_types import (
CurveOption_,
DualTypes,
FXForwards_,
Sequence,
_BaseCurve_,
_BasePeriod,
_FXVolOption_,
datetime_,
str_,
)
class _WithNPV(Protocol):
"""
Protocol to establish value of any *Leg* type.
.. rubric:: Required methods
.. autosummary::
~_WithNPV.spread
.. rubric:: Provided methods
.. autosummary::
~_WithNPV.local_npv
~_WithNPV.npv
"""
@property
def periods(self) -> Sequence[_BasePeriod]:
...
# """List of *Periods* associated with the *Leg*."""
# return self._periods
def __repr__(self) -> str:
return f""
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams` of the
first *Period* of the *Leg*."""
return self.periods[0].settlement_params
def local_npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
"""
Calculate the NPV of the *Leg* expressed in local settlement currency.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable
"""
# a Leg only has cashflows in one single currency, so some up those values first
# then format for necessary dict output if required.
local_npv: DualTypes = sum(
_.local_npv(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
)
for _ in self.periods
)
return local_npv
def npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
"""
Calculate the NPV of the *Period* converted to any other *base* accounting currency.
.. hint::
If the cashflows are unspecified or incalculable due to missing information this method
will raise an exception. For a function that returns a `Result` indicating success or
failure use :meth:`~rateslib.periods._WithNPV.try_local_npv`.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
base: str, optional
The currency to convert the *local settlement* NPV to.
local: bool, optional
An override flag to return a dict of NPV values indexed by string currency.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable or dict of such indexed by string currency.
Notes
-----
If ``base`` is not provided then this function will return the value obtained from
:meth:`~rateslib.periods._WithNPV.try_local_npv`.
If ``base`` is provided this then an :class:`~rateslib.fx.FXForwards` object may be
required to perform conversions. An :class:`~rateslib.fx.FXRates` object is also allowed
for this conversion although best practice does not recommend it due to possible
settlement date conflicts.
"""
# a Leg only has cashflows in one single currency, so some up those values first
# then format for necessary dict output if required.
local_npv: DualTypes = sum(
_.local_npv(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
)
for _ in self.periods
)
return _maybe_local(
value=local_npv,
local=local,
currency=self.settlement_params.currency,
fx=fx,
base=base,
forward=forward,
)
def spread(
self,
*,
target_npv: DualTypes,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
"""
Calculate a spread metric which when applied to the *Leg* allows it to attain the target
value.
Parameters
----------
target_npv: DualTypes, required
The target value of the *Leg* measured using all of the other given arguments.
Must be expressed in local settlement currency units.
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable
"""
raise NotImplementedError(f"Method: `spread` is not available for {type(self).__name__}.")
================================================
FILE: python/rateslib/local_types.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
# This module is reserved only for typing purposes.
# It avoids all circular import by performing a TYPE_CHECKING check on any component.
from collections.abc import Callable as Callable
from collections.abc import Iterable as Iterable
from collections.abc import Sequence as Sequence
from datetime import datetime as datetime
from typing import Any as Any
from typing import NoReturn as NoReturn
from typing import Protocol, TypeAlias
import numpy as np
from pandas import DataFrame as DataFrame
from pandas import Series as Series
from rateslib.curves import RolledCurve as RolledCurve
from rateslib.curves import ShiftedCurve as ShiftedCurve
from rateslib.curves import TranslatedCurve as TranslatedCurve
from rateslib.curves import _BaseCurve as _BaseCurve
from rateslib.curves import _CurveMeta as _CurveMeta
from rateslib.data.fixings import FloatRateIndex as FloatRateIndex
from rateslib.data.fixings import FloatRateSeries as FloatRateSeries
from rateslib.data.fixings import FXFixing as FXFixing
from rateslib.data.fixings import FXIndex as FXIndex
from rateslib.data.fixings import IBORFixing as IBORFixing
from rateslib.data.fixings import IBORStubFixing as IBORStubFixing
from rateslib.data.fixings import IndexFixing as IndexFixing
from rateslib.data.fixings import IRSSeries as IRSSeries
from rateslib.data.fixings import RFRFixing as RFRFixing
from rateslib.data.loader import Fixings as Fixings
from rateslib.data.loader import _BaseFixingsLoader as _BaseFixingsLoader
from rateslib.default import PlotOutput as PlotOutput
from rateslib.dual.variable import Variable as Variable
from rateslib.enums.generics import NoInput as NoInput
from rateslib.enums.generics import Result as Result
from rateslib.enums.parameters import FloatFixingMethod as FloatFixingMethod
from rateslib.enums.parameters import FXDeltaMethod as FXDeltaMethod
from rateslib.enums.parameters import FXOptionMetric as FXOptionMetric
from rateslib.enums.parameters import IndexMethod as IndexMethod
from rateslib.enums.parameters import IROptionMetric as IROptionMetric
from rateslib.enums.parameters import OptionPricingModel as OptionPricingModel
from rateslib.enums.parameters import OptionType as OptionType
from rateslib.enums.parameters import SpreadCompoundMethod as SpreadCompoundMethod
from rateslib.enums.parameters import SwaptionSettlementMethod as SwaptionSettlementMethod
from rateslib.fx import FXForwards as FXForwards
from rateslib.fx import FXRates as FXRates
from rateslib.instruments import CDS as CDS
from rateslib.instruments import FRA as FRA
from rateslib.instruments import IIRS as IIRS
from rateslib.instruments import IRS as IRS
from rateslib.instruments import SBS as SBS
from rateslib.instruments import XCS as XCS
from rateslib.instruments import ZCIS as ZCIS
from rateslib.instruments import ZCS as ZCS
from rateslib.instruments import Bill as Bill
from rateslib.instruments import FixedRateBond as FixedRateBond
from rateslib.instruments import FloatRateNote as FloatRateNote
from rateslib.instruments import Fly as Fly
from rateslib.instruments import FXBrokerFly as FXBrokerFly
from rateslib.instruments import FXCall as FXCall
from rateslib.instruments import FXPut as FXPut
from rateslib.instruments import FXRiskReversal as FXRiskReversal
from rateslib.instruments import FXStraddle as FXStraddle
from rateslib.instruments import FXStrangle as FXStrangle
from rateslib.instruments import FXSwap as FXSwap
from rateslib.instruments import IndexFixedRateBond as IndexFixedRateBond
from rateslib.instruments import Portfolio as Portfolio
from rateslib.instruments import Spread as Spread
from rateslib.instruments import STIRFuture as STIRFuture
from rateslib.instruments import Value as Value
from rateslib.instruments import _BaseInstrument as _BaseInstrument
from rateslib.instruments.ir_options import _BaseIRSOption as _BaseIRSOption
from rateslib.instruments.protocols.kwargs import _KWArgs as _KWArgs
from rateslib.instruments.protocols.pricing import _Curves as _Curves
from rateslib.instruments.protocols.pricing import _Vol as _Vol
from rateslib.legs import CreditPremiumLeg as CreditPremiumLeg
from rateslib.legs import CreditProtectionLeg as CreditProtectionLeg
from rateslib.legs import FixedLeg as FixedLeg
from rateslib.legs import FloatLeg as FloatLeg
from rateslib.legs import ZeroFixedLeg as ZeroFixedLeg
from rateslib.legs import ZeroFloatLeg as ZeroFloatLeg
from rateslib.legs.protocols import _BaseLeg as _BaseLeg
from rateslib.periods import Cashflow as Cashflow
from rateslib.periods import CreditPremiumPeriod as CreditPremiumPeriod
from rateslib.periods import CreditProtectionPeriod as CreditProtectionPeriod
from rateslib.periods import FixedPeriod as FixedPeriod
from rateslib.periods import FloatPeriod as FloatPeriod
from rateslib.periods import FXCallPeriod as FXCallPeriod
from rateslib.periods import FXPutPeriod as FXPutPeriod
from rateslib.periods import ZeroFloatPeriod as ZeroFloatPeriod
from rateslib.periods import _BaseFXOptionPeriod as _BaseFXOptionPeriod
from rateslib.periods import _BaseIRSOptionPeriod as _BaseIRSOptionPeriod
from rateslib.periods.parameters import _FloatRateParams as _FloatRateParams
from rateslib.periods.parameters import _IndexParams as _IndexParams
from rateslib.periods.parameters import _NonDeliverableParams as _NonDeliverableParams
from rateslib.periods.parameters import _PeriodParams as _PeriodParams
from rateslib.periods.parameters import _SettlementParams as _SettlementParams
from rateslib.periods.protocols import _BasePeriod as _BasePeriod
from rateslib.rs import Adjuster as Adjuster
from rateslib.rs import (
FlatBackwardInterpolator,
FlatForwardInterpolator,
LinearInterpolator,
LinearZeroRateInterpolator,
LogLinearInterpolator,
NullInterpolator,
)
from rateslib.rs import StubInference as StubInference
from rateslib.volatility import FXDeltaVolSmile as FXDeltaVolSmile
from rateslib.volatility import FXDeltaVolSurface as FXDeltaVolSurface
from rateslib.volatility import FXSabrSmile as FXSabrSmile
from rateslib.volatility import FXSabrSurface as FXSabrSurface
from rateslib.volatility import IRSabrCube as IRSabrCube
from rateslib.volatility import IRSabrSmile as IRSabrSmile
from rateslib.volatility import IRSplineCube as IRSplineCube
from rateslib.volatility import IRSplineSmile as IRSplineSmile
from rateslib.volatility import _BaseIRCube as _BaseIRCube
from rateslib.volatility import _BaseIRSmile as _BaseIRSmile
from rateslib.volatility import _IRVolPricingParams as _IRVolPricingParams
CurveInterpolator: TypeAlias = "FlatBackwardInterpolator | FlatForwardInterpolator | LinearInterpolator | LogLinearInterpolator | LinearZeroRateInterpolator | NullInterpolator"
from rateslib.rs import Cal as Cal
from rateslib.rs import Convention as Convention
from rateslib.rs import Dual as Dual
from rateslib.rs import Dual2 as Dual2
from rateslib.rs import Frequency as Frequency
from rateslib.rs import LegIndexBase as LegIndexBase
from rateslib.rs import NamedCal as NamedCal
from rateslib.rs import PPSplineDual as PPSplineDual
from rateslib.rs import PPSplineDual2 as PPSplineDual2
from rateslib.rs import PPSplineF64 as PPSplineF64
from rateslib.rs import RollDay as RollDay
from rateslib.rs import UnionCal as UnionCal
from rateslib.scheduling import Schedule as Schedule
from rateslib.solver import Solver as Solver
Solver_: TypeAlias = "Solver | NoInput"
CalTypes: TypeAlias = "Cal | UnionCal | NamedCal"
CalInput: TypeAlias = "CalTypes | str | NoInput"
Adjuster_: TypeAlias = "Adjuster | NoInput"
FXIndex_: TypeAlias = "FXIndex | NoInput"
DualTypes: TypeAlias = "float | Dual | Dual2 | Variable"
DualTypes_: TypeAlias = "DualTypes | NoInput"
Number: TypeAlias = "float | Dual | Dual2"
# https://stackoverflow.com/questions/68916893/
Arr1dF64: TypeAlias = "np.ndarray[tuple[int], np.dtype[np.float64]]"
Arr2dF64: TypeAlias = "np.ndarray[tuple[int, int], np.dtype[np.float64]]"
Arr1dObj: TypeAlias = "np.ndarray[tuple[int], np.dtype[np.object_]]"
Arr2dObj: TypeAlias = "np.ndarray[tuple[int, int], np.dtype[np.object_]]"
Arr3dObj: TypeAlias = "np.ndarray[tuple[int, int, int], np.dtype[np.object_]]"
PeriodFixings: TypeAlias = "DualTypes | Series[DualTypes] | str | NoInput"
LegFixings: TypeAlias = "PeriodFixings | list[PeriodFixings] | tuple[PeriodFixings, PeriodFixings]"
FixingsRates: TypeAlias = "Series[DualTypes] | list[DualTypes | list[DualTypes] | Series[DualTypes] | NoInput] | tuple[DualTypes, Series[DualTypes]] | DualTypes"
FixingsRates_: TypeAlias = "FixingsRates | NoInput"
FixingsFx: TypeAlias = (
"DualTypes | list[DualTypes] | Series[DualTypes] | tuple[DualTypes, Series[DualTypes]]"
)
FixingsFx_: TypeAlias = "FixingsFx | NoInput"
str_: TypeAlias = "str | NoInput"
bool_: TypeAlias = "bool | NoInput"
int_: TypeAlias = "int | NoInput"
datetime_: TypeAlias = "datetime | NoInput"
float_: TypeAlias = "float | NoInput"
# _BaseCurve is an ABC
_BaseCurve_: TypeAlias = "_BaseCurve | NoInput"
_BaseCurveOrId: TypeAlias = "_BaseCurve | str" # used as best practice for Solver mappings
_BaseCurveOrId_: TypeAlias = "_BaseCurveOrId | NoInput"
_BaseCurveOrIdDict: TypeAlias = (
"dict[str, _BaseCurve | str] | dict[str, _BaseCurve] | dict[str, str]"
)
_BaseCurveDict: TypeAlias = "dict[str, _BaseCurve]"
_BaseCurveOrDict: TypeAlias = "_BaseCurve | _BaseCurveDict"
_BaseCurveOrIdOrIdDict: TypeAlias = "_BaseCurveOrId | _BaseCurveOrIdDict"
_BaseCurveOrDict_: TypeAlias = "_BaseCurve | _BaseCurveDict | NoInput"
_BaseCurveOrIdOrIdDict_: TypeAlias = "_BaseCurveOrId | _BaseCurveOrIdDict | NoInput"
CurvesT: TypeAlias = "_BaseCurveOrIdOrIdDict | Sequence[CurveOrId | CurveDict] | _Curves"
CurvesT_: TypeAlias = "CurvesT | NoInput"
_FXVolObj: TypeAlias = "FXDeltaVolSurface | FXDeltaVolSmile | FXSabrSmile | FXSabrSurface"
_FXVolOption: TypeAlias = "_FXVolObj | DualTypes"
_FXVolOption_: TypeAlias = "_FXVolOption | NoInput"
FXVol: TypeAlias = "_FXVolOption | str"
FXVol_: TypeAlias = "FXVol | NoInput"
_IRVolObj: TypeAlias = "_BaseIRSmile | _BaseIRCube[Any]"
_IRVolOption: TypeAlias = "_IRVolObj | DualTypes"
_IRVolOption_: TypeAlias = "_IRVolOption | NoInput"
IRVol: TypeAlias = "_IRVolOption | str"
IRVol_: TypeAlias = "IRVol | NoInput"
VolT: TypeAlias = "IRVol | FXVol | _Vol"
VolT_: TypeAlias = "VolT | NoInput"
VolStrat_: TypeAlias = "Sequence[VolStrat_] | VolT | NoInput"
SeqVolT_: TypeAlias = "Sequence[VolT_]"
CurveDict: TypeAlias = "dict[str, _BaseCurve | str] | dict[str, _BaseCurve] | dict[str, str]"
CurveOrId: TypeAlias = "_BaseCurve | str"
CurveOrId_: TypeAlias = "CurveOrId | NoInput"
CurveInput: TypeAlias = "CurveOrId | CurveDict"
CurveInput_: TypeAlias = "CurveInput | NoInput"
CurveOption: TypeAlias = "_BaseCurve | dict[str, _BaseCurve]"
CurveOption_: TypeAlias = "CurveOption | NoInput"
Curves: TypeAlias = "CurveOrId | CurveDict | Sequence[CurveOrId | CurveDict]"
Curves_: TypeAlias = "CurveOrId_ | CurveDict | Sequence[CurveOrId_ | CurveDict]"
Curves_Tuple: TypeAlias = "tuple[CurveOption_, CurveOption_, CurveOption_, CurveOption_]"
Curves_DiscTuple: TypeAlias = "tuple[CurveOption_, _BaseCurve_, CurveOption_, _BaseCurve_]"
# this is a type for a wrapped `rate_curve`, `disc_curve` and `index_curve`
PeriodCurves: TypeAlias = "tuple[CurveOption_, _BaseCurve_, _BaseCurve_]"
FX: TypeAlias = "DualTypes | FXRates | FXForwards"
FX_: TypeAlias = "FX | NoInput"
FXRevised_: TypeAlias = "FXRates | FXForwards | NoInput"
FXForwards_: TypeAlias = "FXForwards | NoInput"
# NPV: TypeAlias = "DualTypes | dict[str, DualTypes]"
#
# Leg: TypeAlias = "FixedLeg | FloatLeg | ZeroFloatLeg | ZeroFixedLeg | ZeroIndexLeg | CreditPremiumLeg | CreditProtectionLeg"
# Period: TypeAlias = "FixedPeriod | FloatPeriod | Cashflow | CreditPremiumPeriod | CreditProtectionPeriod"
#
# Security: TypeAlias = "FixedRateBond | FloatRateNote | Bill | IndexFixedRateBond"
# FXOptionTypes: TypeAlias = (
# "FXCall | FXPut | FXRiskReversal | FXStraddle | FXStrangle | FXBrokerFly | FXOptionStrat"
# )
# RatesDerivative: TypeAlias = "IRS | SBS | FRA | ZCS | STIRFuture"
# IndexDerivative: TypeAlias = "IIRS | ZCIS"
# CurrencyDerivative: TypeAlias = "XCS | FXSwap | FXForward"
# Combinations: TypeAlias = "Portfolio | Fly | Spread | Value | VolValue"
#
# Instrument: TypeAlias = (
# "Combinations | Security | FXOptionTypes | RatesDerivative | CDS | CurrencyDerivative"
# )
class SupportsSolverMutability(Protocol):
@property
def _n(self) -> int: ...
@property
def _ini_solve(self) -> int: ...
def _set_ad_order(self, ad: int) -> None: ...
def _set_node_vector(self, vector: Arr1dObj, ad: int) -> None: ...
def _get_node_vars(self) -> tuple[str, ...]: ...
def _get_node_vector(self) -> Arr1dObj: ...
class SupportsRate(Protocol):
def rate(self, *args: Any, **kwargs: Any) -> DualTypes: ...
@property
def rate_scalar(self) -> float: ...
class SupportsMetrics:
def rate(self, *args: Any, **kwargs: Any) -> DualTypes: ... # type: ignore[empty-body]
def npv(self, *args: Any, **kwargs: Any) -> DualTypes | dict[str, DualTypes]: ... # type: ignore[empty-body]
def delta(self, *args: Any, **kwargs: Any) -> DataFrame: ... # type: ignore[empty-body]
def gamma(self, *args: Any, **kwargs: Any) -> DataFrame: ... # type: ignore[empty-body]
def cashflows(self, *args: Any, **kwargs: Any) -> DataFrame: ... # type: ignore[empty-body]
def cashflows_table(self, *args: Any, **kwargs: Any) -> DataFrame: ... # type: ignore[empty-body]
class _SupportsFixedFloatLeg1(Protocol):
@property
def leg1(self) -> FixedLeg | FloatLeg: ...
================================================
FILE: python/rateslib/mutability/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import os
from collections import OrderedDict
from collections.abc import Callable
from functools import wraps
from typing import TYPE_CHECKING, Generic, ParamSpec, TypeVar
from rateslib import defaults
if TYPE_CHECKING:
pass
P = ParamSpec("P")
R = TypeVar("R")
def _no_interior_validation(func: Callable[P, R]) -> Callable[P, R]:
"""
Used with a Solver to provide a context to set a flag to prevent repetitive validation,
for example during iteration. After conclusion of the function re-activate validation.
"""
@wraps(func)
def wrapper_no_interior_validation(*args: P.args, **kwargs: P.kwargs) -> R:
self = args[0]
if self._do_not_validate: # type: ignore[attr-defined]
# make no changes: handle recursive no interior validations.
result = func(*args, **kwargs)
else:
# set to no further validation and reset at end of method
self._do_not_validate = True # type: ignore[attr-defined]
result = func(*args, **kwargs)
self._do_not_validate = False # type: ignore[attr-defined]
return result
return wrapper_no_interior_validation
def _validate_states(func: Callable[P, R]) -> Callable[P, R]:
"""
Add a decorator to a class instance method to first validate the object state before performing
additional operations. If a change is detected the implemented `validate_state` function
is responsible for resetting the cache and updating any `state_id`s.
"""
@wraps(func)
def wrapper_validate_states(*args: P.args, **kwargs: P.kwargs) -> R:
self = args[0]
self._validate_state() # type: ignore[attr-defined]
return func(*args, **kwargs)
return wrapper_validate_states
def _clear_cache_post(func: Callable[P, R]) -> Callable[P, R]:
"""
Add a decorator to a class instance method to clear the cache and set a new state
post performing the function.
"""
@wraps(func)
def wrapper_clear_cache(*args: P.args, **kwargs: P.kwargs) -> R:
self = args[0]
result = func(*args, **kwargs)
self._clear_cache() # type: ignore[attr-defined]
return result
return wrapper_clear_cache
def _new_state_post(func: Callable[P, R]) -> Callable[P, R]:
"""
Add a decorator to a class instance method to clear the cache and set a new state
post performing the function.
"""
@wraps(func)
def wrapper_new_state(*args: P.args, **kwargs: P.kwargs) -> R:
self = args[0]
result = func(*args, **kwargs)
self._set_new_state() # type: ignore[attr-defined]
return result
return wrapper_new_state
class _WithState:
"""
Record and manage the `state_id` of mutable classes.
Attributes
----------
_state: int: This is the most recent recorded state reference of this object.
_mutable_by_association: bool: This is a rateslib definition of whether this object is
directly mutable and therefore generates its own state id, or whether its state is
derived from the most recently evaluated state of its associated objects.
"""
_state: int = 0
_mutable_by_association: bool = False
_do_not_validate: bool = False
def _set_new_state(self) -> None:
"""Set the state_id of a superclass. Some objects which are 'mutable by association'
will overload the `get_compoisted_state` method to derive a state from their
associated items."""
if self._mutable_by_association:
self._state = self._get_composited_state()
else:
self._state = hash(os.urandom(8)) # 64-bit entropy
def _validate_state(self) -> None:
"""Used by 'mutable by association' objects to evaluate if their own record of
associated objects states matches the current state of those objects.
Mutable by update objects have no concept of state validation, they simply maintain
a *state* id.
"""
return None
def _get_composited_state(self) -> int:
"""Used by 'mutable by association' objects to record the state of their associated
objects and set this as the object's own state."""
raise NotImplementedError("Must be implemented for 'mutable by association' types")
KT = TypeVar("KT")
VT = TypeVar("VT")
class _WithCache(Generic[KT, VT]):
_cache: OrderedDict[KT, VT]
_cache_len: int
def _cached_value(self, key: KT, val: VT) -> VT:
"""Used to add a value to the cache and control memory size when returning some
parameter from an object using cache and state management."""
if defaults.curve_caching and key not in self._cache:
if self._cache_len < defaults.curve_caching_max:
self._cache[key] = val
self._cache_len += 1
else:
self._cache.popitem(last=False)
self._cache[key] = val
return val
def _clear_cache(self) -> None:
"""Clear the cache of values on a object controlled by cache and state management.
Returns
-------
None
Notes
-----
This should be used if any modification has been made to the *Curve*.
Users are advised against making direct modification to *Curve* classes once
constructed to avoid the issue of un-cleared caches returning erroneous values.
Alternatively the curve caching as a feature can be set to *False* in ``defaults``.
"""
self._cache = OrderedDict()
self._cache_len = 0
================================================
FILE: python/rateslib/periods/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from rateslib.periods.cashflow import (
Cashflow,
MtmCashflow,
# IndexCashflow,
# NonDeliverableCashflow,
# NonDeliverableIndexCashflow,
)
from rateslib.periods.credit import CreditPremiumPeriod, CreditProtectionPeriod
from rateslib.periods.fixed_period import (
FixedPeriod,
# IndexFixedPeriod,
# NonDeliverableFixedPeriod,
# NonDeliverableIndexFixedPeriod,
ZeroFixedPeriod,
)
from rateslib.periods.float_period import (
FloatPeriod,
# IndexFloatPeriod,
# NonDeliverableFloatPeriod,
# NonDeliverableIndexFloatPeriod,
ZeroFloatPeriod,
)
from rateslib.periods.fx_volatility import FXCallPeriod, FXPutPeriod, _BaseFXOptionPeriod
from rateslib.periods.ir_volatility import IRSCallPeriod, IRSPutPeriod, _BaseIRSOptionPeriod
from rateslib.periods.protocols import _BasePeriod, _BasePeriodStatic
__all__ = [
"FixedPeriod",
"FloatPeriod",
"ZeroFixedPeriod",
"ZeroFloatPeriod",
"Cashflow",
"MtmCashflow",
"CreditPremiumPeriod",
"CreditProtectionPeriod",
"FXCallPeriod",
"FXPutPeriod",
"IRSCallPeriod",
"IRSPutPeriod",
"_BasePeriod",
"_BasePeriodStatic",
"_BaseFXOptionPeriod",
"_BaseIRSOptionPeriod",
]
================================================
FILE: python/rateslib/periods/cashflow.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from pandas import DataFrame
from rateslib import defaults
from rateslib.data.fixings import _get_fx_index, _maybe_get_fx_index
from rateslib.enums.generics import NoInput, Ok, _drb
from rateslib.enums.parameters import IndexMethod
from rateslib.periods.parameters import (
_init_MtmParams,
_init_or_none_IndexParams,
_init_or_none_NonDeliverableParams,
_init_SettlementParams_with_fx_pair,
)
from rateslib.periods.parameters.mtm import _MtmParams
from rateslib.periods.protocols import (
_BasePeriodStatic,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CurveOption_,
DualTypes,
DualTypes_,
FXForwards_,
FXIndex,
Result,
Series,
_BaseCurve_,
_FXVolOption_,
bool_,
datetime,
datetime_,
int_,
str_,
)
class Cashflow(_BasePeriodStatic):
r"""
A *Period* defined by a specific amount.
The expected unindexed reference cashflow under the risk neutral distribution is defined as,
.. math::
\mathbb{E^Q} [\bar{C}_t] = -N
There is no *analytical delta* for this *Period* type and hence :math:`\xi` is not defined.
Examples
--------
.. ipython:: python
:suppress:
from rateslib.periods import Cashflow
from datetime import datetime as dt
.. ipython:: python
period = Cashflow(
payment=dt(2025, 10, 22),
ex_dividend=dt(2025, 10, 21),
currency="eur",
notional=125000,
)
period.cashflows()
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **settlement** parameters.
currency: str, :green:`optional (set by 'defaults')`
The physical *settlement currency* of the *Period*.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional amount of the *Period* expressed in ``notional currency``.
payment: datetime, :red:`required`
The payment date of the *Period* cashflow.
ex_dividend: datetime, :green:`optional (set as 'payment')`
The ex-dividend date of the *Period*. Settlements occurring **after** this date
are assumed to be non-receivable.
.. note::
The following parameters define **non-deliverability**. If the *Period* is directly
deliverable do not supply these parameters.
pair: FXIndex, str, :green:`optional`
The currency pair of the :class:`~rateslib.data.fixings.FXFixing` that determines
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing`. If a scalar is used directly.
If a string identifier will link to the central ``fixings`` object and data loader.
See :ref:`fixings `.
delivery: datetime, :green:`optional (set as 'payment')`
The settlement delivery date of the :class:`~rateslib.data.fixings.FXFixing`.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value set of the base index value.
If not given and ``index_fixings`` is a str fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The index value for the reference date.
If a scalar value this is used directly. If a string identifier will link to the
central ``fixings`` object and data loader. See :ref:`fixings `.
index_base_date: datetime, :green:`optional`
The reference date for determining the base index value. Not required if ``_index_base``
value is given directly.
index_reference_date: datetime, :green:`optional (set as 'end')`
The reference date for determining the index value. Not required if ``_index_fixings``
is given as a scalar value.
index_only: bool, :green:`optional (set as False)`
A flag which determines non-payment of notional on supported *Periods*.
"""
def __init__(
self,
*,
# currency args:
payment: datetime,
notional: DualTypes,
currency: str_ = NoInput(0),
ex_dividend: datetime_ = NoInput(0),
# non-deliverable args:
pair: FXIndex | str_ = NoInput(0),
fx_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
delivery: datetime_ = NoInput(0),
# index-args:
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
index_only: bool_ = NoInput(0),
index_base_date: datetime_ = NoInput(0),
index_reference_date: datetime_ = NoInput(0),
):
self._settlement_params = _init_SettlementParams_with_fx_pair(
_notional=notional,
_payment=payment,
_currency=_drb(defaults.base_currency, currency).lower(),
_ex_dividend=_drb(payment, ex_dividend),
_fx_pair=_maybe_get_fx_index(pair),
)
self._non_deliverable_params = _init_or_none_NonDeliverableParams(
_currency=self.settlement_params.currency,
_fx_index=pair,
_fx_fixings=fx_fixings,
_delivery=_drb(self.settlement_params.payment, delivery),
)
self._index_params = _init_or_none_IndexParams(
_index_base=index_base,
_index_lag=index_lag,
_index_method=index_method,
_index_fixings=index_fixings,
_index_base_date=index_base_date,
_index_reference_date=_drb(self.settlement_params.payment, index_reference_date),
_index_only=index_only,
)
def unindexed_reference_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
**kwargs: Any,
) -> DualTypes:
return -self.settlement_params.notional
def try_unindexed_reference_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
return Ok(0.0)
def try_unindexed_reference_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
return Ok(DataFrame())
class MtmCashflow(_BasePeriodStatic):
r"""
A *Period* defined by a specific amount calculated from the difference between two
:class:`~rateslib.data.fixings.FXFixing`.
This type does not permit non-deliverability, although its notional is expressed in a
notional currency which is different to the settlement currency.
The expected unindexed reference cashflow under the risk neutral distribution is defined as,
.. math::
\mathbb{E^Q} [\bar{C}_t] = -N ( f_{ref:loc}(m_{a.e}) - f_{ref:loc}(m_{a.s}) )
There is no *analytical delta* for this *Period* type and hence :math:`\xi` is not defined.
Examples
--------
This *MTMCashflow* is the movement of the EURUSD FX rate from 1.1 to 1.2 on a notional of
125,000 EUR resulting in a cashflow of -12,500 USD.
.. ipython:: python
:suppress:
from rateslib.periods import MtmCashflow
from datetime import datetime as dt
.. ipython:: python
period = MtmCashflow(
payment=dt(2025, 10, 22),
start=dt(2025, 7, 22),
currency="usd",
pair="eurusd",
notional=125000,
fx_fixings_start=1.10,
fx_fixings_end=1.20,
)
period.cashflows()
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **settlement** parameters.
currency: str, :green:`optional (set by 'defaults')`
The physical *settlement currency* of the *Period*.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional amount of the *Period* expressed in ``notional currency``.
payment: datetime, :red:`required`
The payment date of the *Period* cashflow.
ex_dividend: datetime, :green:`optional (set as 'payment')`
The ex-dividend date of the *Period*. Settlements occurring **after** this date
are assumed to be non-receivable.
.. note::
The following parameters define the specific **mtm** aspects of the *cashflow*.
pair: FXIndex, str, :red:`required`
The currency pair of the two :class:`~rateslib.data.fixings.FXFixing` that determines
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
start: datetime, :red:`required`
The delivery date of the first :class:`~rateslib.data.fixings.FXFixing` at the start of
the *Period*.
end: datetime, :green:`optional (set as 'payment')`
The delivery date of the second :class:`~rateslib.data.fixings.FXFixing` at the end of
the *Period*.
fx_fixings_start: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the first :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier will link to the central ``fixings`` object and
data loader. See :ref:`fixings `.
fx_fixings_end: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the second :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier will link to the central ``fixings`` object and
data loader. See :ref:`fixings `.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value set of the base index value.
If not given and ``index_fixings`` is a str fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The index value for the reference date.
If a scalar value this is used directly. If a string identifier will link to the
central ``fixings`` object and data loader. See :ref:`fixings `.
index_base_date: datetime, :green:`optional`
The reference date for determining the base index value. Not required if ``_index_base``
value is given directly.
index_reference_date: datetime, :green:`optional (set as 'end')`
The reference date for determining the index value. Not required if ``_index_fixings``
is given as a scalar value.
index_only: bool, :green:`optional (set as False)`
A flag which determines non-payment of notional on supported *Periods*.
"""
@property
def mtm_params(self) -> _MtmParams:
"""The :class:`~rateslib.periods.parameters._MtmParams` of the
*Period*."""
return self._mtm_params
def __init__(
self,
*,
payment: datetime,
notional: DualTypes,
pair: FXIndex | str,
start: datetime,
end: datetime_ = NoInput(0),
currency: str_ = NoInput(0),
ex_dividend: datetime_ = NoInput(0),
fx_fixings_start: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
fx_fixings_end: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
# index-args:
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
index_only: bool_ = NoInput(0),
index_base_date: datetime_ = NoInput(0),
index_reference_date: datetime_ = NoInput(0),
):
fx_index = _get_fx_index(pair)
self._settlement_params = _init_SettlementParams_with_fx_pair(
_notional=notional,
_payment=payment,
_currency=_drb(defaults.base_currency, currency).lower(),
_ex_dividend=_drb(payment, ex_dividend),
_fx_pair=fx_index,
)
self._mtm_params = _init_MtmParams(
_fx_index=fx_index,
_currency=_drb(defaults.base_currency, currency).lower(),
_start=start,
_end=_drb(payment, end),
_fx_fixings_start=fx_fixings_start,
_fx_fixings_end=fx_fixings_end,
)
self._non_deliverable_params = None
self._index_params = _init_or_none_IndexParams(
_index_base=index_base,
_index_lag=index_lag,
_index_method=index_method,
_index_fixings=index_fixings,
_index_base_date=index_base_date,
_index_reference_date=_drb(self.settlement_params.payment, index_reference_date),
_index_only=index_only,
)
def unindexed_reference_cashflow( # type: ignore[override]
self,
*,
fx: FXForwards_ = NoInput(0),
**kwargs: Any,
) -> DualTypes:
fx0 = self.mtm_params.fx_fixing_start.try_value_or_forecast(fx).unwrap()
fx1 = self.mtm_params.fx_fixing_end.try_value_or_forecast(fx).unwrap()
if self.mtm_params.fx_reversed:
diff = 1.0 / fx1 - 1.0 / fx0
else:
diff = fx1 - fx0
return -self.settlement_params.notional * diff
def try_unindexed_reference_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
return Ok(0.0)
def try_unindexed_reference_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
return Ok(DataFrame())
================================================
FILE: python/rateslib/periods/credit.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import timedelta
from typing import TYPE_CHECKING
import rateslib.errors as err
from rateslib import defaults
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import Err, NoInput, Ok, Result, _drb
from rateslib.periods.parameters import (
_CreditParams,
_FixedRateParams,
_PeriodParams,
_SettlementParams,
)
from rateslib.periods.protocols import _BasePeriod
from rateslib.periods.protocols.npv import _screen_ex_div_and_forward
from rateslib.periods.utils import _maybe_local, _try_validate_base_curve, _validate_credit_curves
from rateslib.scheduling import Convention, Frequency, get_calendar
from rateslib.scheduling.adjuster import _get_adjuster
from rateslib.scheduling.convention import _get_convention
from rateslib.scheduling.frequency import _get_frequency
if TYPE_CHECKING: # pragma: no cover
from rateslib.local_types import (
Adjuster,
CalInput,
CurveOption_,
DualTypes,
DualTypes_,
FXForwards_,
FXRevised_,
RollDay,
_BaseCurve,
_BaseCurve_,
_FXVolOption_,
_IRVolOption_,
_IRVolPricingParams,
bool_,
datetime,
datetime_,
str_,
)
class CreditPremiumPeriod(_BasePeriod):
r"""
A *Period* defined by a fixed interest rate and contingent credit event.
The immediate expected valuation of the *Period* cashflow is defined as;
.. math::
\mathbb{E^Q} [V(m_T)C_T] = -N S d (Q(m_{a.s}) v(m_t) + V_{I_{pa}} )
where,
.. math::
V_{I_{pa}} = C_t I_{pa} v(m_{a.e}) \times \left \{ \begin{matrix} \frac{1}{2} \left ( Q(m_{a.s}) - Q(m_{a.e}) \right ) & m_{a.s} >= m_{today} \\ \frac{\tilde{n}+r}{2\tilde{n}} \left ( 1 - Q(m_{a.e}) \right ) & m_{a.s} < m_{today} \\ \end{matrix} \right .
For *analytic delta* purposes the :math:`\xi=-S`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.periods import CreditPremiumPeriod
from datetime import datetime as dt
.. ipython:: python
cp = CreditPremiumPeriod(
start=dt(2000, 3, 20),
end=dt(2000, 6, 20),
payment=dt(2000, 6, 20),
frequency="Q",
fixed_rate=1.00,
)
cp.cashflows()
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **settlement** parameters.
currency: str, :green:`optional (set by 'defaults')`
The physical *settlement currency* of the *Period*.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional amount of the *Period* expressed in ``notional currency``.
payment: datetime, :red:`required`
The payment date of the *Period* cashflow.
ex_dividend: datetime, :green:`optional (set as 'payment')`
The ex-dividend date of the *Period*. Settlements occurring **after** this date
are assumed to be non-receivable.
.. note::
The following parameters are scheduling **period** parameters
start: datetime, :red:`required`
The identified start date of the *Period*.
end: datetime, :red:`required`
The identified end date of the *Period*.
frequency: Frequency, str, :red:`required`
The :class:`~rateslib.scheduling.Frequency` associated with the *Period*.
convention: Convention, str, :green:`optional` (set by 'defaults')
The day count :class:`~rateslib.scheduling.Convention` associated with the *Period*.
termination: datetime, :green:`optional`
The termination date of an external :class:`~rateslib.scheduling.Schedule`.
calendar: Calendar, :green:`optional`
The calendar associated with the *Period*.
stub: bool, str, :green:`optional (set as False)`
Whether the *Period* is defined as a stub according to some external
:class:`~rateslib.scheduling.Schedule`.
roll: RollDay, int, str, :green:`optional (set by 'frequency')`
The rollday associated with any monthly :class:`~rateslib.scheduling.Frequency`, if
not directly associated with that object.
adjuster: Adjuster, :green:`optional`
The date :class:`~rateslib.scheduling.Adjuster` applied to unadjusted dates in the
external :class:`~rateslib.scheduling.Schedule` to arrive at adjusted accrual dates.
.. note::
The following define **fixed rate** parameters.
fixed_rate: float, Dual, Dual2, Variable, :green:`optional`
The fixed rate to determine the *Period* cashflow.
.. note::
The following parameters define **credit specific** elements.
premium_accrued: bool, :green:`optional (set by 'defaults')`
Whether an accrued premium is paid on the event of mid-period credit default.
""" # noqa: E501
@property
def credit_params(self) -> _CreditParams:
"""The :class:`~rateslib.periods.parameters._CreditParams` of the *Period*."""
return self._credit_params
@property
def rate_params(self) -> _FixedRateParams:
"""The :class:`~rateslib.periods.parameters._FixedRateParams` of the *Period*."""
return self._rate_params
@property
def period_params(self) -> _PeriodParams:
"""The :class:`~rateslib.periods.parameters._PeriodParams` of the *Period*."""
return self._period_params
def __init__(
self,
*,
# currency args:
payment: datetime,
notional: DualTypes_ = NoInput(0),
currency: str_ = NoInput(0),
ex_dividend: datetime_ = NoInput(0),
# period params
start: datetime,
end: datetime,
frequency: Frequency | str,
convention: str_ = NoInput(0),
termination: datetime_ = NoInput(0),
stub: bool = False,
roll: RollDay | int | str_ = NoInput(0),
calendar: CalInput = NoInput(0),
adjuster: Adjuster | str_ = NoInput(0),
# specific params
fixed_rate: DualTypes_ = NoInput(0),
premium_accrued: bool_ = NoInput(0),
) -> None:
self._settlement_params = _SettlementParams(
_currency=_drb(defaults.base_currency, currency).lower(),
_notional_currency=_drb(defaults.base_currency, currency).lower(),
_payment=payment,
_notional=_drb(defaults.notional, notional),
_ex_dividend=_drb(payment, ex_dividend),
)
self._rate_params = _FixedRateParams(
_fixed_rate=fixed_rate,
)
self._credit_params = _CreditParams(
_premium_accrued=_drb(defaults.cds_premium_accrued, premium_accrued),
)
self._period_params = _PeriodParams(
_start=start,
_end=end,
_frequency=_get_frequency(frequency, roll, calendar),
_calendar=get_calendar(calendar),
_adjuster=NoInput(0) if isinstance(adjuster, NoInput) else _get_adjuster(adjuster),
_stub=stub,
_convention=_get_convention(_drb(defaults.convention, convention)),
_termination=termination,
)
def immediate_local_npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
) -> DualTypes:
rate_curve_, disc_curve_ = _validate_credit_curves(rate_curve, disc_curve).unwrap()
cf = self.cashflow()
return cf * self._probability_adjusted_df(rate_curve_, disc_curve_)
def try_immediate_local_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXRevised_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> Result[DualTypes]:
c = 0.0001 * self.period_params.dcf * self.settlement_params.notional
c_res = _validate_credit_curves(rate_curve, disc_curve)
if isinstance(c_res, Err):
return c_res
else:
rate_curve_, disc_curve_ = c_res.unwrap()
return Ok(c * self._probability_adjusted_df(rate_curve_, disc_curve_))
def cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> DualTypes:
if isinstance(self.rate_params.fixed_rate, NoInput):
raise ValueError(err.VE_NEEDS_FIXEDRATE)
return (
-self.rate_params.fixed_rate
* 0.01
* self.period_params.dcf
* self.settlement_params.notional
)
def try_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithNPVStatic.cashflow`
with lazy exception handling.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
try:
v = self.cashflow(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx_vol=fx_vol,
fx=fx,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
def _probability_adjusted_df(self, rate_curve: _BaseCurve, disc_curve: _BaseCurve) -> DualTypes:
v_payment = disc_curve[self.settlement_params.payment]
q_end = rate_curve[self.period_params.end]
if self.credit_params.premium_accrued:
v_end = disc_curve[self.period_params.end]
n = _dual_float((self.period_params.end - self.period_params.start).days)
if self.period_params.start < disc_curve.nodes.initial:
# then mid-period valuation
r: float = _dual_float((disc_curve.nodes.initial - self.period_params.start).days)
q_start: DualTypes = 1.0
_v_start: DualTypes = 1.0
else:
r = 0.0
q_start = rate_curve[self.period_params.start]
_v_start = disc_curve[self.period_params.start]
# method 1:
accrued_: DualTypes = 0.5 * (1 + r / n)
accrued_ *= q_start - q_end
accrued_ *= v_end
# # method 4 EXACT
# _ = 0.0
# for i in range(1, int(s)):
# m_i, m_i2 = m_today + timedelta(days=i-1), m_today + timedelta(days=i)
# _ += (
# (i + r) / n * disc_curve[m_today + timedelta(days=i)] * (curve[m_i] - curve[m_i2])
# )
else:
accrued_ = 0.0
return q_end * v_payment + accrued_
def try_accrued(self, settlement: datetime) -> Result[DualTypes]:
"""
Calculate the amount of premium accrued until a specific date within the *Period*, with
lazy error raising.
Parameters
----------
settlement: datetime
The date against which accrued is measured.
Returns
-------
Result[float]
"""
if isinstance(self.rate_params.fixed_rate, NoInput):
return Err(ValueError(err.VE_NEEDS_FIXEDRATE))
c = (
-self.rate_params.fixed_rate
* 0.01
* self.period_params.dcf
* self.settlement_params.notional
)
start, end = self.period_params.start, self.period_params.end
if settlement <= start or settlement >= end:
return Ok(0.0)
return Ok(c * (settlement - start).days / (end - start).days)
def accrued(self, settlement: datetime) -> DualTypes:
"""
Calculate the amount of premium accrued until a specific date within the *Period*.
Parameters
----------
settlement: datetime
The date against which accrued is measured.
Returns
-------
float
"""
return self.try_accrued(settlement).unwrap()
class CreditProtectionPeriod(_BasePeriod):
r"""
A *Period* defined by a credit event and contingent notional payment.
The immediate expected valuation of the *Period* cashflow is defined as;
.. math::
\mathbb{E^Q}[V(m_T)C_T] = -N(1-RR) \int_{max(m_{a.s}, m_{today})}^{m_{a.e}} w_{loc:col}(m_s) Q(m_s) \lambda(s) ds
where the integral is numerically determined.
There is no *analytical delta* for this *Period* type and hence :math:`\xi` is not defined.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.periods import CreditProtectionPeriod
from datetime import datetime as dt
.. ipython:: python
cp = CreditProtectionPeriod(
start=dt(2000, 3, 20),
end=dt(2000, 6, 20),
payment=dt(2000, 6, 20),
frequency="Q",
)
cp.cashflows()
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **settlement** parameters.
currency: str, :green:`optional (set by 'defaults')`
The physical *settlement currency* of the *Period*.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional amount of the *Period* expressed in ``notional currency``.
payment: datetime, :red:`required`
The payment date of the *Period* cashflow.
ex_dividend: datetime, :green:`optional (set as 'payment')`
The ex-dividend date of the *Period*. Settlements occurring **after** this date
are assumed to be non-receivable.
.. note::
The following parameters are scheduling **period** parameters
start: datetime, :red:`required`
The identified start date of the *Period*.
end: datetime, :red:`required`
The identified end date of the *Period*.
frequency: Frequency, str, :red:`required`
The :class:`~rateslib.scheduling.Frequency` associated with the *Period*.
termination: datetime, :green:`optional`
The termination date of an external :class:`~rateslib.scheduling.Schedule`.
calendar: Calendar, :green:`optional`
The calendar associated with the *Period*.
stub: bool, str, :green:`optional (set as False)`
Whether the *Period* is defined as a stub according to some external
:class:`~rateslib.scheduling.Schedule`.
roll: RollDay, int, str, :green:`optional (set by 'frequency')`
The rollday associated with any monthly :class:`~rateslib.scheduling.Frequency`, if
not directly associated with that object.
adjuster: Adjuster, :green:`optional`
The date :class:`~rateslib.scheduling.Adjuster` applied to unadjusted dates in the
external :class:`~rateslib.scheduling.Schedule` to arrive at adjusted accrual dates.
""" # noqa: E501
@property
def credit_params(self) -> _CreditParams:
"""The :class:`~rateslib.periods.parameters._CreditParams` of the *Period*."""
return self._credit_params
@property
def period_params(self) -> _PeriodParams:
"""The :class:`~rateslib.periods.parameters._PeriodParams` of the *Period*."""
return self._period_params
def __init__(
self,
*,
# currency args:
payment: datetime,
notional: DualTypes_ = NoInput(0),
currency: str_ = NoInput(0),
ex_dividend: datetime_ = NoInput(0),
# period params
start: datetime,
end: datetime,
frequency: Frequency | str,
# convention: str_ = NoInput(0),
termination: datetime_ = NoInput(0),
stub: bool = False,
roll: RollDay | int | str_ = NoInput(0),
calendar: CalInput = NoInput(0),
adjuster: Adjuster | str_ = NoInput(0),
) -> None:
self._settlement_params = _SettlementParams(
_currency=_drb(defaults.base_currency, currency).lower(),
_notional_currency=_drb(defaults.base_currency, currency).lower(),
_payment=payment,
_notional=_drb(defaults.notional, notional),
_ex_dividend=_drb(payment, ex_dividend),
)
self._credit_params = _CreditParams(
_premium_accrued=True
) # arg irrelevant for Period type.
self._period_params = _PeriodParams(
_start=start,
_end=end,
_frequency=_get_frequency(frequency, roll, calendar),
_calendar=get_calendar(calendar),
_adjuster=NoInput(0) if isinstance(adjuster, NoInput) else _get_adjuster(adjuster),
_stub=stub,
_convention=Convention.One, # _get_convention(_drb(defaults.convention, convention)),
_termination=termination,
)
def cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> DualTypes:
rate_curve_ = _try_validate_base_curve(rate_curve).unwrap()
return -self.settlement_params.notional * (1 - rate_curve_.meta.credit_recovery_rate)
def try_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithNPVStatic.cashflow`
with lazy exception handling.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
try:
v = self.cashflow(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx_vol=fx_vol,
fx=fx,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
def immediate_local_npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
) -> DualTypes:
rate_curve_, disc_curve_ = _validate_credit_curves(rate_curve, disc_curve).unwrap()
quadrature = self._quadrature(rate_curve_, disc_curve_)
cf = self.cashflow(rate_curve=rate_curve)
return quadrature * cf
def _quadrature(
self,
rate_curve_: _BaseCurve,
disc_curve_: _BaseCurve,
) -> DualTypes:
"""determine the integral component of the NPV function using discretised intervals"""
discretization = rate_curve_.meta.credit_discretization
if self.period_params.start < rate_curve_.nodes.initial:
s2 = rate_curve_.nodes.initial
else:
s2 = self.period_params.start
value: DualTypes = 0.0
q2: DualTypes = rate_curve_[s2]
v2: DualTypes = disc_curve_[s2]
while s2 < self.period_params.end:
q1, v1 = q2, v2
s2 = s2 + timedelta(days=discretization)
if s2 > self.period_params.end:
s2 = self.period_params.end
q2, v2 = rate_curve_[s2], disc_curve_[s2]
value += 0.5 * (v1 + v2) * (q1 - q2)
# value += v2 * (q1 - q2)
return value
def try_immediate_local_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXRevised_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> Result[DualTypes]:
return Ok(0.0)
def analytic_rec_risk(
self,
rate_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
"""
Calculate the exposure of the NPV to a change in recovery rate.
.. role:: red
.. role:: green
Parameters
----------
rate_curve: _BaseCurve, :red:`required`
Used to forecast credit parameters, such as hazard rates and recovery rates.
disc_curve: _BaseCurve, :red:`required`
Used to discount cashflows.
fx: FXForwards, :green:`optional`
The :class:`~rateslib.fx.FXForwards` object used for currency conversion.
base: str, :green:`optional`
The currency to convert the *local settlement* value into.
settlement: datetime, :green:`optional`
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, :green:`optional`
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2
"""
rate_curve_, disc_curve_ = _validate_credit_curves(rate_curve, disc_curve).unwrap()
quadrature = self._quadrature(rate_curve_, disc_curve_)
local_immediate_value = quadrature * self.settlement_params.notional * 0.01
local_value = _screen_ex_div_and_forward(
local_value=Ok(local_immediate_value),
rate_curve=rate_curve,
disc_curve=disc_curve,
ex_dividend=self.settlement_params.ex_dividend,
settlement=settlement,
forward=forward,
)
ret: DualTypes = _maybe_local( # type: ignore[assignment] # local is False
value=local_value.unwrap(),
local=False,
currency=self.settlement_params.currency,
fx=fx,
base=base,
forward=forward,
)
return ret
================================================
FILE: python/rateslib/periods/fixed_period.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING
from pandas import DataFrame
import rateslib.errors as err
from rateslib import defaults
from rateslib.data.fixings import _maybe_get_fx_index
from rateslib.enums.generics import Err, NoInput, Ok, _drb
from rateslib.enums.parameters import IndexMethod
from rateslib.periods.parameters import (
_FixedRateParams,
_init_or_none_IndexParams,
_init_or_none_NonDeliverableParams,
_init_SettlementParams_with_fx_pair,
_PeriodParams,
)
from rateslib.periods.protocols import _BasePeriodStatic
from rateslib.scheduling import Adjuster, Frequency, dcf, get_calendar
from rateslib.scheduling.adjuster import _get_adjuster
from rateslib.scheduling.convention import _get_convention
from rateslib.scheduling.frequency import _get_frequency
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CalInput,
CurveOption_,
DualTypes,
DualTypes_,
FXForwards_,
FXIndex,
Result,
RollDay,
Schedule,
Series,
_BaseCurve_,
_FXVolOption_,
_IRVolOption_,
bool_,
datetime,
datetime_,
int_,
str_,
)
class FixedPeriod(_BasePeriodStatic):
r"""
A *Period* defined by a fixed interest rate.
The expected unindexed reference cashflow under the risk neutral distribution is defined as,
.. math::
\mathbb{E^Q} [\bar{C}_t] = -N d R
For *analytic delta* purposes the :math:`\xi=-R`.
.. role:: red
.. role:: green
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.periods import FixedPeriod
from datetime import datetime as dt
.. ipython:: python
period = FixedPeriod(
start=dt(2000, 1, 1),
end=dt(2001, 1, 1),
payment=dt(2001, 1, 1),
fixed_rate=5.0,
notional=1e6,
convention="ActActICMA",
frequency="A",
)
period.cashflows()
Parameters
----------
.
.. note::
The following define generalised **settlement** parameters.
currency: str, :green:`optional (set by 'defaults')`
The physical *settlement currency* of the *Period*.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional amount of the *Period* expressed in ``notional currency``.
payment: datetime, :red:`required`
The payment date of the *Period* cashflow.
ex_dividend: datetime, :green:`optional (set as 'payment')`
The ex-dividend date of the *Period*. Settlements occurring **after** this date
are assumed to be non-receivable.
.. note::
The following parameters are scheduling **period** parameters
start: datetime, :red:`required`
The identified start date of the *Period*.
end: datetime, :red:`required`
The identified end date of the *Period*.
frequency: Frequency, str, :red:`required`
The :class:`~rateslib.scheduling.Frequency` associated with the *Period*.
convention: Convention, str, :green:`optional` (set by 'defaults')
The day count :class:`~rateslib.scheduling.Convention` associated with the *Period*.
termination: datetime, :green:`optional`
The termination date of an external :class:`~rateslib.scheduling.Schedule`.
calendar: Calendar, :green:`optional`
The calendar associated with the *Period*.
stub: bool, str, :green:`optional (set as False)`
Whether the *Period* is defined as a stub according to some external
:class:`~rateslib.scheduling.Schedule`.
roll: RollDay, int, str, :green:`optional (set by 'frequency')`
The rollday associated with any monthly :class:`~rateslib.scheduling.Frequency`, if
not directly associated with that object.
adjuster: Adjuster, :green:`optional`
The date :class:`~rateslib.scheduling.Adjuster` applied to unadjusted dates in the
external :class:`~rateslib.scheduling.Schedule` to arrive at adjusted accrual dates.
.. note::
The following define **fixed rate** parameters.
fixed_rate: float, Dual, Dual2, Variable, :green:`optional`
The fixed rate to determine the *Period* cashflow.
.. note::
The following parameters define **non-deliverability**. If the *Period* is directly
deliverable do not supply these parameters.
pair: str, :green:`optional`
The currency pair of the :class:`~rateslib.data.fixings.FXFixing` that determines
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing`. If a scalar is used directly.
If a string identifier will link to the central ``fixings`` object and data loader.
See :ref:`fixings `.
delivery: datetime, :green:`optional (set as 'payment')`
The settlement delivery date of the :class:`~rateslib.data.fixings.FXFixing`.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value set of the base index value.
If not given and ``index_fixings`` is a str fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The index value for the reference date.
If a scalar value this is used directly. If a string identifier will link to the
central ``fixings`` object and data loader. See :ref:`fixings `.
index_base_date: datetime, :green:`optional`
The reference date for determining the base index value. Not required if ``_index_base``
value is given directly.
index_reference_date: datetime, :green:`optional (set as 'end')`
The reference date for determining the index value. Not required if ``_index_fixings``
is given as a scalar value.
index_only: bool, :green:`optional (set as False)`
A flag which determines non-payment of notional on supported *Periods*.
.. Examples
--------
A typical RFR type :class:`~rateslib.periods.FloatPeriod`.
.. ipython:: python
:supress:
from rateslib.periods import FloatPeriod
from rateslib.data.fixings import FloatRateIndex
from datetime import datetime as dt
.. ipython:: python
period = FloatPeriod(
start=dt(2025, 9, 22),
end=dt(2025, 10, 20),
payment=dt(2025, 10, 22),
frequency="1M",
)
A typical IBOR tenor type :class:`~rateslib.periods.FloatPeriod`.
.. ipython:: python
period = FloatPeriod(
start=dt(2025, 9, 22),
end=dt(2025, 10, 22),
payment=dt(2025, 10, 22),
frequency="1M",
currency="eur",
fixing_method="IBOR",
fixing_series="eur_IBOR",
)
"""
@property
def rate_params(self) -> _FixedRateParams:
"""The :class:`~rateslib.periods.parameters._FixedRateParams` of the *Period*."""
return self._rate_params
@property
def period_params(self) -> _PeriodParams:
"""The :class:`~rateslib.periods.parameters._PeriodParams` of the *Period*."""
return self._period_params
def __init__(
self,
*,
fixed_rate: DualTypes_ = NoInput(0),
# currency args:
payment: datetime,
notional: DualTypes_ = NoInput(0),
currency: str_ = NoInput(0),
ex_dividend: datetime_ = NoInput(0),
# period params
start: datetime,
end: datetime,
frequency: Frequency | str,
convention: str_ = NoInput(0),
termination: datetime_ = NoInput(0),
stub: bool = False,
roll: RollDay | int | str_ = NoInput(0),
calendar: CalInput = NoInput(0),
adjuster: Adjuster | str_ = NoInput(0),
# non-deliverable args:
pair: FXIndex | str_ = NoInput(0),
fx_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
delivery: datetime_ = NoInput(0),
# index-args:
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
index_only: bool_ = NoInput(0),
index_base_date: datetime_ = NoInput(0),
index_reference_date: datetime_ = NoInput(0),
) -> None:
self._settlement_params = _init_SettlementParams_with_fx_pair(
_currency=_drb(defaults.base_currency, currency).lower(),
_payment=payment,
_notional=_drb(defaults.notional, notional),
_ex_dividend=_drb(payment, ex_dividend),
_fx_pair=_maybe_get_fx_index(pair),
)
self._non_deliverable_params = _init_or_none_NonDeliverableParams(
_currency=self.settlement_params.currency,
_fx_index=pair,
_delivery=_drb(self.settlement_params.payment, delivery),
_fx_fixings=fx_fixings,
)
self._period_params = _PeriodParams(
_start=start,
_end=end,
_frequency=_get_frequency(frequency, roll, calendar),
_calendar=get_calendar(calendar),
_adjuster=NoInput(0) if isinstance(adjuster, NoInput) else _get_adjuster(adjuster),
_stub=stub,
_convention=_get_convention(_drb(defaults.convention, convention)),
_termination=termination,
)
self._index_params = _init_or_none_IndexParams(
_index_base=index_base,
_index_lag=index_lag,
_index_method=index_method,
_index_fixings=index_fixings,
_index_only=index_only,
_index_base_date=index_base_date,
_index_reference_date=_drb(self.period_params.end, index_reference_date),
)
self._rate_params = _FixedRateParams(fixed_rate)
def unindexed_reference_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
**kwargs: Any,
) -> DualTypes:
if isinstance(self.rate_params.fixed_rate, NoInput):
raise ValueError(err.VE_NEEDS_FIXEDRATE)
else:
return (
-self.settlement_params.notional
* self.rate_params.fixed_rate
* 0.01
* self.period_params.dcf
)
# def try_cashflow(
# self,
# *,
# rate_curve: CurveOption_ = NoInput(0),
# disc_curve: _BaseCurve_ = NoInput(0),
# index_curve: _BaseCurve_ = NoInput(0),
# fx: FXForwards_ = NoInput(0),
# fx_vol: _FXVolOption_ = NoInput(0),
# ) -> Result[DualTypes]:
# if self.index_params is None:
# if self.non_deliverable_params is None:
# return self.try_unindexed_reference_cashflow(
# rate_curve=rate_curve,
# disc_curve=disc_curve,
# index_curve=index_curve,
# fx=fx,
# fx_vol=fx_vol,
# )
# else:
# return self.try_unindexed_cashflow(
# rate_curve=rate_curve,
# disc_curve=disc_curve,
# index_curve=index_curve,
# fx=fx,
# fx_vol=fx_vol,
# )
# else:
# if self.non_deliverable_params is None:
# return self.try_reference_cashflow(
# rate_curve=rate_curve,
# disc_curve=disc_curve,
# index_curve=index_curve,
# fx=fx,
# fx_vol=fx_vol,
# )
# else:
# rc = self.try_reference_cashflow(
# rate_curve=rate_curve,
# index_curve=index_curve,
# disc_curve=disc_curve,
# fx=fx,
# fx_vol=fx_vol,
# )
# return self.try_convert_deliverable(value=rc, fx=fx)
def try_unindexed_reference_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
return Ok(self.settlement_params.notional * 0.0001 * self.period_params.dcf)
def try_unindexed_reference_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
return Ok(DataFrame())
class ZeroFixedPeriod(_BasePeriodStatic):
r"""
A *Period* defined by a fixed interest rate, as a representation of multiple compounded *Periods*.
The expected unindexed reference cashflow under the risk neutral distribution is defined as,
.. math::
\mathbb{E^Q}[\bar{C}_t] = - N \left ( \left ( 1 + \frac{R}{f} \right )^{df} - 1 \right ), \qquad d = \sum_{i=1}^n d_i
For *analytic delta* purposes the :math:`\xi=-R`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.periods import ZeroFixedPeriod
from rateslib.legs import CustomLeg
from rateslib.scheduling import Schedule
from datetime import datetime as dt
.. ipython:: python
period = ZeroFixedPeriod(
schedule=Schedule(dt(2000, 1, 1), "5Y", "A"),
fixed_rate=5.0,
convention="1",
)
period.cashflows()
For more details of the individual compounded periods one can compose a
:class:`~rateslib.legs.CustomLeg` and view the pseudo-cashflows.
.. ipython:: python
CustomLeg(period.fixed_periods).cashflows()
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define generalised **settlement** parameters.
currency: str, :green:`optional (set by 'defaults')`
The physical *settlement currency* of the *Period*.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional amount of the *Period* expressed in ``notional currency``.
.. note::
The following parameters are scheduling **period** parameters
schedule: Schedule, :red:`required`
The :class:`~rateslib.scheduling.Schedule` defining the individual *Periods*, including
the *payment* and *ex-dividend* dates.
.. note::
The following define **fixed rate** parameters.
fixed_rate: float, Dual, Dual2, Variable, :green:`optional`
The fixed rate to determine the *Period* cashflow.
.. note::
The following parameters define **non-deliverability**. If the *Period* is directly
deliverable do not supply these parameters.
pair: str, :green:`optional`
The currency pair of the :class:`~rateslib.data.fixings.FXFixing` that determines
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing`. If a scalar is used directly.
If a string identifier will link to the central ``fixings`` object and data loader.
See :ref:`fixings `.
delivery: datetime, :green:`optional (set as 'payment')`
The settlement delivery date of the :class:`~rateslib.data.fixings.FXFixing`.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value set of the base index value.
If not given and ``index_fixings`` is a str fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The index value for the reference date.
If a scalar value this is used directly. If a string identifier will link to the
central ``fixings`` object and data loader.
See :ref:`fixings `.
index_only: bool, :green:`optional (set as False)`
A flag which determines non-payment of notional on supported *Periods*.
""" # noqa: E501
@property
def rate_params(self) -> _FixedRateParams:
"""The :class:`~rateslib.periods.parameters._FixedRateParams` of the *Period*."""
return self._rate_params
@property
def period_params(self) -> _PeriodParams:
"""The :class:`~rateslib.periods.parameters._PeriodParams` of the *Period*."""
return self._period_params
@property
def schedule(self) -> Schedule:
"""The :class:`~rateslib.scheduling.Schedule` object for this *Period*."""
return self._schedule
@cached_property
def dcf(self) -> float:
"""An overload for the calculation of the DCF, replacing `period_params.dcf`."""
return sum(
dcf(
start=self.schedule.aschedule[i],
end=self.schedule.aschedule[i + 1],
convention=self.period_params.convention,
termination=self.schedule.aschedule[-1],
frequency=self.schedule.frequency_obj,
stub=self.schedule._stubs[i],
roll=NoInput(0), # taken from Frequency obj
calendar=self.schedule.calendar,
adjuster=self.schedule.modifier,
)
for i in range(self.schedule.n_periods)
)
@property
def fixed_periods(self) -> list[FixedPeriod]:
"""
The individual :class:`~rateslib.periods.FixedPeriod` that are
compounded.
"""
return self._fixed_periods
def __init__(
self,
*,
fixed_rate: DualTypes_ = NoInput(0),
schedule: Schedule,
# currency args:
notional: DualTypes_ = NoInput(0),
currency: str_ = NoInput(0),
# period params
convention: str_ = NoInput(0),
# non-deliverable args:
pair: FXIndex | str_ = NoInput(0),
fx_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
delivery: datetime_ = NoInput(0),
# index-args:
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
index_only: bool_ = NoInput(0),
) -> None:
self._schedule = schedule
self._settlement_params = _init_SettlementParams_with_fx_pair(
_currency=_drb(defaults.base_currency, currency).lower(),
_payment=self.schedule.pschedule[-1],
_notional=_drb(defaults.notional, notional),
_ex_dividend=self.schedule.pschedule3[-1],
_fx_pair=_maybe_get_fx_index(pair),
)
self._non_deliverable_params = _init_or_none_NonDeliverableParams(
_currency=self.settlement_params.currency,
_fx_index=pair,
_delivery=_drb(self.settlement_params.payment, delivery),
_fx_fixings=fx_fixings,
)
self._period_params = _PeriodParams(
_start=self.schedule.aschedule[0],
_end=self.schedule.aschedule[-1],
_frequency=self.schedule.frequency_obj,
_calendar=self.schedule.calendar,
_adjuster=self.schedule.modifier,
_stub=True,
_convention=_get_convention(_drb(defaults.convention, convention)),
_termination=self.schedule.aschedule[-1],
)
self._index_params = _init_or_none_IndexParams(
_index_base=index_base,
_index_lag=index_lag,
_index_method=index_method,
_index_fixings=index_fixings,
_index_only=index_only,
_index_base_date=self.schedule.aschedule[0],
_index_reference_date=self.schedule.aschedule[-1],
)
self._rate_params = _FixedRateParams(fixed_rate)
self._fixed_periods: list[FixedPeriod] = [
FixedPeriod(
fixed_rate=fixed_rate,
# currency args:
payment=self.schedule.pschedule[i + 1],
notional=notional,
currency=currency,
ex_dividend=self.schedule.pschedule3[i + 1],
# period params
start=self.schedule.aschedule[i],
end=self.schedule.aschedule[i + 1],
frequency=self.schedule.frequency_obj,
convention=convention,
termination=self.schedule.aschedule[-1],
stub=self.schedule._stubs[i],
roll=NoInput(0), # inferred from frequency obj
calendar=self.schedule.calendar,
adjuster=self.schedule.modifier,
# Each individual period is not genuine Period, only psuedo periods to derive the
# cashflow calculation so no 'non-deliverable' or 'index' params are required.
)
for i in range(self.schedule.n_periods)
]
def unindexed_reference_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
**kwargs: Any,
) -> DualTypes:
if isinstance(self.rate_params.fixed_rate, NoInput):
raise ValueError(err.VE_NEEDS_FIXEDRATE)
else:
f = self.schedule.periods_per_annum
return -self.settlement_params.notional * (
(1 + self.rate_params.fixed_rate / (f * 100)) ** (self.dcf * f) - 1
)
def try_unindexed_reference_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
if isinstance(self.rate_params.fixed_rate, NoInput):
return Err(ValueError(err.VE_NEEDS_FIXEDRATE))
else:
f = self.schedule.periods_per_annum
return Ok(
self.settlement_params.notional
* 0.0001
* self.dcf
* ((1 + self.rate_params.fixed_rate / (f * 100)) ** (self.dcf * f - 1))
)
def try_unindexed_reference_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
return Ok(DataFrame())
def cashflows(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> dict[str, Any]:
"""
Return aggregated cashflow data for the *Period*.
.. warning::
This method is a convenience method to provide a visual representation of all
associated calculation data. Calling this method to extracting certain values
should be avoided. It is more efficient to source relevant parameters or calculations
from object attributes or other methods directly.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
base: str, optional
The currency to convert the *local settlement* NPV to.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
dict[Any]
"""
d = super().cashflows(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
settlement=settlement,
forward=forward,
base=base,
)
d[defaults.headers["dcf"]] = self.dcf # reinsert the overload
return d
================================================
FILE: python/rateslib/periods/float_period.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING
import numpy as np
from pandas import DataFrame, Index, MultiIndex, Series, concat, merge
import rateslib.errors as err
from rateslib import defaults
from rateslib.curves import _BaseCurve
from rateslib.curves.utils import average_rate
from rateslib.data.fixings import (
FloatRateSeries,
_leg_fixings_to_list,
_maybe_get_fx_index,
_RFRRate,
)
from rateslib.data.loader import _find_neighbouring_tenors
from rateslib.dual import Variable, gradient
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import Err, NoInput, Ok, _drb
from rateslib.enums.parameters import FloatFixingMethod, IndexMethod, SpreadCompoundMethod
from rateslib.periods.float_rate import (
try_rate_value,
)
from rateslib.periods.parameters import (
_init_FloatRateParams,
_init_or_none_IndexParams,
_init_or_none_NonDeliverableParams,
_init_SettlementParams_with_fx_pair,
_PeriodParams,
)
from rateslib.periods.protocols import _BasePeriodStatic
from rateslib.periods.utils import _get_rfr_curve_from_dict
from rateslib.scheduling import Adjuster, Frequency, dcf, get_calendar
from rateslib.scheduling.adjuster import _get_adjuster
from rateslib.scheduling.convention import _get_convention
from rateslib.scheduling.frequency import _get_frequency, _get_tenor_from_frequency
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
Arr1dObj,
CalInput,
Convention,
CurveOption_,
DualTypes,
DualTypes_,
Frequency,
FXForwards_,
FXIndex,
Result,
RFRFixing,
RollDay,
Schedule,
Series,
_BaseCurve_,
_FloatRateParams,
_FXVolOption_,
_IRVolOption_,
bool_,
datetime,
datetime_,
int_,
str_,
)
class FloatPeriod(_BasePeriodStatic):
r"""
A *Period* defined by a floating interest rate.
The expected unindexed reference cashflow under the risk neutral distribution is defined as,
.. math::
\mathbb{E^Q} [\bar{C}_t] = -N d r(\mathbf{C}, z, R_i)
For *analytic delta* purposes the :math:`\xi=-z`.
.. role:: red
.. role:: green
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import FloatPeriod, Frequency, fixings, FloatRateSeries
from rateslib.enums import SpreadCompoundMethod, FloatFixingMethod
from datetime import datetime as dt
from pandas import Series
.. ipython:: python
fixings.add("MY_RATE_INDEX_6M", Series(index=[dt(2000, 1, 1)], data=[2.66]))
period = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 7, 1),
payment=dt(2000, 7, 1),
notional=1e6,
convention="Act360",
frequency="S",
fixing_method="ibor(0)",
rate_fixings="MY_RATE_INDEX"
)
period.cashflows()
.. ipython:: python
:suppress:
fixings.pop("MY_RATE_INDEX_6M")
Parameters
----------
.
.. note::
The following define generalised **settlement** parameters.
currency: str, :green:`optional (set by 'defaults')`
The physical *settlement currency* of the *Period*.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional amount of the *Period* expressed in ``notional currency``.
payment: datetime, :red:`required`
The payment date of the *Period* cashflow.
ex_dividend: datetime, :green:`optional (set as 'payment')`
The ex-dividend date of the *Period*. Settlements occurring **after** this date
are assumed to be non-receivable.
.. note::
The following parameters are scheduling **period** parameters
start: datetime, :red:`required`
The identified start date of the *Period*.
end: datetime, :red:`required`
The identified end date of the *Period*.
frequency: Frequency, str, :red:`required`
The :class:`~rateslib.scheduling.Frequency` associated with the *Period*.
convention: Convention, str, :green:`optional` (set by 'defaults')
The day count :class:`~rateslib.scheduling.Convention` associated with the *Period*.
termination: datetime, :green:`optional`
The termination date of an external :class:`~rateslib.scheduling.Schedule`.
calendar: Calendar, :green:`optional`
The calendar associated with the *Period*.
stub: bool, str, :green:`optional (set as False)`
Whether the *Period* is defined as a stub according to some external
:class:`~rateslib.scheduling.Schedule`.
roll: RollDay, int, str, :green:`optional (set by 'frequency')`
The rollday associated with any monthly :class:`~rateslib.scheduling.Frequency`, if
not directly associated with that object.
adjuster: Adjuster, :green:`optional`
The date :class:`~rateslib.scheduling.Adjuster` applied to unadjusted dates in the
external :class:`~rateslib.scheduling.Schedule` to arrive at adjusted accrual dates.
.. note::
The following define **floating rate** parameters.
fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for the period.
fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the period for an IBOR type ``fixing_method`` or '1B' if RFR type.
fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in the period rate determination. If not given is
set to zero.
spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with (non-averaging)
RFR type ``fixing_method``.
rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader. See :ref:`fixings `.
.. note::
The following parameters define **non-deliverability**. If the *Period* is directly
deliverable do not supply these parameters.
pair: str, :green:`optional`
The currency pair of the :class:`~rateslib.data.fixings.FXFixing` that determines
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing`. If a scalar is used directly.
If a string identifier will link to the central ``fixings`` object and data loader.
See :ref:`fixings `.
delivery: datetime, :green:`optional (set as 'payment')`
The settlement delivery date of the :class:`~rateslib.data.fixings.FXFixing`.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value set of the base index value.
If not given and ``index_fixings`` is a str fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The index value for the reference date.
If a scalar value this is used directly. If a string identifier will link to the
central ``fixings`` object and data loader. See :ref:`fixings `.
index_base_date: datetime, :green:`optional`
The reference date for determining the base index value. Not required if ``_index_base``
value is given directly.
index_reference_date: datetime, :green:`optional (set as 'end')`
The reference date for determining the index value. Not required if ``_index_fixings``
is given as a scalar value.
index_only: bool, :green:`optional (set as False)`
A flag which determines non-payment of notional on supported *Periods*.
Notes
-----
**Five** different classifications of *FloatPeriod* are possible to construct.
.. tabs::
.. tab:: RFR
A standard *RFR* period consists of multiple *'1B'* overnight fixings compounded
over the *Period* to determine the *rate*. It is specified by using any non-averaging
*RFR* ``fixing_method``. This variant constructs an
:class:`~rateslib.data.fixings.RFRFixing` as the object to coordinate *rate*
calculation. It will depend on ``spread_compound_method`` to incorporate a
``float_spread`` into the calculation. The ``fixing_frequency`` is *'1B'* under this
method.
.. ipython:: python
fp = FloatPeriod(
start=dt(2026, 1, 22),
end=dt(2027, 1, 22),
payment=dt(2027, 1, 25),
frequency=Frequency.Months(12, None), # <- or "A"
fixing_method=FloatFixingMethod.RFRPaymentDelay(), # <- or "rfr_payment_delay"
float_spread=5.0,
spread_compound_method=SpreadCompoundMethod.NoneSimple, # <- or "NoneSimple"
)
fp.rate_params.rate_fixing
.. tab:: Average RFR
This type is the same as **RFR** but uses an averaging ``fixing_method`` variant.
``spread_compound_method`` cannot be used and can only be *'NoneSimple'*.
.. ipython:: python
fp = FloatPeriod(
start=dt(2026, 1, 22),
end=dt(2027, 1, 22),
payment=dt(2027, 1, 25),
frequency=Frequency.Months(12, None), # <- or "A"
fixing_method=FloatFixingMethod.RFRPaymentDelayAverage(), # <- or "rfr_payment_delay_avg"
float_spread=5.0,
)
fp.rate_params.rate_fixing
.. warning::
The :meth:`~rateslib.periods.FloatPeriod.rate` method does **not** make any
*convexity adjustments* for an averaging type versus the numéraire compounding type
and determines a *rate* under the direct calculations from a provided *rate Curve*.
.. tab:: IBOR
This type, for legacy tenor **IBOR** rates, such as US-LIBOR, GBP-LIBOR, and existing
tenor rates such as EURIBOR, STIBOR, NIBOR, BB3M etc. uses a single fixing period. It is
specified by an *'ibor'* ``fixing_method``.
When the period is regular it will create an :class:`~rateslib.data.fixings.IBORFixing`
with a ``fixing_frequency`` that aligns with that of the *Period*.
.. ipython:: python
fp = FloatPeriod(
start=dt(2026, 1, 22),
end=dt(2026, 4, 22),
payment=dt(2026, 4, 22),
frequency=Frequency.Months(3, None), # <- or "Q"
fixing_method=FloatFixingMethod.IBOR(2), # <- or "ibor(2)"
float_spread=5.0,
)
fp.rate_params.rate_fixing
.. tab:: Misaligned IBOR
The ``fixing_frequency`` and ``fixing_series`` allow custom definitions of an IBOR
*FloatPeriod* to be created, such as using a 6M tenor with a 3M period, or using
mixed accrual calendars that do not align with the IBOR definition.
.. ipython:: python
fp = FloatPeriod(
start=dt(2026, 2, 4),
end=dt(2026, 5, 7), # <- Tokyo holidays on 4th, 5th, 6th May
payment=dt(2026, 5, 7),
frequency=Frequency.Months(3, None), # <- or "Q"
fixing_method=FloatFixingMethod.IBOR(2), # <- or "ibor(2)"
calendar="tyo,nyc",
float_spread=5.0,
fixing_series="usd_ibor", # <- or define your own FloatRateSeries
fixing_frequency=Frequency.Months(6, None), # <- or "S"
)
fp.rate_params.rate_fixing
.. tab:: IBOR Stubs
IBOR stub periods can also be created which utilise an
:class:`~rateslib.data.fixings.IBORStubFixing`, for *rate* determination. These
must be identified by the ``stub`` flag.
*IBOR* stubs depend upon the ``tenors`` definition with the ``fixing_series``
(or by ['1W', '1M', '3M', '6M', '12M'] when omitted)
When using these in combinations with ``fixings`` all necessary date timeseries must
be available under the appropriate ``identifier``, e.g. *'STIBOR_1M'* and *'STIBOR_2M'*.
.. ipython:: python
fixings.add("STIBOR_1M", Series(data=[1.0], index=[dt(2026, 2, 2)]))
fixings.add("STIBOR_2M", Series(data=[2.0], index=[dt(2026, 2, 2)]))
fp = FloatPeriod(
start=dt(2026, 2, 4),
end=dt(2026, 3, 12),
payment=dt(2026, 3, 12),
frequency=Frequency.Months(6, None), # <- or "S"
stub=True,
fixing_method=FloatFixingMethod.IBOR(2), # <- or "ibor(2)"
calendar="stk",
float_spread=5.0,
fixing_series=FloatRateSeries(
lag=2, calendar="stk", modifier="MF", convention="act360",
eom=False, tenors=["2D", "1W", "1M", "2M", "3M", "6M"],
),
rate_fixings="STIBOR",
)
fp.rate_params.rate_fixing
fp.rate_params.rate_fixing.value
.. ipython:: python
:suppress:
fixings.pop("STIBOR_1M")
fixings.pop("STIBOR_2M")
""" # noqa: E501
@property
def rate_params(self) -> _FloatRateParams:
"""The :class:`~rateslib.periods.parameters._FloatRateParams` of the *Period*."""
return self._rate_params
@property
def period_params(self) -> _PeriodParams:
return self._period_params
def __init__(
self,
*,
float_spread: DualTypes_ = NoInput(0),
rate_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
fixing_method: FloatFixingMethod | str_ = NoInput(0),
spread_compound_method: SpreadCompoundMethod | str_ = NoInput(0),
fixing_frequency: Frequency | str_ = NoInput(0),
fixing_series: FloatRateSeries | str_ = NoInput(0),
# currency args:
payment: datetime,
notional: DualTypes_ = NoInput(0),
currency: str_ = NoInput(0),
ex_dividend: datetime_ = NoInput(0),
# period params
start: datetime,
end: datetime,
frequency: Frequency | str,
convention: Convention | str_ = NoInput(0),
termination: datetime_ = NoInput(0),
stub: bool = False,
roll: RollDay | int | str_ = NoInput(0),
calendar: CalInput = NoInput(0),
adjuster: Adjuster | str_ = NoInput(0),
# non-deliverable args:
pair: FXIndex | str_ = NoInput(0),
fx_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
delivery: datetime_ = NoInput(0),
# index-args:
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
index_only: bool_ = NoInput(0),
index_base_date: datetime_ = NoInput(0),
index_reference_date: datetime_ = NoInput(0),
) -> None:
self._settlement_params = _init_SettlementParams_with_fx_pair(
_currency=_drb(defaults.base_currency, currency).lower(),
_payment=payment,
_notional=_drb(defaults.notional, notional),
_ex_dividend=_drb(payment, ex_dividend),
_fx_pair=_maybe_get_fx_index(pair),
)
self._non_deliverable_params = _init_or_none_NonDeliverableParams(
_currency=self.settlement_params.currency,
_fx_index=pair,
_delivery=_drb(self.settlement_params.payment, delivery),
_fx_fixings=fx_fixings,
)
self._period_params = _PeriodParams(
_start=start,
_end=end,
_frequency=_get_frequency(frequency, roll, calendar),
_calendar=get_calendar(calendar),
_adjuster=NoInput(0) if isinstance(adjuster, NoInput) else _get_adjuster(adjuster),
_stub=stub,
_convention=_get_convention(_drb(defaults.convention, convention)),
_termination=termination,
)
self._index_params = _init_or_none_IndexParams(
_index_base=index_base,
_index_lag=index_lag,
_index_method=index_method,
_index_fixings=index_fixings,
_index_only=index_only,
_index_base_date=index_base_date,
_index_reference_date=_drb(self.period_params.end, index_reference_date),
)
self._rate_params = _init_FloatRateParams(
_float_spread=float_spread,
_spread_compound_method=spread_compound_method,
_fixing_method=fixing_method,
_fixing_series=fixing_series,
_fixing_frequency=fixing_frequency,
_rate_fixings=rate_fixings,
_accrual_start=self.period_params.start,
_accrual_end=self.period_params.end,
_period_calendar=self.period_params.calendar,
_period_convention=self.period_params.convention,
_period_adjuster=self.period_params.adjuster,
_period_frequency=self.period_params.frequency,
_period_stub=self.period_params.stub,
)
if self.rate_params.spread_compound_method in [
SpreadCompoundMethod.ISDACompounding,
SpreadCompoundMethod.ISDAFlatCompounding,
] and type(self.rate_params.fixing_method) in [
FloatFixingMethod.IBOR,
FloatFixingMethod.RFRPaymentDelayAverage,
FloatFixingMethod.RFRLookbackAverage,
FloatFixingMethod.RFRLockoutAverage,
FloatFixingMethod.RFRObservationShiftAverage,
]:
raise ValueError(
f"The input for `spread_compound_method`: "
f"{self.rate_params.spread_compound_method} is not compatible with the "
f"`fixing_method`: {self.rate_params.fixing_method}."
)
def unindexed_reference_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
**kwargs: Any,
) -> DualTypes:
r = self.rate(rate_curve)
return -self.settlement_params.notional * r * 0.01 * self.period_params.dcf
def try_unindexed_reference_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
"""
Calculate the analytic rate delta of a *Period* expressed in ``reference_currency``
without indexation.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
Returns
-------
float, Dual, Dual2, Variable
"""
if (
self.rate_params.spread_compound_method == SpreadCompoundMethod.NoneSimple
or self.rate_params.float_spread == 0
):
# then analytic_delta is not impacted by float_spread compounding
dr_dz: float = 1.0
else:
_ = self.rate_params.float_spread
self.rate_params.float_spread = Variable(_dual_float(_), ["z_float_spread"])
rate: Result[DualTypes] = self.try_rate(rate_curve)
if rate.is_err:
return rate
dr_dz = gradient(rate.unwrap(), ["z_float_spread"])[0] * 100
self.rate_params.float_spread = _
return Ok(self.settlement_params.notional * 0.0001 * dr_dz * self.period_params.dcf)
def try_unindexed_reference_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
if isinstance(rate_curve, NoInput):
return Err(ValueError(err.VE_NEEDS_RATE_CURVE))
if isinstance(self.rate_params.fixing_method, FloatFixingMethod.IBOR):
return _UnindexedReferenceCashflowFixingsSensitivity._ibor(
self=self, rate_curve=rate_curve
)
else: # is RFR
if isinstance(rate_curve, dict):
rate_curve_: _BaseCurve = _get_rfr_curve_from_dict(rate_curve)
else:
rate_curve_ = rate_curve
return _UnindexedReferenceCashflowFixingsSensitivity._rfr(
self=self, rate_curve=rate_curve_
)
# def try_unindexed_reference_fixings_exposure(
# self,
# rate_curve: CurveOption_ = NoInput(0),
# disc_curve: _BaseCurve_ = NoInput(0),
# right: datetime_ = NoInput(0),
# ) -> Result[DataFrame]:
# if self.rate_params.fixing_method == FloatFixingMethod.IBOR:
# return _FixingsExposureCalculator.ibor(
# p=self,
# rate_curve=rate_curve,
# disc_curve=disc_curve,
# right=right,
# )
# else:
# if isinstance(rate_curve, dict):
# rate_curve_: _BaseCurve_ = _get_rfr_curve_from_dict(rate_curve)
# else:
# rate_curve_ = rate_curve
# return _FixingsExposureCalculator.rfr(
# p=self,
# rate_curve=_validate_obj_not_no_input(rate_curve_, "rate_curve"),
# disc_curve=disc_curve,
# right=right,
# )
def try_rate(self, rate_curve: CurveOption_) -> Result[DualTypes]:
"""
Calculate the period rate, with lazy e
Parameters
----------
rate_curve: XXX
The curve used to forecast rates, if the period has no fixing.
Returns
-------
float, Dual, Dual2, Variable
"""
rate_fixing = self.rate_params.rate_fixing.value
if isinstance(rate_fixing, NoInput):
return try_rate_value(
start=self.rate_params.accrual_start,
end=self.rate_params.accrual_end,
rate_curve=NoInput(0) if rate_curve is None else rate_curve,
rate_fixings=self.rate_params.rate_fixing.identifier,
fixing_method=self.rate_params.fixing_method,
spread_compound_method=self.rate_params.spread_compound_method,
float_spread=self.rate_params.float_spread,
stub=self.period_params.stub,
frequency=self.rate_params.fixing_frequency,
rate_series=self.rate_params.fixing_series,
)
else:
# the fixing value is a scalar so a Curve should not be required for this calculation
return try_rate_value(
start=self.rate_params.accrual_start,
end=self.rate_params.accrual_end,
rate_curve=NoInput(0),
rate_fixings=rate_fixing,
fixing_method=self.rate_params.fixing_method,
spread_compound_method=self.rate_params.spread_compound_method,
float_spread=self.rate_params.float_spread,
stub=self.period_params.stub,
frequency=self.rate_params.fixing_frequency,
rate_series=self.rate_params.fixing_series,
)
def rate(self, rate_curve: CurveOption_) -> DualTypes:
"""
Calculate the period rate.
Parameters
----------
rate_curve: XXX
The curve used to forecast rates, if the period has no fixing.
Returns
-------
float, Dual, Dual2, Variable
"""
return self.try_rate(rate_curve).unwrap()
class ZeroFloatPeriod(_BasePeriodStatic):
r"""
A *Period* defined by compounded floating rate *Periods*.
The expected unindexed reference cashflow under the risk neutral distribution is defined as,
.. math::
\mathbb{E^Q}[\bar{C}_t] = - N \left ( \prod_{i=1}^n \left ( 1 + r_i(\mathbf{C}, R_j, z) d_i \right ) - 1 \right )
For *analytic delta* purposes the :math:`\xi=-z`.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.periods import ZeroFloatPeriod
from rateslib.legs import CustomLeg
from rateslib.scheduling import Schedule
from datetime import datetime as dt
.. ipython:: python
fixings.add("MY_RATE_INDEX_6M", Series(
index=[dt(2000, 1, 1), dt(2000, 7, 1), dt(2001, 1, 1), dt(2001, 7, 1)],
data=[1.0, 2.0, 3.0, 4.0]
))
period = ZeroFloatPeriod(
schedule=Schedule(dt(2000, 1, 1), "2Y", "S"),
fixing_method="IBOR(0)",
rate_fixings="MY_RATE_INDEX",
convention="Act360",
)
period.cashflows()
For more details of the individual compounded periods one can compose a
:class:`~rateslib.legs.CustomLeg` and view the pseudo-cashflows.
.. ipython:: python
CustomLeg(period.float_periods).cashflows()
.. ipython:: python
:suppress:
fixings.pop("MY_RATE_INDEX_6M")
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following parameters are scheduling **period** parameters
schedule: Schedule, :red:`required`
The :class:`~rateslib.scheduling.Schedule` defining the individual *Periods*, including
the *payment* and *ex-dividend* dates.
convention: Convention, str, :green:`optional (set by 'defaults')`
The day count :class:`~rateslib.scheduling.Convention` associated with the *Period*.
.. note::
The following define generalised **settlement** parameters.
currency: str, :green:`optional (set by 'defaults')`
The physical *settlement currency* of the *Period*.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional amount of the *Period* expressed in ``notional currency``.
.. note::
The following define **floating rate** parameters.
fixing_method: FloatFixingMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for the period.
fixing_frequency: Frequency, str, :green:`optional (set by 'frequency' or '1B')`
The :class:`~rateslib.scheduling.Frequency` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given is assumed to match the
frequency of the period for an IBOR type ``fixing_method`` or '1B' if RFR type.
fixing_series: FloatRateSeries, str, :green:`optional (implied by other parameters)`
The :class:`~rateslib.data.fixings.FloatRateSeries` as a component of the
:class:`~rateslib.data.fixings.FloatRateIndex`. If not given inherits attributes given
such as the ``calendar``, ``convention``, ``fixing_method`` etc.
float_spread: float, Dual, Dual2, Variable, :green:`optional (set as 0.0)`
The amount (in bps) added to the rate in the period rate determination. If not given is
set to zero.
spread_compound_method: SpreadCompoundMethod, str, :green:`optional (set by 'defaults')`
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``float_spread``. Used **only** with RFR type
``fixing_method``.
rate_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the rate fixing. If a scalar, is used directly. If a string identifier, links
to the central ``fixings`` object and data loader. See :ref:`fixings `.
.. note::
The following parameters define **non-deliverability**. If the *Period* is directly
deliverable do not supply these parameters.
pair: str, :green:`optional`
The currency pair of the :class:`~rateslib.data.fixings.FXFixing` that determines
settlement. The *reference currency* is implied from ``pair``. Must include ``currency``.
fx_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the :class:`~rateslib.data.fixings.FXFixing`. If a scalar is used directly.
If a string identifier will link to the central ``fixings`` object and data loader.
See :ref:`fixings `.
delivery: datetime, :green:`optional (set as 'payment')`
The settlement delivery date of the :class:`~rateslib.data.fixings.FXFixing`.
.. note::
The following parameters define **indexation**. The *Period* will be considered
indexed if any of ``index_method``, ``index_lag``, ``index_base``, ``index_fixings``
are given.
index_method : IndexMethod, str, :green:`optional (set by 'defaults')`
The interpolation method, or otherwise, to determine index values from reference dates.
index_lag: int, :green:`optional (set by 'defaults')`
The indexation lag, in months, applied to the determination of index values.
index_base: float, Dual, Dual2, Variable, :green:`optional`
The specific value set of the base index value.
If not given and ``index_fixings`` is a str fixings identifier that will be
used to determine the base index value.
index_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The index value for the reference date.
If a scalar value this is used directly. If a string identifier will link to the
central ``fixings`` object and data loader. See :ref:`fixings `.
index_base_date: datetime, :green:`optional (set as aschedule[0])`
The reference date for determining the base index value. Not used if ``_index_base``
value is given directly.
index_reference_date: datetime, :green:`optional (set as aschedule[1])`
The reference date for determining the index value. Not used if ``_index_fixings``
is given as a scalar value.
index_only: bool, :green:`optional (set as False)`
A flag which determines non-payment of notional on supported *Periods*.
.. note::
The following are meta parameters
metric: str, :green:`optional (set as 'compounding')`
The type of calculation to use in the :meth:`~rateslib.periods.ZeroFloatPeriod.rate` method.
""" # noqa: E501
@property
def rate_params(self) -> _FloatRateParams:
"""The :class:`~rateslib.periods.parameters._FixedRateParams` of the *Period*."""
return self.float_periods[0].rate_params
@property
def rate_metric(self) -> str:
"""The type of calculation to perform in :meth:`~rateslib.periods.ZeroFloatPeriod.rate`."""
return self._rate_metric
@property
def period_params(self) -> _PeriodParams:
"""The :class:`~rateslib.periods.parameters._PeriodParams` of the *Period*."""
return self._period_params
@property
def schedule(self) -> Schedule:
"""The :class:`~rateslib.scheduling.Schedule` object for this *Period*."""
return self._schedule
@cached_property
def dcf(self) -> float:
"""An overload for the calculation of the DCF, replacing `period_params.dcf`."""
return sum(
dcf(
start=self.schedule.aschedule[i],
end=self.schedule.aschedule[i + 1],
convention=self.period_params.convention,
termination=self.schedule.aschedule[-1],
frequency=self.schedule.frequency_obj,
stub=self.schedule._stubs[i],
roll=NoInput(0), # taken from Frequency obj
calendar=self.schedule.calendar,
adjuster=self.schedule.modifier,
)
for i in range(self.schedule.n_periods)
)
@property
def float_spread(self) -> DualTypes:
"""The float spread parameter of each :class:`~rateslib.periods.FloatPeriod`."""
return self._float_periods[0].rate_params.float_spread
@float_spread.setter
def float_spread(self, value: DualTypes) -> None:
for period in self._float_periods:
period.rate_params.float_spread = value
@property
def float_periods(self) -> list[FloatPeriod]:
"""
The individual :class:`~rateslib.periods.FloatPeriod` that are
compounded.
"""
return self._float_periods
def __init__(
self,
schedule: Schedule,
*,
float_spread: DualTypes_ = NoInput(0),
rate_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
fixing_method: FloatFixingMethod | str_ = NoInput(0),
spread_compound_method: SpreadCompoundMethod | str_ = NoInput(0),
fixing_frequency: Frequency | str_ = NoInput(0),
fixing_series: FloatRateSeries | str_ = NoInput(0),
# currency args:
notional: DualTypes_ = NoInput(0),
currency: str_ = NoInput(0),
# period params
convention: str_ = NoInput(0),
# non-deliverable args:
pair: FXIndex | str_ = NoInput(0),
fx_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
delivery: datetime_ = NoInput(0),
# index-args:
index_base: DualTypes_ = NoInput(0),
index_lag: int_ = NoInput(0),
index_method: IndexMethod | str_ = NoInput(0),
index_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
index_base_date: datetime_ = NoInput(0),
index_reference_date: datetime_ = NoInput(0),
index_only: bool_ = NoInput(0),
# meta-args:
metric: str_ = NoInput(0),
) -> None:
self._rate_metric: str = _drb("compounding", metric).lower()
self._schedule = schedule
self._settlement_params = _init_SettlementParams_with_fx_pair(
_currency=_drb(defaults.base_currency, currency).lower(),
_payment=self.schedule.pschedule[-1],
_notional=_drb(defaults.notional, notional),
_ex_dividend=self.schedule.pschedule3[-1],
_fx_pair=_maybe_get_fx_index(pair),
)
self._non_deliverable_params = _init_or_none_NonDeliverableParams(
_currency=self.settlement_params.currency,
_fx_index=pair,
_delivery=_drb(self.settlement_params.payment, delivery),
_fx_fixings=fx_fixings,
)
self._period_params = _PeriodParams(
_start=self.schedule.aschedule[0],
_end=self.schedule.aschedule[-1],
_frequency=self.schedule.frequency_obj,
_calendar=self.schedule.calendar,
_adjuster=self.schedule.modifier,
_stub=not self.schedule.frequency_obj.is_uregular(
self.schedule.uschedule[0], self.schedule.uschedule[-1]
),
_convention=_get_convention(_drb(defaults.convention, convention)),
_termination=self.schedule.aschedule[-1],
)
self._index_params = _init_or_none_IndexParams(
_index_base=index_base,
_index_lag=index_lag,
_index_method=index_method,
_index_fixings=index_fixings,
_index_only=index_only,
_index_base_date=_drb(self.schedule.aschedule[0], index_base_date),
_index_reference_date=_drb(self.schedule.aschedule[-1], index_reference_date),
)
rate_fixings_ = _leg_fixings_to_list(rate_fixings, self.schedule.n_periods)
self._float_periods: list[FloatPeriod] = [
FloatPeriod(
float_spread=float_spread,
rate_fixings=rate_fixings_[i],
fixing_method=fixing_method,
spread_compound_method=spread_compound_method,
fixing_frequency=fixing_frequency,
fixing_series=fixing_series,
# currency args:
payment=self.schedule.pschedule[i + 1],
notional=notional,
currency=currency,
ex_dividend=self.schedule.pschedule3[i + 1],
# period params
start=self.schedule.aschedule[i],
end=self.schedule.aschedule[i + 1],
frequency=self.schedule.frequency_obj,
convention=convention,
termination=self.schedule.aschedule[-1],
stub=self.schedule._stubs[i],
roll=NoInput(0), # inferred from frequency obj
calendar=self.schedule.calendar,
adjuster=self.schedule.modifier,
# Each individual period is not genuine Period, only psuedo periods to derive the
# cashflow calculation so no 'non-deliverable' or 'index' params are required.
)
for i in range(self.schedule.n_periods)
]
def try_rate(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
**kwargs: Any,
) -> Result[DualTypes]:
try:
r_i = [period.rate(rate_curve=rate_curve) for period in self.float_periods]
d_i = [period.period_params.dcf for period in self.float_periods]
except Exception as e:
return Err(e)
if self.rate_metric == "compounding":
f = self.schedule.periods_per_annum
r = np.prod(1.0 + np.array(r_i) * np.array(d_i) / 100.0)
r = r ** (1.0 / (self.dcf * f))
r = (r - 1) * f * 100.0
elif self.rate_metric == "simple":
r = np.prod(1.0 + np.array(r_i) * np.array(d_i) / 100.0)
r = (r - 1.0) * 100.0 / self.dcf
else:
return Err(ValueError("`rate_metric` must be 'simple' or 'compounding'."))
return Ok(r)
def rate(self, *, rate_curve: CurveOption_ = NoInput(0)) -> DualTypes:
r"""Calculate a single *rate* representation for the *Period's* cashflow.
The *rate* is determined from the compounded *Period* rates according to:
.. math::
\left ( 1 + \frac{r}{f} \right )^{df} = \prod_{i=1}^n \left ( 1 + r_i(\mathbf{C}, R_j, z) d_i \right )
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
Returns
-------
float, Dual, Dual2 or Variable
""" # noqa: E501
return self.try_rate(rate_curve=rate_curve).unwrap()
def unindexed_reference_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
**kwargs: Any,
) -> DualTypes:
# determine each rate from individual Periods
r_i = [period.rate(rate_curve=rate_curve) for period in self.float_periods]
d_i = [period.period_params.dcf for period in self.float_periods]
r: DualTypes = np.prod(1.0 + np.array(r_i) * np.array(d_i) / 100.0) - 1.0
return -self.settlement_params.notional * r
def try_unindexed_reference_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
try:
r_i = [period.rate(rate_curve=rate_curve) for period in self._float_periods]
d_i = [period.period_params.dcf for period in self._float_periods]
a_i = [
period.try_unindexed_reference_cashflow_analytic_delta(
rate_curve=rate_curve, disc_curve=disc_curve
).unwrap()
for period in self._float_periods
]
except Exception as e:
return Err(e)
lhs = np.prod(1.0 + np.array(r_i) * np.array(d_i) / 100.0)
rhs = np.sum([a / (1 + r * d / 100.0) for (a, d, r) in zip(a_i, d_i, r_i, strict=False)])
return Ok(lhs * rhs)
def try_unindexed_reference_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
try:
r_i = [period.rate(rate_curve=rate_curve) for period in self.float_periods]
d_i = [period.period_params.dcf for period in self.float_periods]
dfs_i = [
period.try_unindexed_reference_cashflow_analytic_rate_fixings(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
index_curve=index_curve,
).unwrap()
for period in self.float_periods
]
except Exception as e:
return Err(e)
scalar = np.prod(1.0 + np.array(r_i) * np.array(d_i) / 100.0)
dfs = [
df * scalar / (1 + d * r / 100.0) for (df, d, r) in zip(dfs_i, d_i, r_i, strict=False)
]
return Ok(concat(dfs))
def cashflows(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> dict[str, Any]:
d = super().cashflows(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
settlement=settlement,
forward=forward,
base=base,
)
d[defaults.headers["dcf"]] = self.dcf # reinsert the overload
return d
def _get_ibor_curve_from_dict(fixing_frequency: Frequency, d: dict[str, _BaseCurve]) -> _BaseCurve:
remapped = {k.upper(): v for k, v in d.items()}
try:
freq_str = _get_tenor_from_frequency(fixing_frequency)
return remapped[freq_str]
except KeyError:
raise ValueError(
"If supplying `rate_curve` as dict must provide a tenor mapping key and curve for"
f"the frequency of the given Period. The missing mapping is '{freq_str}'."
)
def _get_ibor_curve_from_dict2(fixing_frequency: str, d: dict[str, _BaseCurve]) -> _BaseCurve:
remapped = {k.upper(): v for k, v in d.items()}
try:
return remapped[fixing_frequency.upper()]
except KeyError:
raise ValueError(
"If supplying `rate_curve` as dict must provide a tenor mapping key and curve for"
f"the frequency of the given Period. The missing mapping is '{fixing_frequency}'."
)
class _UnindexedReferenceCashflowFixingsSensitivity:
@staticmethod
def _ibor(
self: FloatPeriod, rate_curve: _BaseCurve | dict[str, _BaseCurve]
) -> Result[DataFrame]:
if self.period_params.stub:
if isinstance(rate_curve, dict):
rate_curve_: dict[str, _BaseCurve] = rate_curve
else:
rate_curve_ = {
_get_tenor_from_frequency(self.rate_params.fixing_frequency): rate_curve
}
return _UnindexedReferenceCashflowFixingsSensitivity._ibor_stub(
self=self,
rate_curve=rate_curve_,
frequency_str=_get_tenor_from_frequency(self.rate_params.fixing_frequency),
)
else:
if isinstance(rate_curve, dict):
rate_curve__: _BaseCurve = _get_ibor_curve_from_dict(
self.rate_params.fixing_frequency, rate_curve
)
else:
rate_curve__ = rate_curve
return _UnindexedReferenceCashflowFixingsSensitivity._ibor_regular(
self=self,
rate_curve=rate_curve__,
frequency_str=_get_tenor_from_frequency(self.rate_params.fixing_frequency),
)
@staticmethod
def _ibor_regular(
self: FloatPeriod,
rate_curve: _BaseCurve,
frequency_str: str,
) -> Result[DataFrame]:
return Ok(
DataFrame(
index=Index(data=[self.rate_params.rate_fixing.date], name="obs_dates"),
data=[
-self.settlement_params.notional * self.period_params.dcf * 0.0001
if isinstance(self.rate_params.rate_fixing.value, NoInput)
else 0.0
],
columns=MultiIndex.from_tuples(
[
(
rate_curve.id,
self.settlement_params.currency,
self.settlement_params.notional_currency,
frequency_str,
)
],
names=["identifier", "local_ccy", "display_ccy", "frequency"],
),
)
)
@staticmethod
def _ibor_stub(
self: FloatPeriod,
rate_curve: dict[str, _BaseCurve],
frequency_str: str,
) -> Result[DataFrame]:
# get consistent curves for the tenors of the stub fixings
tenors, ends = _find_neighbouring_tenors(
end=self.rate_params.rate_fixing.accrual_end,
start=self.rate_params.rate_fixing.accrual_start,
tenors=[_ for _ in rate_curve if _.upper() != "RFR"],
rate_series=self.rate_params.rate_fixing.series, # type: ignore[union-attr]
)
rate_curve_1: _BaseCurve = _get_ibor_curve_from_dict2(tenors[0], rate_curve)
df1_res = _UnindexedReferenceCashflowFixingsSensitivity._ibor_regular(
self=self,
rate_curve=rate_curve_1,
frequency_str=tenors[0],
)
if len(tenors) == 1 or tenors[0] == tenors[1]:
return df1_res # then no multiple curves for the stub
else:
rate_curve_2: _BaseCurve = _get_ibor_curve_from_dict2(tenors[1], rate_curve)
df2_res = _UnindexedReferenceCashflowFixingsSensitivity._ibor_regular(
self=self,
rate_curve=rate_curve_2,
frequency_str=tenors[1],
)
alpha = (ends[1] - self.period_params.end) / (ends[1] - ends[0])
return Ok(
merge(
left=df1_res.unwrap() * alpha,
right=df2_res.unwrap() * (1 - alpha),
left_index=True,
right_index=True,
)
)
@staticmethod
def _rfr(
self: FloatPeriod,
rate_curve: _BaseCurve,
) -> Result[DataFrame]:
rf: RFRFixing = self.rate_params.rate_fixing # type: ignore[assignment]
if isinstance(rf.value, NoInput):
# then some sensitivity still exists
drdr = _UnindexedReferenceCashflowFixingsSensitivity._rfr_drdr_approximation(
self=self,
rate_curve=rate_curve,
)
else:
# all sensitivity is zero
drdr = np.array([0.0 for _ in range(len(rf.dates_obs) - 1)])
temp = Series(
index=rf.dates_obs[:-1],
data=-self.settlement_params.notional * self.period_params.dcf * 0.0001 * drdr,
)
temp[rf.populated.index] = 0.0
df1 = DataFrame(
index=Index(data=rf.dates_obs[:-1], name="obs_dates"),
data=temp.to_list(),
columns=MultiIndex.from_tuples(
[
(
rate_curve.id,
self.settlement_params.currency,
self.settlement_params.notional_currency,
"1B",
)
],
names=["identifier", "local_ccy", "display_ccy", "frequency"],
),
)
return Ok(df1)
@staticmethod
def _rfr_drdr_approximation(
self: FloatPeriod,
rate_curve: _BaseCurve,
) -> Arr1dObj:
"""
Determine the value :math:`\frac{\\partial r(r_i, z)}{\\partial r_j}` for rate
fixing sensitivity.
For NoneSimple spread compounding this formula is exact, which covers most cases.
For ISDAFlatCompounding this is approximated as the NoneSimple case so is an
approximation.
For ISDACompounding the geometric 1-day average rate is used as a component in the formula
meaning the result is approximate.
These values do **not** distinguish between published and unpublished fixings. This should
be adjusted post.
Returns
-------
ndarray
"""
rf: RFRFixing = self.rate_params.rate_fixing # type: ignore[assignment]
d_hat_i = rf.dcfs_dcf
z = self.rate_params.float_spread
fixing_method = self.rate_params.fixing_method
spread_compound_method = self.rate_params.spread_compound_method
method_param = self.rate_params.fixing_method.method_param()
# approximate sensitivity to each fixing
z = z / 100.0
d = d_hat_i.sum()
if type(fixing_method) in [
FloatFixingMethod.RFRLockoutAverage,
FloatFixingMethod.RFRObservationShiftAverage,
FloatFixingMethod.RFRLookbackAverage,
FloatFixingMethod.RFRPaymentDelayAverage,
]:
drdri: Arr1dObj = d_hat_i / d
else:
unpopulated = rf.unpopulated
populated = rf.populated
r_i = Series(index=rf.dates_obs[:-1], data=np.nan, dtype=object)
r_i.update(populated) # type: ignore[arg-type]
# determine the rate for the period, from the curve if necessary
if unpopulated.index[0] < rate_curve.nodes.initial:
raise ValueError(err.VE_BEFORE_INITIAL)
_RFRRate._forecast_fixing_rates_from_curve(
unpopulated=unpopulated,
populated=populated,
fixing_rates=r_i, # type: ignore[arg-type]
rate_curve=rate_curve,
dates_obs=rf.dates_obs,
dcfs_obs=rf.dcfs_obs,
)
r_star = _RFRRate._inefficient_calculation(
fixing_rates=r_i,
fixing_dcfs=d_hat_i,
fixing_method=fixing_method,
spread_compound_method=spread_compound_method,
float_spread=self.rate_params.float_spread,
).unwrap()
if spread_compound_method == SpreadCompoundMethod.ISDACompounding:
# this makes a number of approximations including reversing a compounded spread
# with a simple formula
r_bar, d_bar, n = average_rate(
effective=self.period_params.start,
termination=self.period_params.end,
convention=self.rate_params.fixing_series.convention,
rate=r_star - z,
dcf=d,
)
drdri = d_hat_i / (1 + d_hat_i * (r_bar + z) / 100.0) * (1 + r_star / 100.0 * d) / d # type: ignore[operator]
# elif spread_compound_method == SpreadCompoundMethod.ISDAFlatCompounding:
# r_star = ((1 + d_bar * (r_bar + z) / 100.0) ** n - 1) * 100.0 / (n * d_bar)
# drdri1 = di / (1 + di * (r_bar + z) / 100.0) * ((r_star / 100.0 * d) + 1) / d
# drdri2 = di / (1 + di * r_bar / 100.0) * ((r_star0 / 100.0 * d) + 1) / d
# drdri = (drdri1 + drdri2) / 2.0
else: # spread_compound_method == SpreadCompoundMethod.NoneSimple:
r_i_ = r_i.to_numpy()
drdri = d_hat_i / (1 + d_hat_i * r_i_ / 100.0) * ((r_star - z) / 100.0 * d + 1) / d
if type(fixing_method) in [
FloatFixingMethod.RFRLockoutAverage,
FloatFixingMethod.RFRLockout,
]:
for i in range(method_param):
drdri[-(method_param + 1)] += drdri[-(i + 1)]
drdri[-(i + 1)] = 0.0
return drdri
================================================
FILE: python/rateslib/periods/float_rate.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from pandas import Series
from rateslib import NoInput
from rateslib.data.fixings import _get_float_rate_series_or_blank, _IBORRate, _RFRRate
from rateslib.enums.generics import Err, Ok, _drb
from rateslib.enums.parameters import (
FloatFixingMethod,
SpreadCompoundMethod,
_get_float_fixing_method,
_get_spread_compound_method,
)
from rateslib.periods.utils import _get_rfr_curve_from_dict
from rateslib.scheduling.frequency import _get_frequency
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurveOption_,
DualTypes,
DualTypes_,
FloatRateSeries,
Frequency,
Result,
Series,
_BaseCurve_,
datetime,
str_,
)
def rate_value(
start: datetime,
end: datetime,
rate_curve: CurveOption_ = NoInput(0),
*,
rate_fixings: DualTypes_ | str = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
rate_series: FloatRateSeries | str_ = NoInput(0),
fixing_method: FloatFixingMethod | str = FloatFixingMethod.RFRPaymentDelay(),
spread_compound_method: SpreadCompoundMethod | str = SpreadCompoundMethod.NoneSimple,
float_spread: DualTypes = 0.0,
stub: bool = False,
) -> DualTypes:
return try_rate_value(
start=start,
end=end,
rate_curve=rate_curve,
rate_series=rate_series,
frequency=frequency,
rate_fixings=rate_fixings,
fixing_method=fixing_method,
spread_compound_method=spread_compound_method,
float_spread=float_spread,
stub=stub,
).unwrap()
def try_rate_value(
start: datetime,
end: datetime,
rate_curve: CurveOption_ = NoInput(0),
*,
rate_series: FloatRateSeries | str_ = NoInput(0),
frequency: Frequency | str_ = NoInput(0),
rate_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
fixing_method: FloatFixingMethod | str = FloatFixingMethod.RFRPaymentDelay(),
spread_compound_method: SpreadCompoundMethod | str = SpreadCompoundMethod.NoneSimple,
float_spread: DualTypes = 0.0,
stub: bool = False,
) -> Result[DualTypes]:
"""
Derive a floating rate value from a combination of market inputs.
"""
fm = _get_float_fixing_method(fixing_method)
scm = _get_spread_compound_method(spread_compound_method)
rs = _get_float_rate_series_or_blank(rate_series)
if type(fm) is FloatFixingMethod.IBOR:
return _IBORRate._rate(
start=start,
end=end,
rate_curve=rate_curve,
rate_fixings=rate_fixings,
float_spread=_drb(0.0, float_spread),
lag=fm.method_param(),
stub=stub,
rate_series=rs,
frequency=_get_frequency(frequency, NoInput(0), NoInput(0)),
)
else: # RFR based
if isinstance(rate_curve, dict):
rate_curve_: _BaseCurve_ = _get_rfr_curve_from_dict(rate_curve)
else:
rate_curve_ = rate_curve
r_result = _RFRRate._rate(
start=start,
end=end,
rate_curve=rate_curve_,
rate_fixings=rate_fixings,
fixing_method=fm,
spread_compound_method=scm,
float_spread=float_spread,
rate_series=rs,
)
if isinstance(r_result, Err):
return r_result
else:
return Ok(r_result.unwrap()[0])
================================================
FILE: python/rateslib/periods/fx_volatility.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from datetime import timezone
from typing import TYPE_CHECKING
import numpy as np
import rateslib.errors as err
from rateslib import defaults
from rateslib.curves._parsers import _validate_obj_not_no_input
from rateslib.data.fixings import _get_fx_index
from rateslib.dual import dual_exp, dual_log, dual_norm_cdf, dual_norm_pdf, newton_1dim
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, Ok, Result, _drb
from rateslib.enums.parameters import (
FXDeltaMethod,
FXOptionMetric,
OptionType,
_get_fx_delta_type,
_get_fx_option_metric,
)
from rateslib.fx import FXForwards
from rateslib.periods.parameters import (
_FXOptionParams,
_IndexParams,
_NonDeliverableParams,
_SettlementParams,
)
from rateslib.periods.protocols import _BasePeriodStatic, _WithAnalyticFXOptionGreeks
from rateslib.periods.utils import (
_get_fx_vol_value_maybe_from_obj,
_get_vol_delta_type,
_get_vol_smile_or_raise,
_get_vol_smile_or_value,
_validate_fx_as_forwards,
)
from rateslib.volatility import (
FXDeltaVolSmile,
FXDeltaVolSurface,
FXSabrSmile,
FXSabrSurface,
)
from rateslib.volatility.fx import FXVolObj
from rateslib.volatility.fx.delta_vol import (
_moneyness_from_atm_delta_one_dimensional,
_moneyness_from_atm_delta_two_dimensional,
_moneyness_from_delta_one_dimensional,
_moneyness_from_delta_two_dimensional,
)
from rateslib.volatility.fx.utils import (
_delta_type_constants,
_moneyness_from_atm_delta_closed_form,
_moneyness_from_delta_closed_form,
)
from rateslib.volatility.utils import (
_OptionModelBlack76,
_surface_index_left,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
Arr1dF64,
DualTypes,
DualTypes_,
FXForwards_,
FXIndex,
Number,
Series,
_BaseCurve,
_BaseCurve_,
_FXVolOption,
_FXVolOption_,
datetime,
datetime_,
str_,
)
UTC = timezone.utc
class _BaseFXOptionPeriod(_BasePeriodStatic, _WithAnalyticFXOptionGreeks, metaclass=ABCMeta):
r"""
Abstract base class for *FXOptionPeriods* types.
**See Also**: :class:`~rateslib.periods.FXCallPeriod`,
:class:`~rateslib.periods.FXPutPeriod`
"""
def analytic_greeks(
self,
rate_curve: _BaseCurve,
disc_curve: _BaseCurve,
fx: FXForwards,
fx_vol: _FXVolOption_ = NoInput(0),
premium: DualTypes_ = NoInput(0), # expressed in the payment currency
premium_payment: datetime_ = NoInput(0),
) -> dict[str, Any]:
return super()._base_analytic_greeks(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
premium=premium,
premium_payment=premium_payment,
)
@property
def period_params(self) -> None:
"""This *Period* type has no
:class:`~rateslib.periods.parameters._PeriodParams`."""
return self._period_params
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams`
of the *Period*."""
return self._settlement_params
@property
def index_params(self) -> _IndexParams | None:
"""The :class:`~rateslib.periods.parameters._IndexParams` of
the *Period*, if any."""
return self._index_params
@property
def non_deliverable_params(self) -> _NonDeliverableParams | None:
"""The :class:`~rateslib.periods.parameters._NonDeliverableParams` of the
*Period*., if any."""
return self._non_deliverable_params
@property
def rate_params(self) -> None:
"""This *Period* type has no rate parameters."""
return self._rate_params
@property
def fx_option_params(self) -> _FXOptionParams:
"""The :class:`~rateslib.periods.parameters._FXOptionParams` of the
*Period*."""
return self._fx_option_params
@abstractmethod
def __init__(
self,
*,
# option params:
direction: OptionType,
delivery: datetime, # otherwise termed the 'payment' of the period
pair: FXIndex | str,
expiry: datetime,
strike: DualTypes_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
delta_type: FXDeltaMethod | str_ = NoInput(0),
metric: FXOptionMetric | str_ = NoInput(0),
option_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
# currency args:
ex_dividend: datetime_ = NoInput(0),
# # non-deliverable args:
# nd_pair: str_ = NoInput(0),
# fx_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
# # index-args:
# index_base: DualTypes_ = NoInput(0),
# index_lag: int_ = NoInput(0),
# index_method: IndexMethod | str_ = NoInput(0),
# index_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), #type: ignore[type-var]
# index_only: bool_ = NoInput(0),
# index_base_date: datetime_ = NoInput(0),
# index_reference_date: datetime_ = NoInput(0),
) -> None:
# self._index_params = _init_or_none_IndexParams(
# _index_base=index_base,
# _index_lag=index_lag,
# _index_method=index_method,
# _index_fixings=index_fixings,
# _index_only=index_only,
# _index_base_date=index_base_date,
# _index_reference_date=_drb(delivery, index_reference_date),
# )
self._index_params = None
self._fx_option_params = _FXOptionParams(
_direction=direction,
_expiry=expiry,
_delivery=delivery,
_delta_type=_get_fx_delta_type(_drb(defaults.fx_delta_type, delta_type)),
_fx_index=_get_fx_index(pair),
_strike=strike,
_metric=_drb(defaults.fx_option_metric, metric),
_option_fixings=option_fixings,
)
self._rate_params = None
self._period_params = None
nd_pair = NoInput(0)
if isinstance(nd_pair, NoInput):
# then option is directly deliverable
self._non_deliverable_params: _NonDeliverableParams | None = None
self._settlement_params = _SettlementParams(
_notional=_drb(defaults.notional, notional),
_payment=delivery,
_currency=self.fx_option_params.fx_index.pair[3:],
_notional_currency=self.fx_option_params.fx_index.pair[:3],
_ex_dividend=ex_dividend,
)
else:
pass
# fx_ccy1, fx_ccy2 = self.fx_option_params.pair[:3], self.fx_option_params.pair[3:]
# nd_ccy1, nd_ccy2 = nd_pair.lower()[:3], nd_pair.lower()[3:]
#
# if nd_ccy1 != fx_ccy1 and nd_ccy1 != fx_ccy2:
# raise ValueError(
# err.VE_MISMATCHED_FX_PAIR_ND_PAIR.format(nd_ccy1, self.fx_option_params.pair)
# )
# elif nd_ccy2 != fx_ccy1 and nd_ccy2 != fx_ccy2:
# raise ValueError(
# err.VE_MISMATCHED_FX_PAIR_ND_PAIR.format(nd_ccy2, self.fx_option_params.pair)
# )
#
# self._non_deliverable_params = _NonDeliverableParams(
# _currency=fx_ccy1,
# _pair=nd_pair,
# _delivery=delivery,
# _fx_fixings=fx_fixings,
# )
# self._settlement_params = _SettlementParams(
# _notional=_drb(defaults.notional, notional),
# _payment=delivery,
# _currency=fx_ccy1,
# _notional_currency=fx_ccy1,
# _ex_dividend=ex_dividend,
# )
def __repr__(self) -> str:
return f""
def unindexed_reference_cashflow( # type: ignore[override]
self,
*,
rate_curve: _BaseCurve_ = NoInput(0), # w(.) variety
disc_curve: _BaseCurve_ = NoInput(0), # v(.) variety
# index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
**kwargs: Any,
) -> DualTypes:
# The unindexed_reference_cashflow does not require a discount curve.
# A curve may only be required to determine an evaluation date, which in turn is used to
# derive 'time_to_expiry'. The cashflow is expressed in reference currency on the delivery
# date of the FX forward, i.e. the 'forward FX date'.
if isinstance(self.fx_option_params.strike, NoInput):
raise ValueError(err.VE_NEEDS_STRIKE)
k = self.fx_option_params.strike
if not isinstance(self.fx_option_params.option_fixing.value, NoInput):
# then the cashflow amount is defined by a known fixing
fix: DualTypes = self.fx_option_params.option_fixing.value
phi: OptionType = self.fx_option_params.direction
if phi == OptionType.Call and k < fix:
return (fix - k) * self.settlement_params.notional
elif phi == OptionType.Put and k > fix:
return (k - fix) * self.settlement_params.notional
else:
return 0.0
else:
# value is expressed in reference currency (i.e. pair[3:])
fx_ = _validate_fx_as_forwards(fx)
vol_ = _get_fx_vol_value_maybe_from_obj(
fx_vol=fx_vol,
fx=fx_,
rate_curve=rate_curve,
strike=k,
pair=self.fx_option_params.pair,
delivery=self.fx_option_params.delivery,
expiry=self.fx_option_params.expiry,
)
# Get time to expiry from some object
if not isinstance(disc_curve, NoInput):
t_e = self.fx_option_params.time_to_expiry(disc_curve.nodes.initial)
elif isinstance(fx_vol, FXVolObj):
t_e = self.fx_option_params.time_to_expiry(fx_vol.meta.eval_date)
elif not isinstance(rate_curve, NoInput):
t_e = self.fx_option_params.time_to_expiry(rate_curve.nodes.initial)
else:
raise ValueError(
"Object required to define evaluation date and time to expiry.\n"
"Use one of `disc_curve`, `fx_vol`, or `rate_curve`."
)
expected = _OptionModelBlack76._value(
F=fx_.rate(self.fx_option_params.pair, self.fx_option_params.delivery),
K=k,
rate_shift=0.0,
t_e=t_e,
v2=1.0, # disc_curve_[delivery] / disc_curve_[payment],
vol=vol_ / 100.0,
phi=self.fx_option_params.direction.value, # controls calls or put price
)
return expected * self.settlement_params.notional
def try_rate(
self,
rate_curve: _BaseCurve,
disc_curve: _BaseCurve,
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
metric: FXOptionMetric | str_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> Result[DualTypes]:
"""
Return the pricing metric of the *FXOption*, with lazy error handling.
See :meth:`~rateslib.periods._BaseFXOptionPeriod.rate`.
"""
if not isinstance(metric, NoInput):
metric_ = _get_fx_option_metric(metric)
else: # use metric associated with self
metric_ = self.fx_option_params.metric
cash_res = self.try_unindexed_reference_cashflow(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
)
if cash_res.is_err:
return cash_res
cash: DualTypes = cash_res.unwrap()
if metric_ == FXOptionMetric.Pips:
points_premium = cash / self.settlement_params.notional
if isinstance(forward, NoInput):
return Ok(points_premium * 10000.0)
else:
return Ok(
points_premium
* 10000.0
* disc_curve[self.settlement_params.payment]
/ disc_curve[forward]
)
else: # metric_ == FXOptionMetric.Percent:
fx_ = _validate_fx_as_forwards(fx)
currency_premium = cash / fx_.rate(
self.fx_option_params.pair, self.settlement_params.payment
)
if isinstance(forward, NoInput):
return Ok(currency_premium / self.settlement_params.notional * 100)
else:
currency_premium *= rate_curve[self.settlement_params.payment] / rate_curve[forward]
return Ok(currency_premium / self.settlement_params.notional * 100)
def rate(
self,
*,
rate_curve: _BaseCurve,
disc_curve: _BaseCurve,
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
metric: str_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
"""
Return the pricing metric of the *FXOption*.
This is priced according to the ``payment`` date of the *OptionPeriod*.
Parameters
----------
rate_curve: Curve
The discount *Curve* for the LHS currency. (Not used).
disc_curve: Curve
The discount *Curve* for the RHS currency.
fx: float, FXRates, FXForwards, optional
The object to project the currency pair FX rate at delivery.
base: str, optional
Not used by `rate`.
fx_vol: float, Dual, Dual2
The percentage log-normal volatility to price the option.
metric: str in {"pips", "percent"}
The metric to return. If "pips" assumes the premium is in foreign (rhs)
currency. If "percent", the premium is assumed to be domestic (lhs).
forward: datetime, optional (set as payment date of option)
The date to project the cashflow value to using the ``disc_curve`` if RHS ("pips") or
using ``rate_curve`` if LHS ("percent").
Returns
-------
float, Dual, Dual2 or dict of such.
"""
return self.try_rate(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
metric=metric,
forward=forward,
).unwrap()
def implied_vol(
self,
rate_curve: _BaseCurve,
disc_curve: _BaseCurve,
fx: FXForwards,
premium: DualTypes,
metric: FXOptionMetric | str_ = NoInput(0),
) -> Number:
"""
Calculate the implied volatility of the FX option.
Parameters
----------
rate_curve: Curve
Not used by `implied_vol`.
disc_curve: Curve
The discount *Curve* for the RHS currency.
fx: FXForwards
The object to project the currency pair FX rate at delivery.
premium: float, Dual, Dual2
The premium value of the option paid at the appropriate payment date. Expressed
either in *'pips'* or *'percent'* of notional. Must align with ``metric``.
metric: str in {"pips", "percent"}, optional
The manner in which the premium is expressed.
Returns
-------
float, Dual or Dual2
"""
if isinstance(self.fx_option_params.strike, NoInput):
raise ValueError(err.VE_NEEDS_STRIKE)
k = self.fx_option_params.strike
phi = self.fx_option_params.direction
metric_ = _get_fx_option_metric(_drb(self.fx_option_params.metric, metric))
# This function uses newton_1d and is AD safe.
# convert the premium to a standardised immediate pips value.
if metric_ == FXOptionMetric.Percent:
# convert premium to pips form
premium = (
premium
* fx.rate(self.fx_option_params.pair, self.settlement_params.payment)
* 100.0
)
# convert to immediate pips form
imm_premium = premium * disc_curve[self.settlement_params.payment]
t_e = self.fx_option_params.time_to_expiry(disc_curve.nodes.initial)
v2 = disc_curve[self.fx_option_params.delivery]
f_d = fx.rate(self.fx_option_params.pair, self.fx_option_params.delivery)
def root(
vol: DualTypes, f_d: DualTypes, k: DualTypes, t_e: float, v2: DualTypes, phi: float
) -> tuple[DualTypes, DualTypes]:
f0 = _OptionModelBlack76._value(f_d, k, 0.0, t_e, v2, vol, phi) * 10000.0 - imm_premium
sqrt_t = t_e**0.5
d_plus = _OptionModelBlack76._d_plus_min_u(k / f_d, vol * sqrt_t, 0.5)
f1 = v2 * dual_norm_pdf(phi * d_plus) * f_d * sqrt_t * 10000.0
return f0, f1
result = newton_1dim(root, 0.10, args=(f_d, k, t_e, v2, phi))
_: Number = result["g"] * 100.0
return _
# Volatility determinations
def _index_vol_and_strike_from_atm(
self,
delta_type: FXDeltaMethod,
vol: _FXVolOption,
w_deli: DualTypes,
w_spot: DualTypes,
f: DualTypes | FXForwards,
t_e: DualTypes,
) -> tuple[DualTypes | None, DualTypes, DualTypes]:
"""
This function returns strike and vol, where available, a delta index for an option period
defined by ATM delta.
Parameters
----------
delta_type: FXDeltaMethod
The delta type of the option period.
vol: DualTypes | Smile | Surface
The volatility used, either specifici value or a Smile/Surface.
w_deli: DualTypes
The relevant discount factor at delivery.
w_spot: DualTypes
The relevant discount factor at spot.
f: DualTypes, FXForwards
The forward FX rate for delivery. FXForwards is used when a SabrSurface is present.
t_e: DualTypes
The time to expiry
Returns
-------
(delta_index, vol, strike)
"""
# TODO this method branches depending upon eta0 and eta1, but depending upon the
# type of vol these maybe automatically set equal to each other. Refactoring this would
# make eliminate repeated type checking for the vol argument.
vol_delta_type = _get_vol_delta_type(vol, delta_type)
z_w = w_deli / w_spot
eta_0, z_w_0, _ = _delta_type_constants(delta_type, z_w, 0.0) # u: unused
eta_1, z_w_1, _ = _delta_type_constants(vol_delta_type, z_w, 0.0) # u: unused
if isinstance(vol, FXSabrSmile | FXSabrSurface):
return self._index_vol_and_strike_from_atm_sabr(f, eta_0, vol)
else: # DualTypes | FXDeltaVolSmile | FXDeltaVolSurface
f_: DualTypes = f # type: ignore[assignment]
vol_: DualTypes | FXDeltaVolSmile | FXDeltaVolSurface = vol
return self._index_vol_and_strike_from_atm_dv(
f_,
eta_0,
eta_1,
z_w_0,
z_w_1,
vol_,
t_e,
delta_type,
vol_delta_type,
z_w,
)
def _index_vol_and_strike_from_atm_sabr(
self,
f: DualTypes | FXForwards,
eta_0: float,
vol: FXSabrSmile | FXSabrSurface,
) -> tuple[DualTypes | None, DualTypes, DualTypes]:
"""Get vol and strike from ATM delta specification under a SABR model."""
t_e = (self.fx_option_params.expiry - vol.meta.eval_date).days / 365.0
if isinstance(f, FXForwards):
f_d: DualTypes = f.rate(self.fx_option_params.pair, self.fx_option_params.delivery)
# _ad = _set_ad_order_objects([0], [f]) # GH755
else:
f_d = f # type: ignore[assignment]
def root1d(
k: DualTypes, f_d: DualTypes, fx: DualTypes | FXForwards, as_float: bool
) -> tuple[DualTypes, DualTypes]:
# if not as_float and isinstance(fx, FXForwards):
# _set_ad_order_objects(_ad, [fx])
dsigma_dk: Number
sigma, dsigma_dk = vol._d_sabr_d_k_or_f( # type: ignore[assignment]
k=k, f=fx, expiry=self.fx_option_params.expiry, as_float=as_float, derivative=1
)
f0 = -dual_log(k / f_d) + eta_0 * sigma**2 * t_e
f1 = -1 / k + eta_0 * 2 * sigma * dsigma_dk * t_e
return f0, f1
if isinstance(vol, FXSabrSmile):
alpha = vol.nodes.alpha
else: # FXSabrSurface
vol_: FXSabrSurface = vol
expiry_posix = self.fx_option_params.expiry.replace(tzinfo=UTC).timestamp()
e_idx, _ = _surface_index_left(vol_.meta.expiries_posix, expiry_posix)
alpha = vol_.smiles[e_idx].nodes.alpha
root_solver = newton_1dim(
root1d,
f_d * dual_exp(eta_0 * alpha**2 * t_e),
args=(f_d, f),
pre_args=(True,), # solve `as_float` in iterations
final_args=(False,), # capture AD in final iterations
raise_on_fail=True,
)
k: DualTypes = root_solver["g"]
v_ = vol.get_from_strike(k, f, self.fx_option_params.expiry)[1]
return None, v_, k
def _index_vol_and_strike_from_atm_dv( # DeltaVol type models
self,
f: DualTypes,
eta_0: float,
eta_1: float,
z_w_0: DualTypes,
z_w_1: DualTypes,
vol: DualTypes | FXDeltaVolSmile | FXDeltaVolSurface,
t_e: DualTypes,
delta_type: FXDeltaMethod,
vol_delta_type: FXDeltaMethod,
z_w: DualTypes,
) -> tuple[DualTypes | None, DualTypes, DualTypes]:
"""Determine strike from ATM delta specification with DeltaVol models or fixed volatility"""
if eta_0 == 0.5: # then delta type is unadjusted
if eta_1 == 0.5: # then smile delta type matches: closed form eqn available
if isinstance(vol, FXDeltaVolSmile | FXDeltaVolSurface):
d_i: DualTypes = z_w_1 / 2.0
vol_value: DualTypes = _get_vol_smile_or_raise(
vol, self.fx_option_params.expiry
)[d_i]
delta_idx: DualTypes | None = d_i
else:
vol_value = _validate_obj_not_no_input(vol, "vol") # type: ignore[assignment]
delta_idx = None
u = _moneyness_from_atm_delta_closed_form(vol_value, t_e)
return delta_idx, vol_value, u * f
else: # then smile delta type unmatched: 2-d solver required
delta: DualTypes = z_w_0 * self.fx_option_params.direction.value / 2.0
u, delta_idx = _moneyness_from_delta_two_dimensional(
delta,
delta_type,
_get_vol_smile_or_raise(vol, self.fx_option_params.expiry),
t_e,
z_w,
self.fx_option_params.direction.value,
)
else: # then delta type is adjusted,
if eta_1 == -0.5: # then smile type matches: use 1-d solver
u = _moneyness_from_atm_delta_one_dimensional(
delta_type,
vol_delta_type,
_get_vol_smile_or_value(vol, self.fx_option_params.expiry),
t_e,
z_w,
self.fx_option_params.direction,
)
delta_idx = z_w_1 * u * 0.5
else: # smile delta type unmatched: 2-d solver required
u, delta_idx = _moneyness_from_atm_delta_two_dimensional(
delta_type,
_get_vol_smile_or_raise(vol, self.fx_option_params.expiry),
t_e,
z_w,
self.fx_option_params.direction,
)
if isinstance(vol, FXDeltaVolSmile | FXDeltaVolSurface):
vol_value = _get_vol_smile_or_raise(vol, self.fx_option_params.expiry)[delta_idx]
else:
vol_value = _validate_obj_not_no_input(vol, "vol") # type: ignore[assignment]
# u, delta_idx, delta =
# self._moneyness_from_delta_three_dimensional(delta_type, vol, t_e, z_w)
return delta_idx, vol_value, u * f
def _index_vol_and_strike_from_delta(
self,
delta: float,
delta_type: FXDeltaMethod,
vol: _FXVolOption,
w_deli: DualTypes,
w_spot: DualTypes,
f: DualTypes | FXForwards,
t_e: DualTypes,
) -> tuple[DualTypes | None, DualTypes, DualTypes]:
"""
This function returns strike and, where available, a delta index for an option period
defined by a fixed delta percentage.
Parameters
----------
delta: float
The delta percent, e.g 0.25.
delta_type: FXDeltaMethod
The delta type of the option period.
vol: DualTypes | Smile | Surface
The volatility used, either a specific value or a Smile/Surface.
w_deli: DualTypes
The relevant discount factor at delivery.
w_spot: DualTypes
The relevant discount factor at spot.
f: DualTypes | FXForwards
The forward FX rate for delivery. When using a *SabrSurface* this is required in
*FXForwards* form.
t_e: DualTypes
The time to expiry
Returns
-------
(DualTypes, DualTypes)
"""
vol_delta_type = _get_vol_delta_type(vol, delta_type)
z_w = w_deli / w_spot
if isinstance(vol, FXSabrSmile | FXSabrSurface):
return self._index_vol_and_strike_from_delta_sabr(delta, delta_type, vol, z_w, f)
else: # DualTypes | FXDeltaVolSmile | FXDeltaVolSurface
f_: DualTypes = f # type: ignore[assignment]
vol_: DualTypes | FXDeltaVolSmile = vol # type: ignore[assignment]
return self._index_vol_and_strike_from_delta_dv(
f_,
delta,
vol_,
t_e,
delta_type,
vol_delta_type,
z_w,
)
def _index_vol_and_strike_from_delta_dv(
self,
f: DualTypes,
delta: float,
vol: DualTypes | FXDeltaVolSmile | FXDeltaVolSurface,
t_e: DualTypes,
delta_type: FXDeltaMethod,
vol_delta_type: FXDeltaMethod,
z_w: DualTypes,
) -> tuple[DualTypes | None, DualTypes, DualTypes]:
"""Determine strike and delta index for an option by delta % for DeltaVol type models
or constant volatility"""
eta_0, z_w_0, _ = _delta_type_constants(delta_type, z_w, 0.0) # u: unused
eta_1, z_w_1, _ = _delta_type_constants(vol_delta_type, z_w, 0.0) # u: unused
# then delta types are both unadjusted, used closed form.
if eta_0 == eta_1 and eta_0 == 0.5:
if isinstance(vol, FXDeltaVolSmile | FXDeltaVolSurface):
d_i: DualTypes = (-z_w_1 / z_w_0) * (
delta - 0.5 * z_w_0 * (self.fx_option_params.direction + 1.0)
)
vol_value: DualTypes = _get_vol_smile_or_raise(vol, self.fx_option_params.expiry)[
d_i
]
delta_idx: DualTypes | None = d_i
else:
vol_value = _validate_obj_not_no_input(vol, "vol") # type: ignore[assignment]
delta_idx = None
u: DualTypes = _moneyness_from_delta_closed_form(
delta, vol_value, t_e, z_w_0, self.fx_option_params.direction
)
return delta_idx, vol_value, u * f
# then delta types are both adjusted, use 1-d solver.
elif eta_0 == eta_1 and eta_0 == -0.5:
u = _moneyness_from_delta_one_dimensional(
delta,
delta_type,
vol_delta_type,
_get_vol_smile_or_value(vol, self.fx_option_params.expiry),
t_e,
z_w,
self.fx_option_params.direction,
)
delta_idx = (-z_w_1 / z_w_0) * (
delta - z_w_0 * u * (self.fx_option_params.direction + 1.0) * 0.5
)
else: # delta adjustment types are different, use 2-d solver.
u, delta_idx = _moneyness_from_delta_two_dimensional(
delta,
delta_type,
_get_vol_smile_or_raise(vol, self.fx_option_params.expiry),
t_e,
z_w,
self.fx_option_params.direction,
)
_1: DualTypes | None = delta_idx
_2: DualTypes = u * f
if isinstance(vol, FXDeltaVolSmile | FXDeltaVolSurface):
vol_value = _get_vol_smile_or_raise(vol, self.fx_option_params.expiry)[delta_idx]
else:
vol_value = _validate_obj_not_no_input(vol, "vol") # type: ignore[assignment]
return _1, vol_value, _2
def _index_vol_and_strike_from_delta_sabr(
self,
delta: float,
delta_type: FXDeltaMethod,
vol: FXSabrSmile | FXSabrSurface,
z_w: DualTypes,
f: DualTypes | FXForwards,
) -> tuple[DualTypes | None, DualTypes, DualTypes]:
eta_0, z_w_0, _ = _delta_type_constants(delta_type, z_w, 0.0) # u: unused
t_e = (self.fx_option_params.expiry - vol.meta.eval_date).days / 365.0
sqrt_t = t_e**0.5
if isinstance(f, FXForwards):
f_d: DualTypes = f.rate(self.fx_option_params.pair, self.fx_option_params.delivery)
# _ad = _set_ad_order_objects([0], [f]) # GH755
else:
f_d = f # type: ignore[assignment]
def root1d(
k: DualTypes,
f_d: DualTypes,
fx: FXForwards | DualTypes,
z_w_0: DualTypes,
delta: float,
as_float: bool,
) -> tuple[DualTypes, DualTypes]:
# if not as_float and isinstance(fx, FXForwards):
# _set_ad_order_objects(_ad, [fx])
sigma, dsigma_dk = vol._d_sabr_d_k_or_f(
k=k, f=fx, expiry=self.fx_option_params.expiry, as_float=as_float, derivative=1
)
dn0 = -dual_log(k / f_d) / (sigma * sqrt_t) + eta_0 * sigma * sqrt_t
Phi = dual_norm_cdf(self.fx_option_params.direction * dn0)
if eta_0 == -0.5:
z_u_0, dz_u_dk = k / f_d, 1 / f_d
d_1 = -dz_u_dk * z_w_0 * self.fx_option_params.direction * Phi
else:
z_u_0, dz_u_dk = 1.0, 0.0
d_1 = 0.0
ddn_dk = (dual_log(k / f_d) / (sigma**2 * sqrt_t) + eta_0 * sqrt_t) * dsigma_dk - 1 / (
k * sigma * sqrt_t
)
d_2 = -z_u_0 * z_w_0 * dual_norm_pdf(self.fx_option_params.direction * dn0) * ddn_dk
f0 = delta - z_w_0 * z_u_0 * self.fx_option_params.direction * Phi
f1 = d_1 + d_2
return f0, f1
g01 = delta if self.fx_option_params.direction > 0 else max(delta, -0.75)
if isinstance(vol, FXSabrSmile):
alpha = vol.nodes.alpha
else: # FXSabrSurface
vol_: FXSabrSurface = vol
expiry_posix = self.fx_option_params.expiry.replace(tzinfo=UTC).timestamp()
e_idx, _ = _surface_index_left(vol_.meta.expiries_posix, expiry_posix)
alpha = vol_.smiles[e_idx].nodes.alpha
g0 = (
_moneyness_from_delta_closed_form(
g01, alpha * 100.0, t_e, z_w_0, self.fx_option_params.direction
)
* f_d
)
root_solver = newton_1dim(
root1d,
g0,
args=(f_d, f, z_w_0, delta),
pre_args=(True,), # solve iterations `as_float`
final_args=(False,), # solve final iteration with AD
raise_on_fail=True,
)
k: DualTypes = root_solver["g"]
v_ = vol.get_from_strike(k, f, self.fx_option_params.expiry)[1]
return None, v_, k
def _payoff_at_expiry(
self, rng: tuple[float, float] | NoInput = NoInput(0)
) -> tuple[Arr1dF64, Arr1dF64]:
# used by plotting methods
if isinstance(self.fx_option_params.strike, NoInput):
raise ValueError(
"Cannot return payoff for option without a specified `strike`.",
) # pragma: no cover
if isinstance(rng, NoInput):
x = np.linspace(0, 20, 1001)
else:
x = np.linspace(rng[0], rng[1], 1001)
k: float = _dual_float(self.fx_option_params.strike)
_ = (x - k) * self.fx_option_params.direction
__ = np.zeros(1001)
if self.fx_option_params.direction > 0: # call
y = np.where(x < k, __, _) * self.settlement_params.notional
else: # put
y = np.where(x > k, __, _) * self.settlement_params.notional
return x, y
class FXCallPeriod(_BaseFXOptionPeriod):
r"""
A *Period* defined by a European FX call option.
The expected unindexed reference cashflow is given by,
.. math::
\mathbb{E^Q}[\bar{C}_t] = \left \{ \begin{matrix} \max(f_d - K, 0) & \text{after expiry} \\ B76(f_d, K, t, \sigma) & \text{before expiry} \end{matrix} \right .
where :math:`B76(.)` is the Black-76 option pricing formula, using log-normal volatility
calculations with calendar day time reference.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.periods import FXCallPeriod
from datetime import datetime as dt
.. ipython:: python
fxo = FXCallPeriod(
delivery=dt(2000, 3, 1),
pair="eurusd",
expiry=dt(2000, 2, 28),
strike=1.10,
delta_type="forward",
)
fxo.cashflows()
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **fx option** and generalised **settlement** parameters.
delivery: datetime, :red:`required`
The settlement date of the underlying FX rate of the option. Also used as the implied
payment date of the cashflow valuation date.
pair: str, :red:`required`
The currency pair of the :class:`~rateslib.data.fixings.FXFixing` against which the option
will settle.
expiry: datetime, :red:`required`
The expiry date of the option, when the option fixing is determined.
strike: float, Dual, Dual2, Variable, :green:`optional`
The strike price of the option. Can be set after initialisation.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional of the option expressed in units of LHS currency of `pair`.
delta_type: FXDeltaMethod, str, :green:`optional (set by 'default')`
The definition of the delta for the option.
metric: FXOptionMetric, str, :green:`optional` (set by 'default')`
The metric used by default in the
:meth:`~rateslib.periods.fx_volatility._BaseFXOptionPeriod.rate` method.
option_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the option :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
See :ref:`fixings `.
ex_dividend: datetime, :green:`optional (set as 'delivery')`
The ex-dividend date of the settled cashflow.
.. note::
This *Period* type has not implemented **indexation** or **non-deliverability**.
""" # noqa: E501
def __init__(
self,
*,
# option params:
delivery: datetime, # otherwise termed the 'payment' of the period
pair: str,
expiry: datetime,
strike: DualTypes_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
delta_type: FXDeltaMethod | str_ = NoInput(0),
metric: FXOptionMetric | str_ = NoInput(0),
option_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
# currency args:
ex_dividend: datetime_ = NoInput(0),
) -> None:
super().__init__(
direction=OptionType.Call,
delivery=delivery,
pair=pair,
expiry=expiry,
strike=strike,
notional=notional,
delta_type=delta_type,
metric=metric,
option_fixings=option_fixings,
ex_dividend=ex_dividend,
)
class FXPutPeriod(_BaseFXOptionPeriod):
r"""
A *Period* defined by a European FX put option.
The expected unindexed reference cashflow is given by,
.. math::
\mathbb{E^Q}[\bar{C}_t] = \left \{ \begin{matrix} \max(K - f_d, 0) & \text{after expiry} \\ B76(f_d, K, t, \sigma) & \text{before expiry} \end{matrix} \right .
where :math:`B76(.)` is the Black-76 option pricing formula, using log-normal volatility
calculations with calendar day time reference.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.periods import FXPutPeriod
from datetime import datetime as dt
.. ipython:: python
fxo = FXPutPeriod(
delivery=dt(2000, 3, 1),
pair="eurusd",
expiry=dt(2000, 2, 28),
strike=1.10,
delta_type="forward",
)
fxo.cashflows()
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **fx option** and generalised **settlement** parameters.
delivery: datetime, :red:`required`
The settlement date of the underlying FX rate of the option. Also used as the implied
payment date of the cashflow valuation date.
pair: str, :red:`required`
The currency pair of the :class:`~rateslib.data.fixings.FXFixing` against which the option
will settle.
expiry: datetime, :red:`required`
The expiry date of the option, when the option fixing is determined.
strike: float, Dual, Dual2, Variable, :green:`optional`
The strike price of the option. Can be set after initialisation.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional of the option expressed in units of LHS currency of `pair`.
delta_type: FXDeltaMethod, str, :green:`optional (set by 'default')`
The definition of the delta for the option.
metric: FXOptionMetric, str, :green:`optional` (set by 'default')`
The metric used by default in the
:meth:`~rateslib.periods.fx_volatility._BaseFXOptionPeriod.rate` method.
option_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the option :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
See :ref:`fixings `.
ex_dividend: datetime, :green:`optional (set as 'delivery')`
The ex-dividend date of the settled cashflow.
.. note::
This *Period* type has not implemented **indexation** or **non-deliverability**.
""" # noqa: E501
def __init__(
self,
*,
# option params:
delivery: datetime, # otherwise termed the 'payment' of the period
pair: str,
expiry: datetime,
strike: DualTypes_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
delta_type: FXDeltaMethod | str_ = NoInput(0),
metric: FXOptionMetric | str_ = NoInput(0),
option_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
# currency args:
ex_dividend: datetime_ = NoInput(0),
) -> None:
super().__init__(
direction=OptionType.Put,
delivery=delivery,
pair=pair,
expiry=expiry,
strike=strike,
notional=notional,
delta_type=delta_type,
metric=metric,
option_fixings=option_fixings,
ex_dividend=ex_dividend,
)
================================================
FILE: python/rateslib/periods/ir_volatility.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
import numpy as np
import rateslib.errors as err
from rateslib import defaults
from rateslib.curves._parsers import (
_disc_required_maybe_from_curve,
_validate_curve_not_no_input,
)
from rateslib.data.fixings import _get_irs_series
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import Err, NoInput, Ok, _drb
from rateslib.enums.parameters import (
IROptionMetric,
OptionPricingModel,
OptionType,
SwaptionSettlementMethod,
_get_ir_option_metric,
)
from rateslib.instruments.protocols.pricing import _Curves
from rateslib.periods.parameters import (
_IndexParams,
_IROptionParams,
_NonDeliverableParams,
_SettlementParams,
)
from rateslib.periods.protocols import _BasePeriodStatic, _WithAnalyticIROptionGreeks
from rateslib.periods.utils import (
_get_ir_vol_value_and_forward_maybe_from_obj,
)
from rateslib.volatility.ir.utils import _IRVolPricingParams
from rateslib.volatility.utils import (
_OptionModelBachelier,
_OptionModelBlack76,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
Arr1dF64,
CurveOption,
CurveOption_,
DualTypes,
DualTypes_,
FXForwards_,
IRSSeries,
Result,
Series,
_BaseCurve,
_BaseCurve_,
_IRVolOption_,
datetime,
datetime_,
str_,
)
class _BaseIRSOptionPeriod(_BasePeriodStatic, _WithAnalyticIROptionGreeks, metaclass=ABCMeta):
r"""
Abstract base class for *IROptionPeriods* types.
**See Also**: :class:`~rateslib.periods.IRSCallPeriod`,
:class:`~rateslib.periods.IRSPutPeriod`
"""
def analytic_greeks(
self,
rate_curve: CurveOption,
disc_curve: _BaseCurve,
index_curve: _BaseCurve,
fx: FXForwards_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
premium: DualTypes_ = NoInput(0), # expressed in the payment currency
premium_payment: datetime_ = NoInput(0),
) -> dict[str, Any]:
return super()._base_analytic_greeks(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
ir_vol=ir_vol,
fx=fx,
premium=premium,
premium_payment=premium_payment,
)
@property
def period_params(self) -> None:
"""This *Period* type has no
:class:`~rateslib.periods.parameters._PeriodParams`."""
return self._period_params # type: ignore[return-value] # pragma: no cover
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams`
of the *Period*."""
return self._settlement_params
@property
def index_params(self) -> _IndexParams | None:
"""The :class:`~rateslib.periods.parameters._IndexParams` of
the *Period*, if any."""
return self._index_params
@property
def non_deliverable_params(self) -> _NonDeliverableParams | None:
"""The :class:`~rateslib.periods.parameters._NonDeliverableParams` of the
*Period*., if any."""
return self._non_deliverable_params
@property
def rate_params(self) -> None:
"""This *Period* type has no rate parameters."""
return self._rate_params # type: ignore[return-value] # pragma: no cover
@property
def ir_option_params(self) -> _IROptionParams:
"""The :class:`~rateslib.periods.parameters._IROptionParams` of the
*Period*."""
return self._ir_option_params
@abstractmethod
def __init__(
self,
*,
# option params:
direction: OptionType,
expiry: datetime,
tenor: datetime | str,
irs_series: IRSSeries | str,
strike: DualTypes_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
option_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
# currency args:
settlement_method: SwaptionSettlementMethod | str_ = NoInput(0),
ex_dividend: datetime_ = NoInput(0),
) -> None:
self._index_params = None
self._rate_params = None
self._period_params = None
self._ir_option_params = _IROptionParams(
_direction=direction,
_expiry=expiry,
_tenor=tenor,
_irs_series=_get_irs_series(irs_series),
_strike=strike,
_metric=_drb(defaults.ir_option_metric, metric),
_option_fixings=option_fixings,
_settlement_method=_drb(defaults.ir_option_settlement, settlement_method),
)
nd_pair = NoInput(0)
if isinstance(nd_pair, NoInput):
# then option is directly deliverable
self._non_deliverable_params: _NonDeliverableParams | None = None
self._settlement_params = _SettlementParams(
_notional=_drb(defaults.notional, notional),
_payment=self.ir_option_params.option_fixing.effective,
_currency=self.ir_option_params.option_fixing.irs_series.currency,
_notional_currency=self.ir_option_params.option_fixing.irs_series.currency,
_ex_dividend=ex_dividend,
)
else:
raise NotImplementedError("ND IR Options not implement") # pragma: no cover
def __repr__(self) -> str:
return f""
def _unindexed_reference_cashflow_elements(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
) -> tuple[DualTypes, DualTypes | None, _IRVolPricingParams | None]:
"""
Perform the unindexed_reference_cashflow calculations but return calculation
components.
Returns
-------
(cashflow, analytic_delta, pricing params)
"""
# The unindexed_reference_cashflow is the value of the IRS after expiry.
# This may be based on number numerous different settlement methods: physical / cash etc.
# Currently we only offer 1 form of valuation which is "physical or physical simulation".
if isinstance(self.ir_option_params.strike, NoInput):
raise ValueError(err.VE_NEEDS_STRIKE)
k = self.ir_option_params.strike
r = self.ir_option_params.option_fixing.value
if not isinstance(r, NoInput):
# the presence of fixing value here is used purely as an indicator of exercise status.
phi: OptionType = self.ir_option_params.direction
if (phi == OptionType.Call and k < r) or (phi == OptionType.Put and k > r):
if self.ir_option_params.settlement_method is SwaptionSettlementMethod.Physical:
local_npv_pay_dt: DualTypes = self.ir_option_params.option_fixing.irs.npv( # type: ignore[assignment]
curves=_Curves(
disc_curve=index_curve,
leg2_rate_curve=rate_curve,
leg2_disc_curve=index_curve,
),
forward=self.settlement_params.payment,
local=False,
)
value = (
local_npv_pay_dt
* self.settlement_params.notional
/ 1e6
* self.ir_option_params.direction.value
)
return value, None, None
else:
# in [
# SwaptionSettlementMethod.CashParTenor,
# SwaptionSettlementMethod.CashCollateralized
# ]
index_curve_ = _validate_curve_not_no_input(index_curve)
del index_curve
a_r = self.ir_option_params.option_fixing.annuity(
settlement_method=self.ir_option_params.settlement_method,
rate_curve=rate_curve,
index_curve=index_curve_,
)
value = (
(r - k)
* 100.0
* a_r
* self.settlement_params.notional
/ 1e6
* self.ir_option_params.direction.value
)
return value, None, None
else:
# no exercise
return 0.0, None, None
else:
disc_curve_ = _disc_required_maybe_from_curve(curve=rate_curve, disc_curve=disc_curve)
del disc_curve
index_curve_ = _validate_curve_not_no_input(index_curve)
del index_curve
pricing_ = _get_ir_vol_value_and_forward_maybe_from_obj(
ir_vol=ir_vol,
index_curve=index_curve_,
rate_curve=rate_curve,
strike=k,
irs=self.ir_option_params.option_fixing.irs,
expiry=self.ir_option_params.expiry,
tenor=self.ir_option_params.option_fixing.termination,
t_e=self.ir_option_params.time_to_expiry(disc_curve_.nodes.initial),
)
match pricing_.pricing_model:
case OptionPricingModel.Black76:
expected = (
_OptionModelBlack76._value(
F=pricing_.f,
K=pricing_.k,
rate_shift=pricing_.rate_shift,
t_e=pricing_.t_e,
v2=1.0, # not required
vol=pricing_.vol / 100.0,
phi=self.ir_option_params.direction.value,
)
* 100.0
) # bps
case OptionPricingModel.Bachelier:
expected = (
_OptionModelBachelier._value(
F=pricing_.f,
K=pricing_.k,
t_e=pricing_.t_e,
v2=1.0, # not required
vol=pricing_.vol / 100.0,
phi=self.ir_option_params.direction.value,
)
* 100.0
)
case _: # pragma: no cover
raise RuntimeError(
f"Option pricing model {pricing_.pricing_model} not implemented. "
f"Please report this issue."
)
a_r = self.ir_option_params.option_fixing.annuity(
settlement_method=self.ir_option_params.settlement_method,
rate_curve=rate_curve,
index_curve=index_curve_,
)
return (
expected * self.settlement_params.notional / 1e6 * a_r,
a_r,
pricing_,
)
def unindexed_reference_cashflow( # type: ignore[override]
self,
*,
rate_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
**kwargs: Any,
) -> DualTypes:
return self._unindexed_reference_cashflow_elements(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
ir_vol=ir_vol,
)[0]
def try_rate(
self,
rate_curve: CurveOption_,
disc_curve: _BaseCurve,
index_curve: _BaseCurve,
fx: FXForwards_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> Result[DualTypes]:
"""
Return the pricing metric of the *FXOption*, with lazy error handling.
See :meth:`~rateslib.periods.FXOptionPeriod.rate`.
"""
try:
return Ok(
self.rate(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
ir_vol=ir_vol,
metric=metric,
forward=forward,
)
)
except Exception as e:
return Err(e)
def rate(
self,
*,
rate_curve: CurveOption_,
disc_curve: _BaseCurve,
index_curve: _BaseCurve,
fx: FXForwards_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
"""
Return the pricing metric of the *IRSOption*.
This is priced according to the ``payment`` date of the *OptionPeriod*.
Parameters
----------
rate_curve: Curve
The curve used for forecasting rates on the underlying
:class:`~rateslib.instruments.IRS`.
disc_curve: Curve
The discount *Curve* according to the collateral agreement of the option.
index_curve: Curve
The curve used for price alignment indexing according to the
:class:`~rateslib.enums.SwaptionSettlementMethod`. I.e. the discount curve used on the
underlying :class:`~rateslib.instruments.IRS`.
fx: float, FXRates, FXForwards, optional
The object to project the currency pair FX rate at delivery.
ir_vol: IRSabrSmile, float, Dual, Dual2
The volatility object to price the option. If given as numeric, it is assumed to be
Black (log-normal) volatility with zero shift.
metric: IROptionMetric,
The metric to return. See examples.
forward: datetime, optional (set as payment date of option)
Not currently used by IRSOptionPeriod.rate.
Returns
-------
float, Dual, Dual2 or dict of such.
"""
if not isinstance(metric, NoInput):
metric_ = _get_ir_option_metric(metric)
else: # use metric associated with self
metric_ = self.ir_option_params.metric
del metric
cash, anal_delta, pricing = self._unindexed_reference_cashflow_elements(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
ir_vol=ir_vol,
)
if metric_ == IROptionMetric.Premium():
return cash
elif metric_ == IROptionMetric.PercentNotional():
return cash / self.settlement_params.notional * 100.0
disc_curve_ = _disc_required_maybe_from_curve(curve=rate_curve, disc_curve=disc_curve)
del disc_curve
if pricing is None:
pricing_ = _get_ir_vol_value_and_forward_maybe_from_obj(
ir_vol=ir_vol,
index_curve=index_curve,
rate_curve=rate_curve,
strike=self.ir_option_params.strike, # type: ignore[arg-type]
irs=self.ir_option_params.option_fixing.irs,
expiry=self.ir_option_params.expiry,
tenor=self.ir_option_params.option_fixing.termination,
t_e=self.ir_option_params.time_to_expiry(disc_curve_.nodes.initial),
)
else:
pricing_ = pricing
del pricing
if metric_ == IROptionMetric.NormalVol():
match pricing_.pricing_model:
case OptionPricingModel.Bachelier:
return pricing_.vol
case OptionPricingModel.Black76:
return _OptionModelBlack76.convert_to_bachelier(
f=pricing_.f,
k=pricing_.k,
shift=pricing_.shift,
vol=pricing_.vol,
t_e=pricing_.t_e,
)
case _: # pragma: no cover
raise RuntimeError(
f"Pricing model `{pricing_.pricing_model}` not implemented. "
f"Please report this issue."
)
elif type(metric_) is IROptionMetric.BlackVolShift:
# might need to resolve a volatility value depending upon the required shift
# and the expected shift
required_shift = metric_.shift()
provided_shift = int(_dual_float(pricing_.shift))
match pricing_.pricing_model:
case OptionPricingModel.Bachelier:
return _OptionModelBachelier.convert_to_black76(
f=pricing_.f,
k=pricing_.k,
shift=required_shift,
vol=pricing_.vol,
t_e=pricing_.t_e,
)
case OptionPricingModel.Black76:
return _OptionModelBlack76.convert_to_new_shift(
f=pricing_.f,
k=pricing_.k,
old_shift=provided_shift,
target_shift=required_shift,
vol=pricing_.vol,
t_e=pricing_.t_e,
)
else:
raise NotImplementedError("IROptionMetric` not implemented.") # pragma: no cover
def _payoff_at_expiry(
self, rng: tuple[float, float] | NoInput = NoInput(0)
) -> tuple[Arr1dF64, Arr1dF64]:
# used by plotting methods
if isinstance(self.ir_option_params.strike, NoInput):
raise ValueError(
"Cannot return payoff for option without a specified `strike`.",
) # pragma: no cover
if isinstance(rng, NoInput):
x = np.linspace(0, 20, 1001)
else:
x = np.linspace(rng[0], rng[1], 1001)
k: float = _dual_float(self.ir_option_params.strike)
_ = (x - k) * self.ir_option_params.direction
__ = np.zeros(1001)
if self.ir_option_params.direction > 0: # call
y = np.where(x < k, __, _) * self.settlement_params.notional
else: # put
y = np.where(x > k, __, _) * self.settlement_params.notional
return x, y
class IRSCallPeriod(_BaseIRSOptionPeriod):
r"""
A *Period* defined by a European call option on an IRS.
The expected unindexed reference cashflow is given by,
.. math::
\mathbb{E^Q}[\bar{C}_t] = \left \{ \begin{matrix} \max(f_d - K, 0) & \text{after expiry} \\ B76(f_d, K, t, \sigma) & \text{before expiry} \end{matrix} \right .
where :math:`B76(.)` is the Black-76 option pricing formula, using log-normal volatility
calculations with calendar day time reference.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.periods import FXCallPeriod
from datetime import datetime as dt
.. ipython:: python
fxo = FXCallPeriod(
delivery=dt(2000, 3, 1),
pair="eurusd",
expiry=dt(2000, 2, 28),
strike=1.10,
delta_type="forward",
)
fxo.cashflows()
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **ir option** and generalised **settlement** parameters.
expiry: datetime, :red:`required`
The expiry date of the option, when the option fixing is determined.
irs_series: IRSSeries, str :red:`required`
This defines the conventions of the underlying :class:`~rateslib.instruments.IRS`.
tenor: datetime, str :red:`required`
The tenor of the underlying :class:`~rateslib.instruments.IRS`.
strike: float, Dual, Dual2, Variable, :green:`optional`
The strike fixed rate of the option. Can be set after initialisation.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional of the option expressed in reference currency.
metric: IROptionMetric, str, :green:`optional` (set by 'default')`
The metric used by default in the
:meth:`~rateslib.periods.ir_volatility._BaseIRSOptionPeriod.rate` method.
option_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the option :class:`~rateslib.data.fixings.IRSFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
See :ref:`fixings `.
settlement_method: SwaptionSettlementMethod, str, :green:`optional` (set by 'default')`
The method for deriving the settlement cashflow or underlying value.
ex_dividend: datetime, :green:`optional (set as 'delivery')`
The ex-dividend date of the settled cashflow.
.. note::
This *Period* type has not implemented **indexation** or **non-deliverability**.
""" # noqa: E501
def __init__(
self,
*,
# option params:
expiry: datetime,
tenor: datetime | str,
irs_series: IRSSeries | str,
strike: DualTypes_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
option_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
# currency args:
settlement_method: SwaptionSettlementMethod | str_ = NoInput(0),
ex_dividend: datetime_ = NoInput(0),
) -> None:
super().__init__(
direction=OptionType.Call,
tenor=tenor,
irs_series=irs_series,
expiry=expiry,
strike=strike,
notional=notional,
metric=metric,
option_fixings=option_fixings,
settlement_method=settlement_method,
ex_dividend=ex_dividend,
)
class IRSPutPeriod(_BaseIRSOptionPeriod):
r"""
A *Period* defined by a European FX put option.
The expected unindexed reference cashflow is given by,
.. math::
\mathbb{E^Q}[\bar{C}_t] = \left \{ \begin{matrix} \max(K - f_d, 0) & \text{after expiry} \\ B76(f_d, K, t, \sigma) & \text{before expiry} \end{matrix} \right .
where :math:`B76(.)` is the Black-76 option pricing formula, using log-normal volatility
calculations with calendar day time reference.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib.periods import FXPutPeriod
from datetime import datetime as dt
.. ipython:: python
fxo = FXPutPeriod(
delivery=dt(2000, 3, 1),
pair="eurusd",
expiry=dt(2000, 2, 28),
strike=1.10,
delta_type="forward",
)
fxo.cashflows()
.. role:: red
.. role:: green
Parameters
----------
.
.. note::
The following define **fx option** and generalised **settlement** parameters.
delivery: datetime, :red:`required`
The settlement date of the underlying FX rate of the option. Also used as the implied
payment date of the cashflow valuation date.
pair: str, :red:`required`
The currency pair of the :class:`~rateslib.data.fixings.FXFixing` against which the option
will settle.
expiry: datetime, :red:`required`
The expiry date of the option, when the option fixing is determined.
strike: float, Dual, Dual2, Variable, :green:`optional`
The strike price of the option. Can be set after initialisation.
notional: float, Dual, Dual2, Variable, :green:`optional (set by 'defaults')`
The notional of the option expressed in units of LHS currency of `pair`.
delta_type: FXDeltaMethod, str, :green:`optional (set by 'default')`
The definition of the delta for the option.
metric: FXDeltaMethod, str, :green:`optional` (set by 'default')`
The metric used by default in the
:meth:`~rateslib.periods.fx_volatility.FXOptionPeriod.rate` method.
option_fixings: float, Dual, Dual2, Variable, Series, str, :green:`optional`
The value of the option :class:`~rateslib.data.fixings.FXFixing`. If a scalar, is used
directly. If a string identifier, links to the central ``fixings`` object and data loader.
See :ref:`fixings `.
settlement_method: SwaptionSettlementMethod, str, :green:`optional` (set by 'default')`
The method for deriving the settlement cashflow or underlying value.
ex_dividend: datetime, :green:`optional (set as 'delivery')`
The ex-dividend date of the settled cashflow.
.. note::
This *Period* type has not implemented **indexation** or **non-deliverability**.
""" # noqa: E501
def __init__(
self,
*,
# option params:
expiry: datetime,
tenor: datetime | str,
irs_series: IRSSeries | str,
strike: DualTypes_ = NoInput(0),
notional: DualTypes_ = NoInput(0),
metric: IROptionMetric | str_ = NoInput(0),
option_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
# currency args:
settlement_method: SwaptionSettlementMethod | str_ = NoInput(0),
ex_dividend: datetime_ = NoInput(0),
) -> None:
super().__init__(
direction=OptionType.Put,
tenor=tenor,
irs_series=irs_series,
expiry=expiry,
strike=strike,
notional=notional,
metric=metric,
option_fixings=option_fixings,
settlement_method=settlement_method,
ex_dividend=ex_dividend,
)
================================================
FILE: python/rateslib/periods/parameters/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.periods.parameters.credit import _CreditParams
from rateslib.periods.parameters.fx_volatility import _FXOptionParams
from rateslib.periods.parameters.index import (
_IndexParams,
_init_or_none_IndexParams,
)
from rateslib.periods.parameters.ir_volatility import _IROptionParams
from rateslib.periods.parameters.mtm import (
_init_MtmParams,
_MtmParams,
)
from rateslib.periods.parameters.period import _PeriodParams
from rateslib.periods.parameters.rate import (
_FixedRateParams,
_FloatRateParams,
_init_FloatRateParams,
)
from rateslib.periods.parameters.settlement import (
_init_or_none_NonDeliverableParams,
_init_SettlementParams_with_fx_pair,
_NonDeliverableParams,
_SettlementParams,
)
__all__ = [
"_IndexParams",
"_init_or_none_IndexParams",
"_init_or_none_NonDeliverableParams",
"_init_SettlementParams_with_fx_pair",
"_init_FloatRateParams",
"_init_MtmParams",
"_SettlementParams",
"_PeriodParams",
"_FixedRateParams",
"_FloatRateParams",
"_NonDeliverableParams",
"_CreditParams",
"_MtmParams",
"_FXOptionParams",
"_IROptionParams",
]
================================================
FILE: python/rateslib/periods/parameters/credit.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
class _CreditParams:
"""
Parameters for *Period* cashflows associated with credit events.
"""
_premium_accrued: bool
def __init__(self, _premium_accrued: bool) -> None:
self._premium_accrued = _premium_accrued
@property
def premium_accrued(self) -> bool:
"""Whether the premium is accrued within the period to default."""
return self._premium_accrued
================================================
FILE: python/rateslib/periods/parameters/fx_volatility.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from pandas import Series
from rateslib.data.fixings import FXFixing
from rateslib.enums.parameters import FXOptionMetric, _get_fx_option_metric
if TYPE_CHECKING:
from rateslib.local_types import (
DualTypes,
DualTypes_,
FXDeltaMethod,
FXIndex,
OptionType,
datetime,
str_,
)
class _FXOptionParams:
"""
Parameters for *FX Option Period* cashflows.
"""
_expiry: datetime
_delivery: datetime
_fx_index: FXIndex
_delta_type: FXDeltaMethod
_metric: FXOptionMetric
_option_fixing: FXFixing
_strike: DualTypes_
_currency: str
_direction: OptionType
def __init__(
self,
_direction: OptionType,
_expiry: datetime,
_delivery: datetime,
_fx_index: FXIndex,
_delta_type: FXDeltaMethod,
_metric: str | FXOptionMetric,
_option_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
_strike: DualTypes_,
):
self._direction = _direction
self._expiry = _expiry
self._delivery = _delivery
self._fx_index = _fx_index
self._delta_type = _delta_type
self._metric = _get_fx_option_metric(_metric)
self._strike = _strike
if isinstance(_option_fixings, Series):
value = FXFixing._lookup(timeseries=_option_fixings, date=self.delivery)
self._option_fixing = FXFixing(
delivery=_delivery,
value=value,
fx_index=_fx_index,
publication=_expiry,
)
elif isinstance(_option_fixings, str):
self._option_fixing = FXFixing(
delivery=_delivery,
identifier=_option_fixings,
fx_index=_fx_index,
publication=_expiry,
)
else:
self._option_fixing = FXFixing(
delivery=_delivery,
value=_option_fixings,
fx_index=_fx_index,
publication=_expiry,
)
@property
def expiry(self) -> datetime:
"""The expiry date of the option."""
return self._expiry
@property
def delivery(self) -> datetime:
"""The date of the FX rate exchange for the FX rate used for settlement of the option."""
return self._delivery
@property
def fx_index(self) -> FXIndex:
"""The FX index defining the FX rate conventions"""
return self._fx_index
@property
def pair(self) -> str:
"""The currency pair for settlement of the option."""
return self.fx_index.pair
@property
def direction(self) -> OptionType:
"""The direction of the option."""
return self._direction
@property
def strike(self) -> DualTypes_:
"""The strike price of the option."""
return self._strike
@strike.setter
def strike(self, val: DualTypes_) -> None:
self._strike = val
@property
def option_fixing(self) -> FXFixing:
"""The FX fixing related to settlement of the option."""
return self._option_fixing
@property
def metric(self) -> FXOptionMetric:
"""The default pricing quoting of the option."""
return self._metric
@property
def delta_type(self) -> FXDeltaMethod:
"""The delta type used by the option to define its delta."""
return self._delta_type
def time_to_expiry(self, now: datetime) -> float:
"""The time to expiry of the option in years measured by calendar days from ``now``."""
# TODO make this a dual, associated with theta
return (self.expiry - now).days / 365.0
================================================
FILE: python/rateslib/periods/parameters/index.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from typing import TYPE_CHECKING
from pandas import Series
import rateslib.errors as err
from rateslib import defaults
from rateslib.curves.curves import _try_index_value
from rateslib.data.fixings import IndexFixing
from rateslib.enums.generics import (
Err,
NoInput,
Ok,
_drb,
)
from rateslib.enums.parameters import (
IndexMethod,
_get_index_method,
)
if TYPE_CHECKING:
from rateslib.local_types import (
Any,
DualTypes,
DualTypes_,
Result,
_BaseCurve_,
bool_,
datetime_,
int_,
str_,
)
class _IndexParams:
"""
Parameters for *Period* cashflows adjusted under some indexation.
Parameters
----------
_index_method : IndexMethod
The interpolation method, or otherwise, to determine index values from reference dates.
_index_lag: int
The indexation lag, in months, applied to the determination of index values.
_index_base: float, Dual, Dual2, Variable, optional
The specific value set of the base index value.
If not given and ``index_fixings`` is a str fixings identifier that will be
used to determine the base index value.
_index_fixings: float, Dual, Dual2, Variable, Series, str, optional
The index value for the reference date.
If a scalar value this is used directly. If a string identifier will link to the
central ``fixings`` object and data loader.
_index_base_date: datetime, optional
The reference date for determining the base index value. Not required if ``_index_base``
value is given directly.
_index_reference_date: datetime, optional
The reference date for determining the index value. Not required if ``_index_fixings``
is given as a scalar value.
_index_only: bool, optional
A flag which determines non-payment of notional on supported *Periods*.
"""
_index_lag: int
_index_method: IndexMethod
_index_fixing: IndexFixing
_index_base: IndexFixing
_index_only: bool
def __init__(
self,
*,
_index_method: IndexMethod,
_index_lag: int,
_index_base: DualTypes_,
_index_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
_index_base_date: datetime_,
_index_reference_date: datetime_,
_index_only: bool,
) -> None:
self._index_method = _index_method
self._index_lag = _index_lag
self._index_only = _index_only
if isinstance(_index_fixings, Series):
warnings.warn(err.FW_FIXINGS_AS_SERIES, FutureWarning)
if isinstance(_index_base, NoInput) and isinstance(_index_fixings, Series):
_index_base_value = IndexFixing._lookup(
index_lag=self.index_lag,
index_method=self.index_method,
timeseries=_index_fixings,
date=_index_base_date, # type: ignore[arg-type] # argument combinations
)
self._index_base = IndexFixing(
date=_index_base_date, # type: ignore[arg-type] # argument combinations
index_lag=self.index_lag,
index_method=self.index_method,
value=_index_base_value,
identifier=NoInput(0),
)
else:
self._index_base = IndexFixing(
date=_index_base_date, # type: ignore[arg-type] # argument combinations
index_lag=self.index_lag,
index_method=self.index_method,
value=_index_base,
identifier=_index_fixings if isinstance(_index_fixings, str) else NoInput(0),
)
if isinstance(_index_fixings, Series):
_index_ref_value = IndexFixing._lookup(
index_lag=self.index_lag,
index_method=self.index_method,
timeseries=_index_fixings,
date=_index_reference_date, # type: ignore[arg-type] # argument combinations
)
self._index_fixing = IndexFixing(
date=_index_reference_date, # type: ignore[arg-type] # argument combinations
index_lag=self.index_lag,
index_method=self.index_method,
value=_index_ref_value,
identifier=NoInput(0),
)
else:
self._index_fixing = IndexFixing(
date=_index_reference_date, # type: ignore[arg-type] # argument combinations
index_lag=self.index_lag,
index_method=self.index_method,
value=_index_fixings if not isinstance(_index_fixings, str) else NoInput(0),
identifier=_index_fixings if isinstance(_index_fixings, str) else NoInput(0),
)
@property
def index_base(self) -> IndexFixing:
"""The :class:`~rateslib.data.fixings.IndexFixing` associated with the index base date."""
return self._index_base
@index_base.setter
def index_base(self, value: Any) -> None:
raise ValueError(err.VE_ATTRIBUTE_IS_IMMUTABLE.format("index_base"))
@property
def index_fixing(self) -> IndexFixing:
"""The :class:`~rateslib.data.fixings.IndexFixing` associated with the index
reference date."""
return self._index_fixing
@index_fixing.setter
def index_fixing(self, value: Any) -> None:
raise ValueError(err.VE_ATTRIBUTE_IS_IMMUTABLE.format("index_fixing"))
@property
def index_only(self) -> bool:
"""A flag which determines non-payment of notional on supported *Periods*."""
return self._index_only
@property
def index_lag(self) -> int:
"""The indexation lag, in months, applied to the determination of index values."""
return self._index_lag
@property
def index_method(self) -> IndexMethod:
"""The :class:`~rateslib.enums.parameters.IndexMethod` to determine index values
from reference dates."""
return self._index_method
def try_index_value(
self,
index_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
"""
Determine the index reference value from fixing or forecast curve, with lazy error raising.
Parameters
----------
index_curve : _BaseCurve, optional
The curve from which index values are forecast if required.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
return _try_index_value(
index_fixings=self.index_fixing.value,
index_date=self.index_fixing.date,
index_curve=index_curve,
index_lag=self.index_lag,
index_method=self.index_method,
)
def try_index_base(
self,
index_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
"""
Determine the index base value from fixing or forecast curve, with lazy error raising.
Parameters
----------
index_curve : _BaseCurve, optional
The curve from which index values are forecast if required.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
return _try_index_value(
index_fixings=self.index_base.value,
index_date=self.index_base.date,
index_curve=index_curve,
index_lag=self.index_lag,
index_method=self.index_method,
)
def try_index_ratio(
self,
index_curve: _BaseCurve_ = NoInput(0),
) -> Result[tuple[DualTypes, DualTypes, DualTypes]]:
"""
Replicates :meth:`~rateslib.periods.parameters._IndexParams.index_ratio` with
lazy error raising.
Returns
-------
Result[tuple[float, Dual, Dual2, Variable]] for the ratio, numerator, denominator.
"""
denominator_ = self.try_index_base(index_curve=index_curve)
if isinstance(denominator_, Err):
return denominator_
numerator_ = self.try_index_value(index_curve=index_curve)
if isinstance(numerator_, Err):
return numerator_
n_, d_ = numerator_.unwrap(), denominator_.unwrap()
return Ok((n_ / d_, n_, d_))
def index_ratio(
self,
index_curve: _BaseCurve_ = NoInput(0),
) -> tuple[DualTypes, DualTypes, DualTypes]:
"""
Calculate the index ratio for the *Period*, including the numerator and denominator.
.. math::
I(m) = \\frac{I_{val}(m)}{I_{base}}
Parameters
----------
index_curve : _BaseCurve, optional
The curve from which index values are forecast if required.
Returns
-------
tuple[float, Dual, Dual2, Variable] for the ratio, numerator, denominator.
"""
return self.try_index_ratio(index_curve=index_curve).unwrap()
def _init_or_none_IndexParams(
_index_base: DualTypes_,
_index_lag: int_,
_index_method: IndexMethod | str_,
_index_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
_index_only: bool_,
_index_base_date: datetime_,
_index_reference_date: datetime_,
) -> _IndexParams | None:
if all(
isinstance(_, NoInput)
for _ in (
_index_base,
_index_lag,
_index_method,
_index_fixings,
)
):
return None
else:
if isinstance(_index_base, str):
raise ValueError(err.VE_INDEX_BASE_NO_STR)
return _IndexParams(
_index_base=_index_base,
_index_lag=_drb(defaults.index_lag, _index_lag),
_index_method=_get_index_method(_drb(defaults.index_method, _index_method)),
_index_fixings=_index_fixings,
_index_base_date=_index_base_date,
_index_reference_date=_index_reference_date,
_index_only=_drb(False, _index_only),
)
================================================
FILE: python/rateslib/periods/parameters/ir_volatility.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from pandas import Series
from rateslib.data.fixings import IRSFixing
from rateslib.enums.generics import NoInput
from rateslib.enums.parameters import _get_ir_option_metric, _get_swaption_settlement_method
if TYPE_CHECKING:
from rateslib.local_types import (
DualTypes,
DualTypes_,
IROptionMetric,
IRSSeries,
OptionType,
SwaptionSettlementMethod,
datetime,
str_,
)
class _IROptionParams:
"""
Parameters for *IR Option Period* cashflows.
"""
_expiry: datetime
_metric: IROptionMetric
_option_fixing: IRSFixing
_strike: DualTypes_
_direction: OptionType
def __init__(
self,
_direction: OptionType,
_expiry: datetime,
_tenor: str | datetime,
_irs_series: IRSSeries,
_metric: str | IROptionMetric,
_option_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
_strike: DualTypes_,
_settlement_method: SwaptionSettlementMethod | str,
):
self._direction = _direction
self._expiry = _expiry
self._metric = _get_ir_option_metric(_metric)
self._strike = _strike
self._settlement_method = _get_swaption_settlement_method(_settlement_method)
if isinstance(_option_fixings, Series):
value = IRSFixing._lookup(timeseries=_option_fixings, date=self.expiry)
self._option_fixing = IRSFixing(
tenor=_tenor,
value=value,
irs_series=_irs_series,
publication=_expiry,
identifier=NoInput(0),
)
elif isinstance(_option_fixings, str):
self._option_fixing = IRSFixing(
tenor=_tenor,
value=NoInput(0),
irs_series=_irs_series,
publication=_expiry,
identifier=_option_fixings,
)
else:
self._option_fixing = IRSFixing(
tenor=_tenor,
value=_option_fixings,
publication=_expiry,
irs_series=_irs_series,
identifier=NoInput(0),
)
self._option_fixing.irs.fixed_rate = self.strike
@property
def settlement_method(self) -> SwaptionSettlementMethod:
"""The settlement method of the option."""
return self._settlement_method
@property
def expiry(self) -> datetime:
"""The expiry date of the option."""
return self._expiry
@property
def direction(self) -> OptionType:
"""The direction of the option."""
return self._direction
@property
def strike(self) -> DualTypes_:
"""The strike price of the option."""
return self._strike
@strike.setter
def strike(self, val: DualTypes_) -> None:
self.option_fixing.irs.fixed_rate = val
self._strike = val
@property
def option_fixing(self) -> IRSFixing:
"""The :class:`~rateslib.data.fixings.IRSFixing` related to settlement of the option."""
return self._option_fixing
@property
def metric(self) -> IROptionMetric:
"""The default :class:`~rateslib.enums.IROptionMetric` used for the rate of the option."""
return self._metric
def time_to_expiry(self, now: datetime) -> float:
"""The time to expiry of the option in years measured by calendar days from ``now``."""
# TODO make this a dual, associated with theta
return (self.expiry - now).days / 365.0
================================================
FILE: python/rateslib/periods/parameters/mtm.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING
from pandas import Series
from rateslib.enums import Err, Ok
from rateslib.periods.parameters.settlement import _init_fx_fixing
if TYPE_CHECKING:
from rateslib.local_types import (
DualTypes,
FXFixing,
FXForwards_,
FXIndex,
Result,
datetime,
str_,
)
class _MtmParams:
"""
Parameters for *Period* cashflows associated with multiple
:class:`~rateslib.data.fixings.FXFixing`.
Parameters
----------
_fx_fixing_start: FXFixing
The :class:`~rateslib.data.fixings.FXFixing` that is determined at the start of the
*Period*.
_fx_fixing_end: FXFixing
The :class:`~rateslib.data.fixings.FXFixing` that is determined at the end of the *Period*.
_currency: str
The local *settlement currency* of the *Period*.
"""
_fx_fixing_start: FXFixing
_fx_fixing_end: FXFixing
_currency: str
def __init__(
self,
_fx_fixing_start: FXFixing,
_fx_fixing_end: FXFixing,
_currency: str,
) -> None:
self._fx_fixing_start = _fx_fixing_start
self._fx_fixing_end = _fx_fixing_end
self._currency = _currency
@property
def fx_fixing_start(self) -> FXFixing:
"""The :class:`~rateslib.data.fixings.FXFixing` measured at the start of the period."""
return self._fx_fixing_start
@property
def fx_fixing_end(self) -> FXFixing:
"""The :class:`~rateslib.data.fixings.FXFixing` measured at the end of the period."""
return self._fx_fixing_end
@property
def currency(self) -> str:
"""The settlement currency of the period."""
return self._currency
@property
def pair(self) -> str:
"""The pair that defines each :class:`~rateslib.data.fixings.FXFixing`."""
return self.fx_fixing_start.pair
@property
def reference_currency(self) -> str:
"""The *reference currency* of the period."""
ccy1, ccy2 = self.pair[:3], self.pair[3:]
return ccy1 if ccy2 == self.currency else ccy2
@cached_property
def fx_reversed(self) -> bool:
"""Whether the ``reference_currency`` and ``currency`` are reversed in the ``pair``."""
return self.currency == self.pair[:3]
def try_fixing_change(self, fx: FXForwards_) -> Result[DualTypes]:
"""Calculate the change between the FX fixing at the start and end of the period."""
fx0 = self.fx_fixing_start.try_value_or_forecast(fx=fx)
fx1 = self.fx_fixing_end.try_value_or_forecast(fx=fx)
if isinstance(fx0, Err):
return fx0
if isinstance(fx1, Err):
return fx1
else:
return Ok(fx1.unwrap() - fx0.unwrap())
def _init_MtmParams(
_fx_index: FXIndex,
_start: datetime,
_end: datetime,
_fx_fixings_start: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
_fx_fixings_end: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
_currency: str,
) -> _MtmParams:
# FX fixing publication dates are derived under the ISDA conventions associated with FXIndex.
return _MtmParams(
_fx_fixing_start=_init_fx_fixing(
delivery=_start,
fx_index=_fx_index,
fixings=_fx_fixings_start,
),
_fx_fixing_end=_init_fx_fixing(
delivery=_end,
fx_index=_fx_index,
fixings=_fx_fixings_end,
),
_currency=_currency,
)
================================================
FILE: python/rateslib/periods/parameters/period.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING
from rateslib.enums.generics import (
NoInput,
)
from rateslib.scheduling import Convention, Frequency, dcf
if TYPE_CHECKING:
from rateslib.local_types import (
Adjuster_,
CalTypes,
datetime,
datetime_,
)
class _PeriodParams:
"""Parameters of *Period* cashflows associated with some
:class:`~rateslib.scheduling.Schedule`.
Parameters
----------
_start: datetime
The identified start date of the *Period*.
_end: datetime
The identified end date of the *Period*.
_frequency: Frequency
The :class:`~rateslib.scheduling.Frequency` associated with the *Period*.
_convention: Convention
The day count :class:`~rateslib.scheduling.Convention` associated with the *Period*.
_termination: datetime, optional
The termination date of an external :class:`~rateslib.scheduling.Schedule`.
_calendar: Calendar, optional
The calendar associated with the *Period*.
_stub: bool
Whether the *Period* is defined as a stub according to some external
:class:`~rateslib.scheduling.Schedule`.
_adjuster: Adjuster, optional
The date :class:`~rateslib.scheduling.Adjuster` applied to unadjusted dates in the
external :class:`~rateslib.scheduling.Schedule` to arrive at adjusted accrual dates.
"""
_start: datetime
_end: datetime
_frequency: Frequency
_convention: Convention
_termination: datetime_
_calendar: CalTypes
_stub: bool
_adjuster: Adjuster_
def __init__(
self,
_start: datetime,
_end: datetime,
_frequency: Frequency,
_convention: Convention,
_termination: datetime_,
_calendar: CalTypes,
_adjuster: Adjuster_,
_stub: bool,
):
if _end < _start:
raise ValueError("`end` cannot be before `start`.")
self._start = _start
self._end = _end
self._frequency = _frequency
self._convention = _convention
self._termination = _termination
self._calendar = _calendar
self._stub = _stub
self._adjuster = _adjuster
@property
def start(self) -> datetime:
"""The identified start date of the *Period*."""
return self._start
@property
def end(self) -> datetime:
"""The identified end date of the *Period*."""
return self._end
@property
def termination(self) -> datetime_:
"""The termination date of an external :class:`~rateslib.scheduling.Schedule`."""
return self._termination
@property
def adjuster(self) -> Adjuster_:
"""The date :class:`~rateslib.scheduling.Adjuster` applied to unadjusted dates in the
external :class:`~rateslib.scheduling.Schedule` to arrive at adjusted accrual dates."""
return self._adjuster
@property
def calendar(self) -> CalTypes:
"""The calendar associated with the *Period*."""
return self._calendar
@property
def stub(self) -> bool:
"""Whether the *Period* is defined as a stub according to some external
:class:`~rateslib.scheduling.Schedule`"""
return self._stub
@property
def convention(self) -> Convention:
"""The day count :class:`~rateslib.scheduling.Convention` associated with the *Period*."""
return self._convention
@property
def frequency(self) -> Frequency:
"""The :class:`~rateslib.scheduling.Frequency` associated with the *Period*."""
return self._frequency
@cached_property
def dcf(self) -> float:
"""
The DCF of the *Period* determined under its given parameters.
"""
return dcf(
start=self.start,
end=self.end,
convention=self.convention,
termination=self.termination,
frequency=self.frequency,
stub=self.stub,
roll=NoInput(0), # `frequency` is a Frequency.
calendar=self.calendar,
adjuster=self.adjuster,
)
================================================
FILE: python/rateslib/periods/parameters/rate.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import datetime
from functools import cached_property
from typing import TYPE_CHECKING
import numpy as np
from pandas import Series
from rateslib import defaults
from rateslib.data.fixings import (
FloatRateIndex,
FloatRateSeries,
IBORFixing,
IBORStubFixing,
RFRFixing,
_get_float_rate_series,
_RFRRate,
)
from rateslib.enums.generics import (
NoInput,
_drb,
)
from rateslib.enums.parameters import (
FloatFixingMethod,
SpreadCompoundMethod,
_get_float_fixing_method,
_get_spread_compound_method,
)
from rateslib.scheduling import Convention, Frequency, StubInference
from rateslib.scheduling.adjuster import _convert_to_adjuster
from rateslib.scheduling.frequency import _get_frequency, _get_tenor_from_frequency
if TYPE_CHECKING:
from rateslib.local_types import (
Adjuster,
Adjuster_,
CalTypes,
DualTypes,
DualTypes_,
PeriodFixings,
datetime,
str_,
)
def _init_FloatRateParams(
_float_spread: DualTypes_,
_rate_fixings: PeriodFixings,
_fixing_method: FloatFixingMethod | str_,
_spread_compound_method: SpreadCompoundMethod | str_,
_fixing_frequency: Frequency | str_,
_fixing_series: FloatRateSeries | str_,
_accrual_start: datetime,
_accrual_end: datetime,
_period_calendar: CalTypes,
_period_adjuster: Adjuster_,
_period_convention: Convention,
_period_frequency: Frequency,
_period_stub: bool,
) -> _FloatRateParams:
fixing_method: FloatFixingMethod = _get_float_fixing_method(
_drb(defaults.fixing_method, _fixing_method)
)
spread_compound_method = _get_spread_compound_method(
_drb(defaults.spread_compound_method, _spread_compound_method)
)
fixing_series: FloatRateSeries = _init_float_rate_series(
fixing_series=_fixing_series,
calendar=_period_calendar,
convention=_period_convention,
adjuster=_period_adjuster,
fixing_method=fixing_method,
)
float_spread = _drb(0.0, _float_spread)
if isinstance(_fixing_frequency, NoInput):
if type(fixing_method) is FloatFixingMethod.IBOR:
fixing_frequency = _period_frequency
else:
fixing_frequency = Frequency.BusDays(1, fixing_series.calendar)
else:
fixing_frequency = _get_frequency(
frequency=_fixing_frequency, roll=NoInput(0), calendar=fixing_series.calendar
)
fixing_index = FloatRateIndex(
frequency=fixing_frequency,
series=fixing_series,
)
if type(fixing_method) is FloatFixingMethod.IBOR and not _period_stub:
if isinstance(_rate_fixings, Series):
result = IBORFixing._lookup(
timeseries=_rate_fixings,
date=fixing_index.calendar.lag_bus_days(_accrual_start, -fixing_index.lag, False),
bounds=None,
)
rate_fixing: IBORFixing | IBORStubFixing | RFRFixing = IBORFixing(
rate_index=fixing_index,
accrual_start=_accrual_start,
date=NoInput(0),
value=result,
identifier=NoInput(0),
)
else:
if isinstance(_rate_fixings, str):
identifier: str_ = _rate_fixings + "_" + _get_tenor_from_frequency(fixing_frequency)
else:
identifier = NoInput(0)
rate_fixing = IBORFixing(
rate_index=fixing_index,
accrual_start=_accrual_start,
date=NoInput(0),
value=_rate_fixings if not isinstance(_rate_fixings, str) else NoInput(0),
identifier=identifier,
)
elif type(fixing_method) is FloatFixingMethod.IBOR and _period_stub:
if isinstance(_rate_fixings, Series):
result = IBORFixing._lookup(
timeseries=_rate_fixings,
date=fixing_index.calendar.lag_bus_days(_accrual_start, -fixing_index.lag, False),
bounds=None,
)
rate_fixing = IBORStubFixing(
rate_series=fixing_series,
accrual_start=_accrual_start,
accrual_end=_accrual_end,
date=NoInput(0),
value=result,
identifier=NoInput(0),
)
else:
rate_fixing = IBORStubFixing(
rate_series=fixing_series,
accrual_start=_accrual_start,
accrual_end=_accrual_end,
date=NoInput(0),
value=_rate_fixings if not isinstance(_rate_fixings, str) else NoInput(0),
identifier=_rate_fixings if isinstance(_rate_fixings, str) else NoInput(0),
)
else:
if isinstance(_rate_fixings, Series):
dates_obs, dates_dcf = RFRFixing._get_date_bounds(
accrual_start=_accrual_start,
accrual_end=_accrual_end,
fixing_method=fixing_method,
fixing_calendar=fixing_index.calendar,
)
dcfs_dcf = _RFRRate._get_dcf_values(
dcf_dates=np.array(
fixing_index.calendar.bus_date_range(dates_dcf[0], dates_dcf[1])
),
fixing_convention=fixing_index.convention,
fixing_calendar=fixing_index.calendar,
)
result = RFRFixing._lookup(
timeseries=_rate_fixings,
fixing_method=fixing_method,
spread_compound_method=spread_compound_method,
float_spread=float_spread,
dates_obs=np.array(
fixing_index.calendar.bus_date_range(dates_obs[0], dates_obs[1])
),
dcfs_dcf=dcfs_dcf,
)[0]
rate_fixing = RFRFixing(
rate_index=fixing_index,
float_spread=float_spread,
spread_compound_method=spread_compound_method,
accrual_start=_accrual_start,
accrual_end=_accrual_end,
fixing_method=fixing_method,
value=result,
identifier=NoInput(0),
)
else:
if isinstance(_rate_fixings, str):
identifier = _rate_fixings + "_" + _get_tenor_from_frequency(fixing_index.frequency)
else:
identifier = NoInput(0)
rate_fixing = RFRFixing(
rate_index=fixing_index,
accrual_start=_accrual_start,
accrual_end=_accrual_end,
fixing_method=fixing_method,
float_spread=float_spread,
spread_compound_method=spread_compound_method,
value=_rate_fixings if not isinstance(_rate_fixings, str) else NoInput(0),
identifier=identifier,
)
return _FloatRateParams(
_float_spread=float_spread,
_spread_compound_method=spread_compound_method,
_fixing_series=fixing_series,
_fixing_frequency=fixing_frequency,
_fixing_method=fixing_method,
_rate_fixing=rate_fixing,
)
def _init_float_rate_series(
fixing_series: FloatRateSeries | str_,
calendar: CalTypes,
convention: Convention,
fixing_method: FloatFixingMethod,
adjuster: Adjuster | NoInput,
) -> FloatRateSeries:
if not isinstance(fixing_series, NoInput):
fixing_series_ = _get_float_rate_series(fixing_series)
del fixing_series
if (
isinstance(fixing_method, FloatFixingMethod.IBOR)
and fixing_method.method_param() != fixing_series_.lag
):
raise ValueError(
"A `fixing_series` has been provided with a publication `lag` that does not "
f"match the `param` of the `fixing_method`.\nGot {fixing_series_.lag} and "
f"{fixing_method.method_param()} respectively."
)
return fixing_series_
else:
# modifier is defaulted to days only type if RFR based
if type(fixing_method) is FloatFixingMethod.IBOR:
lag = fixing_method.method_param()
else:
lag = 0
return FloatRateSeries(
lag=lag,
calendar=calendar,
convention=convention,
modifier=_convert_to_adjuster(
modifier=_drb(defaults.modifier, adjuster),
settlement=False,
mod_days=not isinstance(fixing_method, FloatFixingMethod.IBOR),
),
eom=defaults.eom,
zero_period_stub=StubInference.ShortBack, # TODO: hard coded default replaced?
)
class _CreditParams:
"""
Parameters associated with credit related *Periods*.
Parameters
----------
_premium_accrued: bool
Whether premium *Periods* pay accrued in the event of mid-period default.
"""
_premium_accrued: bool
def __init__(self, _premium_accrued: bool):
self.__premium_accrued = _premium_accrued
@property
def premium_accrued(self) -> bool:
"""Whether premium *Periods* pay accrued in the event of mid-period default."""
return self._premium_accrued
class _FixedRateParams:
"""
Parameters for a *Period* containing a fixed rate.
Parameters
----------
_fixed_rate: float, Dual, Dual2, Variable, optional
The fixed rate defining the *Period* cashflow.
"""
def __init__(self, _fixed_rate: DualTypes_) -> None:
self._fixed_rate = _fixed_rate
@property
def fixed_rate(self) -> DualTypes | NoInput:
"""The fixed rate defining the *Period* cashflow."""
return self._fixed_rate
@fixed_rate.setter
def fixed_rate(self, value: DualTypes_) -> None:
self._fixed_rate = value
class _FloatRateParams:
"""
Parameters for a *Period* containing a floating rate.
Parameters
----------
_fixing_method: FloatFixingMethod
The :class:`~rateslib.enums.parameters.FloatFixingMethod` describing the determination
of the floating rate for the period.
_fixing_series: FloatRateSeries,
The :class:`~rateslib.enums.parameters.FloatRateSeries` of the
:class:`~rateslib.enums.parameters.FloatRateIndex` defining the specific interest
rate index and some of its calculation parameters.
_fixing_frequency: Frequency,
The :class:`~rateslib.scheduling.Frequency` of the
:class:`~rateslib.enums.parameters.FloatRateIndex`.
_float_spread: float, Dual, Dual2, Variable
The amount (in bps) added to the rate in the period rate determination.
_spread_compound_method: SpreadCompoundMethod
The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation
of the period rate when combining a ``_float_spread``. Used **only** with RFR type
``fixing_method``.
_rate_fixing: IBORFixing, IBORStubFixing, RFRFixing
The appropriate rate fixing class that is used to determine if known, published
values are available for the *Period*.
"""
def __init__(
self,
*,
_fixing_method: FloatFixingMethod,
_fixing_series: FloatRateSeries,
_fixing_frequency: Frequency,
_float_spread: DualTypes,
_spread_compound_method: SpreadCompoundMethod,
_rate_fixing: IBORFixing | IBORStubFixing | RFRFixing,
) -> None:
self._fixing_method: FloatFixingMethod = _fixing_method
self._spread_compound_method: SpreadCompoundMethod = _spread_compound_method
self._fixing_series = _fixing_series
self._fixing_index = FloatRateIndex(
frequency=_fixing_frequency,
series=_fixing_series,
)
self._float_spread: DualTypes = _float_spread
self._rate_fixing: IBORFixing | IBORStubFixing | RFRFixing = _rate_fixing
self._validate_combinations_args()
@property
def fixing_series(self) -> FloatRateSeries:
"""The :class:`~rateslib.enums.parameters.FloatRateSeries` of the
:class:`~rateslib.enums.parameters.FloatRateIndex`."""
return self._fixing_series
@property
def fixing_index(self) -> FloatRateIndex:
"""The :class:`~rateslib.enums.parameters.FloatRateIndex` assoociated with the
determination of the floating rate for the *Period*."""
return self._fixing_index
@cached_property
def fixing_date(self) -> datetime:
"""The relevant date of the (first) rate fixing for the *Period*."""
if type(self.fixing_method) in [
FloatFixingMethod.RFRPaymentDelay,
FloatFixingMethod.RFRPaymentDelayAverage,
FloatFixingMethod.RFRLockout,
FloatFixingMethod.RFRLockoutAverage,
]:
return self.accrual_start
else:
return self.fixing_calendar.lag_bus_days(
date=self.accrual_start, days=-self.fixing_series.lag, settlement=False
)
@property
def fixing_convention(self) -> Convention:
"""The day count :class:`~rateslib.scheduling.Convention` of the
:class:`~rateslib.enums.parameters.FloatRateIndex`."""
return self.fixing_index.convention
@property
def fixing_modifier(self) -> Adjuster:
"""The date :class:`~rateslib.scheduling.Adjuster` of the
:class:`~rateslib.enums.parameters.FloatRateIndex`."""
return self.fixing_index.modifier
@property
def fixing_frequency(self) -> Frequency:
"""The date :class:`~rateslib.scheduling.Frequency` of the
:class:`~rateslib.enums.parameters.FloatRateIndex`."""
return self.fixing_index.frequency
@property
def fixing_identifier(self) -> str_:
"""The string identifier provided to ``rate_fixings`` to construct a *Fixings* object."""
if isinstance(self.rate_fixing, RFRFixing):
if isinstance(self.rate_fixing.identifier, str):
return self.rate_fixing.identifier[:-3] # strip out "_1B"
return NoInput(0)
elif isinstance(self.rate_fixing, IBORFixing):
if isinstance(self.rate_fixing.identifier, str):
if self.rate_fixing.identifier[-3] == "_":
return self.rate_fixing.identifier[:-3]
else: # [-4] == "_"
return self.rate_fixing.identifier[:-4]
else:
return NoInput(0)
else: # IBORStubFixing
if isinstance(self.rate_fixing.identifier, str):
return self.rate_fixing.identifier # no suffix
return NoInput(0)
@property
def accrual_start(self) -> datetime:
"""
The accrual start date for the *Period*.
Fixing dates will be measured relative to this date under appropriate calendars and
``fixing_method``
"""
return self.rate_fixing.accrual_start
@property
def accrual_end(self) -> datetime:
"""The accrual end date for the *Period*.
Final fixing dates (or IBOR stub weights) will be measured relative to this date under
appropriate calendars and ``fixing_method``.
"""
return self.rate_fixing.accrual_end
@property
def fixing_calendar(self) -> CalTypes:
"""The calendar of the :class:`~rateslib.enums.parameters.FloatRateIndex`."""
return self.fixing_index.calendar
@property
def fixing_method(self) -> FloatFixingMethod:
"""The :class:`~rateslib.enums.parameters.FloatFixingMethod` defining the determination of
the floating rate for the period."""
return self._fixing_method
@property
def float_spread(self) -> DualTypes:
"""The amount (in bps) added to the rate in the period rate determination."""
return self._float_spread
@float_spread.setter
def float_spread(self, value: DualTypes) -> None:
self._float_spread = value
self.rate_fixing.reset()
@property
def spread_compound_method(self) -> SpreadCompoundMethod:
"""The :class:`~rateslib.enums.parameters.SpreadCompoundMethod` used in the calculation."""
return self._spread_compound_method
@property
def rate_fixing(self) -> IBORFixing | IBORStubFixing | RFRFixing:
"""The :class:`~rateslib.data.fixings.IBORFixing`,
:class:`~rateslib.data.fixings.IBORStubFixing`, or :class:`~rateslib.data.fixings.RFRFixing`
appropriate for the *Period*."""
return self._rate_fixing
def _validate_combinations_args(self) -> None:
"""
Validate the argument input to float periods.
Returns
-------
tuple
"""
if (
type(self.fixing_method)
in [
FloatFixingMethod.RFRLockout,
FloatFixingMethod.RFRLockoutAverage,
]
and self.fixing_method.method_param() < 1
):
raise ValueError(
f'`method_param` must be >0 for "RFRLockout" type `fixing_method`, '
f"got {self.fixing_method.method_param()}",
)
================================================
FILE: python/rateslib/periods/parameters/settlement.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from functools import cached_property
from typing import TYPE_CHECKING
from pandas import Series
import rateslib.errors as err
from rateslib import defaults
from rateslib.data.fixings import FXFixing, _FXFixingMajor, _get_fx_index
from rateslib.enums.generics import (
NoInput,
_drb,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
DualTypes,
DualTypes_,
FXIndex,
FXIndex_,
datetime,
datetime_,
str_,
)
class _SettlementParams:
"""
Parameters for settlement of *Period* cashflows.
Parameters
----------
_currency: str
The physical *settlement currency* of the *Period*.
_notional: float, Dual, Dual2, Variable
The notional amount of the *Period* expressed in ``notional currency``.
_notional_currency: str
The currency for the expresion of ``notional`` amount.
_payment: datetime
The payment date of the *Period* cashflow.
_ex_dividend: datetime, optional
The ex-dividend date of the *Period*. Settlements occurring **after** this date
are assumed to be non-receivable. If not given is assumed to be equal to ``payment``
"""
_currency: str
_notional: DualTypes
_notional_currency: str
_payment: datetime
_ex_dividend: datetime
def __init__(
self,
_currency: str,
_notional: DualTypes,
_notional_currency: str,
_payment: datetime,
_ex_dividend: datetime_ = NoInput(0),
) -> None:
self._currency = _currency.lower()
self._notional = _notional
self._notional_currency = _notional_currency.lower()
self._payment = _payment
self._ex_dividend = _drb(self.payment, _ex_dividend)
@property
def currency(self) -> str:
"""The local settlement currency of the *Period* cashflow."""
return self._currency
@property
def notional(self) -> DualTypes:
"""The notional amount of the *Period* expressed in units of ``notional_currency``."""
return self._notional
@property
def notional_currency(self) -> str:
"""The currency for the expression of ``notional`` amount."""
return self._notional_currency
@property
def payment(self) -> datetime:
"""The payment date of the *Period* cashflow."""
return self._payment
@property
def ex_dividend(self) -> datetime:
"""The ex-dividend date for settlement of the *Period* cashflow."""
return self._ex_dividend
class _NonDeliverableParams:
"""
Parameters for determination of non-deliverable *Period* cashflows.
Parameters
----------
_currency: str
The physical *settlement currency* of the *Period*.
_fx_index: FXIndex,
The :class:`~rateslib.fixings.data.FXIndex` defining conventions of the currency pair
of the *FX* rate fixing that determines settlement, including its
settlement and quotation conventions. The
*reference currency* is implied from ``pair`` when it is not equal to ``currency``.
_delivery: datetime
The settlement delivery date of the *FX* rate fixing.
_fx_fixings: float, Dual, Dual2, Variable, Series, str, optional
The value of the :class:`~rateslib.data.fixings.FXFixing`. If a scalar is used directly.
If a string identifier will link to the central ``fixings`` object and data loader.
"""
def __init__(
self,
_currency: str,
_fx_index: FXIndex,
_delivery: datetime,
_fx_fixings: DualTypes | Series[DualTypes] | str_ = NoInput(0), # type: ignore[type-var]
) -> None:
self._currency = _currency.lower()
self._fx_index = _fx_index
self._fx_fixing = _init_fx_fixing(
fx_index=_fx_index,
fixings=_fx_fixings,
delivery=_delivery,
)
@property
def currency(self) -> str:
"""The physical *settlement currency* of the *Period*."""
return self._currency
@property
def reference_currency(self) -> str:
"""The *reference currency* of underlying *Period* cashflows."""
ccy1, ccy2 = self.pair[0:3], self.pair[3:6]
return ccy1 if ccy1 != self.currency else ccy2
@property
def fx_index(self) -> FXIndex:
"""
The :class:`~rateslib.fixings.data.FXIndex` defining conventions of the FX fixing.
"""
return self._fx_index
@property
def pair(self) -> str:
"""The currency pair associated with the :class:`~rateslib.data.fixings.FXFixing`."""
return self.fx_index.pair
@property
def fx_fixing(self) -> FXFixing:
"""The :class:`~rateslib.data.fixings.FXFixing` associated with the *Period* cashflow."""
return self._fx_fixing
@fx_fixing.setter
def fx_fixing(self, val: Any) -> None:
raise ValueError(err.VE_ATTRIBUTE_IS_IMMUTABLE.format("fx_fixing"))
@property
def delivery(self) -> datetime:
"""The settlement delivery date of the :class:`~rateslib.data.fixings.FXFixing`."""
return self.fx_fixing.delivery
@property
def publication(self) -> datetime:
"""The publication date of the :class:`~rateslib.data.fixings.FXFixing`."""
return self.fx_fixing.publication
@cached_property
def fx_reversed(self) -> bool:
"""Is *True* if the ``referency_currency`` is the RHS of ``pair``."""
return self.pair[3:6] == self.reference_currency
def _init_or_none_NonDeliverableParams(
_currency: str,
_fx_index: str | FXIndex_,
_delivery: datetime,
_fx_fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
) -> _NonDeliverableParams | None:
if isinstance(_fx_index, NoInput):
return None
else:
return _NonDeliverableParams(
_currency=_currency,
_fx_index=_get_fx_index(_fx_index),
_delivery=_delivery,
_fx_fixings=_fx_fixings,
)
def _init_SettlementParams_with_fx_pair(
_currency: str_,
_payment: datetime,
_notional: DualTypes_,
_ex_dividend: datetime,
_fx_pair: FXIndex_,
) -> _SettlementParams:
notional = _drb(defaults.notional, _notional)
ccy = _drb(defaults.base_currency, _currency).lower()
if isinstance(_fx_pair, NoInput):
return _SettlementParams(
_currency=ccy,
_notional_currency=ccy,
_payment=_payment,
_notional=notional,
_ex_dividend=_ex_dividend,
)
else:
c1, c2 = _fx_pair.pair[:3], _fx_pair.pair[3:]
# other parameters will also be determined.
if ccy != c1 and ccy != c2:
raise ValueError(err.VE_MISMATCHED_ND_PAIR.format(ccy, _fx_pair.pair))
return _SettlementParams(
_currency=ccy,
_notional_currency=c1 if c1 != ccy else c2,
_payment=_payment,
_notional=notional,
_ex_dividend=_ex_dividend,
)
def _init_fx_fixing(
delivery: datetime,
fx_index: FXIndex,
fixings: DualTypes | Series[DualTypes] | str_, # type: ignore[type-var]
) -> FXFixing:
# physical FX fixings do not set versus a screen therefore do not require cross methodology
if isinstance(fixings, Series):
publication_: datetime = fx_index.isda_fixing_date(delivery)
value = _FXFixingMajor._lookup(timeseries=fixings, date=publication_)
return FXFixing(delivery=delivery, value=value, fx_index=fx_index)
elif isinstance(fixings, str):
return FXFixing(delivery=delivery, identifier=fixings, fx_index=fx_index)
else:
return FXFixing(delivery=delivery, value=fixings, fx_index=fx_index)
================================================
FILE: python/rateslib/periods/protocols/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
# ruff: noqa: I001
from abc import ABCMeta
from rateslib.periods.protocols.npv import (
_WithIndexingStatic,
_WithNonDeliverableStatic,
_WithNPV,
_WithNPVStatic,
)
from rateslib.periods.protocols.analytic_delta import (
_WithAnalyticDelta,
_WithAnalyticDeltaStatic,
)
from rateslib.periods.protocols.analytic_fixings import (
_WithAnalyticRateFixings,
_WithAnalyticRateFixingsStatic,
)
from rateslib.periods.protocols.analytic_greeks import (
_WithAnalyticFXOptionGreeks,
_WithAnalyticIROptionGreeks,
)
from rateslib.periods.protocols.cashflows import (
_WithCashflows,
_WithCashflowsStatic,
)
from rateslib.periods.protocols.fixings import (
_WithFixings,
)
class _BasePeriod(
_WithCashflows,
_WithAnalyticDelta,
_WithAnalyticRateFixings,
_WithFixings,
metaclass=ABCMeta,
):
"""Abstract base class for *Period* types."""
pass
class _BasePeriodStatic(
_WithCashflowsStatic,
_WithAnalyticDeltaStatic,
_WithAnalyticRateFixingsStatic,
_BasePeriod,
metaclass=ABCMeta,
):
"""Abstract base class for *Static Period* types."""
pass
__all__ = [
"_BasePeriod",
"_BasePeriodStatic",
"_WithNPV",
"_WithCashflows",
"_WithFixings",
"_WithAnalyticDelta",
"_WithAnalyticRateFixings",
"_WithAnalyticFXOptionGreeks",
"_WithAnalyticIROptionGreeks",
"_WithNPVStatic",
"_WithCashflowsStatic",
"_WithAnalyticDeltaStatic",
"_WithAnalyticRateFixingsStatic",
"_WithIndexingStatic",
"_WithNonDeliverableStatic",
]
================================================
FILE: python/rateslib/periods/protocols/analytic_delta.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.curves._parsers import (
_try_disc_required_maybe_from_curve,
)
from rateslib.enums.generics import Err, NoInput, Ok
from rateslib.periods.parameters.settlement import _SettlementParams
from rateslib.periods.protocols.npv import (
_screen_ex_div_and_forward,
_WithIndexingStatic,
_WithNonDeliverableStatic,
)
from rateslib.periods.utils import (
_maybe_local,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurveOption_,
DualTypes,
FXForwards_,
FXRevised_,
Result,
_BaseCurve,
_BaseCurve_,
_FXVolOption_,
datetime_,
str_,
)
class _WithAnalyticDelta(Protocol):
r"""
Protocol to establish analytical sensitivity to rate type metrics.
.. rubric:: Required methods
.. autosummary::
~_WithAnalyticDelta.try_immediate_local_analytic_delta
.. rubric:: Provided methods
.. autosummary::
~_WithAnalyticDelta.try_local_analytic_delta
~_WithAnalyticDelta.analytic_delta
Notes
-----
Since this is *analytical*, each *Period* type must define its unique referenced sensitivity
to interest rates. This protocol ultimately determines the quantity,
.. math::
A^{bas}(m_f, m_s) = \frac{\partial P^{bas}(m_f, m_s)}{\partial \xi}, \quad \text{for some quantity, } \xi
""" # noqa: E501
_settlement_params: _SettlementParams
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams` of the
*Period*."""
return self._settlement_params
def try_immediate_local_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Calculate the immediate, analytic rate delta of a *Period* expressed in local
settlement currency, with lazy error raising.
This method does **not** adjust for ex-dividend and is an immediate measure according to,
.. math::
A_0 = \frac{\partial P_0}{\partial \xi}, \quad \text{for some, } \xi
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForward` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
pass
def try_local_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Calculate the analytic rate delta of a *Period* expressed in local settlement currency,
with lazy error raising.
This method adjusts the immediate NPV for ex-dividend and forward projected value,
according to,
.. math::
A(m_s, m_f) = \mathbb{I}(m_s) \frac{1}{v(m_f)} A_0, \qquad \; \mathbb{I}(m_s) = \left \{ \begin{matrix} 0 & m_s > m_{ex} \\ 1 & m_s \leq m_{ex} \end{matrix} \right .
for forward, :math:`m_f`, settlement, :math:`m_s`, and ex-dividend, :math:`m_{ex}`.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForward` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
Result[float, Dual, Dual2, Variable]
""" # noqa: E501
local_immediate_result = self.try_immediate_local_analytic_delta(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
)
return _screen_ex_div_and_forward(
local_value=local_immediate_result,
rate_curve=rate_curve,
disc_curve=disc_curve,
ex_dividend=self.settlement_params.ex_dividend,
settlement=settlement,
forward=forward,
)
def analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
"""
Calculate the analytic rate delta of the *Period* converted to any other
*base* accounting currency.
This method converts a local settlement currency value to a base accounting currency
according to:
.. math::
A^{bas}(m_s, m_f) = f_{loc:bas}(m_f) A(m_s, m_f)
.. hint::
If the cashflows are unspecified or incalculable due to missing information this method
will raise an exception. For a function that returns a `Result` indicating success or
failure use
:meth:`~rateslib.periods._WithAnalyticDelta.try_local_analytic_delta`.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
base: str, optional
The currency to convert the *local settlement* NPV to.
local: bool, optional
An override flag to return a dict of values indexed by string currency.
settlement: datetime, optional, (set as immediate date)
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional, (set as ``settlement``)
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable or dict
"""
local_delta = self.try_local_analytic_delta(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
).unwrap()
return _maybe_local(
value=local_delta,
local=local,
currency=self.settlement_params.currency,
fx=fx,
base=base,
forward=forward,
)
class _WithAnalyticDeltaStatic(
_WithAnalyticDelta, _WithIndexingStatic, _WithNonDeliverableStatic, Protocol
):
r"""
Protocol to establish analytical sensitivity to rate type metrics for *Static Period* types.
.. rubric:: Required methods
.. autosummary::
~_WithAnalyticDeltaStatic.try_unindexed_reference_cashflow_analytic_delta
.. rubric:: Provided methods
.. autosummary::
~_WithAnalyticDeltaStatic.try_reference_cashflow_analytic_delta
~_WithAnalyticDeltaStatic.try_unindexed_cashflow_analytic_delta
~_WithAnalyticDeltaStatic.try_cashflow_analytic_delta
~_WithAnalyticDeltaStatic.try_immediate_local_analytic_delta
~_WithAnalyticDeltaStatic.try_local_analytic_delta
~_WithAnalyticDeltaStatic.analytic_delta
Notes
-----
Since this is *analytical*, each *Period* type must define its unique referenced sensitivity
to interest rates. This protocol ultimately determines the quantity,
.. math::
A^{bas}(m_f, m_s) = \frac{\partial P^{bas}(m_f, m_s)}{\partial \xi}, \quad \text{for some quantity, } \xi
""" # noqa: E501
def try_unindexed_reference_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Calculate the cashflow analytic delta for the *Static Period* before settlement currency
adjustment and indexation, with lazy error raising.
.. math::
\frac{\partial \mathbb{E^Q}[\bar{C}_t]}{\partial \xi}
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
raise NotImplementedError(
f"type {type(self).__name__} has not implemented "
f"`try_unindexed_reference_cashflow_analytic_delta`"
)
def try_reference_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Calculate the cashflow analytic delta for the *Static Period* before settlement currency
adjustment but after indexation, with lazy error raising.
.. math::
I_r \frac{\partial \mathbb{E^Q}[\bar{C}_t]}{\partial \xi}
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
rrad = self.try_unindexed_reference_cashflow_analytic_delta(
rate_curve=rate_curve, disc_curve=disc_curve
)
return self.try_index_up(value=rrad, index_curve=index_curve)
def try_unindexed_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Calculate the cashflow analytic delta for the *Static Period* with settlement currency
adjustment but without indexation, with lazy error raising.
.. math::
f(m_d) \frac{\partial \mathbb{E^Q}[\bar{C}_t]}{\partial \xi}
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
rrad = self.try_unindexed_reference_cashflow_analytic_delta(
rate_curve=rate_curve, disc_curve=disc_curve
)
return self.try_convert_deliverable(value=rrad, fx=fx)
def try_cashflow_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXRevised_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Calculate the cashflow for the *Period* with settlement currency adjustment
and indexation.
.. math::
I_r f(m_d) \frac{\partial \mathbb{E^Q}[\bar{C}_t]}{\partial \xi}
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
rad = self.try_reference_cashflow_analytic_delta(
rate_curve=rate_curve, disc_curve=disc_curve, index_curve=index_curve
)
lad = self.try_convert_deliverable(value=rad, fx=fx) # type: ignore[arg-type]
if lad.is_err:
return lad
return lad
def try_immediate_local_analytic_delta(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXRevised_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DualTypes]:
dc_res = _try_disc_required_maybe_from_curve(curve=rate_curve, disc_curve=disc_curve)
if isinstance(dc_res, Err):
return dc_res
disc_curve_: _BaseCurve = dc_res.unwrap()
if self.settlement_params.payment < disc_curve_.nodes.initial:
# payment date is in the past
return Ok(0.0)
cad = self.try_cashflow_analytic_delta(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve_,
fx_vol=fx_vol,
fx=fx,
)
if cad.is_err:
return cad
return Ok(cad.unwrap() * disc_curve_[self.settlement_params.payment])
================================================
FILE: python/rateslib/periods/protocols/analytic_fixings.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from pandas import DataFrame, MultiIndex
from rateslib.curves._parsers import (
_try_disc_required_maybe_from_curve,
)
from rateslib.enums.generics import Err, NoInput, Ok
from rateslib.periods.parameters import _SettlementParams
from rateslib.periods.protocols import _WithIndexingStatic, _WithNonDeliverableStatic
from rateslib.periods.protocols.npv import _screen_ex_div_and_forward
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurveOption_,
FXForwards_,
Result,
_BaseCurve,
_BaseCurve_,
_FXVolOption_,
datetime_,
)
class _WithAnalyticRateFixings(Protocol):
"""
Protocol to derive a rate fixings sensitivity *DataFrame*.
.. rubric:: Required methods
.. autosummary::
~_WithAnalyticRateFixings.try_immediate_analytic_rate_fixings
.. rubric:: Provided methods
.. autosummary::
~_WithAnalyticRateFixings.local_analytic_rate_fixings
"""
@property
def settlement_params(self) -> _SettlementParams: ...
def try_immediate_analytic_rate_fixings(
self,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
"""
Return a DataFrame of financial sensitivity to published interest rate fixings,
expressed in local **settlement currency** of the *Period* with immediate value, with
lazy error raising.
If the *Period* has no sensitivity to rates fixings this *DataFrame* is empty.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
Returns
-------
Result[DataFrame]
"""
return Ok(DataFrame())
def local_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Return a DataFrame of financial sensitivity to published interest rate fixings,
expressed in local **settlement currency** of the *Period*.
If the *Period* has no sensitivity to rates fixings this *DataFrame* is empty.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
"""
rfs = self.try_immediate_analytic_rate_fixings(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
)
return _screen_ex_div_and_forward(
local_value=rfs, # type: ignore[arg-type]
rate_curve=rate_curve,
disc_curve=disc_curve,
ex_dividend=self.settlement_params.ex_dividend,
forward=forward,
settlement=settlement,
).unwrap() # type: ignore[return-value]
class _WithAnalyticRateFixingsStatic(
_WithAnalyticRateFixings, _WithIndexingStatic, _WithNonDeliverableStatic, Protocol
):
"""
Protocol to derive an analytic rate fixings sensitivity *DataFrame* from pricing *Curves*.
.. rubric:: Required methods
.. autosummary::
~_WithAnalyticRateFixingsStatic.try_unindexed_reference_cashflow_analytic_rate_fixings
.. rubric:: Provided methods
.. autosummary::
~_WithAnalyticRateFixingsStatic.try_unindexed_cashflow_analytic_rate_fixings
~_WithAnalyticRateFixingsStatic.try_reference_cashflow_analytic_rate_fixings
~_WithAnalyticRateFixingsStatic.try_cashflow_analytic_rate_fixings
~_WithAnalyticRateFixingsStatic.try_immediate_analytic_rate_fixings
~_WithAnalyticRateFixingsStatic.local_analytic_rate_fixings
"""
def try_unindexed_reference_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
"""
Return a DataFrame of financial sensitivity to published interest rate fixings,
expressed in reference currency of the *Period*, unadjusted
by timing of the cashflow and by indexation.
If the *Period* has no sensitivity to rates fixings this *DataFrame* is empty.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
Returns
-------
Result[DataFrame]
"""
raise NotImplementedError(
f"Type: {type(self).__name__} has not implemented "
f"`try_unindexed_reference_cashflow_fixings_sensitivity`."
)
def try_unindexed_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
"""
Return a DataFrame of financial sensitivity to published interest rate fixings,
expressed in settlement currency of the *Period*, unadjusted
by timing of the cashflow and indexation.
If the *Period* has no sensitivity to rates fixings this *DataFrame* is empty.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
Returns
-------
Result[DataFrame]
"""
urcfe = self.try_unindexed_reference_cashflow_analytic_rate_fixings(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
)
if self.non_deliverable_params is None:
return urcfe # no ND modifications required
if urcfe.is_err:
return urcfe
if urcfe.unwrap().empty:
return urcfe # nothing to modify
nd_scalar = self.try_convert_deliverable(value=Ok(1.0), fx=fx)
if nd_scalar.is_err:
return nd_scalar # type: ignore[return-value]
d = urcfe.unwrap() * nd_scalar.unwrap()
c = d.columns
d.columns = MultiIndex.from_tuples(
tuples=[
(c.values[0][0], c.values[0][1], self.settlement_params.currency, c.values[0][3])
],
names=c.names,
)
return Ok(d)
def try_reference_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
"""
Return a DataFrame of financial sensitivity to published interest rate fixings,
expressed in reference currency of the *Period*,adjusted for indexation but unadjusted
by timing of the cashflow.
If the *Period* has no sensitivity to rates fixings this *DataFrame* is empty.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
Returns
-------
Result[DataFrame]
"""
urcfe = self.try_unindexed_reference_cashflow_analytic_rate_fixings(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
)
if urcfe.is_err:
return urcfe
index_scalar = self.try_index_up(value=Ok(1.0), index_curve=index_curve)
if index_scalar.is_err:
return index_scalar # type: ignore[return-value]
return Ok(urcfe.unwrap() * index_scalar.unwrap())
def try_cashflow_analytic_rate_fixings(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
"""
Return a DataFrame of financial sensitivity to published interest rate fixings,
expressed in settlement currency of the *Period*, adjusted for indexation but unadjusted
by timing of the cashflow.
If the *Period* has no sensitivity to rates fixings this *DataFrame* is empty.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
Returns
-------
Result[DataFrame]
"""
ucfe = self.try_unindexed_cashflow_analytic_rate_fixings(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
)
if ucfe.is_err:
return ucfe
index_scalar = self.try_index_up(value=Ok(1.0), index_curve=index_curve)
if index_scalar.is_err:
return index_scalar # type: ignore[return-value]
return Ok(ucfe.unwrap() * index_scalar.unwrap())
def try_immediate_analytic_rate_fixings(
self,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
) -> Result[DataFrame]:
dc_res = _try_disc_required_maybe_from_curve(curve=rate_curve, disc_curve=disc_curve)
if isinstance(dc_res, Err):
return dc_res
disc_curve_: _BaseCurve = dc_res.unwrap()
cfe = self.try_cashflow_analytic_rate_fixings(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
)
if cfe.is_err:
return cfe
if self.settlement_params.payment < disc_curve_.nodes.initial:
# payment date is in the past
return Ok(cfe.unwrap() * 0.0)
return Ok(cfe.unwrap() * disc_curve_[self.settlement_params.payment])
================================================
FILE: python/rateslib/periods/protocols/analytic_greeks.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.dual import dual_log, dual_norm_cdf, dual_norm_pdf
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import FXDeltaMethod, OptionPricingModel
from rateslib.periods.parameters.fx_volatility import _FXOptionParams
from rateslib.periods.parameters.ir_volatility import _IROptionParams
from rateslib.periods.parameters.settlement import _SettlementParams
from rateslib.periods.utils import _get_ir_vol_value_and_forward_maybe_from_obj
from rateslib.splines import evaluate
from rateslib.volatility import (
FXDeltaVolSmile,
FXDeltaVolSurface,
FXSabrSmile,
FXSabrSurface,
_BaseIRCube,
_BaseIRSmile,
_IRVolPricingParams,
)
from rateslib.volatility.fx.utils import (
_delta_type_constants,
)
from rateslib.volatility.utils import (
_OptionModelBachelier,
_OptionModelBlack76,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CurveOption,
DualTypes,
DualTypes_,
FXForwards,
FXForwards_,
_BaseCurve,
_FXVolOption,
_FXVolOption_,
_IRVolOption_,
datetime,
datetime_,
)
class _WithAnalyticFXOptionGreeks(Protocol):
"""
Protocol to derive analytic *FXOption* greeks.
"""
@property
def fx_option_params(self) -> _FXOptionParams: ...
@property
def settlement_params(self) -> _SettlementParams: ...
# def try_unindexed_reference_analytic_greeks(
# self,
# *,
# rate_curve: _BaseCurve,
# disc_curve: _BaseCurve,
# fx: FXForwards,
# index_curve: _BaseCurve_ = NoInput(0),
# fx_vol: _FXVolOption_ = NoInput(0),
# ) -> dict[str, Any]:
# return self.__base_analytic_greeks(
# rate_curve=rate_curve,
# disc_curve=disc_curve,
# fx=fx,
# fx_vol=fx_vol,
# premium=NoInput(0),
# _reduced=False,
# )
def analytic_greeks(
self,
rate_curve: _BaseCurve,
disc_curve: _BaseCurve,
fx: FXForwards,
fx_vol: _FXVolOption_ = NoInput(0),
premium: DualTypes_ = NoInput(0), # expressed in the payment currency
premium_payment: datetime_ = NoInput(0),
) -> dict[str, Any]:
r"""
Return the different greeks for the *FX Option*.
Parameters
----------
rate_curve: _BaseCurve
The discount *Curve* for the LHS currency of ``pair``.
disc_curve: _BaseCurve
The discount *Curve* for the RHS currency of ``pair``.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForward` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
premium: float, Dual, Dual2, optional
The premium value of the option paid at the appropriate payment date.
Premium should be expressed in domestic currency.
If not given calculates and assumes a mid-market premium.
premium_payment: datetime, optional
The date that the premium is paid. If not given is assumed to be equal to the
*payment* associated with the option period *settlement_params*.
Returns
-------
dict
Notes
-----
**Delta** :math:`\Delta`
This is the percentage value of the domestic notional in either the *forward* or *spot*
FX rate. The choice of which is defined by the option's ``delta_type``.
Delta is also expressed in nominal domestic currency amount.
**Gamma** :math:`\Gamma`
This defines by how much *delta* will change for a 1.0 increase in either the *forward*
or *spot* FX rate. Which rate is determined by the option's ``delta_type``.
Gamma is also expressed in nominal domestic currency amount for a +1% change in FX rates.
**Vanna** :math:`\Delta_{\nu}`
This defines by how much *delta* will change for a 1.0 increase (i.e. 100 log-vols) in
volatility. The additional
**Vega** :math:`\nu`
This defines by how much the PnL of the option will change for a 1.0 increase in
volatility for a nominal of 1 unit of domestic currency.
Vega is also expressed in foreign currency for a 0.01 (i.e. 1 log-vol) move higher in vol.
**Vomma (Volga)** :math:`\nu_{\nu}`
This defines by how much *vega* will change for a 1.0 increase in volatility.
These values can be used to estimate PnL for a change in the *forward* or
*spot* FX rate and the volatility according to,
.. math::
\delta P \approx v_{deli} N^{dom} \left ( \Delta \delta f + \frac{1}{2} \Gamma \delta f^2 + \Delta_{\nu} \delta f \delta \sigma \right ) + N^{dom} \left ( \nu \delta \sigma + \frac{1}{2} \nu_{\nu} \delta \sigma^2 \right )
where :math:`v_{deli}` is the date of FX settlement for *forward* or *spot* rate.
**Kappa** :math:`\kappa`
This defines by how much the PnL of the option will change for a 1.0 increase in
strike for a nominal of 1 unit of domestic currency.
**Kega** :math:`\left . \frac{dK}{d\sigma} \right|_{\Delta}`
This defines the rate of change of strike with respect to volatility for a constant delta.
Raises
------
ValueError: if the ``strike`` is not set on the *Option*.
""" # noqa: E501
raise NotImplementedError(
"Type {type(self).__name__} has not implmented `anlaytic_greeks`."
)
def _base_analytic_greeks(
self,
rate_curve: _BaseCurve, # w(.)
disc_curve: _BaseCurve, # v(.)
fx: FXForwards,
fx_vol: _FXVolOption_ = NoInput(0),
premium: DualTypes_ = NoInput(0), # expressed in the payment currency
premium_payment: datetime_ = NoInput(0),
_reduced: bool = False,
) -> dict[str, Any]:
"""Calculates `analytic_greeks`, if _reduced only calculates those necessary for
Strange single_vol calculation.
_reduced calculates:
__vol, vega, __bs76, _kappa, _kega, _delta_index, gamma, __strike, __forward, __sqrt_t
"""
premium_payment_ = _drb(self.settlement_params.payment, premium_payment)
if isinstance(self.fx_option_params.strike, NoInput):
raise ValueError("`strike` must be set to value FXOption.")
spot = fx.pairs_settlement[self.fx_option_params.pair]
w_spot = rate_curve[spot]
w_deli = rate_curve[self.fx_option_params.delivery]
if self.fx_option_params.delivery != premium_payment_:
w_payment = rate_curve[premium_payment_]
else:
w_payment = w_deli
v_deli = disc_curve[self.fx_option_params.delivery]
v_spot = disc_curve[spot]
f_d = fx.rate(self.fx_option_params.pair, self.fx_option_params.delivery)
f_t = fx.rate(self.fx_option_params.pair, spot)
u = self.fx_option_params.strike / f_d
sqrt_t = self.fx_option_params.time_to_expiry(rate_curve.nodes.initial) ** 0.5
eta_0, z_w_0, z_u_0 = _delta_type_constants(
self.fx_option_params.delta_type, w_deli / w_spot, u
)
if isinstance(fx_vol, NoInput):
raise ValueError("`fx_vol` must be a number quantity or Smile or Surface.")
elif isinstance(fx_vol, FXDeltaVolSmile | FXDeltaVolSurface):
eta_1, z_w_1, __ = _delta_type_constants(fx_vol.meta.delta_type, w_deli / w_spot, u)
res: tuple[DualTypes, DualTypes, DualTypes] = fx_vol.get_from_strike(
k=self.fx_option_params.strike,
f=f_d,
expiry=self.fx_option_params.expiry,
z_w=w_deli / w_spot,
)
delta_idx: DualTypes | None = res[0]
fx_vol_: DualTypes = res[1]
elif isinstance(fx_vol, FXSabrSmile):
eta_1, z_w_1 = eta_0, z_w_0
res = fx_vol.get_from_strike(
k=self.fx_option_params.strike, f=f_d, expiry=self.fx_option_params.expiry
)
delta_idx = None
fx_vol_ = res[1]
elif isinstance(fx_vol, FXSabrSurface):
eta_1, z_w_1 = eta_0, z_w_0
# SabrSurface uses FXForwards to derive multiple rates
res = fx_vol.get_from_strike(
k=self.fx_option_params.strike, f=fx, expiry=self.fx_option_params.expiry
)
delta_idx = None
fx_vol_ = res[1]
else:
eta_1, z_w_1 = eta_0, z_w_0
delta_idx = None
fx_vol_ = fx_vol
fx_vol_ /= 100.0
vol_sqrt_t = fx_vol_ * sqrt_t
_is_spot = self.fx_option_params.delta_type in [
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.Spot,
]
if _is_spot:
z_v_0 = v_deli / v_spot
else:
z_v_0 = 1.0
d_eta_0 = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, eta_0)
d_plus = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, 0.5)
d_min = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, -0.5)
_: dict[str, Any] = dict()
_["gamma"] = self._analytic_gamma(
_is_spot,
v_deli,
v_spot,
z_w_0,
self.fx_option_params.direction,
d_plus,
f_d,
vol_sqrt_t,
)
_["vega"] = self._analytic_vega(
v_deli, f_d, sqrt_t, self.fx_option_params.direction, d_plus
)
_["_kega"] = self._analytic_kega(
z_u_0,
z_w_0,
eta_0,
fx_vol_,
sqrt_t,
f_d,
self.fx_option_params.direction,
self.fx_option_params.strike,
d_eta_0,
)
_["_kappa"] = self._analytic_kappa(v_deli, self.fx_option_params.direction, d_min)
_["_delta_index"] = delta_idx
_["__delta_type"] = self.fx_option_params.delta_type
_["__vol"] = fx_vol_
_["__strike"] = self.fx_option_params.strike
_["__forward"] = f_d
_["__sqrt_t"] = sqrt_t
_["__bs76"] = self._analytic_bs76(
self.fx_option_params.direction,
v_deli,
f_d,
d_plus,
self.fx_option_params.strike,
d_min,
)
_["__notional"] = self.settlement_params.notional
if self.fx_option_params.direction > 0:
_["__class"] = "FXCallPeriod"
else:
_["__class"] = "FXPutPeriod"
if not _reduced:
_["delta"] = self._analytic_delta(
premium,
self.fx_option_params.delta_type
in [FXDeltaMethod.SpotPremiumAdjusted, FXDeltaMethod.ForwardPremiumAdjusted],
z_u_0,
z_w_0,
d_eta_0,
self.fx_option_params.direction,
d_plus,
w_payment,
w_spot,
self.settlement_params.notional,
)
_[f"delta_{self.fx_option_params.pair[:3]}"] = (
abs(self.settlement_params.notional) * _["delta"]
)
_[f"gamma_{self.fx_option_params.pair[:3]}_1%"] = (
_["gamma"]
* abs(self.settlement_params.notional)
* (f_t if _is_spot else f_d)
* 0.01
)
_[f"vega_{self.fx_option_params.pair[3:]}"] = (
_["vega"] * abs(self.settlement_params.notional) * 0.01
)
_["delta_sticky"] = self._analytic_sticky_delta(
_["delta"],
_["vega"],
v_deli,
fx_vol,
sqrt_t,
fx_vol_,
self.fx_option_params.expiry,
f_d,
delta_idx,
u,
z_v_0,
z_w_0,
z_w_1,
eta_1,
d_plus,
self.fx_option_params.strike,
fx,
)
_["vomma"] = self._analytic_vomma(_["vega"], d_plus, d_min, fx_vol_)
_["vanna"] = self._analytic_vanna(
z_w_0, self.fx_option_params.direction, d_plus, d_min, fx_vol_
)
# _["vanna"] = self._analytic_vanna(_["vega"], _is_spot, f_t, f_d, d_plus, vol_sqrt_t)
return _
@staticmethod
def _analytic_vega(
v_deli: DualTypes, f_d: DualTypes, sqrt_t: DualTypes, phi: float, d_plus: DualTypes
) -> DualTypes:
return v_deli * f_d * sqrt_t * dual_norm_pdf(phi * d_plus)
@staticmethod
def _analytic_vomma(
vega: DualTypes,
d_plus: DualTypes,
d_min: DualTypes,
vol: DualTypes,
) -> DualTypes:
return vega * d_plus * d_min / vol
@staticmethod
def _analytic_gamma(
spot: DualTypes,
v_deli: DualTypes,
v_spot: DualTypes,
z_w: DualTypes,
phi: float,
d_plus: DualTypes,
f_d: DualTypes,
vol_sqrt_t: DualTypes,
) -> DualTypes:
ret = z_w * dual_norm_pdf(phi * d_plus) / (f_d * vol_sqrt_t)
if spot:
return ret * z_w * v_spot / v_deli
return ret
@staticmethod
def _analytic_delta(
premium: DualTypes | NoInput,
adjusted: bool,
z_u: DualTypes,
z_w: DualTypes,
d_eta: DualTypes,
phi: float,
d_plus: DualTypes,
w_payment: DualTypes,
w_spot: DualTypes,
N_dom: DualTypes,
) -> DualTypes:
if not adjusted or isinstance(premium, NoInput):
# returns unadjusted delta or mid-market premium adjusted delta
return z_u * z_w * phi * dual_norm_cdf(phi * d_eta)
else:
# returns adjusted delta with set premium in domestic (LHS) currency.
# ASSUMES: if premium adjusted the premium is expressed in LHS currency.
return z_w * phi * dual_norm_cdf(phi * d_plus) - w_payment / w_spot * premium / N_dom
@staticmethod
def _analytic_sticky_delta(
delta: DualTypes,
vega: DualTypes,
v_deli: DualTypes,
vol: _FXVolOption,
sqrt_t: DualTypes,
vol_: DualTypes,
expiry: datetime,
f_d: DualTypes,
delta_idx: DualTypes | None,
u: DualTypes,
z_v_0: DualTypes,
z_w_0: DualTypes,
z_w_1: DualTypes,
eta_1: float,
d_plus: DualTypes,
k: DualTypes,
fxf: FXForwards,
) -> DualTypes:
dvol_df: DualTypes
if isinstance(vol, FXSabrSmile):
_, dvol_df = vol._d_sabr_d_k_or_f( # type: ignore[assignment]
k=k,
f=f_d,
expiry=expiry,
as_float=False,
derivative=2, # with respect to f
)
elif isinstance(vol, FXSabrSurface):
_, dvol_df = vol._d_sabr_d_k_or_f( # type: ignore[assignment]
k=k,
f=fxf, # use FXForwards to derive multiple rates
expiry=expiry,
as_float=False,
derivative=2, # with respect to f
)
elif isinstance(vol, FXDeltaVolSmile | FXDeltaVolSurface):
if isinstance(vol, FXDeltaVolSurface):
smile: FXDeltaVolSmile = vol.get_smile(expiry)
else:
smile = vol
# d sigma / d delta_idx
_B = evaluate(smile.nodes.spline.spline, delta_idx, 1) / 100.0 # type: ignore[arg-type]
if vol.meta.delta_type in [
FXDeltaMethod.ForwardPremiumAdjusted,
FXDeltaMethod.SpotPremiumAdjusted,
]:
# then smile is adjusted:
ddelta_idx_df_d: DualTypes = -delta_idx / f_d # type: ignore[operator]
else:
ddelta_idx_df_d = 0.0
_A = z_w_1 * dual_norm_pdf(-d_plus)
ddelta_idx_df_d -= _A / (f_d * vol_ * sqrt_t)
ddelta_idx_df_d /= 1 + _A * ((dual_log(u) / (vol_**2 * sqrt_t) + eta_1 * sqrt_t) * _B)
dvol_df = _B * z_w_0 / z_v_0 * ddelta_idx_df_d
else:
dvol_df = 0.0
return delta + vega / v_deli * z_v_0 * dvol_df
@staticmethod
def _analytic_vanna(
z_w: DualTypes,
phi: float,
d_plus: DualTypes,
d_min: DualTypes,
vol: DualTypes,
) -> DualTypes:
return -z_w * dual_norm_pdf(phi * d_plus) * d_min / vol
# @staticmethod
# def _analytic_vanna(vega, spot, f_t, f_d, d_plus, vol_sqrt_t): # Alternative monetary def.
# if spot:
# return vega / f_t * (1 - d_plus / vol_sqrt_t)
# else:
# return vega / f_d * (1 - d_plus / vol_sqrt_t)
@staticmethod
def _analytic_kega(
z_u: DualTypes,
z_w: DualTypes,
eta: float,
vol: DualTypes,
sqrt_t: float,
f_d: DualTypes,
phi: float,
k: DualTypes,
d_eta: DualTypes,
) -> DualTypes:
if eta < 0:
# dz_u_du = 1.0
x = vol * phi * dual_norm_cdf(phi * d_eta) / (f_d * z_u * dual_norm_pdf(phi * d_eta))
else:
x = 0.0
ret = (d_eta - 2.0 * eta * sqrt_t * vol) / (-1 / (k * sqrt_t) + x)
return ret
@staticmethod
def _analytic_kappa(v_deli: DualTypes, phi: float, d_min: DualTypes) -> DualTypes:
return -v_deli * phi * dual_norm_cdf(phi * d_min)
@staticmethod
def _analytic_bs76(
phi: float,
v_deli: DualTypes,
f_d: DualTypes,
d_plus: DualTypes,
k: DualTypes,
d_min: DualTypes,
) -> DualTypes:
return phi * v_deli * (f_d * dual_norm_cdf(phi * d_plus) - k * dual_norm_cdf(phi * d_min))
class _WithAnalyticIROptionGreeks(Protocol):
"""
Protocol to derive analytic *IROption* greeks.
"""
@property
def ir_option_params(self) -> _IROptionParams: ...
@property
def settlement_params(self) -> _SettlementParams: ...
def analytic_greeks(
self,
rate_curve: CurveOption,
disc_curve: _BaseCurve,
index_curve: _BaseCurve,
fx: FXForwards,
ir_vol: _IRVolOption_ = NoInput(0),
premium: DualTypes_ = NoInput(0), # expressed in the payment currency
premium_payment: datetime_ = NoInput(0),
) -> dict[str, Any]:
r"""
Return the different greeks for the *IR Option*.
Parameters
----------
rate_curve: _BaseCurve
The discount *Curve* for the LHS currency of ``pair``.
disc_curve: _BaseCurve
The discount *Curve* for the RHS currency of ``pair``.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForward` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary.
ir_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
premium: float, Dual, Dual2, optional
The premium value of the option paid at the appropriate payment date.
Premium should be expressed in domestic currency.
If not given calculates and assumes a mid-market premium.
premium_payment: datetime, optional
The date that the premium is paid. If not given is assumed to be equal to the
*payment* associated with the option period *settlement_params*.
Returns
-------
dict
Notes
-----
**Delta** :math:`\Delta`
This is the percentage value of the domestic notional in either the *forward* or *spot*
FX rate. The choice of which is defined by the option's ``delta_type``.
Delta is also expressed in nominal domestic currency amount.
**Gamma** :math:`\Gamma`
This defines by how much *delta* will change for a 1.0 increase in either the *forward*
or *spot* FX rate. Which rate is determined by the option's ``delta_type``.
Gamma is also expressed in nominal domestic currency amount for a +1% change in FX rates.
**Vanna** :math:`\Delta_{\nu}`
This defines by how much *delta* will change for a 1.0 increase (i.e. 100 log-vols) in
volatility. The additional
**Vega** :math:`\nu`
This defines by how much the PnL of the option will change for a 1.0 increase in
volatility for a nominal of 1 unit of domestic currency.
Vega is also expressed in foreign currency for a 0.01 (i.e. 1 log-vol) move higher in vol.
**Vomma (Volga)** :math:`\nu_{\nu}`
This defines by how much *vega* will change for a 1.0 increase in volatility.
These values can be used to estimate PnL for a change in the *forward* or
*spot* FX rate and the volatility according to,
.. math::
\delta P \approx v_{deli} N^{dom} \left ( \Delta \delta f + \frac{1}{2} \Gamma \delta f^2 + \Delta_{\nu} \delta f \delta \sigma \right ) + N^{dom} \left ( \nu \delta \sigma + \frac{1}{2} \nu_{\nu} \delta \sigma^2 \right )
where :math:`v_{deli}` is the date of FX settlement for *forward* or *spot* rate.
**Kappa** :math:`\kappa`
This defines by how much the PnL of the option will change for a 1.0 increase in
strike for a nominal of 1 unit of domestic currency.
**Kega** :math:`\left . \frac{dK}{d\sigma} \right|_{\Delta}`
This defines the rate of change of strike with respect to volatility for a constant delta.
Raises
------
ValueError: if the ``strike`` is not set on the *Option*.
""" # noqa: E501
raise NotImplementedError(
"Type {type(self).__name__} has not implemented `analytic_greeks`."
)
def _base_analytic_greeks(
self,
rate_curve: CurveOption,
disc_curve: _BaseCurve,
index_curve: _BaseCurve,
fx: FXForwards_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
premium: DualTypes_ = NoInput(0), # expressed in the payment currency
premium_payment: datetime_ = NoInput(0),
_reduced: bool = False,
) -> dict[str, Any]:
"""Calculates `analytic_greeks`, if _reduced only calculates those necessary for
Strange single_vol calculation.
_reduced calculates:
__vol, vega, __bs76, _kappa, _kega, _delta_index, gamma, __strike, __forward, __sqrt_t
"""
_drb(self.settlement_params.payment, premium_payment)
if isinstance(self.ir_option_params.strike, NoInput):
raise ValueError("`strike` must be set to value IROption.")
# v_deli = rate_curve[self.ir_option_params.option_fixing.effective]
sqrt_t = self.ir_option_params.time_to_expiry(disc_curve.nodes.initial) ** 0.5
pricing_ = _get_ir_vol_value_and_forward_maybe_from_obj(
ir_vol=ir_vol,
index_curve=index_curve,
rate_curve=rate_curve,
strike=self.ir_option_params.strike,
irs=self.ir_option_params.option_fixing.irs,
expiry=self.ir_option_params.expiry,
tenor=self.ir_option_params.option_fixing.termination,
t_e=sqrt_t**2,
)
vol_sqrt_t = pricing_.vol / 100.0 * sqrt_t
a_r = self.ir_option_params.option_fixing.annuity(
settlement_method=self.ir_option_params.settlement_method,
index_curve=index_curve,
rate_curve=rate_curve,
)
v_p = disc_curve[self.settlement_params.payment]
_: dict[str, Any] = dict()
match pricing_.pricing_model:
case OptionPricingModel.Black76:
d_plus = _OptionModelBlack76._d_plus_min_u(
shifted_u=(pricing_.k + pricing_.rate_shift)
/ (pricing_.f + pricing_.rate_shift),
vol_sqrt_t=vol_sqrt_t,
eta=0.5,
)
_["__bs76"] = _OptionModelBlack76._value(
F=pricing_.f,
K=pricing_.k,
rate_shift=pricing_.rate_shift,
t_e=pricing_.t_e,
v2=1.0,
vol=pricing_.vol / 100.0,
phi=self.ir_option_params.direction,
)
# d_min = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, -0.5)
case OptionPricingModel.Bachelier:
d_plus = (pricing_.f - pricing_.k) / vol_sqrt_t
_["__bachelier"] = _OptionModelBachelier._value(
F=pricing_.f,
K=pricing_.k,
t_e=pricing_.t_e,
v2=1.0,
vol=pricing_.vol / 100.0,
phi=self.ir_option_params.direction,
)
_["__forward"] = pricing_.f
_["__sqrt_t"] = sqrt_t
_["__vol"] = pricing_.vol / 100.0
_["__strike"] = pricing_.k
_["delta"] = self._analytic_delta(
self.ir_option_params.direction, d_plus, pricing_.pricing_model
)
_[f"delta_{self.settlement_params.currency}"] = (
abs(self.settlement_params.notional) * _["delta"] * a_r * v_p * 1e-6
)
_["gamma"] = self._analytic_gamma(
self.ir_option_params.direction,
d_plus,
pricing_.pricing_model,
pricing_.f,
vol_sqrt_t,
)
_[f"gamma_{self.settlement_params.currency}"] = (
_["gamma"] * abs(self.settlement_params.notional) * 1e-8 * a_r * v_p
)
_["vanna"] = self._analytic_vanna(
self.ir_option_params.direction,
d_plus,
pricing_.pricing_model,
vol_sqrt_t,
pricing_.vol / 100.0,
)
_[f"vanna_{self.settlement_params.currency}"] = (
_["vanna"] * abs(self.settlement_params.notional) * 1e-8 * a_r * v_p
)
_["vega"] = self._analytic_vega(
self.ir_option_params.direction,
d_plus,
pricing_.pricing_model,
pricing_.f,
_["__sqrt_t"],
)
_[f"vega_{self.settlement_params.currency}"] = (
_["vega"] * abs(self.settlement_params.notional) * 1e-6 * a_r * v_p
)
_["vomma"] = self._analytic_vomma(
self.ir_option_params.direction,
d_plus,
pricing_.pricing_model,
_["vega"],
vol_sqrt_t,
pricing_.vol / 100.0,
)
_[f"vomma_{self.settlement_params.currency}"] = (
_["vomma"] * abs(self.settlement_params.notional) * 1e-8 * a_r * v_p
)
_["delta_sticky"] = self._analytic_sticky_delta(
delta=_["delta"],
vega=_["vega"],
ir_vol=ir_vol,
f=pricing_.f,
k=pricing_.k,
expiry=self.ir_option_params.expiry,
tenor=self.ir_option_params.option_fixing.termination,
)
_[f"delta_sticky_{self.settlement_params.currency}"] = (
abs(self.settlement_params.notional) * _["delta_sticky"] * a_r * v_p * 1e-6
)
_["__notional"] = self.settlement_params.notional
if self.ir_option_params.direction > 0:
_["__class"] = "IRSCallPeriod"
else:
_["__class"] = "IRSPutPeriod"
return _
@staticmethod
def _analytic_vega(
phi: float,
d_plus: DualTypes,
model: OptionPricingModel,
f: DualTypes,
sqrt_t: DualTypes,
) -> DualTypes:
match model:
case OptionPricingModel.Black76:
return f * sqrt_t * dual_norm_pdf(phi * d_plus)
case OptionPricingModel.Bachelier:
return sqrt_t * dual_norm_pdf(d_plus)
@staticmethod
def _analytic_vomma(
phi: float,
d_plus: DualTypes,
model: OptionPricingModel,
vega: DualTypes,
vol_sqrt_t: DualTypes,
vol: DualTypes,
) -> DualTypes:
match model:
case OptionPricingModel.Black76:
return vega * d_plus * (d_plus - vol_sqrt_t) / vol
case OptionPricingModel.Bachelier:
return vega * d_plus * d_plus / vol
@staticmethod
def _analytic_gamma(
phi: float,
d_plus: DualTypes,
model: OptionPricingModel,
f_d: DualTypes,
vol_sqrt_t: DualTypes,
) -> DualTypes:
ret = dual_norm_pdf(phi * d_plus) / vol_sqrt_t
match model:
case OptionPricingModel.Black76:
return ret / f_d
case OptionPricingModel.Bachelier:
return ret
@staticmethod
def _analytic_delta(
phi: float,
d_plus: DualTypes,
model: OptionPricingModel = OptionPricingModel.Black76,
) -> DualTypes:
match model:
case OptionPricingModel.Black76:
return phi * dual_norm_cdf(phi * d_plus)
case OptionPricingModel.Bachelier:
return phi * dual_norm_cdf(phi * d_plus)
@staticmethod
def _analytic_vanna(
phi: float,
d_plus: DualTypes,
model: OptionPricingModel,
vol_sqrt_t: DualTypes,
vol: DualTypes,
) -> DualTypes:
match model:
case OptionPricingModel.Black76:
return -dual_norm_pdf(phi * d_plus) * (d_plus - vol_sqrt_t) / vol
case OptionPricingModel.Bachelier:
return -dual_norm_pdf(phi * d_plus) * d_plus / vol
@staticmethod
def _analytic_sticky_delta(
delta: DualTypes,
vega: DualTypes,
ir_vol: _IRVolOption_ | _IRVolPricingParams,
f: DualTypes,
k: DualTypes,
expiry: str | datetime,
tenor: str | datetime,
) -> DualTypes:
dvol_df: DualTypes
if isinstance(ir_vol, _BaseIRSmile):
dvol_df = ir_vol._d_sigma_d_f(k=k, f=f)
elif isinstance(ir_vol, _BaseIRCube):
smile = ir_vol.get_smile(expiry, tenor)
dvol_df = smile._d_sigma_d_f(k=k, f=f)
elif isinstance(ir_vol, _IRVolPricingParams):
raise NotImplementedError(
"Cannot calculate sticky delta from pricing params without object"
)
else:
dvol_df = 0.0
return delta + vega * dvol_df
================================================
FILE: python/rateslib/periods/protocols/cashflows.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib import defaults
from rateslib.curves._parsers import (
_try_disc_required_maybe_from_curve,
)
from rateslib.dual.utils import _dual_float, _float_or_none
from rateslib.enums.generics import Err, NoInput
from rateslib.periods.parameters import (
_CreditParams,
_FixedRateParams,
_FloatRateParams,
_IndexParams,
_MtmParams,
_NonDeliverableParams,
_PeriodParams,
)
from rateslib.periods.protocols.npv import _WithNPV, _WithNPVStatic
from rateslib.periods.utils import (
_get_immediate_fx_scalar_and_base,
_try_validate_base_curve,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
CurveOption_,
DualTypes,
FXForwards_,
Result,
_BaseCurve_,
_FXVolOption_,
_IRVolOption_,
datetime_,
str_,
)
class _WithCashflows(_WithNPV, Protocol):
"""
Protocol for parameter and calculation display for the *Period*.
.. warning::
The direct methods of this class are for display convenience.
Calling these to extract certain values should be avoided. It is more efficient to
source relevant parameters or calculations from object attributes or other methods directly.
.. rubric:: Required methods
.. autosummary::
~_WithCashflows.try_cashflow
.. rubric:: Provided methods
.. autosummary::
~_WithCashflows.cashflows
"""
def try_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> Result[DualTypes]:
"""
Calculate the cashflow for the *Period* with any non-deliverable currency adjustment
**and** indexation.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
Returns
-------
Result of float, Dual, Dual2, Variable
"""
return Err(
NotImplementedError(
f"`cashflow` is not explicitly implemented for period type: {type(self).__name__}"
)
)
def cashflows(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> dict[str, Any]:
"""
Return aggregated cashflow data for the *Period*.
.. warning::
This method is a convenience method to provide a visual representation of all
associated calculation data. Calling this method to extracting certain values
should be avoided. It is more efficient to source relevant parameters or calculations
from object attributes or other methods directly.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
base: str, optional
The currency to convert the *local settlement* NPV to.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
dict[Any]
"""
standard_elements = _standard_elements(self=self)
period_elements = _period_elements(self=self)
cashflow_elements = _cashflow_elements(
self=self,
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
fx_vol=fx_vol,
base=base,
forward=forward,
settlement=settlement,
)
rate_elements = _rate_elements(self=self, rate_curve=rate_curve)
credit_elements = _credit_elements(self=self, rate_curve=rate_curve)
return {
**standard_elements,
**period_elements,
**rate_elements,
**cashflow_elements,
**credit_elements,
}
class _WithCashflowsStatic(_WithNPVStatic, Protocol):
"""
Protocol for parameter and calculation display for the *Static Period*.
.. warning::
The direct methods of this class are for display convenience.
Calling these to extract certain values should be avoided. It is more efficient to
source relevant parameters or calculations from object attributes or other methods directly.
.. rubric:: Provided methods
.. autosummary::
~_WithCashflowsStatic.cashflows
"""
def _index_elements(
self,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> dict[str, Any]:
# indexing parameters
index_elements: dict[str, Any] = {}
if hasattr(self, "index_params") and isinstance(self.index_params, _IndexParams):
assert isinstance(self.index_params, _IndexParams) # noqa: S101
iv = self.index_params.try_index_value(index_curve=index_curve)
ib = self.index_params.try_index_base(index_curve=index_curve)
if not isinstance(iv, Err) and not isinstance(ib, Err):
ir = iv.unwrap() / ib.unwrap()
else:
ir = None
uc = self.try_unindexed_cashflow(
rate_curve=rate_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
)
index_elements = {
defaults.headers["index_base"]: _float_or_none(ib),
defaults.headers["index_value"]: _float_or_none(iv),
defaults.headers["index_ratio"]: _float_or_none(ir),
defaults.headers["index_fix_date"]: self.index_params.index_fixing.date,
defaults.headers["unindexed_cashflow"]: _float_or_none(uc),
}
return index_elements
def _non_deliverable_elements(self, fx: FXForwards_) -> dict[str, Any]:
# non-deliverable parameters
non_deliverable_elements: dict[str, Any] = {}
if hasattr(self, "non_deliverable_params") and isinstance(
self.non_deliverable_params, _NonDeliverableParams
):
fx_fixing_res: Result[DualTypes] = (
self.non_deliverable_params.fx_fixing.try_value_or_forecast(fx)
)
non_deliverable_elements.update(
{
defaults.headers["fx_fixing"]: _float_or_none(fx_fixing_res),
defaults.headers["fx_fixing_date"]: self.non_deliverable_params.fx_fixing.date,
defaults.headers[
"reference_currency"
]: self.non_deliverable_params.reference_currency.upper(),
}
)
return non_deliverable_elements
def _mtm_elements(self, fx: FXForwards_) -> dict[str, Any]:
mtm_elements: dict[str, Any] = {}
if hasattr(self, "mtm_params") and isinstance(self.mtm_params, _MtmParams):
# mtm_elements overwrite non_deliverable elements as these are exclusive params.
fx_fixing_res = self.mtm_params.fx_fixing_end.try_value_or_forecast(fx)
mtm_elements = {
defaults.headers["fx_fixing"]: _float_or_none(fx_fixing_res),
defaults.headers["fx_fixing_date"]: self.mtm_params.fx_fixing_end.date,
defaults.headers["reference_currency"]: self.mtm_params.reference_currency.upper(),
}
return mtm_elements
def cashflows(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> dict[str, Any]:
"""
Return aggregated cashflow data for the *Period*.
.. warning::
This method is a convenience method to provide a visual representation of all
associated calculation data. Calling this method to extracting certain values
should be avoided. It is more efficient to source relevant parameters or calculations
from object attributes or other methods directly.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
base: str, optional
The currency to convert the *local settlement* NPV to.
settlement: datetime, optional
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
dict[Any]
"""
standard_elements = _standard_elements(self=self)
period_elements = _period_elements(self=self)
cashflow_elements = _cashflow_elements(
self=self,
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
base=base,
forward=forward,
settlement=settlement,
)
rate_elements = _rate_elements(self=self, rate_curve=rate_curve)
credit_elements = _credit_elements(self=self, rate_curve=rate_curve)
index_elements = self._index_elements(index_curve=index_curve)
non_deliverable_elements = self._non_deliverable_elements(fx=fx)
mtm_elements = self._mtm_elements(fx=fx)
return {
**standard_elements,
**period_elements,
**cashflow_elements,
**rate_elements,
**credit_elements,
**index_elements,
**non_deliverable_elements,
**mtm_elements,
}
def _standard_elements(self: _WithCashflows | _WithCashflowsStatic) -> dict[str, Any]:
"""Typical cashflow attributes for any constructed *Period*"""
# standard parameters
standard_elements: dict[str, Any] = {}
standard_elements.update(
{
defaults.headers["type"]: type(self).__name__,
defaults.headers["currency"]: self.settlement_params.currency.upper(),
defaults.headers["payment"]: self.settlement_params.payment,
defaults.headers["notional"]: _dual_float(self.settlement_params.notional),
}
)
return standard_elements
def _period_elements(self: _WithCashflows | _WithCashflowsStatic) -> dict[str, Any]:
"""
Typical date-like attributes for any constructed *Period* with `period_params`.
"""
# period parameters
period_elements: dict[str, Any] = {}
if hasattr(self, "period_params") and isinstance(self.period_params, _PeriodParams):
period_elements.update(
{
defaults.headers["stub_type"]: "Stub" if self.period_params.stub else "Regular",
defaults.headers["convention"]: str(self.period_params.convention),
defaults.headers["dcf"]: self.period_params.dcf,
defaults.headers["a_acc_start"]: self.period_params.start,
defaults.headers["a_acc_end"]: self.period_params.end,
}
)
return period_elements
def _rate_elements(
self: _WithCashflows | _WithCashflowsStatic,
rate_curve: CurveOption_,
) -> dict[str, Any]:
"""
Typical rate-like attributes for any constructed *Period* with `rate_params`.
"""
# rate parameters
rate_elements: dict[str, Any] = {}
if hasattr(self, "rate_params"):
if isinstance(self.rate_params, _FixedRateParams):
rate_elements.update(
{
defaults.headers["rate"]: _float_or_none(self.rate_params.fixed_rate),
defaults.headers["spread"]: None,
}
)
elif isinstance(self.rate_params, _FloatRateParams):
rate_elements.update(
{
# try_rate is guaranteed by having FloatRateParams but this is poor typing.
defaults.headers["rate"]: _float_or_none(self.try_rate(rate_curve=rate_curve)), # type: ignore[attr-defined]
defaults.headers["spread"]: _float_or_none(self.rate_params.float_spread),
}
)
return rate_elements
def _credit_elements(
self: _WithCashflows | _WithCashflowsStatic,
rate_curve: CurveOption_,
) -> dict[str, Any]:
"""
Typical credit-like attributes for any constructed *Period* with `credit_params`.
"""
credit_elements: dict[str, Any] = {}
if hasattr(self, "credit_params") and isinstance(self.credit_params, _CreditParams):
if hasattr(self, "period_params") and isinstance(self.period_params, _PeriodParams):
rc_res = _try_validate_base_curve(rate_curve)
if not isinstance(rc_res, Err):
credit_elements.update(
{
defaults.headers["survival"]: _dual_float(
rc_res.unwrap()[self.period_params.end]
),
defaults.headers["recovery"]: _dual_float(
rc_res.unwrap().meta.credit_recovery_rate
),
}
)
else:
credit_elements.update(
{defaults.headers["survival"]: None, defaults.headers["recovery"]: None}
)
else:
pass
return credit_elements
def _cashflow_elements(
self: _WithCashflows | _WithCashflowsStatic,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
base: str_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> dict[str, Any]:
# cashflow valuation based parameters
c = self.try_cashflow(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
)
disc_curve_result = _try_disc_required_maybe_from_curve(curve=rate_curve, disc_curve=disc_curve)
if disc_curve_result.is_err:
# then NPV is impossible
v, collateral = None, None
else:
v = disc_curve_result.unwrap()[self.settlement_params.payment]
collateral = disc_curve_result.unwrap().meta.collateral
# Since `cashflows` in not a performance critical function this call duplicates
# cashflow calculations. A more efficient calculation is possible but the code branching
# is ugly.
local_npv_result = self.try_local_npv(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
settlement=settlement,
forward=forward,
)
fx_, base_ = _get_immediate_fx_scalar_and_base(self.settlement_params.currency, fx, base)
if local_npv_result.is_err:
npv_fx = None
else:
npv_fx = local_npv_result.unwrap() * fx_
return {
defaults.headers["df"]: _float_or_none(v),
defaults.headers["cashflow"]: _float_or_none(c),
defaults.headers["npv"]: _float_or_none(local_npv_result),
defaults.headers["fx"]: _dual_float(fx_),
defaults.headers["base"]: base_.upper(),
defaults.headers["npv_fx"]: _float_or_none(npv_fx),
defaults.headers["collateral"]: collateral,
}
================================================
FILE: python/rateslib/periods/protocols/fixings.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import os
from itertools import product
from typing import TYPE_CHECKING, Protocol
from pandas import DataFrame, DatetimeIndex, MultiIndex, Series, isna
from rateslib import fixings
from rateslib.dual import Variable, gradient
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput
from rateslib.periods.parameters import (
_FloatRateParams,
_FXOptionParams,
_IndexParams,
_MtmParams,
_NonDeliverableParams,
)
from rateslib.periods.protocols.npv import _WithNPV
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CurveOption_,
DualTypes,
FXForwards_,
Sequence,
_BaseCurve_,
_FXVolOption_,
datetime_,
int_,
)
class _WithFixings(_WithNPV, Protocol):
"""
Protocol for determining fixing sensitivity for a *Period* with AD.
.. rubric:: Required methods
.. autosummary::
~_WithFixings.reset_fixings
.. rubric:: Provided methods
.. autosummary::
~_WithFixings.reset_fixings
"""
# def local_npv(
# self,
# *,
# rate_curve: CurveOption_ = NoInput(0),
# index_curve: _BaseCurve_ = NoInput(0),
# disc_curve: _BaseCurve_ = NoInput(0),
# fx: FXForwards_ = NoInput(0),
# fx_vol: _FXVolOption_ = NoInput(0),
# settlement: datetime_ = NoInput(0),
# forward: datetime_ = NoInput(0),
# ) -> DualTypes: ...
# @property
# def settlement_param(self) -> _SettlementParams: ...
def reset_fixings(self, state: int_ = NoInput(0)) -> None:
"""
Resets any fixings values of the *Period* derived using the given data state.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import fixings, dt, NoInput, FloatPeriod
from pandas import Series
.. ipython:: python
fp = FloatPeriod(
start=dt(2026, 1, 12),
end=dt(2026, 1, 16),
payment=dt(2026, 1, 16),
frequency="M",
fixing_method="rfr_payment_delay",
rate_fixings="sofr"
)
fixings.add(
name="sofr_1B",
series=Series(
index=[dt(2026, 1, 12), dt(2026, 1, 13), dt(2026, 1, 14), dt(2026, 1, 15)],
data=[3.1, 3.2, 3.3, 3.4]
)
)
# value is populated from given data
assert 3.245 < fp.rate_params.rate_fixing.value < 3.255
fp.reset_fixings()
# private data related to fixing is removed and requires new data lookup
fp.rate_params.rate_fixing._value
fp.rate_params.rate_fixing._populated
.. role:: green
Parameters
----------
state: int, :green:`optional`
The *state id* of the data series that set the fixing. Only fixings determined by this
data will be reset. If not given resets all fixings.
"""
if isinstance(getattr(self, "index_params", None), _IndexParams):
self.index_params.index_base.reset(state) # type: ignore[attr-defined]
self.index_params.index_fixing.reset(state) # type: ignore[attr-defined]
if isinstance(getattr(self, "rate_params", None), _FloatRateParams):
self.rate_params.rate_fixing.reset(state) # type: ignore[attr-defined]
if isinstance(getattr(self, "mtm_params", None), _MtmParams):
self.mtm_params.fx_fixing_start.reset(state) # type: ignore[attr-defined]
self.mtm_params.fx_fixing_end.reset(state) # type: ignore[attr-defined]
if isinstance(getattr(self, "non_deliverable_params", None), _NonDeliverableParams):
self.non_deliverable_params.fx_fixing.reset(state) # type: ignore[attr-defined]
from rateslib.periods.float_period import ZeroFloatPeriod
if isinstance(self, ZeroFloatPeriod):
for float_period in self.float_periods:
float_period.reset_fixings(state)
if isinstance(getattr(self, "fx_option_params", None), _FXOptionParams):
self.fx_option_params.option_fixing.reset(state) # type: ignore[attr-defined]
def local_fixings(
self,
identifiers: Sequence[tuple[str, Series]],
scalars: Sequence[float] | NoInput = NoInput(0),
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DataFrame:
"""
Calculate the sensitivity to fixings of the *Instrument*, expressed in local
settlement currency.
.. role:: red
.. role:: green
Parameters
----------
indentifiers: Sequence of tuple[str, Series], :red:`required`
These are the series string identifiers and the data values that will be used in each
Series to determine the sensitivity against.
scalars: Sequence of floats, :green:`optional (each set as 1.0)`
A sequence of scalars to multiply the sensitivities by for each on of the
``identifiers``.
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
settlement: datetime, optional (set as immediate date)
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional (set as ``settlement``)
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
DataFrame
"""
original_data, index, state = _replace_fixings_with_ad_variables(identifiers)
# Extract sensitivity data
pv: dict[str, DualTypes] = {
self.settlement_params.currency: self.local_npv(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
settlement=settlement,
forward=forward,
)
}
df = _structure_sensitivity_data(pv, index, identifiers, scalars)
_reset_fixings_data(self, original_data, state, identifiers)
return df
def _replace_fixings_with_ad_variables(
identifiers: Sequence[tuple[str, Series]],
) -> tuple[dict[str, tuple[int, Series]], DatetimeIndex, int]:
"""
For a set of identifiers (which must already exist in the `fixings` object) extend those
with the given data as new fixings expressed as a Variable which will capture sensitivity.
Parameters
----------
identifiers
Returns
-------
tuple: the original data that will be reset later, the DatetimeIndex of relevant dates
and the state id used for the added series
"""
# for each identifier, replace the existing fixing Series with a new one with AD Variables.
state = hash(os.urandom(64))
original_data: dict[str, tuple[int, Series]] = {}
index = DatetimeIndex(data=[])
for identifier in identifiers:
original_data[identifier[0]] = (fixings[identifier[0]][0], fixings[identifier[0]][1])
ad_series = Series(
index=identifier[1].index,
data=[ # type: ignore[arg-type]
Variable(_dual_float(v), [f"{identifier[0]}_{d.strftime('%Y%m%d')}"]) # type: ignore[attr-defined]
for d, v in identifier[1].items()
],
)
index = index.union(other=ad_series.index, sort=None) # type: ignore[arg-type] # will sort
fixings.pop(name=identifier[0])
fixings.add(
name=identifier[0],
series=ad_series.combine(original_data[identifier[0]][1], _s2_before_s1),
state=state,
)
return original_data, index, state
def _structure_sensitivity_data(
pv: dict[str, DualTypes],
index: DatetimeIndex,
identifiers: Sequence[tuple[str, Series]],
scalars: Sequence[float] | NoInput,
) -> DataFrame:
if isinstance(scalars, NoInput):
scalars_: Sequence[float] = [1.0] * len(identifiers)
elif len(scalars) != len(identifiers):
raise ValueError("If given, ``scalars`` must be same length as ``identifiers``.")
else:
scalars_ = scalars
date_str = [_.strftime("%Y%m%d") for _ in index]
# Construct DataFrame
df = DataFrame(
columns=MultiIndex.from_tuples(
product(pv.keys(), [i[0] for i in identifiers]), names=["local_ccy", "identifier"]
),
# index=date_list,
index=index,
data=[],
dtype=float,
)
for ccy, v in pv.items():
for j, identifier in enumerate(identifiers):
df[(ccy, identifier[0])] = (
gradient(v, vars=[identifier[0] + "_" + date for date in date_str]) * scalars_[j]
)
return df
class _SupportsResetFixings(Protocol):
def reset_fixings(self, state: int_ = NoInput(0)) -> None: ...
def _reset_fixings_data(
obj: _SupportsResetFixings,
original_data: dict[str, tuple[int, Series]],
state: int,
identifiers: Sequence[tuple[str, Series]],
) -> None:
# reset all data to original values.
obj.reset_fixings(state=state)
for identifier in identifiers:
fixings.pop(name=identifier[0])
fixings.add(
name=identifier[0],
series=original_data[identifier[0]][1],
state=original_data[identifier[0]][0],
)
def _s2_before_s1(v1: DualTypes, v2: DualTypes | None) -> DualTypes:
if v2 is None or isna(v2): # type: ignore[arg-type]
return v1
else:
return v2
================================================
FILE: python/rateslib/periods/protocols/npv.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from rateslib.curves import _BaseCurve
from rateslib.curves._parsers import (
_disc_required_maybe_from_curve,
_try_disc_required_maybe_from_curve,
)
from rateslib.enums.generics import Err, NoInput, Ok
from rateslib.periods.parameters import (
_IndexParams,
_SettlementParams,
)
from rateslib.periods.parameters.settlement import _NonDeliverableParams
from rateslib.periods.utils import (
_maybe_local,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
FX_,
CurveOption_,
DualTypes,
FXForwards_,
Result,
_BaseCurve_,
_FXVolOption_,
_IRVolOption_,
_IRVolPricingParams,
datetime,
datetime_,
str_,
)
def _screen_ex_div_and_forward(
local_value: Result[DualTypes],
rate_curve: CurveOption_,
disc_curve: _BaseCurve_,
ex_dividend: datetime,
forward: datetime_ = NoInput(0),
settlement: datetime_ = NoInput(0),
) -> Result[DualTypes]:
"""
Remap an immediate, local currency value to account for a forward valuation and settlement.
Parameters
----------
local_value: Result[float, Dual, Dual2, Variable]
The value measured with immediate effect expressed in local currency.
rate_curve: _BaseCurve or NoInput
The rate curve which might be used in place of the ``disc_curve`` if that not given.
disc_curve: _BaseCurve or NoInput
The discount curve used to discount units of local currency at an appropriate
collateral rate.
ex_dividend: datetime
The ex-dividend date which, combined with ``settlement``, determines if this value
is set to zero.
settlement: datetime
The settlement date to compare against an ex-dividend date to imply a cashflow.
forward: datetime
The projected forward valuation of the PV obtained via the discount curve
Returns
-------
Float, Dual, Dual2, Variable
"""
if local_value.is_err:
return local_value
# determine forward_ and settlement_ if not given
is_settlement = not isinstance(settlement, NoInput)
is_forward = not isinstance(forward, NoInput)
if not is_settlement and not is_forward:
return local_value # immediate value is returned unadjusted
dc_res = _try_disc_required_maybe_from_curve(curve=rate_curve, disc_curve=disc_curve)
if isinstance(dc_res, Err):
return dc_res
disc_curve_: _BaseCurve = dc_res.unwrap()
if not is_settlement:
# ex-div is assumed to always after a blank settlement
return Ok(local_value.unwrap() / disc_curve_[forward]) # type: ignore[index]
else:
if settlement > ex_dividend: # type: ignore[operator]
return Ok(local_value.unwrap() * 0.0) # TODO: profile this multiplication
# in the case of Dualtypes this would be faster to just return 0.0
# but the multiplication is used to handle DataFrame (FixingsSensitivity)
if not is_forward:
# forward is assumed to be immediate value if not given.
# # forward is assumed to be equal to settlement
return local_value # / disc_curve_[settlement]) # type: ignore[index]
else:
return Ok(local_value.unwrap() / disc_curve_[forward]) # type: ignore[index]
class _WithNPV(Protocol):
r"""
Protocol to define value of any *Period* type.
.. rubric:: Required methods
.. autosummary::
~_WithNPV.immediate_local_npv
.. rubric:: Provided methods
.. autosummary::
~_WithNPV.local_npv
~_WithNPV.npv
~_WithNPV.try_immediate_local_npv
~_WithNPV.try_local_npv
Notes
-----
Each *Period* type is required to implement the immediate expectation of value of
its cashflow under the risk neutral measure, expressed in its local settlement currency.
.. math::
P_0 = \mathbb{E^Q}[V(m_T) C_T]
"""
_settlement_params: _SettlementParams
@property
def settlement_params(self) -> _SettlementParams:
"""The :class:`~rateslib.periods.parameters._SettlementParams` of the
*Period*."""
return self._settlement_params
def __repr__(self) -> str:
return f""
def immediate_local_npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
) -> DualTypes:
r"""
Calculate the immediate NPV of the *Period* in local settlement currency.
This method does **not** adjust for ex-dividend and is an immediate measure according to,
.. math::
P_0 = \mathbb{E^Q} [V(m_T) C(m_T)]
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
ir_vol: IRSabrSmile, optional
The IR volatility *smile* or *Cube* object used for determining Black calendar
day implied volatility values.
Returns
-------
Result[float, Dual, Dual2, Variable]
""" # noqa: E501
raise NotImplementedError( # pragma: no cover
f"Period type '{type(self).__name__}' must implement `immediate_local_npv`"
)
def try_immediate_local_npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithNPV.immediate_local_npv` with
lazy exception handling.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
try:
v = self.immediate_local_npv(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx_vol=fx_vol,
ir_vol=ir_vol,
fx=fx,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
pass
def local_npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes:
r"""
Calculate the NPV of the *Period* in local settlement currency.
This method adjusts the immediate NPV for ex-dividend, settlement and forward projected value,
according to,
.. math::
P(m_s, m_f) = \mathbb{I}(m_s) \frac{1}{v(m_f)} P_0, \qquad \; \mathbb{I}(m_s) = \left \{ \begin{matrix} 0 & m_s > m_{ex} \\ 1 & m_s \leq m_{ex} \end{matrix} \right .
for forward, :math:`m_f`, settlement, :math:`m_s`, and ex-dividend, :math:`m_{ex}`.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
ir_vol: IRSabrSmile, optional
The IR volatility *Smile* or *Cube* object used for determining Black calendar
day implied volatility values.
settlement: datetime, optional (set as immediate date)
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional (set as ``settlement``)
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable
""" # noqa: E501
local_immediate_npv = self.immediate_local_npv(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
)
return _screen_ex_div_and_forward(
local_value=Ok(local_immediate_npv),
rate_curve=rate_curve,
disc_curve=disc_curve,
ex_dividend=self.settlement_params.ex_dividend,
settlement=settlement,
forward=forward,
).unwrap()
def try_local_npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithNPV.local_npv` with lazy
exception handling.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
try:
v = self.local_npv(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
settlement=settlement,
forward=forward,
fx_vol=fx_vol,
ir_vol=ir_vol,
fx=fx,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
def npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
base: str_ = NoInput(0),
local: bool = False,
settlement: datetime_ = NoInput(0),
forward: datetime_ = NoInput(0),
) -> DualTypes | dict[str, DualTypes]:
"""
Calculate the NPV of the *Period* converted to any other *base* accounting currency.
This method converts a local settlement currency value to a base accounting currency
according to:
.. math::
P^{bas}(m_s, m_f) = f_{loc:bas}(m_f) P(m_s, m_f)
.. hint::
If the cashflows are unspecified or incalculable due to missing information this method
will raise an exception. For a function that returns a `Result` indicating success or
failure use :meth:`~rateslib.periods.protocols._WithNPV.try_local_npv`.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
ir_vol: IRSabrSmile, optional
The IR volatility *smile* or *Cube* object used for determining Black calendar
day implied volatility values.
base: str, optional
The currency to convert the *local settlement* NPV to.
local: bool, optional
An override flag to return a dict of NPV values indexed by string currency.
settlement: datetime, optional, (set as immediate date)
The assumed settlement date of the *PV* determination. Used only to evaluate
*ex-dividend* status.
forward: datetime, optional, (set as ``settlement``)
The future date to project the *PV* to using the ``disc_curve``.
Returns
-------
float, Dual, Dual2, Variable or dict of such indexed by string currency.
Notes
-----
If ``base`` is not provided then this function will return the value obtained from
:meth:`~rateslib.periods.protocols._WithNPV.local_npv`.
If ``base`` is provided this then an :class:`~rateslib.fx.FXForwards` object may be
required to perform conversions. An :class:`~rateslib.fx.FXRates` object is also allowed
for this conversion although best practice does not recommend it due to possible
settlement date conflicts.
"""
local_npv = self.local_npv(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
settlement=settlement,
forward=forward,
)
return _maybe_local(
value=local_npv,
local=local,
currency=self.settlement_params.currency,
fx=fx,
base=base,
forward=forward,
)
class _WithIndexingStatic(Protocol):
"""
Protocol to provide indexation for *Static Period* types.
"""
_index_params: _IndexParams | None
@property
def index_params(self) -> _IndexParams | None:
"""
The :class:`~rateslib.periods.parameters._IndexParams` of the *Period*,
if any.
"""
return self._index_params
@property
def is_indexed(self) -> bool:
"""
Check whether the *Period* has indexation applied, which means it has ``index_params``.
"""
return self.index_params is not None
def index_up(self, value: DualTypes, index_curve: _BaseCurve_) -> DualTypes:
"""
Apply indexation to a *Static Period* value using its ``index_params``.
Parameters
----------
value: float, Dual, Dual2, Variable
The possible value to apply indexation to.
index_curve: _BaseCurve, optional
The index curve used to forecast index values, if necessary.
Returns
-------
float, Dual, Dual2, Variable
"""
if self.index_params is None:
# then no indexation of the cashflow will occur.
return value
else:
ir = self.index_params.try_index_ratio(index_curve).unwrap()[0]
if self.index_params.index_only:
return value * (ir - 1)
else:
return value * ir
def try_index_up(self, value: Result[DualTypes], index_curve: _BaseCurve_) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithIndexingStatic.index_up`
with lazy exception handling.
Parameters
----------
value: Result[float, Dual, Dual2, Variable]
The possible value to apply indexation to.
index_curve: _BaseCurve, optional
The index curve used to forecast index values, if necessary.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
try:
v = self.index_up(
value=value.unwrap(),
index_curve=index_curve,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
class _WithNonDeliverableStatic(Protocol):
"""
Protocol to provide non-deliverable conversion for *Static Period* types.
"""
_non_deliverable_params: _NonDeliverableParams | None
@property
def non_deliverable_params(self) -> _NonDeliverableParams | None:
"""The :class:`~rateslib.periods.parameters._NonDeliverableParams` of the
*Period*., if any."""
return self._non_deliverable_params
@property
def is_non_deliverable(self) -> bool:
"""
Check whether the *Period* is non-deliverable,
which means it has ``non_deliverable_params``.
"""
return self.non_deliverable_params is not None
def convert_deliverable(self, value: DualTypes, fx: FXForwards_) -> DualTypes:
"""
Apply settlement currency conversion to a *Static Period* using its
``non_deliverable_params``.
Parameters
----------
value: float, Dual, Dual2, Variable
The possible value to apply settlement currency conversion to.
fx: FXForwards, optional
The object used to forecast forward FX rates, if necessary.
Returns
-------
float, Dual, Dual2, Variable
"""
if self.non_deliverable_params is None:
# then cashflow is directly deliverable
return value
else:
fx_fix = self.non_deliverable_params.fx_fixing.try_value_or_forecast(fx).unwrap()
c = value * (fx_fix if not self.non_deliverable_params.fx_reversed else (1.0 / fx_fix))
return c
def try_convert_deliverable(
self, value: Result[DualTypes], fx: FXForwards_
) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithNonDeliverableStatic.convert_deliverable`
with lazy exception handling.
Parameters
----------
value: Result[float, Dual, Dual2, Variable]
The possible value to apply settlement currency conversion to.
fx: FXForwards, optional
The object used to forecast forward FX rates, if necessary.
Returns
-------
Result[float, Dual, Dual2, Variable]
""" # noqa: E501
try:
v = self.convert_deliverable(
value=value.unwrap(),
fx=fx,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
class _WithNPVStatic(_WithNPV, _WithIndexingStatic, _WithNonDeliverableStatic, Protocol):
r"""
Protocol to establish value of any *Static Period* type.
.. rubric:: Required methods
.. autosummary::
~_WithNPVStatic.unindexed_reference_cashflow
.. rubric:: Provided methods
.. autosummary::
~_WithNPVStatic.reference_cashflow
~_WithNPVStatic.unindexed_cashflow
~_WithNPVStatic.cashflow
~_WithNPVStatic.immediate_local_npv
~_WithNPVStatic.local_npv
~_WithNPVStatic.npv
~_WithNPVStatic.try_unindexed_reference_cashflow
~_WithNPVStatic.try_reference_cashflow
~_WithNPVStatic.try_unindexed_cashflow
~_WithNPVStatic.try_cashflow
~_WithNPVStatic.try_immediate_local_npv
~_WithNPVStatic.try_local_npv
Notes
-----
A *Static Period* type is one with a defined, non-random cashflow date, and for which
indexation and non-deliverability components are independent and can be taken outside of
the expectation of value.
Each *Static Period* is required to implement the expectation of its unindexed reference
currency cashflow under the risk neutral measure, paid at the known payment date,
:math:`m_t`.
.. math::
\mathbb{E^Q}[\bar{C}_t]
"""
# required by each Static Period...
def unindexed_reference_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FX_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
) -> DualTypes:
r"""
Calculate the cashflow for the *Static Period* before settlement currency and
indexation adjustments.
.. math::
\mathbb{E^Q}[\bar{C}_t]
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows, if necessary.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
ir_vol: IRSabrSmile, optional
The IR volatility *smile* or *Cube* object used for determining Black calendar
day implied volatility values.
Returns
-------
float, Dual, Dual2, Variable
"""
raise NotImplementedError( # pragma: no cover
f"Period type '{type(self).__name__}' must implement `unindexed_reference_cashflow`"
)
# automatically provided for each Static Period...
def try_unindexed_reference_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FX_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithNPVStatic.unindexed_reference_cashflow`
with lazy exception handling.
Returns
-------
Result[float, Dual, Dual2, Variable]
""" # noqa: E501
try:
v = self.unindexed_reference_cashflow(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx_vol=fx_vol,
ir_vol=ir_vol,
fx=fx,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
def reference_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FX_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
) -> DualTypes:
r"""
Calculate the cashflow for the *Static Period* before settlement currency adjustment
but after indexation.
.. math::
I_r\mathbb{E^Q}[\bar{C}_t]
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows, if necessary.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
ir_vol: IRSabrSmile, optional
The IR volatility *smile* or *Cube* object used for determining Black calendar
day implied volatility values.
Returns
-------
float, Dual, Dual2, Variable
"""
urc = self.unindexed_reference_cashflow(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
)
return self.index_up(value=urc, index_curve=index_curve)
def try_reference_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FX_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithNPVStatic.reference_cashflow`
with lazy exception handling.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
try:
v = self.reference_cashflow(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx_vol=fx_vol,
ir_vol=ir_vol,
fx=fx,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
def unindexed_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> DualTypes:
r"""
Calculate the cashflow for the *Static Period* with settlement currency adjustment
but without indexation.
.. math::
f(m_d)\mathbb{E^Q}[\bar{C}_t]
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows, if necessary.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
ir_vol: IRSabrSmile, optional
The IR volatility *smile* or *Cube* object used for determining Black calendar
day implied volatility values.
Returns
-------
float, Dual, Dual2, Variable
"""
urc = self.unindexed_reference_cashflow(
rate_curve=rate_curve,
disc_curve=disc_curve,
index_curve=index_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
)
return self.convert_deliverable(value=urc, fx=fx)
def try_unindexed_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithNPVStatic.unindexed_cashflow`
with lazy exception handling.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
try:
v = self.unindexed_cashflow(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx_vol=fx_vol,
ir_vol=ir_vol,
fx=fx,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
def cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
) -> DualTypes:
r"""
Calculate the cashflow for the *Period* with settlement currency adjustment
and indexation.
.. math::
I_r f(m_d)\mathbb{E^Q}[\bar{C}_t]
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows, if necessary.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
:class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
ir_vol: IRSabrSmile, optional
The IR volatility *smile* or *Cube* object used for determining Black calendar
day implied volatility values.
Returns
-------
float, Dual, Dual2, Variable
"""
rc = self.reference_cashflow(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx=fx,
fx_vol=fx_vol,
ir_vol=ir_vol,
)
return self.convert_deliverable(value=rc, fx=fx)
def try_cashflow(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ = NoInput(0),
) -> Result[DualTypes]:
r"""
Replicate :meth:`~rateslib.periods.protocols._WithNPVStatic.cashflow`
with lazy exception handling.
Returns
-------
Result[float, Dual, Dual2, Variable]
"""
try:
v = self.cashflow(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve,
fx_vol=fx_vol,
ir_vol=ir_vol,
fx=fx,
)
except Exception as e:
return Err(e)
else:
return Ok(v)
def immediate_local_npv(
self,
*,
rate_curve: CurveOption_ = NoInput(0),
index_curve: _BaseCurve_ = NoInput(0),
disc_curve: _BaseCurve_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
fx_vol: _FXVolOption_ = NoInput(0),
ir_vol: _IRVolOption_ | _IRVolPricingParams = NoInput(0),
) -> DualTypes:
r"""
Calculate the NPV of the *Period* in local settlement currency.
This method does **not** adjust for ex-dividend and is an immediate measure according to,
.. math::
P_0 = v(m_t) I_r f(m_d) \mathbb{E^Q} [\bar{C}_t]
for non-deliverable delivery, :math:`m_d`, and index ratio, :math:`I_r`.
Parameters
----------
rate_curve: _BaseCurve or dict of such indexed by string tenor, optional
Used to forecast floating period rates, if necessary.
index_curve: _BaseCurve, optional
Used to forecast index values for indexation, if necessary.
disc_curve: _BaseCurve, optional
Used to discount cashflows.
fx: FXForwards, optional
The :class:`~rateslib.fx.FXForwards` object used for forecasting the
``fx_fixing`` for deliverable cashflows, if necessary. Or, an
class:`~rateslib.fx.FXRates` object purely for immediate currency conversion.
fx_vol: FXDeltaVolSmile, FXSabrSmile, FXDeltaVolSurface, FXSabrSurface, optional
The FX volatility *Smile* or *Surface* object used for determining Black calendar
day implied volatility values.
ir_vol: IRSabrSmile, optional
The IR volatility *smile* or *Cube* object used for determining Black calendar
day implied volatility values.
Returns
-------
float, Dual, Dual2, Variable
"""
# dc_res = _try_disc_required_maybe_from_curve(curve=rate_curve, disc_curve=disc_curve)
# if isinstance(dc_res, Err):
# return dc_res
# disc_curve_: _BaseCurve = dc_res.unwrap()
disc_curve_ = _disc_required_maybe_from_curve(curve=rate_curve, disc_curve=disc_curve)
if self.settlement_params.payment < disc_curve_.nodes.initial:
# payment date is in the past
return 0.0
c = self.cashflow(
rate_curve=rate_curve,
index_curve=index_curve,
disc_curve=disc_curve_,
fx_vol=fx_vol,
ir_vol=ir_vol,
fx=fx,
)
return c * disc_curve_[self.settlement_params.payment]
================================================
FILE: python/rateslib/periods/utils.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from datetime import datetime
from typing import TYPE_CHECKING
import rateslib.errors as err
from rateslib.curves._parsers import _validate_obj_not_no_input
from rateslib.curves.curves import _BaseCurve
from rateslib.enums.generics import Err, NoInput, Ok, Result
from rateslib.enums.parameters import FXDeltaMethod, OptionPricingModel
from rateslib.fx import FXForwards, FXRates
from rateslib.instruments.protocols.pricing import _Curves
from rateslib.volatility import (
FXDeltaVolSmile,
FXDeltaVolSurface,
FXSabrSmile,
FXSabrSurface,
_BaseIRCube,
_BaseIRSmile,
)
from rateslib.volatility.ir.utils import _IRVolPricingParams
if TYPE_CHECKING:
from rateslib.local_types import (
FX_,
IRS,
Any,
CurveOption_,
DualTypes,
FXForwards_,
_BaseCurve_,
_FXVolOption_,
_IRVolOption_,
datetime_,
str_,
)
def _maybe_local(
value: DualTypes,
local: bool,
currency: str,
fx: FXForwards_,
base: str_,
forward: datetime_,
) -> dict[str, DualTypes] | DualTypes:
"""
Return NPVs in scalar form or dict form.
"""
if local:
return {currency: value}
else:
return _maybe_fx_converted(
value=value, currency=currency, fx=fx, base=base, forward=forward
)
def _maybe_fx_converted(
value: DualTypes,
currency: str,
fx: FXForwards_,
base: str_,
forward: datetime_,
) -> DualTypes:
"""Take an input Value and maybe FX convert it depending on the inputs"""
fx_, base = _get_immediate_fx_scalar_and_base(currency=currency, fx=fx, base=base)
if isinstance(forward, datetime) and base != currency:
fx_ = fx.rate(f"{currency}{base}", settlement=forward) # type: ignore[union-attr]
return value * fx_
def _get_immediate_fx_scalar_and_base(
currency: str,
fx: FXForwards_,
base: str_,
) -> tuple[DualTypes, str]:
"""
From a local currency and potentially FX Objects determine the conversion rate between
`currency` and `base`. If `base` is not given it is set as `currency` and the returned
FX rate is 1.0
"""
if isinstance(base, NoInput) or base is None:
if isinstance(fx, NoInput | FXRates | FXForwards):
return 1.0, currency
else: # fx is DualTypes
if abs(fx - 1.0) < 1e-10: # type: ignore[operator]
return fx, currency # type: ignore[return-value] # base is assumed
else:
warnings.warn(
"It is not best practice to provide `fx` as numeric since this can "
"cause errors of output when dealing with multi-currency derivatives,\n"
"and it also fails to preserve FX rate sensitivity in calculations.\n"
"Instead, supply a 'base' currency and use an "
"FXRates or, for best practice, an FXForwards object.\n"
f"Reformulate: [fx={fx}, base=None] -> "
f"[fx=FXRates({{'{currency}bas': {fx}}}), base='bas'].",
UserWarning,
)
return fx, "Unspecified" # type: ignore[return-value] # base is unknown
else: # base is str
if isinstance(fx, NoInput):
if base != currency:
raise ValueError(
f"`base` ({base}) cannot be requested without supplying `fx` as a "
"valid FXRates or FXForwards object to convert from "
f"currency ({currency}).\n"
"If you are using a `Solver` with multi-currency instruments have you "
"forgotten to attach the FXForwards in the solver's `fx` argument?",
)
return 1.0, currency
elif isinstance(fx, FXRates | FXForwards):
if base == currency:
return 1.0, currency
else:
return fx.rate(pair=f"{currency}{base}"), base
else: # FX is DualTypes
if abs(fx - 1.0) < 1e-10: # type: ignore[operator]
pass # no warning when fx == 1.0
elif base == currency:
raise ValueError(
"`fx` is given as numeric when `base` and `currency` are the same but the value"
"is not equal to 1.0, which it must be by definition."
)
else:
warnings.warn(
f"Supplying `fx` as numeric is ambiguous, particularly with "
f"multi-currency Instruments, and may lead to forced errors. `base` ({base}) "
f"will also be ignored.\n"
f"Future versions will likely remove this ability altogether.\n"
f"Best practice is to supply `fx` as an FXRates (or FXForwards) object.\n"
f"Reformulate the arguments directly: [fx={fx}, base='{base}'] -> "
f"[fx=FXRates({{'{currency}{base}': {fx}}}), base='{base}'].",
DeprecationWarning,
)
return fx, base # type: ignore[return-value]
def _get_ir_vol_value_and_forward_maybe_from_obj(
ir_vol: _IRVolOption_ | _IRVolPricingParams,
rate_curve: CurveOption_,
index_curve: _BaseCurve_,
strike: DualTypes | str,
irs: IRS,
expiry: datetime,
tenor: datetime,
t_e: DualTypes,
) -> _IRVolPricingParams:
"""
Return the following pring requirements:
Returns
-------
output: tuple[DualTypes, DualTypes, DualTypes]
The forward IRS rate exc. shift, the Black shifted vol, the shift to add to `f` and `k`.
"""
if isinstance(ir_vol, _IRVolPricingParams):
return ir_vol
# IROption can have a `strike` that is NoInput, however this internal function should
# only be performed after a `strike` has been set to number, temporarily or otherwise.
f_ = irs.rate(
curves=_Curves(
disc_curve=index_curve, leg2_rate_curve=rate_curve, leg2_disc_curve=index_curve
)
)
if isinstance(strike, NoInput):
k_: DualTypes = f_
elif isinstance(strike, str):
if strike.lower() == "atm":
k_ = f_
elif "bps" in strike:
k_ = f_ + float(strike[:-3]) / 100.0
else:
raise ValueError("`strike` as string must be either 'atm' or '{}bps'.")
else:
k_ = strike
if isinstance(ir_vol, _BaseIRSmile | _BaseIRCube):
# ir_vol is a Vol object
return ir_vol.get_from_strike(k=k_, f=f_, expiry=expiry, tenor=tenor)
elif isinstance(ir_vol, NoInput):
raise ValueError("`ir_vol` cannot be NoInput when provided to pricing function.")
else:
# vol given as scalar interpolated as Black Vol Zero shifted
return _IRVolPricingParams(
vol=ir_vol, f=f_, k=k_, shift=0.0, pricing_model=OptionPricingModel.Black76, t_e=t_e
)
def _get_fx_vol_value_maybe_from_obj(
fx_vol: _FXVolOption_,
fx: FXForwards,
rate_curve: _BaseCurve_,
strike: DualTypes,
pair: str,
delivery: datetime,
expiry: datetime,
) -> DualTypes:
"""Return a volatility for the option from a given FX Vol object.
``rate_curve`` is used as the curve on the LHS rate_curve to convert between spot and delivery
delta. This is not a 'discount curve' because it is not used to discount cashflows.
"""
# FXOption can have a `strike` that is NoInput, however this internal function should
# only be performed after a `strike` has been set to number, temporarily or otherwise.
if isinstance(fx_vol, FXDeltaVolSmile | FXDeltaVolSurface):
# fx_vol is a Vol object
rate_curve_: _BaseCurve = _validate_base_curve(rate_curve)
spot = fx.pairs_settlement[pair]
f = fx.rate(pair, delivery)
_: tuple[Any, DualTypes, Any] = fx_vol.get_from_strike(
k=strike,
f=f,
z_w=rate_curve_[delivery] / rate_curve_[spot],
expiry=expiry,
)
vol_: DualTypes = _[1]
elif isinstance(fx_vol, FXSabrSmile | FXSabrSurface):
# fx_vol is a Vol object
f = fx.rate(pair, delivery)
_ = fx_vol.get_from_strike(
k=strike,
f=f,
expiry=expiry,
)
vol_ = _[1]
elif isinstance(fx_vol, NoInput):
raise ValueError("`fx_vol` cannot be NoInput when provided to pricing function.")
else:
# fx_vol is a given scalar
vol_ = fx_vol
return vol_
def _get_vol_smile_or_value(vol: _FXVolOption_, expiry: datetime) -> FXDeltaVolSmile | DualTypes:
if isinstance(vol, FXDeltaVolSurface):
return vol.get_smile(expiry)
else:
return _validate_obj_not_no_input(vol, "vol") # type: ignore[return-value]
def _get_vol_smile_or_raise(vol: _FXVolOption_, expiry: datetime) -> FXDeltaVolSmile:
if isinstance(vol, FXDeltaVolSurface):
return vol.get_smile(expiry)
elif isinstance(vol, FXDeltaVolSmile):
return vol
else:
raise ValueError("Must supply FXDeltaVolSmile/Surface as `vol` not numeric value.")
def _get_vol_delta_type(vol: _FXVolOption_, default_delta_type: FXDeltaMethod) -> FXDeltaMethod:
if not isinstance(vol, FXDeltaVolSmile | FXDeltaVolSurface):
return default_delta_type
else:
return vol.meta.delta_type
def _validate_fx_as_forwards(fx: FX_) -> FXForwards:
return _try_validate_fx_as_forwards(fx).unwrap()
def _try_validate_fx_as_forwards(fx: FX_) -> Result[FXForwards]:
if isinstance(fx, NoInput):
return Err(ValueError(err.VE_NEEDS_FX_FORWARDS))
elif not isinstance(fx, FXForwards):
raise ValueError(err.VE_NEEDS_FX_FORWARDS_BAD_TYPE.format(type(fx).__name__))
else:
return Ok(fx)
def _try_validate_base_curve(curve: CurveOption_) -> Result[_BaseCurve]:
if not isinstance(curve, _BaseCurve):
return Err(
TypeError(
"`curves` have not been supplied correctly.\n"
f"A _BaseCurve type object is required. Got: {type(curve).__name__}"
)
)
return Ok(curve)
def _validate_base_curve(curve: CurveOption_) -> _BaseCurve:
if not isinstance(curve, _BaseCurve):
raise TypeError(
"`curves` have not been supplied correctly.\n"
f"A _BaseCurve type object is required. Got: {type(curve).__name__}"
)
return curve
def _validate_credit_curves(
rate_curve: CurveOption_, disc_curve: CurveOption_
) -> Result[tuple[_BaseCurve, _BaseCurve]]:
# used by Credit type Periods to narrow inputs
if not isinstance(rate_curve, _BaseCurve):
return Err(
TypeError(
"`curves` have not been supplied correctly.\n"
"`curve`for a CreditPremiumPeriod must be supplied as a Curve type."
)
)
if not isinstance(disc_curve, _BaseCurve):
return Err(
TypeError(
"`curves` have not been supplied correctly.\n"
"`disc_curve` for a CreditPremiumPeriod must be supplied as a Curve type."
)
)
return Ok((rate_curve, disc_curve))
def _get_rfr_curve_from_dict(d: dict[str, _BaseCurve]) -> _BaseCurve:
for s in ["rfr", "RFR", "Rfr"]:
try:
ret: _BaseCurve = d[s]
except KeyError:
continue
else:
return ret
raise ValueError(
"A `rate_curve` supplied as dict to an RFR based calculation must contain a key "
"entry 'rfr'."
)
================================================
FILE: python/rateslib/py.typed
================================================
================================================
FILE: python/rateslib/rs.pyi
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from collections.abc import Sequence
from datetime import datetime
from typing import TYPE_CHECKING, Any
from typing_extensions import Self
if TYPE_CHECKING:
from rateslib.local_types import (
Arr1dF64,
Arr2dF64,
CalTypes,
CurveInterpolator,
DualTypes,
Number,
)
class ADOrder:
Zero: ADOrder
One: ADOrder
Two: ADOrder
class LegIndexBase:
Initial: LegIndexBase
PeriodOnPeriod: LegIndexBase
class Imm:
Wed3_HMUZ: Imm
Wed3: Imm
Day20_HMUZ: Imm
Day20_HU: Imm
Day20_MZ: Imm
Day20: Imm
Fri2_HMUZ: Imm
Fri2: Imm
Wed1_Post9: Imm
Wed1_Post9_HMUZ: Imm
Eom: Imm
Leap: Imm
Som: Imm
def next(self, date: datetime) -> datetime: ...
def validate(self, date: datetime) -> bool: ...
def get(self, year: int, month: int) -> datetime: ...
def to_json(self) -> str: ...
class _Scheduling:
def unext(self, udate: datetime) -> datetime: ...
def next(self, date: datetime) -> datetime: ...
def uprevious(self, udate: datetime) -> datetime: ...
def previous(self, date: datetime) -> datetime: ...
def uregular(self, ueffective: datetime, utermination: datetime) -> list[datetime]: ...
def infer_ustub(
self, ueffective: datetime, utermination: datetime, short: bool, front: bool
) -> datetime: ...
def periods_per_annum(self) -> float: ...
class _FrequencyMixins:
def string(self) -> str: ...
def is_stub(self, ustart: datetime, uend: datetime, front: bool) -> bool: ...
def is_uregular(self, ueffective: datetime, utermination: datetime) -> bool: ...
class Frequency(_Scheduling, _FrequencyMixins):
class CalDays(Frequency):
number: int
def __init__(self, number: int) -> None: ...
class BusDays(Frequency):
number: int
calendar: CalTypes
def __init__(self, number: int, calendar: CalTypes) -> None: ...
class Months(Frequency):
number: int
roll: RollDay | None
def __init__(self, number: int, roll: RollDay | None) -> None: ...
class Zero(Frequency): ...
def to_json(self) -> str: ...
class StubInference:
ShortFront: StubInference
LongFront: StubInference
ShortBack: StubInference
LongBack: StubInference
NeitherSide: StubInference
def to_json(self) -> str: ...
class Schedule:
ueffective: datetime
utermination: datetime
ufront_stub: datetime | None
uback_stub: datetime | None
frequency: Frequency
calendar: CalTypes
accrual_adjuster: Adjuster
payment_adjuster: Adjuster
payment_adjuster2: Adjuster
payment_adjuster3: Adjuster
uschedule: list[datetime]
aschedule: list[datetime]
pschedule: list[datetime]
pschedule2: list[datetime]
pschedule3: list[datetime]
def __init__(
self,
effective: datetime,
termination: datetime,
frequency: Frequency,
calendar: CalTypes,
accrual_adjuster: Adjuster,
payment_adjuster: Adjuster,
payment_adjuster2: Adjuster,
payment_adjuster3: Adjuster | None,
front_stub: datetime | None,
back_stub: datetime | None,
eom: bool,
stub_inference: StubInference | None,
) -> None: ...
def is_regular(self) -> bool: ...
def to_json(self) -> str: ...
class Convention:
Act365F: Convention
Act360: Convention
Act364: Convention
Act365_25: Convention
Thirty360: Convention
ThirtyU360: Convention
ThirtyE360: Convention
ThirtyE360ISDA: Convention
YearsAct365F: Convention
YearsAct360: Convention
YearsMonths: Convention
One: Convention
ActActISDA: Convention
ActActICMA: Convention
Bus252: Convention
ActActICMAStubAct365F: Convention
def dcf(
self,
start: datetime,
end: datetime,
termination: datetime | None,
frequency: Frequency | None,
stub: bool | None,
calendar: CalTypes | None,
adjuster: Adjuster | None,
) -> float: ...
def to_json(self) -> str: ...
class Modifier:
P: Modifier
F: Modifier
ModP: Modifier
ModF: Modifier
Act: Modifier
class RollDay:
class Day(RollDay):
_0: int
def __init__(self, val: int) -> None: ...
class IMM(RollDay): ...
def to_json(self) -> str: ...
class _Adjustment:
def adjust(self, date: datetime, calendar: CalTypes) -> datetime: ...
def reverse(self, date: datetime, calendar: CalTypes) -> list[datetime]: ...
def adjusts(self, udates: list[datetime], calendars: CalTypes) -> list[datetime]: ...
class Adjuster(_Adjustment):
class Actual(Adjuster): ...
class Following(Adjuster): ...
class ModifiedFollowing(Adjuster): ...
class FollowingSettle(Adjuster): ...
class ModifiedFollowingSettle(Adjuster): ...
class Previous(Adjuster): ...
class ModifiedPrevious(Adjuster): ...
class PreviousSettle(Adjuster): ...
class ModifiedPreviousSettle(Adjuster): ...
class FollowingExLast(Adjuster): ...
class FollowingExLastSettle(Adjuster): ...
class BusDaysLagSettleInAdvance(Adjuster):
number: int
def __init__(self, number: int) -> None: ...
class BusDaysLagSettle(Adjuster):
number: int
def __init__(self, number: int) -> None: ...
class CalDaysLagSettle(Adjuster):
number: int
def __init__(self, number: int) -> None: ...
def to_json(self) -> str: ...
class _MethodParam:
def method_param(self) -> int: ...
class FloatFixingMethod(_MethodParam):
class RFRPaymentDelay(FloatFixingMethod): ...
class RFRObservationShift(FloatFixingMethod):
param: int
def __init__(self, param: int) -> None: ...
class RFRLockout(FloatFixingMethod):
param: int
def __init__(self, param: int) -> None: ...
class RFRLookback(FloatFixingMethod):
param: int
def __init__(self, param: int) -> None: ...
class RFRPaymentDelayAverage(FloatFixingMethod): ...
class RFRObservationShiftAverage(FloatFixingMethod):
param: int
def __init__(self, param: int) -> None: ...
class RFRLockoutAverage(FloatFixingMethod):
param: int
def __init__(self, param: int) -> None: ...
class RFRLookbackAverage(FloatFixingMethod):
param: int
def __init__(self, param: int) -> None: ...
class IBOR(FloatFixingMethod):
param: int
def __init__(self, param: int) -> None: ...
def to_json(self) -> str: ...
class _Shift:
def shift(self) -> int: ...
class IROptionMetric(_Shift):
class NormalVol(IROptionMetric): ...
class PercentNotional(IROptionMetric): ...
class Premium(IROptionMetric): ...
class BlackVolShift(IROptionMetric):
param: int
def __init__(self, param: int) -> None: ...
def to_json(self) -> str: ...
class CalendarManager:
def add(self, name: str, calendar: Cal) -> None: ...
def pop(self, name: str) -> Cal | UnionCal: ...
def get(self, name: str) -> NamedCal: ...
def keys(self) -> list[str]: ...
class _DateRoll:
def add_bus_days(self, date: datetime, days: int, settlement: bool) -> datetime: ...
def add_cal_days(self, date: datetime, days: int, adjuster: Adjuster) -> datetime: ...
def add_months(
self,
date: datetime,
months: int,
adjuster: Adjuster,
roll: RollDay | None,
) -> datetime: ...
def bus_date_range(self, start: datetime, end: datetime) -> list[datetime]: ...
def cal_date_range(self, start: datetime, end: datetime) -> list[datetime]: ...
def is_bus_day(self, date: datetime) -> bool: ...
def is_non_bus_day(self, date: datetime) -> bool: ...
def is_settlement(self, date: datetime) -> bool: ...
def lag_bus_days(self, date: datetime, days: int, settlement: bool) -> datetime: ...
def to_json(self) -> str: ...
def print(self, year: int, month: int | None = None) -> str: ...
def print_compare(self, comparator: Cal | UnionCal | NamedCal, year: int) -> str: ...
class _CalendarAdjustment:
def adjust(self, date: datetime, adjuster: Adjuster) -> datetime: ...
def adjusts(self, dates: list[datetime], adjuster: Adjuster) -> list[datetime]: ...
class Cal(_DateRoll, _CalendarAdjustment):
def __init__(self, rules: list[datetime], week_mask: list[int]) -> None: ...
@classmethod
def from_name(cls, name: str) -> Cal: ...
class UnionCal(_DateRoll, _CalendarAdjustment):
calendars: list[Cal] = ...
settlement_calendars: list[Cal] = ...
@classmethod
def from_name(cls, name: str) -> UnionCal: ...
def __init__(
self,
calendars: list[Cal],
settlement_calendars: list[Cal] | None,
) -> None: ...
class NamedCal(_DateRoll, _CalendarAdjustment):
inner: UnionCal | Cal = ...
name: str = ...
def __init__(self, name: str) -> None: ...
def inner_ptr_eq(self, other: NamedCal) -> bool: ...
class Ccy:
def __init__(self, name: str) -> None: ...
name: str = ...
class FXRate:
def __init__(
self, lhs: str, rhs: str, rate: DualTypes, settlement: datetime | None
) -> None: ...
rate: DualTypes = ...
ad: int = ...
settlement: datetime = ...
pair: str = ...
def __repr__(self) -> str: ...
def __eq__(self, other: FXRate) -> bool: ... # type: ignore[override]
class FXRates:
def __init__(self, fx_rates: list[FXRate], base: Ccy | None) -> None: ...
def __copy__(self) -> FXRates: ...
fx_rates: list[FXRate] = ...
currencies: list[Ccy] = ...
ad: int = ...
base: Ccy = ...
fx_vector: list[DualTypes] = ...
fx_array: list[list[DualTypes]] = ...
def get_ccy_index(self, currency: Ccy) -> int | None: ...
def rate(self, lhs: Ccy, rhs: Ccy) -> DualTypes | None: ...
def update(self, fx_rates: list[FXRate]) -> None: ...
def set_ad_order(self, ad: ADOrder) -> None: ...
def to_json(self) -> str: ...
class _DualOps:
def __eq__(self, other: Number) -> bool: ... # type: ignore[override]
def __lt__(self, other: Number) -> bool: ...
def __le__(self, other: Number) -> bool: ...
def __gt__(self, other: Number) -> bool: ...
def __ge__(self, other: Number) -> bool: ...
def __neg__(self) -> Self: ...
def __add__(self, other: Number) -> Self: ...
def __radd__(self, other: Number) -> Self: ...
def __sub__(self, other: Number) -> Self: ...
def __rsub__(self, other: Number) -> Self: ...
def __mul__(self, other: Number) -> Self: ...
def __rmul__(self, other: Number) -> Self: ...
def __truediv__(self, other: Number) -> Self: ...
def __rtruediv__(self, other: Number) -> Self: ...
def __pow__(self, power: Number, modulo: int | None = None) -> Self: ...
def __exp__(self) -> Self: ...
def __abs__(self) -> float: ...
def __log__(self) -> Self: ...
def __norm_cdf__(self) -> Self: ...
def __norm_inv_cdf__(self) -> Self: ...
def __float__(self) -> float: ...
def to_json(self) -> str: ...
def ptr_eq(self, other: Self) -> bool: ...
def __repr__(self) -> str: ...
def grad1(self, vars: Sequence[str]) -> Arr1dF64: ... # noqa: A002
def grad2(self, vars: Sequence[str]) -> Arr2dF64: ... # noqa: A002
class Dual(_DualOps):
def __init__(self, real: float, vars: Sequence[str], dual: Sequence[float] | Arr1dF64): ... # noqa: A002
real: float = ...
vars: list[str] = ...
dual: Arr1dF64 = ...
@classmethod
def vars_from(
cls,
other: Dual,
real: float,
vars: Sequence[str], # noqa: A002
dual: Sequence[float] | Arr1dF64,
) -> Dual: ...
def to_dual2(self) -> Dual2: ...
class Dual2(_DualOps):
def __init__(
self,
real: float,
vars: Sequence[str], # noqa: A002
dual: Sequence[float] | Arr1dF64,
dual2: Sequence[float],
): ...
real: float = ...
vars: list[str] = ...
dual: Arr1dF64 = ...
dual2: Arr2dF64 = ...
@classmethod
def vars_from(
cls,
other: Dual2,
real: float,
vars: list[str], # noqa: A002
dual: list[float] | Arr1dF64,
dual2: list[float] | Arr1dF64,
) -> Dual2: ...
def grad1_manifold(self, vars: Sequence[str]) -> list[Dual2]: ... # noqa: A002
def to_dual(self) -> Dual: ...
def _dsolve1(a: list[Any], b: list[Any], allow_lsq: bool) -> list[Dual]: ...
def _dsolve2(a: list[Any], b: list[Any], allow_lsq: bool) -> list[Dual2]: ...
def _fdsolve1(a: Arr2dF64, b: list[Any], allow_lsq: bool) -> list[Dual]: ...
def _fdsolve2(a: Arr2dF64, b: list[Any], allow_lsq: bool) -> list[Dual2]: ...
class PPSplineF64:
n: int = ...
k: int = ...
t: list[float] = ...
c: list[float] | None = ...
def __init__(self, k: int, t: list[float], c: list[float] | None) -> None: ...
def csolve(
self, tau: list[float], y: list[float], left_n: int, right_n: int, allow_lsq: bool
) -> None: ...
def ppev_single(self, x: Number) -> float: ...
def ppev_single_dual(self, x: Number) -> Dual: ...
def ppev_single_dual2(self, x: Number) -> Dual2: ...
def ppev(self, x: list[float]) -> list[float]: ...
def ppdnev_single(self, x: Number, m: int) -> float: ...
def ppdnev_single_dual(self, x: Number, m: int) -> Dual: ...
def ppdnev_single_dual2(self, x: Number, m: int) -> Dual2: ...
def ppdnev(self, x: list[float], m: int) -> list[float]: ...
def bsplev(self, x: list[float], i: int) -> list[float]: ...
def bspldnev(self, x: list[float], i: int, m: int) -> list[float]: ...
def bsplmatrix(self, tau: list[float], left_n: int, right_n: int) -> Arr2dF64: ...
def __eq__(self, other: PPSplineF64) -> bool: ... # type: ignore[override]
def __copy__(self) -> PPSplineF64: ...
def to_json(self) -> str: ...
class PPSplineDual:
n: int = ...
k: int = ...
t: list[float] = ...
c: list[Dual] | None = ...
def __init__(self, k: int, t: list[float], c: list[Dual] | None) -> None: ...
def csolve(
self, tau: list[float], y: list[Dual], left_n: int, right_n: int, allow_lsq: bool
) -> None: ...
def ppev_single(self, x: Number) -> Dual: ...
def ppev_single_dual(self, x: Number) -> Dual: ...
def ppev_single_dual2(self, x: Number) -> Dual2: ...
def ppev(self, x: list[float]) -> list[Dual]: ...
def ppdnev_single(self, x: Number, m: int) -> Dual: ...
def ppdnev_single_dual(self, x: Number, m: int) -> Dual: ...
def ppdnev_single_dual2(self, x: Number, m: int) -> Dual2: ...
def ppdnev(self, x: list[float], m: int) -> list[Dual]: ...
def bsplev(self, x: list[float], i: int) -> list[Dual]: ...
def bspldnev(self, x: list[float], i: int, m: int) -> list[Dual]: ...
def bsplmatrix(self, tau: list[float], left_n: int, right_n: int) -> Arr2dF64: ...
def __eq__(self, other: PPSplineDual) -> bool: ... # type: ignore[override]
def __copy__(self) -> PPSplineDual: ...
def to_json(self) -> str: ...
class PPSplineDual2:
n: int = ...
k: int = ...
t: list[float] = ...
c: list[Dual2] | None = ...
def __init__(self, k: int, t: list[float], c: list[Dual2] | None) -> None: ...
def csolve(
self, tau: list[float], y: list[Dual2], left_n: int, right_n: int, allow_lsq: bool
) -> None: ...
def ppev_single(self, x: Number) -> Dual2: ...
def ppev_single_dual(self, x: Number) -> Dual: ...
def ppev_single_dual2(self, x: Number) -> Dual2: ...
def ppev(self, x: list[float]) -> list[Dual2]: ...
def ppdnev_single(self, x: Number, m: int) -> Dual2: ...
def ppdnev_single_dual(self, x: Number, m: int) -> Dual: ...
def ppdnev_single_dual2(self, x: Number, m: int) -> Dual2: ...
def ppdnev(self, x: list[float], m: int) -> list[Dual2]: ...
def bsplev(self, x: list[float], i: int) -> list[Dual2]: ...
def bspldnev(self, x: list[float], i: int, m: int) -> list[Dual2]: ...
def bsplmatrix(self, tau: list[float], left_n: int, right_n: int) -> Arr2dF64: ...
def __eq__(self, other: PPSplineDual2) -> bool: ... # type: ignore[override]
def __copy__(self) -> PPSplineDual2: ...
def to_json(self) -> str: ...
def bsplev_single(x: float, i: int, k: int, t: list[float], org_k: int | None) -> float: ...
def bspldnev_single(
x: float, i: int, k: int, t: list[float], m: int, org_k: int | None
) -> float: ...
def from_json(json: str) -> Any: ...
class FlatBackwardInterpolator:
def __init__(self) -> None: ...
class FlatForwardInterpolator:
def __init__(self) -> None: ...
class LinearInterpolator:
def __init__(self) -> None: ...
class LogLinearInterpolator:
def __init__(self) -> None: ...
class LinearZeroRateInterpolator:
def __init__(self) -> None: ...
class NullInterpolator:
def __init__(self) -> None: ...
class Curve:
modifier: Modifier = ...
convention: Convention = ...
interpolation: str = ...
ad: ADOrder = ...
id: str = ...
nodes: dict[datetime, Number] = ...
def __init__(
self,
nodes: dict[datetime, Number],
interpolator: CurveInterpolator,
ad: ADOrder,
id: str, # noqa: A002
convention: Convention,
modifier: Modifier,
calendar: CalTypes,
index_base: float | None,
) -> None: ...
def to_json(self) -> str: ...
def __eq__(self, other: Curve) -> bool: ... # type: ignore[override]
def __getitem__(self, date: datetime) -> Number: ...
def set_ad_order(self, ad: ADOrder) -> None: ...
def index_value(self, date: datetime) -> Number: ...
def _get_convention_str(convention: Convention) -> str: ...
def _get_modifier_str(modifier: Modifier) -> str: ...
def index_left_f64(list_input: list[float], value: float, left_count: int | None = None) -> int: ...
def _sabr_x0(
k: Number,
f: Number,
t: Number,
a: Number,
b: Number,
p: Number,
v: Number,
derivative: int = 0,
) -> tuple[Number, Number | None]: ...
def _sabr_x1(
k: Number,
f: Number,
t: Number,
a: Number,
b: Number,
p: Number,
v: Number,
derivative: int = 0,
) -> tuple[Number, Number | None]: ...
def _sabr_x2(
k: Number,
f: Number,
t: Number,
a: Number,
b: Number,
p: Number,
v: Number,
derivative: int = 0,
) -> tuple[Number, Number | None]: ...
================================================
FILE: python/rateslib/scheduling/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from rateslib.rs import (
Adjuster,
Cal,
CalendarManager,
Frequency,
Imm,
NamedCal,
RollDay,
StubInference,
UnionCal,
)
from rateslib.scheduling.calendars import get_calendar
from rateslib.scheduling.convention import Convention
from rateslib.scheduling.dcfs import dcf
from rateslib.scheduling.frequency import add_tenor
from rateslib.scheduling.imm import get_imm, next_imm
from rateslib.scheduling.schedule import Schedule
Imm.__doc__ = """
Enumerable type for International Money-Market (IMM) date definitions.
For further information on these descriptors see the Rust low level docs
for :rust:`Imm `.
"""
StubInference.__doc__ = """
Enumerable type for :class:`~rateslib.scheduling.Schedule` stub inference.
"""
Adjuster.__doc__ = """
Enumerable type for date adjustment rules.
.. rubric:: Variants
.. ipython:: python
:suppress:
from rateslib.rs import Adjuster
variants = [item for item in Adjuster.__dict__ if \\
"__" != item[:2] and \\
item not in ['adjust', 'adjusts', 'to_json', 'reverse'] \
]
.. ipython:: python
variants
"""
RollDay.__doc__ = """
Enumerable type for roll days.
.. rubric:: Variants
.. ipython:: python
:suppress:
from rateslib.rs import RollDay
variants = ["Day(int)", "IMM()"]
.. ipython:: python
variants
"""
Frequency.__doc__ = """
Enumerable type for a scheduling frequency.
.. rubric:: Variants
.. ipython:: python
:suppress:
from rateslib.rs import Frequency
variants = ["BusDays(int, calendar)", "CalDays(int)", "Months(int, rollday | None)", "Zero()"]
.. ipython:: python
variants
"""
Cal.__doc__ = """
A business day calendar defined by weekends and a holiday list.
Parameters
----------
holidays: list[datetime]
A list of specific non-business days.
week_mask: list[int]
A list of days defined as weekends, e.g. [5,6] for Saturday and Sunday.
"""
UnionCal.__doc__ = """
A calendar defined by a business day intersection of multiple :class:`~rateslib.scheduling.Cal`
objects.
Parameters
----------
calendars: list[Cal]
A list of :class:`~rateslib.scheduling.Cal` objects whose combination will define the
business and non-business days.
settlement_calendars: list[Cal]
A list of :class:`~rateslib.scheduling.Cal` objects whose combination will define the
settleable and non-settleable days.
"""
NamedCal.__doc__ = """
A wrapped :class:`~rateslib.scheduling.Cal` or
:class:`~rateslib.scheduling.UnionCal` constructed with a string parsing syntax.
This instance can only be constructed from named :class:`~rateslib.scheduling.Cal` objects that
have already been populated to the ``calendars`` :class:`~rateslib.scheduling.CalendarManager`.
Each *NamedCal* uses data shared in memory and does **not** reconstruct or copy the entire
list of holidays for every instantiation of this class.
Parameters
----------
name: str
The names of the calendars to populate the ``calendars`` and ``settlement_calendars``
arguments of a :class:`~rateslib.scheduling.UnionCal`. The individual calendar names must
pre-exist in the :class:`~rateslib.scheduling.CalendarManager`. The pipe operator
separates the two fields.
Examples
--------
.. ipython:: python
:suppress:
from rateslib.scheduling import NamedCal, UnionCal
.. ipython:: python
named_cal = NamedCal("ldn,tgt|fed")
assert isinstance(named_cal.inner, UnionCal)
assert len(named_cal.inner.calendars) == 2
assert len(named_cal.inner.settlement_calendars) == 1
"""
__all__ = (
"Schedule",
"Cal",
"NamedCal",
"UnionCal",
"CalendarManager",
"Adjuster",
"Convention",
"Frequency",
"Imm",
"RollDay",
"StubInference",
"add_tenor",
"get_calendar",
"get_imm",
"next_imm",
"dcf",
)
================================================
FILE: python/rateslib/scheduling/adjuster.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib.enums.generics import NoInput
from rateslib.rs import Adjuster
if TYPE_CHECKING:
from rateslib.local_types import str_
_A = { # Provides the map of all available string to Adjuster conversions.
"NONESETTLE": Adjuster.Actual(),
"NONE": Adjuster.Actual(),
"F": Adjuster.Following(),
"P": Adjuster.Previous(),
"MF": Adjuster.ModifiedFollowing(),
"MP": Adjuster.ModifiedPrevious(),
"FSETTLE": Adjuster.FollowingSettle(),
"PSETTLE": Adjuster.PreviousSettle(),
"MFSETTLE": Adjuster.ModifiedFollowingSettle(),
"MPSETTLE": Adjuster.ModifiedPreviousSettle(),
"FEX": Adjuster.FollowingExLast(),
"FEXSETTLE": Adjuster.FollowingExLastSettle(),
}
def _get_adjuster_none(adjuster: Adjuster | int | str_) -> Adjuster | None:
if isinstance(adjuster, NoInput):
return None
else:
return _get_adjuster(adjuster)
def _get_adjuster(adjuster: int | str | Adjuster) -> Adjuster:
"""Convert a str such as 'F', 'MF' or '2B' or '5D' to an Adjuster."""
if isinstance(adjuster, Adjuster):
return adjuster
elif isinstance(adjuster, int):
# convert to business days
adjuster = f"{adjuster}B"
adjuster = adjuster.upper()
if adjuster[-1] == "B":
return Adjuster.BusDaysLagSettle(int(adjuster[:-1]))
elif adjuster[-1] == "D":
return Adjuster.CalDaysLagSettle(int(adjuster[:-1]))
else:
return _A[adjuster]
def _convert_to_adjuster(modifier: str | Adjuster, settlement: bool, mod_days: bool) -> Adjuster:
"""Convert a legacy `modifier` to an Adjuster with additional options.
If `modify days` is disallowed then MF -> F
"""
if isinstance(modifier, Adjuster):
return modifier
modifier = modifier.upper()
if not mod_days and modifier[0] == "M":
modifier = modifier[1:]
if settlement:
modifier = modifier + "SETTLE"
return _get_adjuster(modifier)
================================================
FILE: python/rateslib/scheduling/calendars.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import calendars
from rateslib.enums.generics import NoInput
from rateslib.scheduling.adjuster import _convert_to_adjuster
if TYPE_CHECKING:
from rateslib.local_types import CalInput, CalTypes, datetime
def get_calendar(
calendar: CalInput,
) -> CalTypes:
"""
Returns a calendar object, possible constructed by the
:class:`~rateslib.scheduling.CalendarManager`.
.. role:: red
.. role:: green
Parameters
----------
calendar : str, Cal, UnionCal, NamedCal, :red:`required`
If `str`, then the calendar is returned from pre-calculated values.
If a specific user defined calendar this is returned without modification.
Returns
-------
NamedCal, Cal, UnionCal
Notes
-----
Please see the :ref:`defaults ` section of the documentation to discover
which named calendars are base implemented to *rateslib*.
Combined calendars can be created with comma separated input, e.g. *"tgt,nyc"*. This would
be the typical calendar assigned to a cross-currency derivative such as a EUR/USD
cross-currency swap.
For short-dated, FX instrument date calculations a concept known as an
**associated settlement calendars** is introduced. This uses a secondary calendar to determine
if a calculated date is a valid settlement day, but it is not used in the determination
of tenor dates. For a EURUSD FX instrument the appropriate calendar combination is *"tgt|nyc"*.
For a GBPEUR FX instrument the appropriate calendar combination is *"ldn,tgt|nyc"*.
Examples
--------
.. ipython:: python
:suppress:
from rateslib import get_calendar, dt
.. ipython:: python
tgt_cal = get_calendar("tgt")
print(tgt_cal.print(2023, 5))
tgt_cal.add_bus_days(dt(2023, 1, 3), 5, True)
type(tgt_cal)
Calendars can be combined from the pre-existing names using comma separation.
.. ipython:: python
tgt_and_nyc_cal = get_calendar("tgt,nyc")
print(tgt_and_nyc_cal.print(2023, 5))
type(tgt_and_nyc_cal)
"""
if isinstance(calendar, str):
return calendars.get(calendar)
elif isinstance(calendar, NoInput):
return calendars.get("all")
else: # calendar is a Calendar object type
return calendar
def _get_years_and_months(d1: datetime, d2: datetime) -> tuple[int, int]:
"""
Get the whole number of years and months between two dates
"""
years: int = d2.year - d1.year
if (d2.month == d1.month and d2.day < d1.day) or (d2.month < d1.month):
years -= 1
months: int = (d2.month - d1.month) % 12
return years, months
def _adjust_date(
date: datetime,
modifier: str,
calendar: CalInput,
settlement: bool = True,
) -> datetime:
"""
Modify a date under specific rule.
Parameters
----------
date : datetime
The date to be adjusted.
modifier : str
The modification rule, in {"NONE", "F", "MF", "P", "MP"}. If *'NONE'* returns date.
calendar : calendar, optional
The holiday calendar object to use. Required only if `modifier` is not *'NONE'*.
If not given a calendar is created where every day including weekends is valid.
settlement : bool
Whether to also enforce the associated settlement calendar.
Returns
-------
datetime
"""
cal_ = get_calendar(calendar)
return _convert_to_adjuster(modifier, settlement, True).adjust(date, cal_)
def _is_day_type_tenor(tenor: str) -> bool:
tenor_ = tenor.upper()
return "D" in tenor_ or "B" in tenor_ or "W" in tenor_
================================================
FILE: python/rateslib/scheduling/convention.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib.rs import Convention
if TYPE_CHECKING:
pass
_CONVENTIONS_MAP: dict[str, Convention] = {
"ACT365F": Convention.Act365F,
"ACT365": Convention.Act365F,
"ACT360": Convention.Act360,
"ACT365_25": Convention.Act365_25,
"ACT364": Convention.Act364,
###
"30360": Convention.Thirty360,
"THIRTY360": Convention.Thirty360,
"360360": Convention.Thirty360,
"BONDBASIS": Convention.Thirty360,
"30E360": Convention.ThirtyE360,
"THIRTYE360": Convention.ThirtyE360,
"EUROBONDBASIS": Convention.ThirtyE360,
"30E360ISDA": Convention.ThirtyE360ISDA,
"THIRTYE360ISDA": Convention.ThirtyE360ISDA,
"30U360": Convention.ThirtyU360,
"THIRTYU360": Convention.ThirtyU360,
###
"ACT365F+": Convention.YearsAct365F,
"YEARSACT365F": Convention.YearsAct365F,
"ACT360+": Convention.YearsAct360,
"YEARSACT360": Convention.YearsAct360,
"1+": Convention.YearsMonths,
"YEARSMONTHS": Convention.YearsMonths,
###
"1": Convention.One,
"ONE": Convention.One,
###
"ACTACTISDA": Convention.ActActISDA,
"ACTACTICMA": Convention.ActActICMA,
"ACTACTISMA": Convention.ActActICMA,
"ACTACTBOND": Convention.ActActICMA,
###
"BUS252": Convention.Bus252,
###
"ACTACTICMA_STUB365F": Convention.ActActICMAStubAct365F,
"ACTACTICMASTUBACT365F": Convention.ActActICMAStubAct365F,
}
def _get_convention(convention: Convention | str) -> Convention:
"""Convert a user str input into a Convention enum."""
if isinstance(convention, Convention):
return convention
else:
try:
return _CONVENTIONS_MAP[convention.upper()]
except KeyError:
if convention.upper() == "ACTACT":
raise ValueError(
"`ActAct` must be directly specified as `ActActICMA` (most common for bonds) "
"or `ActActISDA` (rarely used)."
)
raise ValueError(f"`convention`: {convention}, is not valid.")
__all__ = ["Convention"]
================================================
FILE: python/rateslib/scheduling/dcfs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from datetime import datetime
from functools import partial
from typing import TYPE_CHECKING
from rateslib.enums.generics import NoInput, _drb
from rateslib.scheduling import Adjuster, Convention, Frequency, RollDay
from rateslib.scheduling.adjuster import _get_adjuster
from rateslib.scheduling.calendars import get_calendar
from rateslib.scheduling.convention import _get_convention
from rateslib.scheduling.frequency import _get_frequency_none
if TYPE_CHECKING:
from rateslib.local_types import Any, CalInput, Callable, bool_, datetime_, int_, str_
def dcf(
start: datetime,
end: datetime,
convention: Convention | str,
termination: datetime_ = NoInput(0), # required for 30E360ISDA and ActActICMA
frequency: Frequency | str_ = NoInput(0), # req. ActActICMA = ActActISMA = ActActBond
stub: bool_ = NoInput(0), # required for ActActICMA = ActActISMA = ActActBond
roll: RollDay | str | int_ = NoInput(0), # required also for ActACtICMA = ...
calendar: CalInput = NoInput(0), # required for ActACtICMA = ActActISMA = ActActBond
adjuster: Adjuster | str_ = NoInput(0),
) -> float:
"""
Calculate the day count fraction of a period.
Parameters
----------
start : datetime
The adjusted start date of the calculation period.
end : datetime
The adjusted end date of the calculation period.
convention : Convention, str
The day count convention of the calculation period accrual. See notes.
termination : datetime, optional
The adjusted termination date of the leg. Required only for some ``convention``.
frequency : Frequency, str, optional
The frequency of the period. Required only for some ``convention``.
stub : bool, optional
Indicates whether the period is a stub or not. Required only for some ``convention``.
roll : str, int, optional
Used only if ``frequency`` is given in string form. Required only for some ``convention``.
calendar: str, Calendar, optional
Used only of ``frequency`` is given in string form. Required only for some ``convention``.
adjuster: Adjuster, str, optional
The :class:`~rateslib.scheduling.Adjuster` used to convert unadjusted dates to
adjusted accrual dates on the period. Required only for some ``convention``.
Returns
--------
float
Notes
-----
See :class:`~rateslib.scheduling.Convention` for permissible values and for argument
related specifics.
Further information can be found in the
:download:`2006 ISDA definitions ` and
:download:`2006 ISDA 30360 example <_static/30360isda_2006_example.xls>`, and also in the lower
level Rust documentation at :rust:`rateslib-rs: Scheduling `.
Examples
--------
.. ipython:: python
:suppress:
from rateslib import dcf
.. ipython:: python
dcf(dt(2000, 1, 1), dt(2000, 4, 3), "Act360")
dcf(dt(2000, 1, 1), dt(2000, 4, 3), "Act365f")
dcf(dt(2000, 1, 1), dt(2000, 4, 3), "ActActICMA", dt(2010, 1, 1), "Q", False)
dcf(dt(2000, 1, 1), dt(2000, 4, 3), "ActActICMA", dt(2010, 1, 1), "Q", True)
""" # noqa: E501
convention_ = _get_convention(convention)
if isinstance(adjuster, NoInput):
adjuster = Adjuster.Actual()
frequency_: Frequency | None = _get_frequency_none(frequency, roll, calendar)
if isinstance(frequency_, Frequency.Zero) and convention_ in [
Convention.ActActICMA,
Convention.ActActICMAStubAct365F,
]:
warnings.warn(
"`frequency` cannot be 'Zero' variant in combination with 'ActActICMA' type"
"conventions. Internally this will be converted to 'Frequency.Months(12, ...)'",
UserWarning,
)
# delegate simple calculations to Python only for performance gains, otherwise use Rust.
if convention_ in PERFORMANCE:
try:
return PERFORMANCE[convention_](start, end, frequency=frequency_, stub=stub)
except NotImplementedError:
pass
return convention_.dcf(
start=start,
end=end,
termination=_drb(None, termination),
frequency=frequency_,
stub=_drb(None, stub),
calendar=get_calendar(calendar),
adjuster=_get_adjuster(_drb(Adjuster.Actual(), adjuster)),
)
def _dcf_numeric(start: datetime, end: datetime, denominator: float, **kwargs: Any) -> float:
"""Calculate the day count fraction of a period using the fixed denominator rule."""
return (end - start).days / denominator
def _dcf_actacticma_nonstub(
start: datetime, end: datetime, frequency: Frequency, stub: bool, **kwargs: Any
) -> float:
"""Calculate just the regular frequency part of the dcf for ActActICMA."""
if not stub:
return 1.0 / frequency.periods_per_annum()
else:
raise NotImplementedError("`stub` must be `False` for `ActActICMA` performance short cut.")
PERFORMANCE: dict[Convention, Callable[..., float]] = {
Convention.Act365F: partial(_dcf_numeric, denominator=365.0),
Convention.Act360: partial(_dcf_numeric, denominator=360.0),
Convention.ActActICMA: _dcf_actacticma_nonstub,
Convention.ActActICMAStubAct365F: _dcf_actacticma_nonstub,
}
# Licence: Creative Commons - Attribution-NonCommercial-NoDerivatives 4.0 International
# Commercial use of this code, and/or copying and redistribution is prohibited.
# Contact rateslib at gmail.com if this code is observed outside its intended sphere.
================================================
FILE: python/rateslib/scheduling/frequency.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING
import rateslib.errors as err
from rateslib.enums.generics import NoInput
from rateslib.rs import Adjuster, Frequency, Imm, RollDay
from rateslib.scheduling.adjuster import _convert_to_adjuster
from rateslib.scheduling.calendars import get_calendar
from rateslib.scheduling.rollday import _get_rollday
from rateslib.utils.calendars import _get_first_bus_day
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
datetime_,
int_,
str_,
)
def _get_frequency(
frequency: str_ | Frequency, roll: str | RollDay | int_, calendar: CalInput
) -> Frequency:
"""
Get a :class:`~rateslib.scheduling.Frequency` object from legacy UI inputs.
Parameters
----------
frequency: str or Frequency
If string, is combined with the ``roll`` and ``calendar`` parameters to derive the
output.
roll: str, int or RollDay, optional
The roll-day to be associated with a *Frequency.Months* variant, if given.
calendar: calendar, str, optional
The calendar to be associated with a *Frequency.BusDay* variant, if given.
Returns
-------
Frequency
"""
if isinstance(frequency, Frequency):
if getattr(frequency, "roll", "no default") is None:
return Frequency.Months(frequency.number, _get_rollday(roll)) # type: ignore[attr-defined]
return frequency
if isinstance(frequency, NoInput):
raise ValueError(err.VE_NEEDS_FREQUENCY)
frequency_: str = frequency.upper()[-1]
if frequency_ == "D":
n_: int = int(frequency[:-1])
return Frequency.CalDays(n_)
elif frequency_ == "B":
n_ = int(frequency[:-1])
return Frequency.BusDays(n_, get_calendar(calendar))
elif frequency_ == "W":
n_ = int(frequency[:-1])
return Frequency.CalDays(n_ * 7)
elif frequency_ == "M":
# handles the dual case of 'xM' for x-months or 'M' or monthly, i.e. 1-month
if len(frequency) == 1:
return Frequency.Months(1, _get_rollday(roll))
else:
n_ = int(frequency[:-1])
return Frequency.Months(n_, _get_rollday(roll))
elif frequency_ == "Q":
return Frequency.Months(3, _get_rollday(roll))
elif frequency_ == "S":
return Frequency.Months(6, _get_rollday(roll))
elif frequency_ == "A":
return Frequency.Months(12, _get_rollday(roll))
elif frequency_ == "Y":
n_ = int(frequency[:-1])
return Frequency.Months(12 * n_, _get_rollday(roll))
elif frequency_ == "Z":
return Frequency.Zero()
else:
raise ValueError(f"Frequency can not be determined from `frequency` input: '{frequency}'.")
def _get_frequency_none(
frequency: str | Frequency | NoInput, roll: str | RollDay | int_, calendar: CalInput
) -> Frequency | None:
if isinstance(frequency, NoInput):
return None
else:
return _get_frequency(frequency, roll, calendar)
def _get_tenor_from_frequency(frequency: Frequency) -> str:
if isinstance(frequency, Frequency.Months):
return f"{frequency.number}M"
elif isinstance(frequency, Frequency.CalDays):
if frequency.number % 7 == 0:
return f"{int(frequency.number / 7)}W"
else:
return f"{frequency.number}D"
elif isinstance(frequency, Frequency.BusDays):
return f"{frequency.number}B"
elif isinstance(frequency, Frequency.Zero):
raise ValueError("Cannot determine regular tenor from Frequency.Zero")
raise ValueError("Cannot determine regular tenor from Frequency")
def add_tenor(
start: datetime,
tenor: str | Frequency,
modifier: str | Adjuster,
calendar: CalInput = NoInput(0),
roll: str | int_ | RollDay = NoInput(0),
settlement: bool = False,
mod_days: bool = False,
) -> datetime:
r"""
Add a tenor to a given date under specific modification rules and holiday calendar.
.. warning::
Note this function does not validate the ``roll`` input, but expects it to be correct.
That is this function acts on ``start`` as an *unchecked* date. See notes.
Parameters
----------
start : datetime
The date to which to add the tenor.
tenor : str | Frequency
The tenor to add, identified by calendar days, `"D"`, months, `"M"`,
years, `"Y"` or business days, `"B"`, for example `"10Y"` or `"5B"`.
modifier : str, optional in {"NONE", "MF", "F", "MP", "P"} | Adjuster
The modification rule to apply if the tenor is calendar days, months or years.
calendar : CustomBusinessDay or str, optional
The calendar for use with business day adjustment and modification.
roll : str, int, RollDay, optional
This is only required if the tenor is given in months or years. Ensures the tenor period
associates with a schedule's roll day.
settlement : bool, optional
If ``modifier`` is string this determines whether to enforce the settlement
with an associated settlement calendar, if provided.
mod_days : bool, optional
If ``modifier`` is string and ``tenor`` is a day variant setting this to *False*
will convert "MF" to "F" and "MP" to "P".
Returns
-------
datetime
Notes
------
.. ipython:: python
:suppress:
from rateslib import add_tenor, get_calendar
from datetime import datetime as dt
import pandas as pd
from pandas import date_range, Series, DataFrame
pd.set_option("display.float_format", lambda x: '%.2f' % x)
pd.set_option("display.max_columns", None)
pd.set_option("display.width", 500)
This method is a convenience function for coordinating a :class:`~rateslib.scheduling.Frequency`
date manipulation and an :class:`~rateslib.scheduling.Adjuster`, from simple UI inputs.
For example the following:
.. ipython:: python
add_tenor(dt(2023, 9, 29), "-6m", "MF", NamedCal("bus"), 31)
is internally mapped to the following, where :meth:`~rateslib.scheduling.Frequency.next`
performs an *unchecked* date period determination:
.. ipython:: python
unadjusted_date = Frequency.Months(-6, RollDay.Day(31)).next(dt(2023, 9, 29))
Adjuster.ModifiedFollowing().adjust(unadjusted_date, NamedCal("bus"))
The allowed string inputs *{'B', 'D', 'W', 'M', 'Y'}* for **b**\ usiness days, calendar
**d**\ ays, **w**\ eeks, **m**\ onths and **y**\ ears are mapped to an appropriate
:class:`~rateslib.scheduling.Frequency` variant (potentially also mapping a
:class:`~rateslib.scheduling.RollDay`), and an appropriate
:class:`~rateslib.scheduling.Adjuster` is derived from the combination of the ``modifier``,
``settlement`` and ``mod_days`` arguments.
Read more about the ``settlement`` argument in the :ref:`calendar user guide `.
The ``mod_days`` argument is provided to avoid having to reconfigure *Instrument*
specifications when a *termination* may differ between months or years, and days or weeks.
Most *Instruments* will be defined by the typical modified following (*"MF"*) ``modifier``,
but this would prefer not to always apply.
.. ipython:: python
add_tenor(dt(2021, 1, 29), "1M", "MF", "bus")
while, the following will by default roll into a new month,
.. ipython:: python
add_tenor(dt(2021, 1, 22), "8d", "MF", "bus")
unless day type frequencies are also specifically modified,
.. ipython:: python
add_tenor(dt(2021, 1, 22), "8d", "MF", "bus", mod_days=True)
Examples
--------
.. ipython:: python
add_tenor(dt(2022, 2, 28), "3M", "NONE")
add_tenor(dt(2022, 12, 28), "4b", "F", get_calendar("ldn"))
add_tenor(dt(2022, 12, 28), "4d", "F", get_calendar("ldn"))
""" # noqa: E501
cal_ = get_calendar(calendar)
if isinstance(tenor, Frequency):
frequency: Frequency = tenor
else:
tenor = tenor.upper()
if "D" in tenor:
frequency = Frequency.CalDays(int(tenor[:-1]))
elif "W" in tenor:
frequency = Frequency.CalDays(int(tenor[:-1]) * 7)
elif "B" in tenor:
frequency = Frequency.BusDays(int(tenor[:-1]), cal_)
elif "Y" in tenor:
roll_ = _get_rollday(roll)
roll__ = RollDay.Day(start.day) if roll_ is None else roll_
frequency = Frequency.Months(int(float(tenor[:-1]) * 12), roll__)
elif "M" in tenor:
roll_ = _get_rollday(roll)
roll__ = RollDay.Day(start.day) if roll_ is None else roll_
frequency = Frequency.Months(int(float(tenor[:-1])), roll__)
else:
raise ValueError(
"`tenor` must identify frequency in {'B', 'D', 'W', 'M', 'Y'} e.g. '1Y'"
)
if isinstance(frequency, Frequency.Months | Frequency.Zero):
mod_days = True
next_date = frequency.next(start)
adjuster = _convert_to_adjuster(modifier, settlement, mod_days)
return adjuster.adjust(next_date, cal_)
def _get_fx_expiry_and_delivery_and_payment(
eval_date: datetime_,
expiry: str | datetime,
delivery_lag: Adjuster | int | datetime,
calendar: CalInput,
modifier: str,
eom: bool,
payment_lag: int | datetime,
) -> tuple[datetime, datetime, datetime]:
"""
Determines the expiry and delivery date of an FX option using the following rules:
See Foreign Exchange Option Pricing by Iain Clark
Parameters
----------
eval_date: datetime
The evaluation date, which is today (if required)
expiry: str, datetime
The expiry date
delivery_lag: Adjuster, int, datetime
Number of days, e.g. spot = 2, or a specified datetime for FX settlement after expiry.
calendar: CalInput
The calendar used for date rolling. This function makes use of the `settlement` option
within calendars.
modifier: str
Date rule, expected to be "MF" for most FX rate tenors.
eom: bool
Whether end-of-month is preserved in tenor date determination.
payment_lag: Adjuster, int, datetime
Number of business days to lag payment by after expiry.
Returns
-------
tuple of datetime
"""
calendar_ = get_calendar(calendar)
del calendar
if isinstance(delivery_lag, int):
delivery_lag_: datetime | Adjuster = Adjuster.BusDaysLagSettle(delivery_lag)
else:
delivery_lag_ = delivery_lag
del delivery_lag
if isinstance(payment_lag, int):
payment_lag_: datetime | Adjuster = Adjuster.BusDaysLagSettle(payment_lag)
else:
payment_lag_ = payment_lag
del payment_lag
if isinstance(expiry, str):
# then use the objects to derive the expiry
if isinstance(eval_date, NoInput):
raise ValueError("`expiry` as string tenor requires `eval_date`.")
# then the expiry will be implied
e = expiry.upper()
if "M" in e or "Y" in e:
# method
if isinstance(delivery_lag_, datetime):
raise ValueError(
"Cannot determine FXOption expiry and delivery with given parameters.\n"
"Supply a `delivery_lag` as integer business days and not a datetime, when "
"using a string tenor `expiry`.",
)
else:
spot = delivery_lag_.adjust(eval_date, calendar_)
roll = "eom" if (eom and Imm.Eom.validate(spot)) else spot.day
delivery_: datetime = add_tenor(spot, expiry, modifier, calendar_, roll, True)
expiry_ = _get_first_bus_day(delivery_lag_.reverse(delivery_, calendar_), calendar_)
# else:
# spot = calendar_.lag_bus_days(eval_date, delivery_lag, True)
# roll = "eom" if (eom and Imm.Eom.validate(spot)) else spot.day
# delivery_: datetime = add_tenor(spot, expiry, modifier, calendar_, roll, True)
# expiry_ = calendar_.add_bus_days(delivery_, -delivery_lag, False)
else:
expiry_ = add_tenor(eval_date, expiry, "F", calendar_, NoInput(0), False)
else:
expiry_ = expiry
if isinstance(delivery_lag_, datetime):
delivery_ = delivery_lag_
else:
delivery_ = delivery_lag_.adjust(expiry_, calendar_)
# delivery_ = calendar_.lag_bus_days(expiry_, delivery_lag, True)
if isinstance(payment_lag_, datetime):
payment_ = payment_lag_
else:
payment_ = payment_lag_.adjust(expiry_, calendar_)
# payment_ = calendar_.lag_bus_days(expiry_, payment_lag, True)
return expiry_, delivery_, payment_
================================================
FILE: python/rateslib/scheduling/imm.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from datetime import datetime
from typing import TYPE_CHECKING
from rateslib.enums.generics import NoInput
from rateslib.rs import Imm
if TYPE_CHECKING:
from rateslib.local_types import int_, str_
_Imm: dict[str, Imm] = {
"imm": Imm.Wed3_HMUZ,
"serial_imm": Imm.Wed3,
"credit_imm": Imm.Day20_HMUZ,
"credit_imm_hu": Imm.Day20_HU,
"credit_imm_mz": Imm.Day20_MZ,
"wed3_hmuz": Imm.Wed3_HMUZ,
"wed3": Imm.Wed3,
"day20_hmuz": Imm.Day20_HMUZ,
"day20": Imm.Day20,
"day20_mz": Imm.Day20_MZ,
"day20_hu": Imm.Day20_HU,
"fri2_hmuz": Imm.Fri2_HMUZ,
"fri2": Imm.Fri2,
"wed1_post9": Imm.Wed1_Post9,
"wed1_post9_hmuz": Imm.Wed1_Post9_HMUZ,
"eom": Imm.Eom,
"leap": Imm.Leap,
"som": Imm.Som,
}
def next_imm(start: datetime, definition: str | Imm = Imm.Wed3_HMUZ) -> datetime:
"""Return the next IMM date *after* the given start date.
Parameters
----------
start : datetime
The date from which to determine the next IMM.
definition : Imm, str
The IMM definition to return the date for. This is entered as either an
:class:`~rateslib.scheduling.Imm` enum, or that enum variant name as sting, e.g. *"Wed3"*.
Returns
-------
datetime
Examples
--------
.. ipython:: python
:suppress:
from rateslib import next_imm, Imm, dt
Get the next quarterly SOFR or ESTR futures date, defined by CME, EUREX, or ICE:
.. ipython:: python
next_imm(dt(2000, 1, 1), Imm.Wed3_HMUZ)
Get the next serial futures contract for a NZD bank bill defined by ASX:
.. ipython:: python
next_imm(dt(2000, 1, 1), "Wed1_Post9")
"""
if isinstance(definition, str):
d_ = definition.lower()
if d_ in ["imm", "serial_imm", "credit_imm", "credit_imm_hu", "credit_imm_mz"]:
warnings.warn(
f"The given string entry '{d_}' is deprecated and will be removed in "
f"future releases. Please change the equivalent version in {{'Wed3', 'Wed3_HMUZ', "
f"'Day20', 'Day20_HMUZ', 'Day20_HU', 'Day20_MZ'}}",
DeprecationWarning,
)
imm_: Imm = _Imm[d_]
else:
imm_ = definition
return imm_.next(start)
MONTHS = {
"F": 1,
"G": 2,
"H": 3,
"J": 4,
"K": 5,
"M": 6,
"N": 7,
"Q": 8,
"U": 9,
"V": 10,
"X": 11,
"Z": 12,
}
def get_imm(
month: int_ = NoInput(0),
year: int_ = NoInput(0),
code: str_ = NoInput(0),
definition: str | Imm = Imm.Wed3,
) -> datetime:
"""
Return an IMM date for a specified month.
Parameters
----------
month: int
The month of the year in which the IMM date falls.
year: int
The year in which the IMM date falls.
code: str
Identifier in the form of a one digit month code and 21st century year, e.g. "U29".
If code is given ``month`` and ``year`` are unused.
definition: Imm, str
The IMM definition to return the date for. This is entered as either an
:class:`~rateslib.scheduling.Imm` enum, or that enum variant name as sting, e.g. *"Wed3"*.
Returns
-------
datetime
Examples
--------
.. ipython:: python
:suppress:
from rateslib import get_imm, Imm, dt
Get the quarterly SOFR or ESTR futures date, defined by CME, EUREX, or ICE:
.. ipython:: python
get_imm(3, 2022, definition=Imm.Wed3_HMUZ)
get_imm(code="H22", definition="Wed3")
Get a serial futures contract for a NZD bank bill defined by ASX:
.. ipython:: python
get_imm(1, 2023, definition="Wed1_Post9")
"""
if isinstance(code, str):
year = int(code[1:]) + 2000
month = MONTHS[code[0].upper()]
elif isinstance(month, NoInput) or isinstance(year, NoInput):
raise ValueError("`month` and `year` must each be valid integers if `code`not given.")
if isinstance(definition, str):
d_ = definition.lower()
if d_ in ["imm", "serial_imm", "credit_imm", "credit_imm_hu", "credit_imm_mz"]:
warnings.warn(
f"The given string entry '{d_}' is deprecated and will be removed in "
f"future releases. Please change the equivalent version in {{'Wed3', 'Wed3_HMUZ', "
f"'Day20', 'Day20_HMUZ', 'Day20_HU', 'Day20_MZ'}}",
DeprecationWarning,
)
imm_: Imm = _Imm[d_]
else:
imm_ = definition
return imm_.get(year, month)
================================================
FILE: python/rateslib/scheduling/rollday.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING
from rateslib.rs import Adjuster, Imm, RollDay
if TYPE_CHECKING:
from rateslib.local_types import CalTypes, int_
def _get_rollday(roll: RollDay | str | int_) -> RollDay | None:
"""Convert a user str or int into a RollDay enum object."""
if isinstance(roll, RollDay):
return roll
elif isinstance(roll, str):
return {
"EOM": RollDay.Day(31),
"SOM": RollDay.Day(1),
"IMM": RollDay.IMM(),
}[roll.upper()]
elif isinstance(roll, int):
return RollDay.Day(roll)
return None
def _is_eom_cal(date: datetime, cal: CalTypes) -> bool:
"""Test whether a given date is end of month under a specific calendar"""
eom_unadjusted = Imm.Eom.get(date.year, date.month)
eom = Adjuster.Previous().adjust(eom_unadjusted, cal)
return date == eom
# end_day = calendar_mod.monthrange(date.year, date.month)[1]
# eom = datetime(date.year, date.month, end_day)
# adj_eom = _adjust_date(eom, "P", cal)
# return date == adj_eom
================================================
FILE: python/rateslib/scheduling/schedule.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from datetime import datetime
from functools import cached_property
from typing import TYPE_CHECKING
from pandas import DataFrame
from rateslib import defaults
from rateslib.default import _make_py_json
from rateslib.enums.generics import NoInput, _drb
from rateslib.rs import Adjuster, Frequency, RollDay, StubInference
from rateslib.rs import Schedule as Schedule_rs
from rateslib.scheduling.adjuster import _convert_to_adjuster, _get_adjuster, _get_adjuster_none
from rateslib.scheduling.calendars import _is_day_type_tenor, get_calendar
from rateslib.scheduling.frequency import _get_frequency, add_tenor
from rateslib.scheduling.rollday import _is_eom_cal
if TYPE_CHECKING:
from rateslib.local_types import (
Adjuster_,
Any,
CalInput,
CalTypes,
bool_,
datetime_,
int_,
str_,
)
def _get_stub_inference(
stub: str | StubInference, front_stub: datetime_, back_stub: datetime_
) -> StubInference:
"""
Perform two tasks:
- Convert `stub` as string to a `StubInference` enum.
- Convert a StubInference to NeitherSide if a specific stud date has been provided that
cannot be inferred.
Parameters
----------
stub: str
The intention of the schedule for inferred stubs
front_stub: datetime, optional
If given StubInference will never contain any front elements.
back_stub: datetime, optional
If given StubInference will never contain any back elements.
Returns
-------
StubInference
"""
if isinstance(stub, StubInference):
if stub is StubInference.NeitherSide:
stub_: str = "NEITHER_SIDE"
elif stub is StubInference.ShortFront:
stub_ = "SHORT_FRONT"
elif stub is StubInference.LongFront:
stub_ = "LONG_FRONT"
elif stub is StubInference.ShortBack:
stub_ = "SHORT_BACK"
else: # StubInference.LongBack:
stub_ = "LONG_BACK"
elif stub is None:
stub_ = "NONE"
else:
stub_ = stub.upper()
del stub
_map: dict[str, StubInference] = {
"SHORTFRONT": StubInference.ShortFront,
"LONGFRONT": StubInference.LongFront,
"SHORTBACK": StubInference.ShortBack,
"LONGBACK": StubInference.LongBack,
"NONE": StubInference.NeitherSide,
"NEITHERSIDE": StubInference.NeitherSide,
"SHORT_FRONT": StubInference.ShortFront,
"LONG_FRONT": StubInference.LongFront,
"SHORT_BACK": StubInference.ShortBack,
"LONG_BACK": StubInference.LongBack,
"NEITHER_SIDE": StubInference.NeitherSide,
}
possibles: dict[str, StubInference] = {v: _map[v] for v in _map if v in stub_}
if not isinstance(front_stub, NoInput):
# cannot infer front stubs, since it is explicitly provided
possibles.pop("SHORTFRONT", None)
possibles.pop("SHORT_FRONT", None)
possibles.pop("LONGFRONT", None)
possibles.pop("LONG_FRONT", None)
if not isinstance(back_stub, NoInput):
# cannot infer back stubs, since it is explicitly provided
possibles.pop("SHORTBACK", None)
possibles.pop("SHORT_BACK", None)
possibles.pop("LONGBACK", None)
possibles.pop("LONG_BACK", None)
if len(possibles) == 0:
return StubInference.NeitherSide # the stub inference is negated by a provided value
elif len(possibles) > 1:
raise ValueError(
"Must supply at least one stub date for dual sided inference.\n"
f"You have likely supplied to many sides to be inferred for `stub`. Got '{stub_}'."
)
else:
return list(possibles.values())[0]
def _get_adjuster_from_modifier(modifier: Adjuster | str_, mod_days: bool) -> Adjuster:
if isinstance(modifier, Adjuster):
return modifier # use the adjuster as provided
modifier_: str = _drb(defaults.modifier, modifier).upper()
return _convert_to_adjuster(modifier_, settlement=False, mod_days=mod_days)
def _should_mod_days(tenor: datetime | str) -> bool:
"""Return whether a specified tenor should be subject to a `modifier`'s modification rule."""
if isinstance(tenor, str):
return not _is_day_type_tenor(tenor)
else:
# cannot infer any data to issue an overwrite
return True
def _get_adjuster_from_lag_drb(lag: Adjuster | int_, default: str) -> Adjuster:
if isinstance(lag, Adjuster):
return lag
else:
lag_: int = _drb(getattr(defaults, default), lag)
return _get_adjuster(f"{lag_}B")
class Schedule:
"""
Generate a schedule of dates according to a regular pattern and calendar inference.
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import Schedule, RollDay, Frequency, StubInference, Adjuster, NamedCal, dt
.. tabs::
.. tab:: Original Inputs
The **original inputs** allow for a more UI friendly input for the most common schedules.
.. ipython:: python
s = Schedule(
effective=dt(2024, 1, 3),
termination=dt(2024, 11, 29),
frequency="Q",
stub="ShortFront",
modifier="MF",
payment_lag=2,
calendar="tgt",
eom=True,
)
print(s)
.. tab:: Core Inputs
The **core inputs** utilise the Rust objects directly and may provide more flexibility.
.. ipython:: python
s = Schedule(
effective=dt(2024, 1, 3),
termination=dt(2024, 11, 29),
frequency=Frequency.Months(3, None),
stub=StubInference.ShortFront,
modifier=Adjuster.ModifiedFollowing(),
payment_lag=Adjuster.BusDaysLagSettle(2),
calendar=NamedCal("tgt"),
eom=True,
)
print(s)
.. role:: red
.. role:: green
Parameters
----------
effective : datetime, str, :red:`required`
The unadjusted effective date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``eval_date`` and ``eval_mode``.
termination : datetime, str, :red:`required`
The unadjusted termination date. If given as adjusted, unadjusted alternatives may be
inferred. If given as string tenor will be calculated from ``effective``.
frequency : Frequency, str in {"M", "Q", "S", "A", "Z", "_D", "_B", "_W", "_M", "_Y"}, :red:`required`
The frequency of the schedule.
If given as string will derive a :class:`~rateslib.scheduling.Frequency` aligning with:
monthly ("M"), quarterly ("Q"), semi-annually ("S"), annually("A") or zero-coupon ("Z"), or
a set number of calendar or business days ("_D", "_B"), weeks ("_W"), months ("_M") or
years ("_Y").
Where required, the :class:`~rateslib.scheduling.RollDay` is derived as per ``roll``
and business day calendar as per ``calendar``.
stub : StubInference, str in {"ShortFront", "LongFront", "ShortBack", "LongBack"}, :green:`optional (set by defaults)`
The stub type used if stub inference is required. If given as string will derive a
:class:`~rateslib.scheduling.StubInference`.
front_stub : datetime, :green:`optional`
The unadjusted date for the start stub period. If given as adjusted, unadjusted
alternatives may be inferred.
back_stub : datetime, :green:`optional`
The unadjusted date for the back stub period. If given as adjusted, unadjusted
alternatives may be inferred.
See notes for combining ``stub``, ``front_stub`` and ``back_stub``
and any automatic stub inference.
roll : RollDay, int in [1, 31], str in {"eom", "imm", "som"}, :green:`optional`
The roll day of the schedule. If not given or not available in ``frequency`` will be
inferred for monthly frequency variants.
eom : bool, :green:`optional (set by defaults)`
Use an end of month preference rather than regular rolls for ``roll`` inference. Set by
default. Not required if ``roll`` is defined.
modifier : Adjuster, str in {"NONE", "F", "MF", "P", "MP"}, :green:`optional (set by defaults)`
The :class:`~rateslib.scheduling.Adjuster` used for adjusting unadjusted schedule dates
into adjusted dates. If given as string must define simple date rolling rules.
calendar : calendar, str, :green:`optional (set as 'all')`
The business day calendar object to use. If string will call
:meth:`~rateslib.scheduling.get_calendar`.
payment_lag: Adjuster, int, :green:`optional (set by defaults)`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
a payment date. If given as integer will define the number of business days to
lag payments by.
payment_lag_exchange: Adjuster, int, :green:`optional (set by defaults)`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional payment date. If given as integer will define the number of business days to
lag payments by.
extra_lag: Adjuster, int, :green:`optional`
The :class:`~rateslib.scheduling.Adjuster` to use to map adjusted schedule dates into
additional dates, which may be used, for example by fixings schedules. If given as integer
will define the number of business days to lag dates by.
eval_date: datetime, :green:`optional`
Only required if ``effective`` is given as a string tenor, to provide a point of reference.
eval_mode: str in {"swaps_align", "swaptions_align"}, :green:`optional (set by defaults)`
The method for determining the ``effective`` and ``termination`` dates if both are provided
as string tenors. See notes.
Notes
-----
Detailed information is provided within :ref:`the scheduling user guide `.
""" # noqa: E501
_obj: Schedule_rs
@property
def obj(self) -> Schedule_rs:
"""A wrapped instance of the Rust implemented :rust:`Schedule `."""
return self._obj
def __init__(
self,
effective: datetime | str,
termination: datetime | str,
frequency: str | Frequency,
*,
stub: StubInference | str_ = NoInput(0),
front_stub: datetime_ = NoInput(0),
back_stub: datetime_ = NoInput(0),
roll: str | RollDay | int_ = NoInput(0),
eom: bool_ = NoInput(0),
modifier: Adjuster | str_ = NoInput(0),
calendar: CalInput = NoInput(0),
payment_lag: Adjuster | int_ = NoInput(0),
payment_lag_exchange: Adjuster | int_ = NoInput(0),
extra_lag: Adjuster | int | str_ = NoInput(0),
eval_date: datetime_ = NoInput(0),
eval_mode: str_ = NoInput(0),
) -> None:
eom_: bool = _drb(defaults.eom, eom)
stub_: str | StubInference = _drb(defaults.stub, stub)
eval_mode_: str = _drb(defaults.eval_mode, eval_mode).lower()
calendar_: CalTypes = get_calendar(calendar)
frequency_: Frequency = _get_frequency(frequency, roll, calendar_)
accrual_adjuster = _get_adjuster_from_modifier(modifier, _should_mod_days(termination))
payment_adjuster = _get_adjuster_from_lag_drb(payment_lag, "payment_lag")
payment_adjuster2 = _get_adjuster_from_lag_drb(payment_lag_exchange, "payment_lag_exchange")
payment_adjuster3 = _get_adjuster_none(extra_lag)
effective_: datetime = _validate_effective(
effective,
eval_mode_,
eval_date,
accrual_adjuster,
calendar_,
roll,
)
termination_: datetime = _validate_termination(
termination,
effective_,
accrual_adjuster,
calendar_,
roll,
eom_,
)
stub_inference_ = _get_stub_inference(stub_, front_stub, back_stub)
try:
self._obj = Schedule_rs(
effective=effective_,
termination=termination_,
frequency=frequency_,
calendar=calendar_,
accrual_adjuster=accrual_adjuster,
payment_adjuster=payment_adjuster,
payment_adjuster2=payment_adjuster2,
payment_adjuster3=payment_adjuster3,
front_stub=_drb(None, front_stub),
back_stub=_drb(None, back_stub),
eom=eom_,
stub_inference=stub_inference_,
)
except ValueError:
raise ValueError(
"A Schedule could not be generated from the parameter combinations:\n"
f"effective: {effective}\n"
f"front stub: {front_stub}\n"
f"back stub: {back_stub}\n"
f"termination: {termination}\n"
f"frequency: {frequency_}\n"
f"stub inference: {stub_inference_}\n"
f"accrual adjuster: {accrual_adjuster}\n"
f"calendar: {calendar_}\n"
)
@classmethod
def __init_from_obj__(cls, obj: Schedule_rs) -> Schedule:
"""Construct the class instance from a given rust object which is wrapped."""
# create a default instance and overwrite it
new = cls(datetime(2000, 1, 1), datetime(2000, 2, 1), "M")
new._obj = obj
return new
def __getnewargs__(
self,
) -> tuple[
datetime,
datetime,
Frequency,
StubInference,
datetime_,
datetime_,
NoInput,
NoInput,
Adjuster,
CalInput,
Adjuster,
Adjuster_,
Adjuster_,
NoInput,
NoInput,
]:
return (
self.ueffective,
self.utermination,
self.frequency_obj,
StubInference.NeitherSide,
NoInput(0) if self.ufront_stub is None else self.ufront_stub,
NoInput(0) if self.uback_stub is None else self.uback_stub,
NoInput(0),
NoInput(0),
self.accrual_adjuster,
self.calendar,
self.payment_adjuster,
NoInput(0) if self.payment_adjuster2 is None else self.payment_adjuster2,
NoInput(0) if self.payment_adjuster3 is None else self.payment_adjuster3,
NoInput(0),
NoInput(0),
)
def __eq__(self, other: Any) -> bool:
if isinstance(other, self.__class__):
return self._obj == other._obj
else:
return False
@cached_property
def uschedule(self) -> list[datetime]:
"""A list of the *unadjusted* schedule dates."""
return self.obj.uschedule
@cached_property
def aschedule(self) -> list[datetime]:
"""
A list of the *adjusted accrual* dates.
These are determined by applying the ``accrual_adjuster`` to ``uschedule``.
"""
return self.obj.aschedule
@cached_property
def pschedule(self) -> list[datetime]:
"""
A list of the cashflow *payment* dates.
These are determined by applying the ``payment_adjuster`` to ``aschedule``.
"""
return self.obj.pschedule
@cached_property
def pschedule2(self) -> list[datetime]:
"""
A list of accrual adjusted dates.
These are determined by applying the ``payment_adjuster2`` to ``aschedule``.
"""
return self.obj.pschedule2
@cached_property
def pschedule3(self) -> list[datetime]:
"""
A list of accrual adjusted dates.
These are determined by applying the ``payment_adjuster3`` to ``aschedule``.
"""
return self.obj.pschedule3
@cached_property
def frequency(self) -> str:
"""Original string representation of the :class:`~rateslib.scheduling.Frequency`."""
return self.obj.frequency.string()
@cached_property
def periods_per_annum(self) -> float:
"""
Average number of coupons per annum. See
:meth:`~rateslib.scheduling.Frequency.periods_per_annum`.
"""
return self.obj.frequency.periods_per_annum()
@cached_property
def frequency_obj(self) -> Frequency:
"""The :class:`~rateslib.scheduling.Frequency` object determining the periods."""
return self.obj.frequency
@property
def modifier(self) -> Adjuster:
"""Alias for the ``accrual_adjuster``."""
return self.accrual_adjuster
@cached_property
def calendar(self) -> CalTypes:
"""
The calendar used for date adjustment by the ``accrual_adjuster`` and
``payment_adjuster``.
"""
return self.obj.calendar
@cached_property
def accrual_adjuster(self) -> Adjuster:
"""The :class:`~rateslib.scheduling.Adjuster` object used for accrual date adjustment."""
return self.obj.accrual_adjuster
@cached_property
def payment_adjuster(self) -> Adjuster:
"""The :class:`~rateslib.scheduling.Adjuster` object used for payment date adjustment."""
return self.obj.payment_adjuster
@cached_property
def payment_adjuster2(self) -> Adjuster:
"""The :class:`~rateslib.scheduling.Adjuster` object used for additional date adjustment."""
return self.obj.payment_adjuster2
@cached_property
def payment_adjuster3(self) -> Adjuster | None:
"""The :class:`~rateslib.scheduling.Adjuster` object used for additional date adjustment."""
return self.obj.payment_adjuster3
@cached_property
def termination(self) -> datetime:
"""The *adjusted* termination date of the schedule."""
return self.obj.aschedule[-1]
@cached_property
def effective(self) -> datetime:
"""The *adjusted* effective date of the schedule."""
return self.obj.aschedule[0]
@cached_property
def utermination(self) -> datetime:
"""The *unadjusted* termination date of the schedule."""
return self.obj.uschedule[-1]
@cached_property
def ueffective(self) -> datetime:
"""The *unadjusted* effective date of the schedule."""
return self.obj.uschedule[0]
@cached_property
def ufront_stub(self) -> datetime | None:
"""The *unadjusted* front stub date of the schedule."""
return self.obj.ufront_stub
@cached_property
def uback_stub(self) -> datetime | None:
"""The *unadjusted* back stub date of the schedule."""
return self.obj.uback_stub
@cached_property
def roll(self) -> str | int | NoInput:
"""
The :class:`~rateslib.scheduling.RollDay` object associated
with :class:`~rateslib.scheduling.Frequency`, if available.
"""
if isinstance(self.obj.frequency, Frequency.Months):
# Frequency.Months on a valid Schedule will always have Some(RollDay).
if isinstance(self.obj.frequency.roll, RollDay.Day):
return self.obj.frequency.roll._0
else:
return self.obj.frequency.roll.__str__()
else:
return NoInput(0)
@cached_property
def table(self) -> DataFrame:
"""
A `DataFrame` of schedule dates and classification.
"""
df = DataFrame(
{
defaults.headers["stub_type"]: [
"Stub" if stub else "Regular" for stub in self._stubs
],
defaults.headers["u_acc_start"]: self.uschedule[:-1],
defaults.headers["u_acc_end"]: self.uschedule[1:],
defaults.headers["a_acc_start"]: self.aschedule[:-1],
defaults.headers["a_acc_end"]: self.aschedule[1:],
defaults.headers["payment"]: self.pschedule[1:],
},
)
return df
@cached_property
def _stubs(self) -> list[bool]:
"""A list of boolean flags indication whether periods are stubs (True) or regular (False)"""
front_stub = self.obj.frequency.is_stub(self.uschedule[0], self.uschedule[1], True)
back_stub = self.obj.frequency.is_stub(self.uschedule[-2], self.uschedule[-1], False)
if len(self.uschedule) == 2: # single period
return [front_stub or back_stub]
else:
return [front_stub] + [False] * (len(self.uschedule) - 3) + [back_stub]
@cached_property
def n_periods(self) -> int:
"""The number of periods contained in the schedule."""
return len(self.obj.uschedule) - 1
def __repr__(self) -> str:
return f""
def __str__(self) -> str:
f: str = self.frequency_obj.__str__()
a: str = self.accrual_adjuster.__str__()
p: str = self.payment_adjuster.__str__()
str_: str = f"freq: {f}, accrual adjuster: {a}, payment adjuster: {p},\n"
ret: str = str_ + self.table.__repr__()
return ret
def is_regular(self) -> bool:
"""Returns whether the schedule is composed only of regular periods (no stubs)."""
return self.obj.is_regular()
def to_json(self) -> str:
"""Return a JSON representation of the object.
Returns
-------
str
"""
return _make_py_json(self._obj.to_json(), "Schedule")
def _validate_effective(
effective: datetime | str,
eval_mode: str,
eval_date: datetime | NoInput,
modifier: str | Adjuster,
calendar: CalTypes,
roll: int | str | RollDay | NoInput,
) -> datetime:
"""
Determine the effective date of a schedule if it is given in string form from
other parameters such as the eval date and the eval mode.
"""
if isinstance(effective, str):
if isinstance(eval_date, NoInput):
raise ValueError(
"For `effective` given as string tenor, must also supply a base `eval_date`.",
)
if eval_mode == "swaps_align":
# effective date is calculated as unadjusted
return add_tenor(
eval_date,
effective,
"NONE",
NoInput(0),
roll,
)
else: # eval_mode == "swaptions_align":
return add_tenor(
eval_date,
effective,
modifier,
calendar,
roll,
)
else:
return effective
def _validate_termination(
termination: datetime | str,
effective: datetime,
modifier: str | Adjuster,
calendar: CalTypes,
roll: int | str | NoInput | RollDay,
eom: bool,
) -> datetime:
"""
Determine the termination date of a schedule if it is given in string form from
"""
if isinstance(termination, str):
if _is_day_type_tenor(termination):
termination_: datetime = add_tenor(
start=effective,
tenor=termination,
modifier=modifier,
calendar=calendar,
roll=NoInput(0),
settlement=False,
mod_days=False,
)
else:
# if termination is string the end date is calculated as unadjusted, which will
# be used later according to roll inference rules, for monthly and yearly tenors.
if eom and isinstance(roll, NoInput) and _is_eom_cal(effective, calendar):
roll_: str | int | NoInput | RollDay = 31
else:
roll_ = roll
termination_ = add_tenor(
effective,
termination,
"NONE",
calendar, # calendar is unused for NONE type modifier
roll_,
)
else:
termination_ = termination
if termination_ <= effective:
raise ValueError("Schedule `termination` must be after `effective`.")
return termination_
================================================
FILE: python/rateslib/serialization/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from rateslib.serialization.json import from_json
__all__ = ["from_json"]
================================================
FILE: python/rateslib/serialization/json.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from json import dumps, loads
# globals namespace
from typing import TYPE_CHECKING, Any
from rateslib.curves import Curve, LineCurve
from rateslib.curves.rs import CurveRs
from rateslib.curves.utils import _CurveInterpolator, _CurveMeta, _CurveNodes, _CurveSpline
from rateslib.dual import Variable
from rateslib.enums.generics import NoInput
from rateslib.fx import FXRates
from rateslib.rs import from_json as from_json_rs
from rateslib.scheduling import Schedule
if TYPE_CHECKING:
pass # pragma: no cover
NAMES_RsPy: dict[str, Any] = { # this is a mapping of native Rust obj names to Py obj names
"FXRates": FXRates,
"Curve": CurveRs,
"Schedule": Schedule,
}
NAMES_Py: dict[str, Any] = { # a mapping of native Python classes with a _from_json() method
"_CurveMeta": _CurveMeta,
"_CurveSpline": _CurveSpline,
"_CurveInterpolator": _CurveInterpolator,
"_CurveNodes": _CurveNodes,
"Curve": Curve,
"LineCurve": LineCurve,
"Variable": Variable,
}
ENUMS_Py: dict[str, Any] = {
"NoInput": NoInput,
}
def _pynative_from_json(name: str, json: dict[str, Any] | str) -> Any:
if name in NAMES_Py:
return NAMES_Py[name]._from_json(json)
else:
# is an Enum
return ENUMS_Py[name](json)
def from_json(json: str) -> Any:
"""
Create an object from JSON string.
Parameters
----------
json: str
JSON string in appropriate format to construct the class.
Returns
-------
Object
"""
obj = loads(json)
if isinstance(obj, dict):
if "PyWrapped" in obj:
# then object is a Rust struct wrapped by a Python class.
# determine the Python class name and reconstruct the Python class from the Rust struct.
class_name = next(iter(obj["PyWrapped"].keys()))
restructured_json = dumps(obj["PyWrapped"])
# objs = globals()
class_obj = NAMES_RsPy[class_name]
return class_obj.__init_from_obj__(obj=from_json_rs(restructured_json))
elif "PyNative" in obj:
# PyNative are objects that are constructed only in Python but do not serialize directly
# and so are tagged with a serialization flag.
class_name = next(iter(obj["PyNative"].keys()))
return _pynative_from_json(name=class_name, json=obj["PyNative"][class_name])
else:
# the dict may have been a native Rust object, try loading directly
# this will raise if all combination exhausted
return from_json_rs(json)
else:
# object is a native Python element
return obj
================================================
FILE: python/rateslib/serialization/utils.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from enum import Enum
from json import dumps
from typing import TYPE_CHECKING
from rateslib.dual import Dual, Dual2
from rateslib.dual.utils import _to_number
from rateslib.enums.generics import NoInput
if TYPE_CHECKING:
from rateslib.local_types import Any, DualTypes, Number # pragma: no cover
# Dualtypes handles case of rust wrapped Dual/Dual2 datatype intermixed with float.
def _dualtypes_to_json(val: DualTypes) -> str:
val_: Number = _to_number(val)
if isinstance(val_, Dual | Dual2):
return val_.to_json()
else:
return dumps(val_)
def _enum_to_json(val: Enum) -> str:
return f'{{"PyNative":{{"{type(val).__name__}":{val.value}}}}}'
def _obj_to_json(val: Any) -> str:
if isinstance(val, NoInput):
return _enum_to_json(val)
else:
try:
return val.to_json() # type: ignore[no-any-return]
except AttributeError:
return dumps(val)
================================================
FILE: python/rateslib/solver.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import warnings
from itertools import combinations
from math import log
from time import time
from typing import TYPE_CHECKING, ParamSpec
from uuid import uuid4
import numpy as np
from pandas import DataFrame, MultiIndex, Series, concat
from pandas.errors import PerformanceWarning
from rateslib import defaults
from rateslib.curves import (
CompositeCurve,
Curve,
MultiCsaCurve,
ProxyCurve,
RolledCurve,
ShiftedCurve,
TranslatedCurve,
_BaseCurve,
)
from rateslib.dual import Dual, Dual2, dual_solve, gradient
from rateslib.dual.newton import _solver_result
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.fx import FXForwards, FXRates
from rateslib.mutability import (
_new_state_post,
_no_interior_validation,
_validate_states,
_WithState,
)
from rateslib.volatility.fx import FXVols
from rateslib.volatility.ir import IRVols, _BaseIRCube, _BaseIRSmile
P = ParamSpec("P")
if TYPE_CHECKING:
from numpy import float64 as Nf64 # noqa: N812
from numpy import object_ as Nobject # noqa: N812
from numpy.typing import NDArray
from rateslib.local_types import (
FX_,
Any,
Callable,
DualTypes,
FXForwards_,
Sequence,
SupportsRate,
SupportsSolverMutability,
Variable,
str_,
)
class Gradients:
"""
A catalogue of all the gradients used in optimisation routines and risk
sensitivties.
"""
_grad_s_vT_method: str = "_grad_s_vT_final_iteration_analytical"
_grad_s_vT_final_iteration_algo: str = "gauss_newton_final"
_J: NDArray[Nf64] | None
_J_pre: NDArray[Nf64] | None
_J2: NDArray[Nf64] | None
_J2_pre: NDArray[Nf64] | None
_grad_v_g: NDArray[Nf64] | None
_grad_s_vT: NDArray[Nf64] | None
_grad_s_vT_pre: NDArray[Nf64] | None
_grad_s_s_vT: NDArray[Nf64] | None
_grad_s_s_vT_pre: NDArray[Nf64] | None
_reset_properties_: Callable[..., None]
_update_step_: Callable[[str], NDArray[Nobject]]
_set_ad_order: Callable[[int], None]
iterate: Callable[..., None]
func_tol: float
conv_tol: float
pre_solvers: tuple[Solver, ...]
r: NDArray[Nobject] # instrument rates at iterate
r_pre: NDArray[Nobject] # instrument rates at iterate including pre_
s: NDArray[Nobject] # target instrument rates
m: int # number of instruments
pre_m: int # number of instruments including pre_
n: int # number of parameters/variables
pre_n: int # number of parameters/variables in all solvers including pre_
g: Dual | Dual2 # solver objective function value
variables: tuple[str, ...] # string tags for AD coordination
pre_variables: tuple[str, ...] # string tags for AD coordination
pre_rate_scalars: list[float] # scalars for the rate attribute of instruments
_ad: int # ad order
instruments: tuple[tuple[SupportsRate, dict[str, Any]], ...] # calibrators
@property
def J(self) -> NDArray[Nf64]:
"""
2d Jacobian array of calibrating instrument rates with respect to curve
variables, of size (n, m);
.. math::
[J]_{i,j} = [\\nabla_\\mathbf{v} \\mathbf{r^T}]_{i,j} = \\frac{\\partial r_j}{\\partial v_i}
Depends on ``self.r``.
""" # noqa: E501
if self._J is None:
self._J = np.array([gradient(rate, self.variables) for rate in self.r]).T
return self._J
@property
def grad_v_rT(self) -> NDArray[Nf64]:
"""
Alias of ``J``.
"""
return self.J
@property
def J2(self) -> NDArray[Nf64]:
"""
3d array of second derivatives of calibrating instrument rates with
respect to curve variables, of size (n, n, m);
.. math::
[J2]_{i,j,k} = [\\nabla_\\mathbf{v} \\nabla_\\mathbf{v} \\mathbf{r^T}]_{i,j,k} = \\frac{\\partial^2 r_k}{\\partial v_i \\partial v_j}
Depends on ``self.r``.
""" # noqa: E501
if self._J2 is None:
if self._ad != 2:
raise ValueError(
f"Cannot perform second derivative calculations when ad mode is {self._ad}.",
)
rates = np.array([_[0].rate(**_[1]) for _ in self.instruments])
# solver is passed in order to extract curves as string
_ = np.array([gradient(rate, self.variables, order=2) for rate in rates])
self._J2 = np.transpose(_, (1, 2, 0))
return self._J2
@property
def grad_v_v_rT(self) -> NDArray[Nf64]:
"""
Alias of ``J2``.
"""
return self.J2 # pragma: no cover
@property
def grad_v_g(self) -> NDArray[Nf64]:
"""
1d array of objective function value with respect to curve variables,
of size (n,);
.. math::
[\\nabla_\\mathbf{v} g(\\mathbf{v}; \\mathbf{s}) = \\frac{\\partial g}{\\partial v_i}
""" # noqa: E501
if self._grad_v_g is None:
self._grad_v_g = gradient(self.g, self.variables)
return self._grad_v_g
@property
def grad_s_vT(self) -> NDArray[Nf64]:
"""
2d Jacobian array of curve variables with respect to calibrating instruments,
of size (m, n);
.. math::
[\\nabla_\\mathbf{s}\\mathbf{v^T}]_{i,j} = \\frac{\\partial v_j}{\\partial s_i} = \\mathbf{J^+}
""" # noqa: E501
if self._grad_s_vT is None:
self._grad_s_vT = getattr(self, self._grad_s_vT_method)()
return self._grad_s_vT
def _grad_s_vT_final_iteration_dual(self, algorithm: str | None = None) -> NDArray[Nf64]:
"""
This is not the ideal method since it requires reset_properties to reassess.
"""
algorithm = algorithm or self._grad_s_vT_final_iteration_algo
_s = self.s
self.s = np.array([Dual(v, [f"s{i}"], []) for i, v in enumerate(self.s)])
self._reset_properties_()
v_1 = self._update_step_(algorithm)
s_vars = [f"s{i}" for i in range(self.m)]
grad_s_vT = np.array([gradient(v, s_vars) for v in v_1]).T
self.s = _s
return grad_s_vT
def _grad_s_vT_final_iteration_analytical(self) -> NDArray[Nf64]:
"""Uses a pseudoinverse algorithm on floats"""
if self.n == 0:
# then there are no instruments: self is only a Solver container of `pre_solvers`
grad_s_vT: NDArray[Nf64] = np.array([[]], dtype=float)
else:
grad_s_vT = np.linalg.pinv(self.J) # type: ignore[assignment]
return grad_s_vT
def _grad_s_vT_fixed_point_iteration(self) -> NDArray[Nf64]:
"""
This is not the ideal method because it requires second order and reset props.
"""
self._set_ad_order(2)
self._reset_properties_()
_s = self.s
self.s = np.array([Dual2(v, [f"s{i}"], [], []) for i, v in enumerate(self.s)])
s_vars = tuple(f"s{i}" for i in range(self.m))
grad2 = gradient(self.g, self.variables + s_vars, order=2)
grad_v_vT_f = grad2[: self.n, : self.n]
grad_s_vT_f = grad2[self.n :, : self.n]
grad_s_vT: NDArray[Nf64] = np.linalg.solve(grad_v_vT_f, -grad_s_vT_f.T).T # type: ignore[assignment]
# The following are alternative representations. Actually faster to calculate and
# do not require sensitivity against S variables to be measured.
# See 'coding interest rates' equation 12.38
# _1 = np.einsum("iy, yz, jz", self.J, self.W, self.J)
# _2 = np.einsum("z, zy, ijy", self.x.astype(float), self.W, self.J2)
# _3 = 2* (_1 + _2)
# _11 = -2 * np.einsum("iz,zj->ji", self.J, self.W)
self.s = _s
self._set_ad_order(1)
self._reset_properties_()
return grad_s_vT
@property
def grad_s_s_vT(self) -> NDArray[Nf64]:
"""
3d array of second derivatives of curve variables with respect to
calibrating instruments, of size (m, m, n);
.. math::
[\\nabla_\\mathbf{s} \\nabla_\\mathbf{s} \\mathbf{v^T}]_{i,j,k} = \\frac{\\partial^2 v_k}{\\partial s_i \\partial s_j}
""" # noqa: E501
if self._grad_s_s_vT is None:
self._grad_s_s_vT = self._grad_s_s_vT_final_iteration_analytical()
return self._grad_s_s_vT
def _grad_s_s_vT_fwd_difference_method(self) -> NDArray[Nf64]:
"""Use a numerical method, iterating through changes in s to calculate."""
ds = 10 ** (int(log(self.func_tol, 10) / 2))
grad_s_vT_0 = np.copy(self.grad_s_vT)
grad_s_s_vT = np.zeros(shape=(self.m, self.m, self.n))
for i in range(self.m):
self.s[i] += ds
self.iterate()
grad_s_s_vT[:, i, :] = (self.grad_s_vT - grad_s_vT_0) / ds
self.s[i] -= ds
# ensure exact symmetry (maybe redundant)
grad_s_s_vT = (grad_s_s_vT + np.swapaxes(grad_s_s_vT, 0, 1)) / 2
self.iterate()
return grad_s_s_vT
def _grad_s_s_vT_final_iteration_analytical(self, use_pre: bool = False) -> NDArray[Nf64]:
"""
Use an analytical formula and second order AD to calculate.
Not: must have 2nd order AD set to function, and valid properties set to
function
"""
if use_pre:
J2, grad_s_vT = self.J2_pre, self.grad_s_vT_pre
else:
J2, grad_s_vT = self.J2, self.grad_s_vT
# dv/dr_l * d2r_l / dvdv
_: NDArray[Nf64] = np.tensordot(J2, grad_s_vT, (2, 0))
# dv_z /ds * d2v / dv_zdv
_ = np.tensordot(grad_s_vT, _, (1, 0))
# dv_h /ds * d2v /dvdv_h
_ = -np.tensordot(grad_s_vT, _, (1, 1))
grad_s_s_vT = _
return grad_s_s_vT
# _ = np.matmul(grad_s_vT, np.matmul(J2, grad_s_vT))
# grad_s_s_vT = -np.tensordot(grad_s_vT, _, (1, 0))
# return grad_s_s_vT
# _pre versions incorporate all variables of solver and pre_solvers
def grad_f_rT_pre(self, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
2d Jacobian array of calibrating instrument rates with respect to FX rate
variables, of size (len(fx_vars), pre_m);
.. math::
[\\nabla_\\mathbf{f}\\mathbf{r^T}]_{i,j} = \\frac{\\partial r_j}{\\partial f_i}
Parameters
----------
fx_vars : list or tuple of str
The variable name tags for the FX rate sensitivities.
"""
grad_f_rT = np.array([gradient(rate, fx_vars) for rate in self.r_pre]).T
return grad_f_rT
@property
def J2_pre(self) -> NDArray[Nf64]:
"""
3d array of second derivatives of calibrating instrument rates with
respect to curve variables for all ``Solvers`` including ``pre_solvers``,
of size (pre_n, pre_n, pre_m);
.. math::
[J2]_{i,j,k} = [\\nabla_\\mathbf{v} \\nabla_\\mathbf{v} \\mathbf{r^T}]_{i,j,k} = \\frac{\\partial^2 r_k}{\\partial v_i \\partial v_j}
Depends on ``self.r`` and ``pre_solvers.J2``.
""" # noqa: E501
if len(self.pre_solvers) == 0:
return self.J2
if self._J2_pre is None:
if self._ad != 2:
raise ValueError(
f"Cannot perform second derivative calculations when ad mode is {self._ad}.",
)
J2 = np.zeros(shape=(self.pre_n, self.pre_n, self.pre_m))
i, j = 0, 0
for pre_slvr in self.pre_solvers:
J2[
i : i + pre_slvr.pre_n,
i : i + pre_slvr.pre_n,
j : j + pre_slvr.pre_m,
] = pre_slvr.J2_pre
i, j = i + pre_slvr.pre_n, j + pre_slvr.pre_m
if self.m > 0:
# then self is not only a container for `pre_solvers`
rates = np.array([_[0].rate(**_[1]) for _ in self.instruments])
# solver is passed in order to extract curves as string
_ = np.array([gradient(r, self.pre_variables, order=2) for r in rates])
J2[:, :, -self.m :] = np.transpose(_, (1, 2, 0))
self._J2_pre = J2
return self._J2_pre
def grad_f_v_rT_pre(self, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
3d array of second derivatives of calibrating instrument rates with respect to
FX rates and curve variables, of size (len(fx_vars), pre_n, pre_m);
.. math::
[\\nabla_\\mathbf{f} \\nabla_\\mathbf{v} \\mathbf{r^T}]_{i,j,k} = \\frac{\\partial^2 r_k}{\\partial f_i \\partial v_j}
Parameters
----------
fx_vars : list or tuple of str
The variable name tags for the FX rate sensitivities.
""" # noqa: E501
# FX sensitivity requires reverting through all pre-solvers rates.
all_gradients = np.array(
[gradient(rate, self.pre_variables + tuple(fx_vars), order=2) for rate in self.r_pre],
).swapaxes(0, 2)
grad_f_v_rT = all_gradients[self.pre_n :, : self.pre_n, :]
return grad_f_v_rT
def grad_f_f_rT_pre(self, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
3d array of second derivatives of calibrating instrument rates with respect to
FX rates, of size (len(fx_vars), len(fx_vars), pre_m);
.. math::
[\\nabla_\\mathbf{f} \\nabla_\\mathbf{f} \\mathbf{r^T}]_{i,j,k} = \\frac{\\partial^2 r_k}{\\partial f_i \\partial f_j}
Parameters
----------
fx_vars : list or tuple of str
The variable name tags for the FX rate sensitivities.
""" # noqa: E501
# FX sensitivity requires reverting through all pre-solvers rates.
grad_f_f_rT = np.array([gradient(rate, fx_vars, order=2) for rate in self.r_pre]).swapaxes(
0,
2,
)
return grad_f_f_rT
@property
def grad_s_s_vT_pre(self) -> NDArray[Nf64]:
"""
3d array of second derivatives of curve variables with respect to
calibrating instruments, of size (pre_m, pre_m, pre_n);
.. math::
[\\nabla_\\mathbf{s} \\nabla_\\mathbf{s} \\mathbf{v^T}]_{i,j,k} = \\frac{\\partial^2 v_k}{\\partial s_i \\partial s_j}
""" # noqa: E501
if len(self.pre_solvers) == 0:
return self.grad_s_s_vT
if self._grad_s_s_vT_pre is None:
self._grad_s_s_vT_pre = self._grad_s_s_vT_final_iteration_analytical(use_pre=True)
return self._grad_s_s_vT_pre
@property
def grad_v_v_rT_pre(self) -> NDArray[Nf64]:
"""
Alias of ``J2_pre``.
"""
return self.J2_pre # pragma: no cover
def grad_f_s_vT_pre(self, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
3d array of second derivatives of curve variables with respect to
FX rates and calibrating instrument rates, of size (len(fx_vars), pre_m, pre_n);
.. math::
[\\nabla_\\mathbf{f} \\nabla_\\mathbf{s} \\mathbf{v^T}]_{i,j,k} = \\frac{\\partial^2 v_k}{\\partial f_i \\partial s_j}
Parameters
----------
fx_vars : list or tuple of str
The variable name tags for the FX rate sensitivities.
""" # noqa: E501
# FX sensitivity requires reverting through all pre-solvers rates.
_ = -np.tensordot(self.grad_f_v_rT_pre(fx_vars), self.grad_s_vT_pre, (1, 1)).swapaxes(1, 2)
_ = np.tensordot(_, self.grad_s_vT_pre, (2, 0))
grad_f_s_vT: NDArray[Nf64] = _
return grad_f_s_vT
def grad_f_f_vT_pre(self, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
3d array of second derivatives of curve variables with respect to
FX rates, of size (len(fx_vars), len(fx_vars), pre_n);
.. math::
[\\nabla_\\mathbf{f} \\nabla_\\mathbf{f} \\mathbf{v^T}]_{i,j,k} = \\frac{\\partial^2 v_k}{\\partial f_i \\partial f_j}
Parameters
----------
fx_vars : list or tuple of str
The variable name tags for the FX rate sensitivities.
""" # noqa: E501
# FX sensitivity requires reverting through all pre-solvers rates.
_ = -np.tensordot(self.grad_f_f_rT_pre(fx_vars), self.grad_s_vT_pre, (2, 0))
_ -= np.tensordot(self.grad_f_rT_pre(fx_vars), self.grad_f_s_vT_pre(fx_vars), (1, 1))
grad_f_f_vT: NDArray[Nf64] = _
return grad_f_f_vT
def grad_f_vT_pre(self, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
2d array of the derivatives of curve variables with respect to FX rates, of
size (len(fx_vars), pre_n).
.. math::
[\\nabla_\\mathbf{f}\\mathbf{v^T}]_{i,j} = \\frac{\\partial v_j}{\\partial f_i} = -\\frac{\\partial r_z}{\\partial f_i} \\frac{\\partial v_j}{\\partial s_z}
Parameters
----------
fx_vars : list or tuple of str
The variable name tags for the FX rate sensitivities
""" # noqa: E501
# FX sensitivity requires reverting through all pre-solvers rates.
grad_f_rT = np.array([gradient(rate, fx_vars) for rate in self.r_pre]).T
_: NDArray[Nf64] = -np.matmul(grad_f_rT, self.grad_s_vT_pre)
return _
def grad_f_f(self, f: Dual | Dual2 | Variable, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
1d array of total derivatives of FX conversion rate with respect to
FX rate variables, of size (len(fx_vars));
.. math::
[\\nabla_\\mathbf{f} f_{loc:bas}]_{i} = \\frac{d f}{d f_i}
Parameters
----------
f : Dual or Dual2
The value of the local to base FX conversion rate.
fx_vars : list or tuple of str
The variable name tags for the FX rate sensitivities
"""
grad_f_f = gradient(f, fx_vars)
grad_f_f += np.matmul(self.grad_f_vT_pre(fx_vars), gradient(f, self.pre_variables))
ret: NDArray[Nf64] = grad_f_f
return ret
@property
def grad_s_vT_pre(self) -> NDArray[Nf64]:
"""
2d Jacobian array of curve variables with respect to calibrating instruments
including all pre solvers attached to the Solver, of size (pre_m, pre_n).
.. math::
[\\nabla_\\mathbf{s}\\mathbf{v^T}]_{i,j} = \\frac{\\partial v_j}{\\partial s_i} = \\mathbf{J^+}
""" # noqa: E501
if len(self.pre_solvers) == 0:
return self.grad_s_vT
if self._grad_s_vT_pre is None:
grad_s_vT = np.zeros(shape=(self.pre_m, self.pre_n))
i, j = 0, 0
for pre_solver in self.pre_solvers:
# create the left side block matrix
m, n = pre_solver.pre_m, pre_solver.pre_n
grad_s_vT[i : i + m, j : j + n] = pre_solver.grad_s_vT_pre
# create the right column dependencies, only if self contains some instruments
# and variable of its own and is not only a container of `pre_solvers`
if self.n > 0:
grad_v_r = np.array([gradient(r, pre_solver.pre_variables) for r in self.r]).T
block = np.matmul(grad_v_r, self.grad_s_vT)
block = -1 * np.matmul(pre_solver.grad_s_vT_pre, block)
grad_s_vT[i : i + m, -self.n :] = block
i, j = i + m, j + n
if self.n > 0:
# create bottom right block, only if self contains some instruments
# and variables of its own and is not only a container of `pre_solvers`
grad_s_vT[-self.m :, -self.n :] = self.grad_s_vT
self._grad_s_vT_pre = grad_s_vT
return self._grad_s_vT_pre
def grad_s_f_pre(self, f: Dual | Dual2 | Variable) -> NDArray[Nf64]:
"""
1d array of FX conversion rate with respect to calibrating instruments,
of size (pre_m);
.. math::
[\\nabla_\\mathbf{s} f_{loc:bas}]_{i} = \\frac{\\partial f}{\\partial s_i}
Parameters
----------
f : Dual or Dual2
The value of the local to base FX conversion rate.
"""
grad_s_f: NDArray[Nf64] = np.tensordot(
self.grad_s_vT_pre, gradient(f, self.pre_variables), (1, 0)
)
return grad_s_f
def grad_s_sT_f_pre(self, f: Dual | Dual2 | Variable) -> NDArray[Nf64]:
"""
2d array of derivatives of FX conversion rate with respect to
calibrating instruments, of size (pre_m, pre_m);
.. math::
[\\nabla_\\mathbf{s} \\nabla_\\mathbf{s}^\\mathbf{T} f_{loc:bas}]_{i,j} = \\frac{\\partial^2 f}{\\partial s_i \\partial s_j}
Parameters
----------
f : Dual or Dual2
The value of the local to base FX conversion rate.
""" # noqa: E501
grad_s_vT = self.grad_s_vT_pre
grad_v_vT_f = gradient(f, self.pre_variables, order=2)
_: NDArray[Nf64] = np.tensordot(grad_s_vT, grad_v_vT_f, (1, 0))
_ = np.tensordot(_, grad_s_vT, (1, 1))
grad_s_sT_f = _
return grad_s_sT_f
def grad_f_sT_f_pre(self, f: Dual | Dual2 | Variable, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
2d array of derivatives of FX conversion rate with respect to
calibrating instruments, of size (pre_m, pre_m);
.. math::
[\\nabla_\\mathbf{f} \\nabla_\\mathbf{s}^\\mathbf{T} f_{loc:bas}(\\mathbf{v(s, f), f)})]_{i,j} = \\frac{d^2 f}{d f_i \\partial s_j}
Parameters
----------
f : Dual or Dual2
The value of the local to base FX conversion rate.
fx_vars : list or tuple of str
The variable name tags for the FX rate sensitivities
""" # noqa: E501
grad_s_vT = self.grad_s_vT_pre
grad_v_f = gradient(f, self.pre_variables)
grad_f_sT_v = self.grad_f_s_vT_pre(fx_vars)
_ = gradient(f, self.pre_variables + tuple(fx_vars), order=2)
grad_v_vT_f = _[: self.pre_n, : self.pre_n]
grad_f_vT_f = _[self.pre_n :, : self.pre_n]
# grad_f_fT_f = _[self.pre_n :, self.pre_n :]
grad_f_vT = self.grad_f_vT_pre(fx_vars)
_ = np.tensordot(grad_f_sT_v, grad_v_f, (2, 0))
_ += np.tensordot(grad_f_vT_f, grad_s_vT, (1, 1))
__ = np.tensordot(grad_f_vT, grad_v_vT_f, (1, 0))
__ = np.tensordot(__, grad_s_vT, (1, 1))
grad_f_sT_f: NDArray[Nf64] = _ + __
return grad_f_sT_f
def grad_f_fT_f_pre(self, f: Dual | Dual2 | Variable, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
2d array of derivatives of FX conversion rate with respect to
calibrating instruments, of size (pre_m, pre_m);
.. math::
[\\nabla_\\mathbf{f} \\nabla_\\mathbf{f}^\\mathbf{T} f_{loc:bas}(\\mathbf{v(s, f), f)})]_{i,j} = \\frac{d^2 f}{d f_i d f_j}
Parameters
----------
f : Dual or Dual2
The value of the local to base FX conversion rate.
fx_vars : list or tuple of str
The variable name tags for the FX rate sensitivities
""" # noqa: E501
# grad_s_vT = self.grad_s_vT_pre
grad_v_f = gradient(f, self.pre_variables)
# grad_f_sT_v = self.grad_f_s_vT_pre(fx_vars)
_ = gradient(f, self.pre_variables + tuple(fx_vars), order=2)
grad_v_vT_f = _[: self.pre_n, : self.pre_n]
grad_f_vT_f = _[self.pre_n :, : self.pre_n]
grad_f_fT_f = _[self.pre_n :, self.pre_n :]
grad_f_vT = self.grad_f_vT_pre(fx_vars)
grad_f_fT_v = self.grad_f_f_vT_pre(fx_vars)
_ = grad_f_fT_f
_ += 2.0 * np.tensordot(grad_f_vT_f, grad_f_vT, (1, 1))
_ += np.tensordot(grad_f_fT_v, grad_v_f, (2, 0))
__ = np.tensordot(grad_f_vT, grad_v_vT_f, (1, 0))
__ = np.tensordot(__, grad_f_vT, (1, 1))
grad_f_fT_f = _ + __
return grad_f_fT_f
# grad_v_v_f: calculated within grad_s_vT_fixed_point_iteration
# delta and gamma calculations require all solver and pre_solver variables
def grad_s_Ploc(self, npv: Dual | Dual2 | Variable) -> NDArray[Nf64]:
"""
1d array of derivatives of local currency PV with respect to calibrating
instruments, of size (pre_m).
.. math::
\\nabla_\\mathbf{s} P^{loc} = \\frac{\\partial P^{loc}}{\\partial s_i}
Parameters:
npv : Dual or Dual2
A local currency NPV of a period of a leg.
"""
grad_s_P: NDArray[Nf64] = np.matmul(self.grad_s_vT_pre, gradient(npv, self.pre_variables))
return grad_s_P
def grad_f_Ploc(self, npv: Dual | Dual2 | Variable, fx_vars: Sequence[str]) -> NDArray[Nf64]:
r"""
1d array of derivatives of local currency PV with respect to FX rate variable,
of size (len(fx_vars)).
.. math::
\\nabla_\\mathbf{f} P^{loc}(\\mathbf{v(s, f), f}) = \\frac{\\partial P^{loc}}{\\partial f_i}+ \\frac{\partial v_z}{\\partial f_i} \\frac{\\partial P^{loc}}{\\partial v_z}
Parameters:
npv : Dual or Dual2
A local currency NPV of a period of a leg.
fx_vars : list or tuple of str
The variable tags for automatic differentiation of FX rate sensitivity
""" # noqa: E501
grad_f_P = gradient(npv, fx_vars)
grad_f_P += np.matmul(self.grad_f_vT_pre(fx_vars), gradient(npv, self.pre_variables))
ret: NDArray[Nf64] = grad_f_P
return ret
def grad_s_Pbase(
self, npv: Dual | Dual2 | Variable, grad_s_P: NDArray[Nf64], f: Dual | Dual2 | Variable
) -> NDArray[Nf64]:
"""
1d array of derivatives of base currency PV with respect to calibrating
instruments, of size (pre_m).
.. math::
\\nabla_\\mathbf{s} P^{bas}(\\mathbf{v(s, f)}) = \\nabla_\\mathbf{s} P^{loc}(\\mathbf{v(s, f)}) f_{loc:bas} + P^{loc} \\nabla_\\mathbf{s} f_{loc:bas}
Parameters:
npv : Dual or Dual2
A local currency NPV of a period of a leg.
grad_s_P : ndarray
The local currency delta risks w.r.t. calibrating instruments.
f : Dual or Dual2
The local:base FX rate.
""" # noqa: E501
grad_s_Pbas: NDArray[Nf64] = _dual_float(npv) * np.matmul(
self.grad_s_vT_pre, gradient(f, self.pre_variables)
)
grad_s_Pbas += grad_s_P * _dual_float(f) # <- use float to cast float array not Dual
return grad_s_Pbas
def grad_f_Pbase(
self,
npv: Dual | Dual2 | Variable,
grad_f_P: NDArray[Nf64],
f: Dual | Dual2 | Variable,
fx_vars: Sequence[str],
) -> NDArray[Nf64]:
"""
1d array of derivatives of base currency PV with respect to FX rate variables,
of size (len(fx_vars)).
.. math::
\\nabla_\\mathbf{s} P^{bas}(\\mathbf{v(s, f)}) = \\nabla_\\mathbf{s} P^{loc}(\\mathbf{v(s, f)}) f_{loc:bas} + P^{loc} \\nabla_\\mathbf{s} f_{loc:bas}
Parameters:
npv : Dual or Dual2
A local currency NPV of a period of a leg.
grad_f_P : ndarray
The local currency delta risks w.r.t. FX pair variables.
f : Dual or Dual2
The local:base FX rate.
fx_vars : list or tuple of str
The variable tags for automatic differentiation of FX rate sensitivity
""" # noqa: E501
# use float here to cast float array not Dual
ret: NDArray[Nf64] = grad_f_P * _dual_float(f)
ret += _dual_float(npv) * self.grad_f_f(f, fx_vars)
return ret
def grad_s_sT_Ploc(self, npv: Dual2 | Variable) -> NDArray[Nf64]:
"""
2d array of derivatives of local currency PV with respect to calibrating
instruments, of size (pre_m, pre_m).
.. math::
\\nabla_\\mathbf{s} \\nabla_\\mathbf{s}^\\mathbf{T} P^{loc}(\\mathbf{v, f}) = \\frac{ \\partial^2 P^{loc}(\\mathbf{v(s, f)}) }{\\partial s_i \\partial s_j}
Parameters:
npv : Dual2
A local currency NPV of a period of a leg.
""" # noqa: E501
# instrument-instrument cross gamma:
_ = np.tensordot(gradient(npv, self.pre_variables, order=2), self.grad_s_vT_pre, (1, 1))
_ = np.tensordot(self.grad_s_vT_pre, _, (1, 0))
_ += np.tensordot(self.grad_s_s_vT_pre, gradient(npv, self.pre_variables), (2, 0))
grad_s_sT_P: NDArray[Nf64] = _
return grad_s_sT_P
# grad_s_sT_P = np.matmul(
# self.grad_s_vT_pre,
# np.matmul(
# npv.gradient(self.pre_variables, order=2), self.grad_s_vT_pre.T
# ),
# )
# grad_s_sT_P += np.matmul(
# self.grad_s_s_vT_pre, npv.gradient(self.pre_variables)[:, None]
# )[:, :, 0]
def gradp_f_vT_Ploc(
self, npv: Dual | Dual2 | Variable, fx_vars: Sequence[str]
) -> NDArray[Nf64]:
"""
2d array of (partial) derivatives of local currency PV with respect to
FX rate variables and curve variables, of size (len(fx_vars), pre_n).
.. math::
\\nabla_\\mathbf{f} \\nabla_\\mathbf{v}^\\mathbf{T} P^{loc}(\\mathbf{v, f}) = \\frac{ \\partial ^2 P^{loc}(\\mathbf{v, f)}) }{\\partial f_i \\partial v_j}
Parameters:
npv : Dual2
A local currency NPV of a period of a leg.
fx_vars : list or tuple of str
The variable tags for automatic differentiation of FX rate sensitivity
""" # noqa: E501
grad_x_xT_Ploc = gradient(npv, self.pre_variables + tuple(fx_vars), order=2)
grad_f_vT_Ploc = grad_x_xT_Ploc[self.pre_n :, : self.pre_n]
return grad_f_vT_Ploc
def grad_f_sT_Ploc(self, npv: Dual | Dual2 | Variable, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
2d array of derivatives of local currency PV with respect to calibrating
instruments, of size (pre_m, pre_m).
.. math::
\\nabla_\\mathbf{f} \\nabla_\\mathbf{s}^\\mathbf{T} P^{loc}(\\mathbf{v(s, f), f}) = \\frac{ d^2 P^{loc}(\\mathbf{v(s, f), f)}) }{d f_i \\partial s_j}
Parameters:
npv : Dual2
A local currency NPV of a period of a leg.
fx_vars : list or tuple of str
The variable tags for automatic differentiation of FX rate sensitivity
""" # noqa: E501
# fx_rate-instrument cross gamma:
_ = np.tensordot(
self.grad_f_vT_pre(fx_vars),
gradient(npv, self.pre_variables, order=2),
(1, 0),
)
_ += self.gradp_f_vT_Ploc(npv, fx_vars)
_ = np.tensordot(_, self.grad_s_vT_pre, (1, 1))
_ += np.tensordot(self.grad_f_s_vT_pre(fx_vars), gradient(npv, self.pre_variables), (2, 0))
grad_f_sT_Ploc: NDArray[Nf64] = _
return grad_f_sT_Ploc
def grad_f_fT_Ploc(self, npv: Dual | Dual2 | Variable, fx_vars: Sequence[str]) -> NDArray[Nf64]:
"""
2d array of derivatives of local currency PV with respect to FX rate variables,
of size (len(fx_vars), len(fx_vars)).
.. math::
\\nabla_\\mathbf{f} \\nabla_\\mathbf{s}^\\mathbf{T} P^{loc}(\\mathbf{v(s, f), f}) = \\frac{ d^2 P^{loc}(\\mathbf{v(s, f), f)}) }{d f_i d f_j}
Parameters:
npv : Dual2
A local currency NPV of a period of a leg.
fx_vars : list or tuple of str
The variable tags for automatic differentiation of FX rate sensitivity
""" # noqa: E501
# fx_rate-instrument cross gamma:
gradp_f_vT_Ploc = self.gradp_f_vT_Ploc(npv, fx_vars)
grad_f_vT_pre = self.grad_f_vT_pre(fx_vars)
grad_v_Ploc = gradient(npv, self.pre_variables)
grad_v_vT_Ploc = gradient(npv, self.pre_variables, order=2)
_ = gradient(npv, fx_vars, order=2)
_ += np.tensordot(self.grad_f_f_vT_pre(fx_vars), grad_v_Ploc, (2, 0))
_ += np.tensordot(grad_f_vT_pre, gradp_f_vT_Ploc, (1, 1))
_ += np.tensordot(gradp_f_vT_Ploc, grad_f_vT_pre, (1, 1))
__ = np.tensordot(grad_f_vT_pre, grad_v_vT_Ploc, (1, 0))
__ = np.tensordot(__, grad_f_vT_pre, (1, 1))
grad_f_f_Ploc: NDArray[Nf64] = _ + __
return grad_f_f_Ploc
def grad_s_sT_Pbase(
self,
npv: Dual | Dual2 | Variable,
grad_s_sT_P: NDArray[Nf64],
f: Dual | Dual2 | Variable,
) -> NDArray[Nf64]:
"""
2d array of derivatives of base currency PV with respect to calibrating
instrument rate variables, of size (pre_m, pre_m).
.. math::
\\nabla_\\mathbf{s} \\nabla_\\mathbf{s}^\\mathbf{T} P^{bas}(\\mathbf{v(s, f), f})
Parameters:
npv : Dual or Dual2
A local currency NPV of a period of a leg.
grad_s_sT_P : ndarray
The local currency gamma risks w.r.t. calibrating instrument variables.
f : Dual or Dual2
The local:base FX rate.
"""
grad_s_f = self.grad_s_f_pre(f)
grad_s_sT_f = self.grad_s_sT_f_pre(f)
grad_s_P = self.grad_s_Ploc(npv)
_ = _dual_float(f) * grad_s_sT_P
_ += np.tensordot(grad_s_f[:, None], grad_s_P[None, :], (1, 0))
_ += np.tensordot(grad_s_P[:, None], grad_s_f[None, :], (1, 0))
_ += _dual_float(npv) * grad_s_sT_f # <- use float to cast float array not Dual
grad_s_sT_Pbas: NDArray[Nf64] = _
return grad_s_sT_Pbas
def grad_f_sT_Pbase(
self,
npv: Dual | Dual2 | Variable,
grad_f_sT_P: NDArray[Nf64],
f: Dual | Dual2 | Variable,
fx_vars: Sequence[str],
) -> NDArray[Nf64]:
"""
2d array of derivatives of base currency PV with respect to FX variables and
calibrating instrument rate variables, of size (len(fx_vars), pre_m).
.. math::
\\nabla_\\mathbf{f} \\nabla_\\mathbf{s}^\\mathbf{T} P^{bas}(\\mathbf{v(s, f), f})
Parameters:
npv : Dual or Dual2
A local currency NPV of a period of a leg.
grad_f_sT_P : ndarray
The local currency gamma risks w.r.t. FX rate variables and
calibrating instrument variables.
f : Dual or Dual2
The local:base FX rate.
fx_vars : list or tuple of str
The variable tags for automatic differentiation of FX rate sensitivity
"""
grad_s_f = self.grad_s_f_pre(f)
grad_f_f = self.grad_f_f(f, fx_vars)
grad_s_P = self.grad_s_Ploc(npv)
grad_f_P = self.grad_f_Ploc(npv, fx_vars)
grad_f_sT_f = self.grad_f_sT_f_pre(f, fx_vars)
_ = _dual_float(f) * grad_f_sT_P
_ += np.tensordot(grad_f_f[:, None], grad_s_P[None, :], (1, 0))
_ += np.tensordot(grad_f_P[:, None], grad_s_f[None, :], (1, 0))
_ += _dual_float(npv) * grad_f_sT_f # <- use float to cast float array not Dual
grad_s_sT_Pbas: NDArray[Nf64] = _
return grad_s_sT_Pbas
def grad_f_fT_Pbase(
self,
npv: Dual | Dual2 | Variable,
grad_f_fT_P: NDArray[Nf64],
f: Dual | Dual2 | Variable,
fx_vars: Sequence[str],
) -> NDArray[Nf64]:
"""
2d array of derivatives of base currency PV with respect to calibrating
instrument rate variables, of size (pre_m, pre_m).
.. math::
\\nabla_\\mathbf{s} \\nabla_\\mathbf{s}^\\mathbf{T} P^{bas}(\\mathbf{v(s, f), f})
Parameters:
npv : Dual or Dual2
A local currency NPV of a period of a leg.
grad_f_fT_P : ndarray
The local currency gamma risks w.r.t. FX rate variables.
f : Dual or Dual2
The local:base FX rate.
fx_vars : list or tuple of str
The variable tags for automatic differentiation of FX rate sensitivity
"""
# grad_s_f = self.grad_s_f_pre(f)
grad_f_f = self.grad_f_f(f, fx_vars)
# grad_s_P = self.grad_s_Ploc(npv)
grad_f_P = self.grad_f_Ploc(npv, fx_vars)
grad_f_fT_f = self.grad_f_fT_f_pre(f, fx_vars)
_ = _dual_float(f) * grad_f_fT_P
_ += np.tensordot(grad_f_f[:, None], grad_f_P[None, :], (1, 0))
_ += np.tensordot(grad_f_P[:, None], grad_f_f[None, :], (1, 0))
_ += _dual_float(npv) * grad_f_fT_f # <- use float to cast float array not Dual
grad_s_sT_Pbas: NDArray[Nf64] = _
return grad_s_sT_Pbas
NO_PARAMETER_CURVES = [
ProxyCurve,
CompositeCurve,
MultiCsaCurve,
RolledCurve,
ShiftedCurve,
TranslatedCurve,
]
class Solver(Gradients, _WithState):
r"""
A numerical solver to determine parameter values on multiple pricing objects simultaneously.
.. ipython:: python
:suppress:
from rateslib import Solver, Curve, IRS, dt
Parameters
----------
curves : sequence
Sequence of :class:`Curve` or :class:`Smile` objects where each one
has been individually configured for its node dates and interpolation structures,
and has a unique ``id``. Each object will be dynamically updated/mutated by the Solver.
surfaces : sequence
Sequence of :class:`Surface` or :class:`Cube` objects where each has been configured
with a unique ``id``. Each object will be dynamically updated/mutated.
Internally, ``surfaces`` and ``curves`` are joined and provide nothing more than
organisational distinction.
instruments : sequence
Sequence of calibrating instrument specifications that will be used by
the solver to determine the solved curves. See notes.
s : sequence
Sequence of objective rates that each solved calibrating instrument will solve
to. Must have the same length and order as ``instruments``.
weights : sequence, optional
The weights that should be used within the objective function when determining
the loss function associated with each calibrating instrument. Should be of
same length as ``instruments``. If not given defaults to all ones.
algorithm : str in {"levenberg_marquardt", "gauss_newton", "gradient_descent"}
The optimisation algorithm to use when solving curves via :meth:`iterate`.
fx : FXForwards, FXRates, optional
The fx object used in FX rate calculations for ``instruments`` rates or sensitivities.
instrument_labels : list of str, optional
The names of the calibrating instruments which will be used in delta risk
outputs.
id : str, optional
The identifier used to denote the instance and attribute risk factors.
pre_solvers : list,
A collection of ``Solver`` s that have already determined curves to which this
instance has a dependency. Used for aggregation of risk sensitivities.
max_iter : int
The maximum number of iterations to perform.
func_tol : float
The tolerance to determine convergence if the objective function is lower
than a specific value. Defaults to 1e-11.
conv_tol : float
The tolerance to determine convergence if successive objective function
values are similar. Defaults to 1e-14.
step_tol : float
The tolerance for the norm of the difference between successive parameter iterates.
Defaults to 1e-14.
grad_tol : float
The tolerance for the norm of the objective function gradient at an iterate. Defaults
to 1e-11.
ini_lambda : 3-tuple of float, optional
Parameters to control the Levenberg-Marquardt algorithm, defined as the
initial lambda value, the scaling factor for a successful iteration and the
scaling factor for an unsuccessful iteration. Defaults to (1000, 0.25, 2).
callback : callable, optional
Is called after each iteration. Used for debugging or optimization.
Notes
-------
**Purpose**
Once initialized, the *Solver* will numerically determine and set, via mutation, all the
relevant node values on each *Curve*, *Smile*, *Surface* or *Cube* simultaneously by
calling :meth:`iterate`. This mutation of those pricing objects will override any local AD
variables pre-configured by a user and use the *Solver's* own variable tags, for proper
*delta* and *gamma* management. The objective function of the *Solver* which it seeks to
minimize over all parameters, :math:`\mathbf{v}`, is:
.. math::
g(\mathbf{v}; \mathbf{s}) = \mathbf{(r(v) - s)^{T} W (r(v) - s)}
**Instrument Specification**
Thus, the *Solver* naturally attempts to match the corresponding value in ``s`` with the
result of the :meth:`~rateslib.instruments.protocols._WithRate.rate` method called on each
of the successive ``instruments``.
Each *Instrument* provided may set its pricing objects (i.e. ``curves``
and ``vol``) and ``metric`` preset at its initialization, so that the
:meth:`~rateslib.instruments.Metrics.rate` method for each *Instrument* in scope is
well defined. Best practice refers to these with string mappings that the *Solver*
records. As an example,
.. code-block:: python
instruments=[
...
FXCall([args], curves=["eur", "usd"], vol="eurusd_smile", metric="vol"),
...
]
The ``fx`` argument used in the :meth:`~rateslib.instruments.protocols._WithRate.rate` call will
be passed directly to each *Instrument* from the *Solver's* ``fx`` argument, being
representative of a consistent *FXForwards* object for all *Instruments*.
If the pricing objects and/or *metric* are not preset then the *Solver* ``instruments`` can be
given as a tuple where the second item is a dict representing keyword arguments passed
directly to the :meth:`~rateslib.instruments.protocols._WithRate.rate`
method. An example is:
.. code-block:: python
instruments=[
...
(FixedRateBond([args]), {"curves": bond_curve, "metric": "ytm"}),
...
]
**Stopping Criteria**
- ``func_tol``: :math:`g(\mathbf{v}_{i+1}; \mathbf{s}) < \epsilon_{func}`. This criteria is
only useful for the cases when the number of parameters and number of instruments are
sufficiently chosen that an objective function value close to zero is obtainable.
- ``conv_tol``: :math:`|g(\mathbf{v}_{i+1}) - g(\mathbf{v}_{i})| < \epsilon_{conv}` and the
iterate is an improvement.
- ``grad_tol``: :math:`|| \nabla_{\mathbf{v}} g || < \epsilon_{grad}`. This is often the
most robust indicator of having reached a stationary point in the optimisation and is a
necessary condition of optimality.
- ``step_tol``: :math:`|| \mathbf{v}_{i+1} - \mathbf{v}_{i} || < \epsilon_{step}`. This
criteria is used mostly to detect 'stalled' or 'stuck' solutions. Even though the *Solver*
reports a success stopping under these conditions may be sub-optimal.
**Analysing**
The ``callback`` argument can be used to display results or perform tasks during iterations.
The signature of such a method is `callback(solver, i, v_i)` giving access to the *Solver*
object itself, the iteration number and the current parameter vector.
.. ipython:: python
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0})
solver = Solver(
curves=[curve],
instruments=[IRS(dt(2022, 1, 1), "6m", spec="usd_irs", curves=curve)],
s=[3.0],
callback=lambda solver, i, v_i: print(f"iteration {i}: {v_i}"),
)
Examples
--------
See the documentation user guide :ref:`here `.
"""
def __init__(
self,
curves: Sequence[Any] = (),
surfaces: Sequence[Any] = (),
instruments: Sequence[SupportsRate] = (),
s: Sequence[DualTypes] = (),
weights: Sequence[float] | NoInput = NoInput(0),
algorithm: str_ = NoInput(0),
fx: FXForwards_ = NoInput(0),
instrument_labels: Sequence[str] | NoInput = NoInput(0),
id: str_ = NoInput(0), # noqa: A002
pre_solvers: Sequence[Solver] = (),
max_iter: int = 100,
func_tol: float = 1e-11,
conv_tol: float = 1e-14,
step_tol: float = 1e-14,
grad_tol: float = 1e-11,
ini_lambda: tuple[float, float, float] | NoInput = NoInput(0),
callback: Callable[[Solver, int, NDArray[Nobject]], None] | NoInput = NoInput(0),
) -> None:
self._do_not_validate_ = False
self.callback = callback
self.algorithm = _drb(defaults.algorithm, algorithm).lower()
self.ini_lambda = _drb(defaults.ini_lambda, ini_lambda)
self.id: str = _drb(uuid4().hex[:5] + "_", id) # 1 in a million clash
self.m = len(instruments)
self.func_tol, self.conv_tol, self.max_iter = func_tol, conv_tol, max_iter
self.step_tol, self.grad_tol = step_tol, grad_tol
self.pre_solvers = tuple(pre_solvers)
# validate `id`s so that DataFrame indexing does not share duplicated keys.
if len(set([self.id] + [p.id for p in self.pre_solvers])) < 1 + len(self.pre_solvers):
raise ValueError(
"Solver `id`s must be unique when supplying `pre_solvers`, "
f"got ids: {[self.id] + [p.id for p in self.pre_solvers]}",
)
# validate `s` and `instruments` with a naive length comparison
if len(s) != len(instruments):
raise ValueError(
f"`s: {len(s)}` (rates) must be same length as `instruments: {len(instruments)}`."
)
self.s = np.asarray(s)
# validate `instrument_labels` if given is same length as `m`
if not isinstance(instrument_labels, NoInput):
if self.m != len(instrument_labels):
raise ValueError(
f"`instrument_labels: {len(instrument_labels)}` must be same length as "
f"`instruments: {len(instruments)}`."
)
else:
self.instrument_labels = tuple(instrument_labels)
else:
self.instrument_labels = tuple(f"{self.id}{i}" for i in range(self.m))
if isinstance(weights, NoInput):
self.weights: NDArray[Nf64] = np.ones(len(instruments), dtype=np.float64)
else:
if len(weights) != self.m:
raise ValueError(
f"`weights: {len(weights)}` must be same length as "
f"`instruments: {len(instruments)}`."
)
self.weights = np.asarray(weights)
self.W = np.diag(self.weights)
# `surfaces` are treated identically to `curves`. Introduced in PR
self.curves: dict[str, SupportsSolverMutability] = {
curve.id: curve
for curve in list(curves) + list(surfaces)
if type(curve) not in NO_PARAMETER_CURVES
}
self.variables = ()
for curve in self.curves.values():
curve._set_ad_order(1) # solver uses gradients in optimisation
self.variables += curve._get_node_vars()
self.n = len(self.variables)
# aggregate and organise variables and labels including pre_solvers
self.pre_curves: dict[str, Any] = {}
self.pre_variables: tuple[str, ...] = ()
self.pre_instrument_labels: tuple[tuple[str, str], ...] = ()
self.pre_instruments: tuple[tuple[SupportsRate, dict[str, Any]], ...] = ()
self.pre_rate_scalars = []
self.pre_m, self.pre_n = self.m, self.n
curve_collection: list[Any] = []
for pre_solver in self.pre_solvers:
self.pre_variables += pre_solver.pre_variables
self.pre_instrument_labels += pre_solver.pre_instrument_labels
self.pre_instruments += pre_solver.pre_instruments
self.pre_rate_scalars.extend(pre_solver.pre_rate_scalars)
self.pre_m += pre_solver.pre_m
self.pre_n += pre_solver.pre_n
self.pre_curves.update(pre_solver.pre_curves)
curve_collection.extend(pre_solver.pre_curves.values())
self.pre_curves.update(self.curves)
self.pre_curves.update(
{
curve.id: curve
for curve in curves
if type(curve) in NO_PARAMETER_CURVES
# no parameter curves added to the collection without variables
},
)
curve_collection.extend(curves)
for curve1, curve2 in combinations(curve_collection, 2):
if curve1.id == curve2.id:
raise ValueError(
"`curves` must each have their own unique `id`. If using "
"pre-solvers as part of a dependency chain a curve can only be "
"specified as a variable in one solver.",
)
self.pre_variables += self.variables
self.pre_instrument_labels += tuple((self.id, lbl) for lbl in self.instrument_labels)
# Final elements
self._ad = 1
self.fx: FXForwards_ = fx
if isinstance(self.fx, FXRates | FXForwards):
self.fx._set_ad_order(1)
elif not isinstance(self.fx, NoInput):
raise ValueError("`fx` argument to Solver must be either FXForwards or NoInput(0).")
self.instruments: tuple[tuple[SupportsRate, dict[str, Any]], ...] = tuple(
self._parse_instrument(inst) for inst in instruments
)
self.pre_instruments += self.instruments
self.rate_scalars = tuple(inst[0].rate_scalar for inst in self.instruments)
self.pre_rate_scalars += self.rate_scalars
# TODO need to check curves associated with fx object and set order.
# self._reset_properties_() performed in iterate
self._result = {
"status": "INITIALISED",
"state": 0,
"g": None,
"iterations": 0,
"time": None,
}
self.iterate()
def __repr__(self) -> str:
return f""
# State management and mutation
def _set_new_state(self) -> None:
self._states = self._associated_states()
self._state = hash(sum(v for v in self._states.values()))
@property
def _do_not_validate(self) -> bool:
return self._do_not_validate_
@_do_not_validate.setter
def _do_not_validate(self, value: bool) -> None:
self._do_not_validate_ = value
for solver in self.pre_solvers:
solver._do_not_validate = value
def _validate_state(self) -> None:
if self._do_not_validate:
return None # do not perform state validation during iterations
if self._state != self._get_composited_state():
# then something has been mutated
states_ = self._associated_states()
fx_state_ = states_.pop("fx")
for k, v in states_.items():
if self._states[k] != v:
raise ValueError(
"The `curves` associated with `solver` have been updated without the "
"`solver` performing additional iterations.\n"
f"In particular the object with id: '{k}' contained in solver with id: "
f"'{self.id}' is detected to have been mutated.\n"
"Calculations are prevented in this "
"state because they will likely be erroneous or a consequence of a bad "
"design pattern."
)
if not isinstance(self.fx, NoInput) and fx_state_ != self._states["fx"]:
warnings.warn(
f"The `fx` object associated with `solver` having id '{self.id}' "
"has been updated without "
"the `solver` performing additional iterations.\nCalculations can still be "
"performed but, dependent upon those updates, errors may be negligible "
"or significant.",
UserWarning,
)
@staticmethod
def _validate_and_get_state(obj: Any) -> int:
obj._validate_state()
return obj._state # type: ignore[no-any-return]
def _associated_states(self) -> dict[str, int]:
states_: dict[str, int] = {
k: self._validate_and_get_state(v) for k, v in self.pre_curves.items()
}
if not isinstance(self.fx, NoInput):
states_["fx"] = self._validate_and_get_state(self.fx)
else:
states_["fx"] = 0
return states_
def _get_composited_state(self) -> int:
_: int = hash(sum(v for v in self._associated_states().values()))
return _
def _parse_instrument(
self, value: SupportsRate | tuple[SupportsRate, dict[str, Any]]
) -> tuple[SupportsRate, dict[str, Any]]:
"""
Parses different input formats for an instrument given to the ``Solver``.
Parameters
----------
value : Instrument or 3-tuple.
If a 3-tuple then it must have the following items:
- The ``Instrument``.
- Positional args supplied to the ``rate`` method as a tuple, or None.
- Keyword args supplied to the ``rate`` method as a dict, or None.
Returns
-------
tuple :
A 3-tuple attaching the self solver and self fx object as pricing params.
Examples
--------
``value=Instrument()``
``value=(Instrument(), (curve, None, fx), {"other_arg": 10.0})``
``value=(Instrument(), None, {"other_arg": 10.0})``
``value=(Instrument(), (curve, None, fx), None)``
``value=(Instrument(), (curve,), {})``
"""
if not isinstance(value, tuple):
# is a direct Instrument so convert to tuple with pricing params
_: tuple[SupportsRate, dict[str, Any]] = (
value,
{"solver": self, "fx": self.fx},
)
return _
else:
# object is tuple
if len(value) != 2:
raise ValueError(
"`Instrument` supplied to `Solver` as tuple must be a 2-tuple of "
"signature: (Instrument, keyword args[dict]).",
)
ret0 = value[0]
ret1: dict[str, Any] = {"solver": self, "fx": self.fx}
if not (value[1] is None or value[1] == {}):
ret1 = {**ret1, **value[1]}
return ret0, ret1
def _reset_properties_(self, dual2_only: bool = False) -> None:
"""
Set all calculated attributes to `None` requiring re-evaluation.
Parameters
----------
dual2_only : bool
Choose whether to reset properties only for the calculation of the
properties whose derivation **requires** Dual2 datatypes. Since the
``Solver`` iterates ``Curve`` s by default it necessarily uses Dual
datatypes and first order derivatives. For the calculation of:
- ``J2`` and ``J2_pre``:
:math:`\frac{\\partial^2 r_i}{\\partial v_j \\partial v_k}`
- ``grad_s_s_vT`` and ``grad_s_s_vT_pre``:
:math:`\frac{\\partial^2 v_i}{\\partial s_j \\partial s_k}`
Returns
-------
None
"""
if not dual2_only:
self._v: NDArray[Nobject] | None = None # depends on self.curves
self._r: NDArray[Nobject] | None = (
None # depends on self.pre_curves and self.instruments
)
self._r_pre: NDArray[Nobject] | None = None # depends on pre_solvers and self.r
self._x: NDArray[Nobject] | None = None # depends on self.r, self.s
self._g: Dual | Dual2 | None = None # depends on self.x, self.weights
self._grad_v_g: NDArray[Nf64] | None = None # depends on self.g,
self._J: NDArray[Nf64] | None = None # depends on self.r
self._grad_s_vT: NDArray[Nf64] | None = (
None # final_iter_dual: depends on self.s and iteration
)
# fixed_point_iter: depends on self.f
# final_iter_anal: depends on self.J
self._grad_s_vT_pre: NDArray[Nf64] | None = (
None # depends on self.grad_s_vT and pre_solvers.
)
self._J2 = None # defines its own self.r under dual2
self._J2_pre = None # depends on self.r and pre_solvers
self._grad_s_s_vT = None # final_iter: depends on self.J2 and self.grad_s_vT
# finite_diff: TODO update comment
self._grad_s_s_vT_pre = None # final_iter: depends on pre versions of above
# finite_diff: TODO update comment
# self._grad_v_v_f = None
# self._Jkm = None # keep manifold originally used for exploring J2 calc method
# Pricing object ID mapping
@_validate_states
def _get_pre_curve(self, obj: str) -> Curve:
ret: Curve | FXVols | IRVols = self.pre_curves[obj]
if isinstance(ret, _BaseCurve):
return ret
else:
raise ValueError(
f"A _BaseCurve object was sought with id:'{obj}' from Solver but another "
f"type object was returned:'{type(ret)}'."
)
@_validate_states
def _get_pre_fxvol(self, obj: str) -> FXVols:
_: Curve | FXVols | IRVols = self.pre_curves[obj]
if isinstance(_, FXVols):
return _
else:
raise ValueError(
f"A type of `FXVol` object was sought with id:'{obj}' from Solver but another "
f"type object was returned:'{type(_)}'."
)
@_validate_states
def _get_pre_irvol(self, obj: str) -> _BaseIRSmile | _BaseIRCube[Any]:
_: Curve | FXVols | _BaseIRSmile | _BaseIRCube[Any] = self.pre_curves[obj]
if isinstance(_, _BaseIRSmile | _BaseIRCube):
return _
else:
raise ValueError(
f"A type of `IRVol` object was sought with id:'{obj}' from Solver but another "
f"type object was returned:'{type(_)}'."
)
@_validate_states
def _get_fx(self) -> FXForwards_:
return self.fx
# Attributes
@property
def result(self) -> dict[str, Any]:
"""
Show statistics relevant to the last *Solver* iteration.
Valid *Solver* states are:
- 1: Success within tolerance of objective function close to zero.
- 2: Success within tolerance of successive iteration function values.
- 4: Success within tolerance of norm of difference of successive iteration parameter values.
- 5: Success within tolerance of function gradient norm close to zero.
- -1: Failed to satisfy tolerance after maximal allowed iteration.
""" # noqa: E501
return self._result
@property
def v(self) -> NDArray[Nobject]:
"""
1d array of curve node variables for each ordered curve, size (n,).
Depends on ``self.curves``.
"""
if self._v is None:
self._v = np.block([_._get_node_vector() for _ in self.curves.values()])
return self._v
@property
def r(self) -> NDArray[Nobject]: # type: ignore[override]
"""
1d array of mid-market rates of each calibrating instrument with given curves,
size (m,).
Depends on ``self.pre_curves`` and ``self.instruments``.
"""
if self._r is None:
self._r = np.array([_[0].rate(**_[1]) for _ in self.instruments])
# solver and fx are passed by default via parse_args to get string curves
return self._r
@property
def r_pre(self) -> NDArray[Nobject]: # type: ignore[override]
if len(self.pre_solvers) == 0:
return self.r
if self._r_pre is None:
r_pre = np.empty(self.pre_m, dtype="object")
i = 0
for pre_solver in self.pre_solvers:
m = pre_solver.pre_m
r_pre[i : i + m] = pre_solver.r_pre
i = i + m
if self.m > 0:
# create bottom right block if solver contains its own instruments and self
# is not just a container of `pre_solvers`
r_pre[-self.m :] = self.r
self._r_pre = r_pre
return self._r_pre
@property
def x(self) -> NDArray[Nobject]:
"""
1d array of error in each calibrating instrument rate, of size (m,).
.. math::
\\mathbf{x} = \\mathbf{r-S}
Depends on ``self.r`` and ``self.s``.
"""
if self._x is None:
self._x = self.r - self.s
return self._x
@property
def error(self) -> Series[float]:
"""
Return the error in calibrating instruments, including ``pre_solvers``, scaled
to the risk representation factor.
Returns
-------
Series
"""
pre_s: Series[float] = Series()
for pre_solver in self.pre_solvers:
if not pre_s.empty:
pre_s = concat([ser for ser in [pre_solver.error, pre_s] if not ser.empty])
else:
pre_s = pre_solver.error
if self.m > 0:
_: Series[float] = Series(
self.x.astype(float) * 100 / self.rate_scalars,
index=MultiIndex.from_tuples([(self.id, inst) for inst in self.instrument_labels]),
)
if not pre_s.empty:
s: Series[float] = concat([pre_s, _])
else:
s = _
else:
s = pre_s
return s
@property
def g(self) -> Dual | Dual2: # type: ignore[override]
"""
Objective function scalar value of the solver;
.. math::
g = \\mathbf{(r-S)^{T}W(r-S)}
Depends on ``self.x`` and ``self.weights``.
"""
if self._g is None:
self._g = np.dot(self.x, self.weights * self.x)
return self._g
# def Jkm(self, extra_vars=[]):
# """
# 2d Jacobian array of rates with respect to discount factors, of size (n, m); :math:`[J]_{i,j} = \\frac{\\partial r_j}{\\partial v_i}`. # noqa: E501
# """
# _Jkm = np.array([rate.gradient(self.variables + extra_vars, keep_manifold=True) for rate in self.r]).T # noqa: E501
# return _Jkm
def _update_step_(self, algorithm: str) -> NDArray[Nobject]:
if algorithm == "gradient_descent":
y = np.matmul(self.J.transpose(), self.grad_v_g[:, np.newaxis])[:, 0]
alpha = np.dot(y, self.weights * self.x) / np.dot(y, self.weights * y)
v_1: NDArray[Nobject] = self.v - self.grad_v_g * alpha.real
elif algorithm == "gauss_newton":
if self.J.shape[0] == self.J.shape[1]: # square system
A = self.J.transpose()
b = -np.array([x.real for x in self.x])[:, np.newaxis]
else:
A = np.matmul(self.J, np.matmul(self.W, self.J.transpose()))
b = -0.5 * self.grad_v_g[:, np.newaxis]
delta: NDArray[Nobject] = np.linalg.solve(A, b)[:, 0]
v_1 = self.v + delta
elif algorithm == "levenberg_marquardt":
if self.g_list[-2] < self.g.real:
# reject previous iteration and rescale lambda:
self.lambd *= self.ini_lambda[2]
# self._update_curves_with_parameters(self.v_prev)
else:
self.lambd *= self.ini_lambda[1]
# self.lambd *= self.ini_lambda[2] if self.g_prev < self.g.real else self.ini_lambda[1]
A = np.matmul(self.J, np.matmul(self.W, self.J.transpose()))
A += self.lambd * np.eye(self.n)
b = -0.5 * self.grad_v_g[:, np.newaxis]
delta = np.linalg.solve(A, b)[:, 0]
v_1 = self.v + delta
# elif algorithm == "gradient_descent_final":
# _ = np.matmul(self.Jkm, np.matmul(self.W, self.x[:, np.newaxis]))
# y = 2 * np.matmul(self.Jkm.transpose(), _)[:, 0]
# alpha = np.dot(y, self.weights * self.x) / np.dot(y, self.weights * y)
# v_1 = self.v - 2 * alpha * _[:, 0]
elif algorithm == "gauss_newton_final":
if self.J.shape[0] == self.J.shape[1]: # square system
A = self.J.transpose()
b = -self.x[:, np.newaxis]
else:
A = np.matmul(self.J, np.matmul(self.W, self.J.transpose()))
b = -np.matmul(np.matmul(self.J, self.W), self.x[:, np.newaxis])
delta = dual_solve(A, b)[:, 0] # type: ignore[assignment]
v_1 = self.v + delta
else:
raise NotImplementedError(f"`algorithm`: {algorithm} (spelled correctly?)")
return v_1
@_new_state_post
def _update_fx(self) -> None:
if not isinstance(self.fx, NoInput):
self.fx.update() # note: with no variables this only updates states
for solver in self.pre_solvers:
solver._update_fx()
@_no_interior_validation
def iterate(self) -> None:
r"""
Solve the DF node values and update all the ``curves``.
This method uses a gradient based optimisation routine, to solve for all
the curve variables, :math:`\mathbf{v}`, as follows,
.. math::
\mathbf{v} = \underset{\mathbf{v}}{\mathrm{argmin}} \;\; f(\mathbf{v}) = \underset{\mathbf{v}}{\mathrm{argmin}} \;\; (\mathbf{r(v)} - \mathbf{S})\mathbf{W}(\mathbf{r(v)} - \mathbf{S})^\mathbf{T}
where :math:`\mathbf{r}` are the mid-market rates of the calibrating
instruments, :math:`\mathbf{S}` are the observed and target rates, and
:math:`\mathbf{W}` is the diagonal array of weights.
Returns
-------
None
""" # noqa: E501
# Initialise data and clear and caches
self.g_list: list[float] = [1e10]
self.lambd: float = self.ini_lambda[0]
self._reset_properties_()
# self._update_fx()
t0 = time()
# Begin iteration
for i in range(self.max_iter):
self.g_list.append(self.g.real)
if self.g.real < self.g_list[i] and (self.g_list[i] - self.g.real) < self.conv_tol:
# Converge tolerance: |g(x_i+1) - g(x_i)| < conv_tol AND a better iterate.
# Condition enforces a better iterate to avoid the case where a null update
# results in the same solution and this is erroneously stopped due to this criteria.
return self._solver_result(1, i, time() - t0)
elif self.g.real < self.func_tol:
# Function tolerance: 0 <= g(x_i+1) < func_tol.
return self._solver_result(2, i, time() - t0)
elif np.sqrt(np.dot(self.grad_v_g, self.grad_v_g)) < self.grad_tol:
# Gradient tolerance: |d_v_g(x_i+1)| < grad_tol.
return self._solver_result(5, i, time() - t0)
if i != 0:
eps = v_1.astype(float, copy=True) - v_0 # type: ignore[has-type] # noqa: F821
if np.sqrt(np.dot(eps, eps)) < self.step_tol:
# Step tolerance: |x_i+1 - x_i| < step_tol.
return self._solver_result(4, i, time() - t0)
v_0 = self.v.astype(float, copy=True) # noqa: F841
v_1 = self._update_step_(self.algorithm)
# self.v_prev = v_0
self._update_curves_with_parameters(v_1)
if not isinstance(self.callback, NoInput):
self.callback(self, i, v_1)
return self._solver_result(-1, self.max_iter, time() - t0)
def _solver_result(self, state: int, i: int, time: float) -> None:
self._result = _solver_result(state, i, self.g.real, time, True, self.algorithm)
self._set_new_state()
@_new_state_post
def _update_curves_with_parameters(self, v_new: NDArray[Nobject]) -> None:
"""Populate the variable curves with the new values"""
var_counter = 0
for curve in self.curves.values():
# this was amended in PR126 as performance improvement to keep consistent `vars`
# and was restructured in PR## to decouple methods to accomodate vol surfaces
n_vars = curve._n - curve._ini_solve
curve._set_node_vector(v_new[var_counter : var_counter + n_vars], self._ad)
var_counter += n_vars
self._update_fx()
self._reset_properties_()
def _set_ad_order(self, order: int) -> None:
"""Defines the node DF in terms of float, Dual or Dual2 for AD order calcs."""
for pre_solver in self.pre_solvers:
pre_solver._set_ad_order(order=order)
self._ad = order
for _, curve in self.pre_curves.items():
curve._set_ad_order(order)
if not isinstance(self.fx, NoInput):
self.fx._set_ad_order(order)
self._reset_properties_()
@_validate_states
@_no_interior_validation
def delta(
self, npv: dict[str, Dual], base: str_ = NoInput(0), fx: FX_ = NoInput(0)
) -> DataFrame:
"""
Calculate the delta risk sensitivity of an instrument's NPV to the
calibrating instruments of the :class:`~rateslib.solver.Solver`, and to
FX rates.
Parameters
----------
npv : dict,
The NPV (Dual) of the instrument or portfolio of instruments to risk.
Must be indexed by 3-digit currency
to discriminate between values expressed in different currencies.
base : str, optional
The currency (3-digit code) to report risk metrics in. If not given will
default to the local currency of the cashflows.
fx : FXRates, FXForwards, optional
The FX object to use to convert risk metrics. If needed but not given
will default to the ``fx`` object associated with the
:class:`~rateslib.solver.Solver`. It is not recommended to use this
argument with multi-currency instruments, see notes.
Returns
-------
DataFrame
Notes
-----
**Output Structure**
.. note::
*Instrument* values are scaled to 1bp (1/10000th of a unit) when they are
rate based. *FX* values are scaled to pips (1/10000th of an FX rate unit).
The output ``DataFrame`` has the following structure:
- A 3-level index by *'type'*, *'solver'*, and *'label'*;
- **type** is either *'instruments'* or *'fx'*, and fx exposures are only
calculated and displayed in some cases where genuine FX exposure arises.
- **solver** lists the different solver ``id`` s to identify between
different instruments in dependency chains from ``pre_solvers``.
- **label** lists the given instrument names in each solver using the
``instrument_labels``.
- A 2-level column header index by *'local_ccy'* and *'display_ccy'*;
- **local_ccy** displays the currency for which cashflows are payable, and
therefore the local currency risk sensitivity amount.
- **display_ccy** displays the currency which the local currency risk
sensitivity has been converted to via an FX transformation.
Converting a delta from a local currency to another ``base`` currency also
introduces FX risk to the NPV of the instrument, which is included in the
output.
**Best Practice**
The ``fx`` option is provided to allow tactical and fast conversion of
delta risks to ``Instruments``. When constructing and pricing multi-currency
instruments it is likely that the :class:`~rateslib.solver.Solver` used is
associated with an :class:`~rateslib.fx.FXForwards` object to consistently
produce FX forward rates within an aribitrage free framework. In that case
it is more consistent to re-use those FX associations. If such an
association exists and a direct ``fx`` object is supplied a warning may be
emitted if they are not the same object.
"""
# self._do_not_validate = True # state is validated prior to the call
base, fx = self._get_base_and_fx(base, fx)
if isinstance(fx, FXRates | FXForwards):
fx_vars: tuple[str, ...] = fx.variables
else:
fx_vars = tuple()
inst_scalar = np.array(self.pre_rate_scalars) / 100 # instruments scalar
fx_scalar = 0.0001
container = {}
for ccy in npv:
container[("instruments", ccy, ccy)] = self.grad_s_Ploc(npv[ccy]) * inst_scalar
container[("fx", ccy, ccy)] = self.grad_f_Ploc(npv[ccy], fx_vars) * fx_scalar
if not isinstance(base, NoInput) and base != ccy:
# is validated by `_get_base_and _fx`
assert isinstance(fx, FXForwards | FXRates) # noqa: S101
# extend the derivatives
f: Dual | Dual2 = fx.rate(f"{ccy}{base}") # type: ignore[assignment]
container[("instruments", ccy, base)] = (
self.grad_s_Pbase(
npv[ccy],
container[("instruments", ccy, ccy)] / inst_scalar,
f,
)
* inst_scalar
)
container[("fx", ccy, base)] = (
self.grad_f_Pbase(npv[ccy], container[("fx", ccy, ccy)] / fx_scalar, f, fx_vars)
* fx_scalar
)
# construct the DataFrame from container with hierarchical indexes
inst_idx = MultiIndex.from_tuples(
[("instruments",) + label for label in self.pre_instrument_labels],
names=["type", "solver", "label"],
)
fx_idx = MultiIndex.from_tuples(
[("fx", "fx", f[3:]) for f in fx_vars],
names=["type", "solver", "label"],
)
indexes = {"instruments": inst_idx, "fx": fx_idx}
r_idx = inst_idx.append(fx_idx)
c_idx = MultiIndex.from_tuples([], names=["local_ccy", "display_ccy"])
df = DataFrame(None, index=r_idx, columns=c_idx)
for key, array in container.items():
df.loc[indexes[key[0]], (key[1], key[2])] = array
if not isinstance(base, NoInput):
df.loc[r_idx, ("all", base)] = df.loc[r_idx, (slice(None), base)].sum(axis=1)
sorted_cols = df.columns.sort_values()
ret: DataFrame = df.loc[:, sorted_cols].astype("float64")
# self._do_not_validate = False
return ret
def _get_base_and_fx(self, base: str_, fx: FX_) -> tuple[str_, FX_]:
# method is used by delta, gamma, and exo_delta. prohibit fx as scalar because it cannot
# convert from arbitrary currencies.
if not isinstance(fx, NoInput | FXRates | FXForwards):
raise ValueError(
"`fx` used in sensitivity calculations cannot be a scalar. An FXRates or "
"FXForwards object is required, or the input left as NoInput(0), in which case "
"the `fx` object associated with a Solver is used in place."
)
if not isinstance(base, NoInput):
base = base.lower()
# then a valid fx object that can convert is required.
if not isinstance(fx, FXRates | FXForwards) and isinstance(self.fx, NoInput):
raise ValueError(
f"`base` is given as '{base}', but `fx` is not available.\n"
"Either provide an FXForwards object directly as `fx` or ensure that Solver.fx "
"is a valid object.\n"
"Alternatively, omit the `base` argument altogether and get results displayed "
"in local currency without base currency conversion."
)
if isinstance(fx, NoInput):
fx = self.fx
elif not isinstance(self.fx, NoInput) and id(fx) != id(self.fx):
warnings.warn(
"Solver contains an `fx` object but an `fx` argument has been "
"supplied as object which is not the same. This can lead to risk sensitivity "
"inconsistencies, mathematically.",
UserWarning,
)
return base, fx
@_validate_states
@_no_interior_validation
def gamma(
self, npv: dict[str, Dual2], base: str_ = NoInput(0), fx: FX_ = NoInput(0)
) -> DataFrame:
"""
Calculate the cross-gamma risk sensitivity of an instrument's NPV to the
calibrating instruments of the :class:`~rateslib.solver.Solver`.
Parameters
----------
npv : Dual2,
The NPV of the instrument or composition of instruments to risk.
base : str, optional
The currency (3-digit code) to report risk metrics in. If not given will
default to the local currency of the cashflows.
fx : FXRates, FXForwards, optional
The FX object to use to convert risk metrics. If needed but not given
will default to the ``fx`` object associated with the
:class:`~rateslib.solver.Solver`. It is not recommended to use this
argument with multi-currency instruments, see
:meth:`Solver.delta `.
Returns
-------
DataFrame
Notes
-----
.. note::
*Instrument* values are scaled to 1bp (1/10000th of a unit) when they are
rate based.
*FX* values are scaled to pips (1/10000th of an FX unit).
The output ``DataFrame`` has the following structure:
- A 5-level index by *'local_ccy'*, *'display_ccy'*, *'type'*, *'solver'*,
and *'label'*;
- **local_ccy** displays the currency for which cashflows are payable, and
therefore the local currency risk sensitivity amount.
- **display_ccy** displays the currency which the local currency risk
sensitivity has been converted to via an FX transformation.
- **type** is either *'instruments'* or *'fx'*, and fx exposures are only
calculated and displayed in some cases where genuine FX exposure arises.
- **solver** lists the different solver ``id`` s to identify between
different instruments in dependency chains from ``pre_solvers``.
- **label** lists the given instrument names in each solver using the
``instrument_labels``.
- A 3-level column header index using the last three levels of the above;
Converting a gamma/delta from a local currency to another ``base`` currency also
introduces FX risk to the NPV of the instrument, which is included in the
output.
Examples
--------
This example replicates the analytical calculations demonstrated in
*Pricing and Trading Interest Rate Derivatives (2022)*, derived from
first principles.
The results are stated in the cross-gamma grid in figure 22.1.
.. ipython:: python
:suppress:
from rateslib import Solver, Curve, SBS, IRS, dt
.. ipython:: python
curve_r = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.99,
dt(2024, 1, 1): 0.98,
dt(2025, 1, 1): 0.97,
dt(2026, 1, 1): 0.96,
dt(2027, 1, 1): 0.95,
},
id="r"
)
curve_z = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.99,
dt(2024, 1, 1): 0.98,
dt(2025, 1, 1): 0.97,
dt(2026, 1, 1): 0.96,
dt(2027, 1, 1): 0.95,
},
id="z"
)
curve_s = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.99,
dt(2024, 1, 1): 0.98,
dt(2025, 1, 1): 0.97,
dt(2026, 1, 1): 0.96,
dt(2027, 1, 1): 0.95,
},
id="s"
)
args = dict(termination="1Y", frequency="A", fixing_method="ibor(0)", leg2_fixing_method="ibor(0)")
instruments = [
SBS(dt(2022, 1, 1), curves=["r", "s", "s", "s"], **args),
SBS(dt(2023, 1, 1), curves=["r", "s", "s", "s"], **args),
SBS(dt(2024, 1, 1), curves=["r", "s", "s", "s"], **args),
SBS(dt(2025, 1, 1), curves=["r", "s", "s", "s"], **args),
SBS(dt(2026, 1, 1), curves=["r", "s", "s", "s"], **args),
SBS(dt(2022, 1, 1), curves=["r", "s", "z", "s"], **args),
SBS(dt(2023, 1, 1), curves=["r", "s", "z", "s"], **args),
SBS(dt(2024, 1, 1), curves=["r", "s", "z", "s"], **args),
SBS(dt(2025, 1, 1), curves=["r", "s", "z", "s"], **args),
SBS(dt(2026, 1, 1), curves=["r", "s", "z", "s"], **args),
IRS(dt(2022, 1, 1), "1Y", "A", curves=["r", "s"], leg2_fixing_method="ibor(0)"),
IRS(dt(2023, 1, 1), "1Y", "A", curves=["r", "s"], leg2_fixing_method="ibor(0)"),
IRS(dt(2024, 1, 1), "1Y", "A", curves=["r", "s"], leg2_fixing_method="ibor(0)"),
IRS(dt(2025, 1, 1), "1Y", "A", curves=["r", "s"], leg2_fixing_method="ibor(0)"),
IRS(dt(2026, 1, 1), "1Y", "A", curves=["r", "s"], leg2_fixing_method="ibor(0)"),
]
solver = Solver(
curves=[curve_r, curve_s, curve_z],
instruments=instruments,
s=[0.]*5 + [0.]*5 + [1.5]*5,
id="sonia",
instrument_labels=[
"s1", "s2", "s3", "s4", "s5",
"z1", "z2", "z3", "z4", "z5",
"r1", "r2", "r3", "r4", "r5",
],
)
irs = IRS(dt(2022, 1, 1), "5Y", "A", notional=-8.3e8, curves=["z", "s"], leg2_fixing_method="ibor(0)", fixed_rate=25.0)
irs.delta(solver=solver)
irs.gamma(solver=solver)
""" # noqa: E501
if self._ad != 2:
raise ValueError("`Solver` must be in ad order 2 to use `gamma` method.")
# new
base, fx = self._get_base_and_fx(base, fx)
if isinstance(fx, FXRates | FXForwards):
fx_vars: tuple[str, ...] = fx.variables
else:
fx_vars = tuple()
inst_scalar = np.array(self.pre_rate_scalars) / 100 # instruments scalar
fx_scalar = np.ones(len(fx_vars)) * 0.0001
container: dict[tuple[str, str], dict[tuple[str, ...], Any]] = {}
for ccy in npv:
container[(ccy, ccy)] = {}
container[(ccy, ccy)]["instruments", "instruments"] = self.grad_s_sT_Ploc(
npv[ccy],
) * np.matmul(inst_scalar[:, None], inst_scalar[None, :])
container[(ccy, ccy)]["fx", "instruments"] = self.grad_f_sT_Ploc(
npv[ccy],
fx_vars,
) * np.matmul(fx_scalar[:, None], inst_scalar[None, :])
container[(ccy, ccy)]["instruments", "fx"] = container[(ccy, ccy)][
("fx", "instruments")
].T
container[(ccy, ccy)]["fx", "fx"] = self.grad_f_fT_Ploc(npv[ccy], fx_vars) * np.matmul(
fx_scalar[:, None],
fx_scalar[None, :],
)
if not isinstance(base, NoInput) and base != ccy:
# validated by `_get_base_and_fx`
assert isinstance(fx, FXRates | FXForwards) # noqa: S101
# extend the derivatives
f: Dual | Dual2 = fx.rate(f"{ccy}{base}") # type: ignore[assignment]
container[(ccy, base)] = {}
container[(ccy, base)]["instruments", "instruments"] = self.grad_s_sT_Pbase(
npv[ccy],
container[(ccy, ccy)]["instruments", "instruments"]
/ np.matmul(inst_scalar[:, None], inst_scalar[None, :]),
f,
) * np.matmul(inst_scalar[:, None], inst_scalar[None, :])
container[(ccy, base)]["fx", "instruments"] = self.grad_f_sT_Pbase(
npv[ccy],
container[(ccy, ccy)]["fx", "instruments"]
/ np.matmul(fx_scalar[:, None], inst_scalar[None, :]),
f,
fx_vars,
) * np.matmul(fx_scalar[:, None], inst_scalar[None, :])
container[(ccy, base)]["instruments", "fx"] = container[(ccy, base)][
("fx", "instruments")
].T
container[(ccy, base)]["fx", "fx"] = self.grad_f_fT_Pbase(
npv[ccy],
container[(ccy, ccy)]["fx", "fx"]
/ np.matmul(fx_scalar[:, None], fx_scalar[None, :]),
f,
fx_vars,
) * np.matmul(fx_scalar[:, None], fx_scalar[None, :])
# construct the DataFrame from container with hierarchical indexes
currencies = list(npv.keys())
local_keys = [(ccy, ccy) for ccy in currencies]
base_keys = [] if base is NoInput.blank else [(ccy, base) for ccy in currencies]
all_keys = sorted(set(local_keys + base_keys))
inst_keys = [("instruments",) + label for label in self.pre_instrument_labels]
fx_keys = [("fx", "fx", f[3:]) for f in fx_vars]
idx_tuples = [c + _ for c in all_keys for _ in inst_keys + fx_keys]
ridx = MultiIndex.from_tuples(
list(idx_tuples),
names=["local_ccy", "display_ccy", "type", "solver", "label"],
)
if base is not NoInput.blank:
ridx = ridx.append(
MultiIndex.from_tuples(
[("all", base) + _ for _ in inst_keys + fx_keys],
names=["local_ccy", "display_ccy", "type", "solver", "label"],
),
)
cidx = MultiIndex.from_tuples(list(inst_keys + fx_keys), names=["type", "solver", "label"])
df = DataFrame(None, index=ridx, columns=cidx)
for key, d in container.items():
array = np.block(
[
[d[("instruments", "instruments")], d[("instruments", "fx")]],
[d[("fx", "instruments")], d[("fx", "fx")]],
],
)
locator = key + (slice(None), slice(None), slice(None))
with warnings.catch_warnings():
# TODO: pandas 3.0.0 can optionally turn off these PerformanceWarnings
warnings.simplefilter(action="ignore", category=PerformanceWarning)
df.loc[locator, :] = array
if not isinstance(base, NoInput):
# sum over all the base rows to aggregate
gdf = (
df.loc[(currencies, base, slice(None), slice(None), slice(None)), :]
.groupby(level=[2, 3, 4])
.sum()
)
gdf.index = MultiIndex.from_tuples([("all", base) + _ for _ in gdf.index])
df.loc[("all", base, slice(None), slice(None), slice(None))] = gdf
return df.astype("float64")
def _pnl_explain(
self,
npv: Dual | Dual2,
ds: Sequence[float],
dfx: Sequence[float] | None = None,
base: str_ = NoInput(0),
fx: FX_ = NoInput(0),
order: int = 1,
) -> DataFrame:
"""
Calculate PnL from market movements over delta and, optionally, gamma.
Parameters
----------
npv : Dual or Dual2,
The initial NPV of the instrument or composition of instruments to value.
ds : sequence of float
The projected market movements of calibrating instruments of the solver,
scaled to the appropriate value amount matching the delta representation.
dfx : sequence of float
The projected market movements of FX rates,
scaled to the appropriate value amount matching the delta representation.
base : str, optional
The currency (3-digit code) to report risk metrics in. If not given will
default to the local currency of the cashflows.
fx : FXRates, FXForwards, optional
The FX object to use to convert risk metrics. If needed but not given
will default to the ``fx`` object associated with the
:class:`~rateslib.solver.Solver`.
order : int in {1, 2}
Whether to return a first order delta PnL explain or a second order one
including gamma contribution.
Returns
-------
DataFrame
"""
raise NotImplementedError()
@_validate_states
@_no_interior_validation
def market_movements(self, solver: Solver) -> DataFrame:
"""
Determine market movements between the *Solver's* instrument rates and those rates priced
from a second *Solver*.
Parameters
----------
solver: Solver
The other *Solver* whose *Curves* are to be used for measuring the final instrument
rates of the existing *Solver's* instruments.
Returns
-------
DataFrame
Notes
-----
.. warning::
Market movement calculations are only possible between *Solvers* whose ``instruments``
are associated with *Curves* with string ID mappings (which is best practice and
demonstrated in :ref:`Mechanisms `). This allows two different
*Solvers* to contain their own *Curves* (which may or may not be equivalent models),
and for the instrument rates of one *Solver* to be evaluated by the *Curves* present
in another *Solver*.
"""
r_0 = self.r_pre
r_1 = np.array(
[
_[0].rate(**{**_[1], "solver": solver, "fx": solver.fx})
for _ in self.pre_instruments
],
)
return DataFrame(
(r_1 - r_0) * 100 / np.array(self.pre_rate_scalars),
index=self.pre_instrument_labels,
)
@_validate_states
@_no_interior_validation
def jacobian(self, solver: Solver) -> DataFrame:
"""
Calculate the Jacobian with respect to another *Solver's* instruments.
Parameters
----------
solver : Solver
The other ``Solver`` for which the Jacobian is to be determined.
Returns
-------
DataFrame
Notes
-----
This Jacobian converts risk sensitivities expressed in the underlying *Solver's*
instruments to the instruments in the other ``solver``.
.. warning::
A Jacobian transformation is only possible between *Solvers* whose ``instruments``
are associated with *Curves* with string ID mappings (which is best practice and
demonstrated in :ref:`Mechanisms `). This allows two different
*Solvers* to contain their own *Curves* (which may or may not be equivalent models),
and for the instrument rates of one *Solver* to be evaluated by the *Curves* present
in another *Solver*
Examples
--------
This example creates a Jacobian transformation between par tenor IRS and forward tenor
IRS. These models are completely consistent and lossless.
.. ipython:: python
par_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="curve",
)
par_instruments = [
IRS(dt(2022, 1, 1), "1Y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "2Y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "3Y", "A", curves="curve"),
]
par_solver = Solver(
curves=[par_curve],
instruments=par_instruments,
s=[1.21, 1.635, 1.99],
id="par_solver",
instrument_labels=["1Y", "2Y", "3Y"],
)
fwd_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="curve"
)
fwd_instruments = [
IRS(dt(2022, 1, 1), "1Y", "A", curves="curve"),
IRS(dt(2023, 1, 1), "1Y", "A", curves="curve"),
IRS(dt(2024, 1, 1), "1Y", "A", curves="curve"),
]
s_fwd = [float(_.rate(solver=par_solver)) for _ in fwd_instruments]
fwd_solver = Solver(
curves=[fwd_curve],
instruments=fwd_instruments,
s=s_fwd,
id="fwd_solver",
instrument_labels=["1Y", "1Y1Y", "2Y1Y"],
)
par_solver.jacobian(fwd_solver)
"""
# Get the instrument rates for self solver evaluated using the curves and links of other
r = np.array(
[
_[0].rate(**{**_[1], "solver": solver, "fx": solver.fx})
for _ in self.pre_instruments
],
)
# Get the gradient of these rates with respect to the variable in other
grad_v_rT = np.array([gradient(_, solver.pre_variables) for _ in r]).T
return DataFrame(
np.matmul(solver.grad_s_vT_pre, grad_v_rT),
columns=self.pre_instrument_labels,
index=solver.pre_instrument_labels,
)
@_validate_states
@_no_interior_validation
def exo_delta(
self,
npv: dict[str, Dual | Dual2],
vars: Sequence[str], # noqa: A002
vars_scalar: Sequence[float] | NoInput = NoInput(0),
vars_labels: Sequence[str] | NoInput = NoInput(0),
base: str_ = NoInput(0),
fx: FX_ = NoInput(0),
) -> DataFrame:
"""
Calculate risk sensitivity to user defined, exogenous variables in the
*Solver Instruments* and the ``npv``.
See :ref:`What are exogenous variables? ` in the cookbook.
Parameters
-----------
npv : dict,
The NPV (Dual) of the instrument or portfolio of instruments to risk.
Must be indexed by 3-digit currency
to discriminate between values expressed in different currencies.
vars : list[str]
The variable tags which to determine sensitivities for.
vars_scalar : list[float], optional
Scaling factors for each variable, for example converting rates to basis point etc.
Defaults to ones.
vars_labels : list[str], optional
Alternative names to relabel variables in DataFrames.
base : str, optional
The currency (3-digit code) to report risk metrics in. If not given will
default to the local currency of the cashflows.
fx : FXRates, FXForwards, optional
The FX object to use to convert risk metrics. If needed but not given
will default to the ``fx`` object associated with the
:class:`~rateslib.solver.Solver`. It is not recommended to use this
argument with multi-currency instruments, see notes.
Returns
-------
DataFrame
"""
base, fx = self._get_base_and_fx(base, fx)
if isinstance(vars_scalar, NoInput):
vars_scalar = [1.0] * len(vars)
if isinstance(vars_labels, NoInput):
vars_labels = vars
container = {}
for ccy in npv:
container[("exogenous", ccy, ccy)] = self.grad_f_Ploc(npv[ccy], vars) * vars_scalar
if not isinstance(base, NoInput) and base != ccy:
assert isinstance(fx, FXRates | FXForwards) # noqa S101
# extend the derivatives
f: Dual | Dual2 = fx.rate(f"{ccy}{base}") # type: ignore[assignment]
container[("exogenous", ccy, base)] = (
self.grad_f_Pbase(
npv[ccy],
container[("exogenous", ccy, ccy)] / vars_scalar,
f,
vars,
)
* vars_scalar
)
# construct the DataFrame from container with hierarchical indexes
exo_idx = MultiIndex.from_tuples(
[("exogenous",) + (self.id, label) for label in vars_labels],
names=["type", "solver", "label"],
)
indexes = {"exogenous": exo_idx}
r_idx = exo_idx
c_idx = MultiIndex.from_tuples([], names=["local_ccy", "display_ccy"])
df = DataFrame(None, index=r_idx, columns=c_idx)
for key, array in container.items():
df.loc[indexes[key[0]], (key[1], key[2])] = array
if not isinstance(base, NoInput):
df.loc[r_idx, ("all", base)] = df.loc[r_idx, (slice(None), base)].sum(axis=1)
sorted_cols = df.columns.sort_values()
_: DataFrame = df.loc[:, sorted_cols].astype("float64")
return _
@classmethod
def from_other(
cls,
*,
pricing_solver: Solver,
instruments: Sequence[SupportsRate],
curves: Sequence[Any] = (),
surfaces: Sequence[Any] = (),
pre_solvers: Sequence[Solver] = (),
fx: FXForwards_ = NoInput(0),
instrument_labels: Sequence[str] | NoInput = NoInput(0),
id: str_ = NoInput(0), # noqa: A002
**kwargs: Any,
) -> Solver:
"""
Create a :class:`~rateslib.solver.Solver` whose rates, ``s``, are automatically
generated from a ``pricing_solver``.
Parameters
----------
pricing_solver: Solver
The :class:`~rateslib.solver.Solver` containing pricing object mappings and an ``fx``
object that can determine all of the instruments rates, ``s``, for the provided
``instruments``.
**kwargs: Any
All other arguments expected by a :class:`~rateslib.solver.Solver`, except for ``s``,
which are generated from ``pricing_solver``.
Notes
-----
This method is designed for ease of implementation of a *'Pricing-Model-Risk-Model'*
framework.
Generating a :class:`~rateslib.solver.Solver` from another **only works** automatically
when ``instruments`` pricing objects have been mapped with the same string ids. For
example, suppose we desire to build a *'pricing curve'* with market instruments and
then, afterward, build a *'risk curve'* with different instruments.
First build the *'pricing curve'*:
.. ipython:: python
pricing_curve = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0, dt(2002, 1, 10): 1.0},
interpolation="spline",
id="sofr",
)
pricing_solver = Solver(
curves=[pricing_curve],
instruments=[
IRS(dt(2000, 1, 1), "1y", spec="usd_irs", curves=["sofr"]),
IRS(dt(2000, 1, 1), "2y", spec="usd_irs", curves=["sofr"]),
],
s=[4.10, 4.25],
instrument_labels=["1y", "2y"],
id="price_sv",
)
Now we build the *'risk curve'* with instruments whose prices are automatically generated
from the *'pricing curve'*. This provides a more granular, localised set of risks.
Note that the ``id`` of both the *'pricing curve'* and the *'risk curve'* are the **same**
so that these can be dynamically mapped to the same instruments by different *Solvers*.
.. ipython:: python
risk_curve = Curve(
nodes={
dt(2000, 1, 1): 1.0,
dt(2000, 4, 1): 1.0,
dt(2000, 7, 1): 1.0,
dt(2000, 10, 1): 1.0,
dt(2001, 1, 1): 1.0,
dt(2001, 4, 1): 1.0,
dt(2001, 7, 1): 1.0,
dt(2001, 10, 1): 1.0,
dt(2002, 1, 10): 1.0,
},
interpolation="log_linear",
id="sofr",
)
risk_solver = Solver.from_other(
pricing_solver=pricing_solver,
curves=[risk_curve],
instruments=[
IRS(dt(2000, 1, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2000, 4, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2000, 7, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2000, 10, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2001, 1, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2001, 4, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2001, 7, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2001, 10, 1), "3m", spec="usd_irs", curves=["sofr"]),
],
instrument_labels=["0m3m", "3m3m", "6m3m", "9m3m", "1y3m", "15m3m", "18m3m", "21m3m"],
id="risk_sv",
)
We can then extract delta or cross-gamma risks in different representations using either
of our *Solver* objects.
.. ipython:: python
irs = IRS(dt(2000, 3, 24), "14m", fixed_rate=3.95, spec="usd_irs", curves=["sofr"])
irs.delta(solver=pricing_solver)
.. ipython:: python
irs.delta(solver=risk_solver)
""" # noqa: E501
return Solver(
pre_solvers=pre_solvers,
curves=curves,
surfaces=surfaces,
instruments=instruments,
s=[_dual_float(_.rate(solver=pricing_solver)) for _ in instruments],
fx=fx,
instrument_labels=instrument_labels,
id=id,
**kwargs,
)
__all__ = ["Gradients", "Solver"]
================================================
FILE: python/rateslib/splines/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.rs import PPSplineDual, PPSplineDual2, PPSplineF64, bspldnev_single, bsplev_single
from rateslib.splines.evaluate import evaluate
PPSplineF64.__doc__ = """
Piecewise polynomial spline composed of float-64 values on the x-axis and
float-64 values on the y-axis.
Parameters
----------
k: int
The order of the spline.
t: sequence of float
The knot sequence of the spline.
c: sequence of float, optional
The coefficients of the spline.
See Also
--------
.. seealso::
:class:`~rateslib.splines.PPSplineDual`: Spline where the y-axis contains :class:`~rateslib.dual.Dual` data types.
:class:`~rateslib.splines.PPSplineDual2`: Spline where the y-axis contains :class:`~rateslib.dual.Dual2` data types.
""" # noqa: E501
PPSplineDual.__doc__ = """
Piecewise polynomial spline composed of float-64 values on the x-axis and
:class:`~rateslib.dual.Dual` values on the y-axis.
Parameters
----------
k: int
The order of the spline.
t: sequence of float
The knot sequence of the spline.
c: sequence of Dual, optional
The coefficients of the spline.
See Also
--------
.. seealso::
:class:`~rateslib.splines.PPSplineF64`: Spline where the y-axis contains float-64 data types.
:class:`~rateslib.splines.PPSplineDual2`: Spline where the y-axis contains :class:`~rateslib.dual.Dual2` data types.
""" # noqa: E501
PPSplineDual2.__doc__ = """
Piecewise polynomial spline composed of float-64 values on the x-axis and
:class:`~rateslib.dual.Dual2` values on the y-axis.
Parameters
----------
k: int
The order of the spline.
t: sequence of float
The knot sequence of the spline.
c: sequence of Dual2, optional
The coefficients of the spline.
.. seealso::
:class:`~rateslib.splines.PPSplineF64`: Spline where the y-axis contains float-64 data types.
:class:`~rateslib.splines.PPSplineDual`: Spline where the y-axis contains :class:`~rateslib.dual.Dual` data types.
""" # noqa: E501
__all__ = (
"PPSplineDual",
"PPSplineDual2",
"PPSplineF64",
"bspldnev_single",
"bsplev_single",
"evaluate",
)
================================================
FILE: python/rateslib/splines/evaluate.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
from rateslib import defaults
from rateslib.dual import Dual, Dual2, Variable
from rateslib.rs import PPSplineDual, PPSplineDual2, PPSplineF64
if TYPE_CHECKING:
from rateslib.local_types import DualTypes, Number
def evaluate(
spline: PPSplineF64 | PPSplineDual | PPSplineDual2,
x: DualTypes,
m: int = 0,
) -> Number:
"""
Evaluate a single x-axis data point, or a derivative value, on a *Spline*.
This method automatically calls :meth:`~rateslib.splines.PPSplineF64.ppdnev_single`,
:meth:`~rateslib.splines.PPSplineF64.ppdnev_single_dual` or
:meth:`~rateslib.splines.PPSplineF64.ppdnev_single_dual2` based on the input form of ``x``.
This method is AD safe.
Parameters
----------
spline: PPSplineF64, PPSplineDual, PPSplineDual2
The *Spline* on which to evaluate the data point.
x: float, Dual, Dual2
The x-axis data point to evaluate.
m: int, optional
The order of derivative to evaluate. If seeking value only use *m=0*.
Returns
-------
float, Dual, Dual2
"""
if isinstance(x, Variable):
if isinstance(spline, PPSplineDual):
x_: float | Dual | Dual2 = x._to_dual_type(order=1)
elif isinstance(spline, PPSplineDual2):
x_ = x._to_dual_type(order=2)
else:
x_ = x._to_dual_type(order=defaults._global_ad_order)
else:
x_ = x
if isinstance(x_, Dual):
return spline.ppdnev_single_dual(x_, m)
elif isinstance(x_, Dual2):
return spline.ppdnev_single_dual2(x_, m)
else:
return spline.ppdnev_single(x_, m)
================================================
FILE: python/rateslib/utils/calendars.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalTypes,
datetime,
)
def _get_first_bus_day(dates: list[datetime], calendar: CalTypes) -> datetime:
if len(dates) == 0:
raise ValueError("The list of `dates` from which to select a business day is empty.")
for date in dates:
if calendar.is_bus_day(date):
return date
raise ValueError("No valid business days were found in `dates`.")
================================================
FILE: python/rateslib/verify.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations
import hashlib
import json
import logging
import os
import sys
import warnings
from datetime import datetime, timedelta
from enum import Enum
from json import JSONDecodeError
from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
)
VERSION = "2.7.1"
class LicenceNotice(UserWarning):
_no_licence_warning = (
"\nRateslib is source-available (not open-source) software distributed under a "
"dual-licence model."
"\nNo commercial licence is registered for this installation. Use is therefore permitted "
"for non-commercial purposes only (at-home or university based academic use)."
"\nAny use in commercial, professional, or for-profit environments, including evaluation "
"or trial use, requires a valid commercial licence or an approved evaluation licence."
"\nCertain features may require a registered commercial or evaluation licence in current "
"or future versions."
"\nFor licensing information or to register a licence, please visit: "
"https://rateslib.com/licence"
)
_invalid_warning = (
"\nRateslib is source-available (not open-source) software distributed under a "
"dual-licence model."
"\nAn invalid licence file is detected for this installation. Use is therefore permitted "
"for non-commercial purposes only (at-home or university based academic use)."
"\nAny use in commercial, professional, or for-profit environments, including evaluation "
"or trial use, requires a valid commercial licence or an approved evaluation licence."
"\nCertain features may require a registered commercial or evaluation licence in current "
"or future versions."
"\nFor licensing information or to register a licence, please visit: "
"https://rateslib.com/licence"
)
_expired_warning = (
"\nYour existing licence for rateslib {0} expired on {1}.\n"
"If you wish to extend your licence, please visit https://rateslib.com/licence for further "
"details.\n"
"Otherwise, please uninstall rateslib.\n"
"Expired licence details:\n{2}\n"
)
class _LicenceStatus(Enum):
VALID = 0
EXPIRED_GRACE = 1
EXPIRED = 2
INVALID = 3
NO_LICENCE = 4
class Licence:
"""
A licence coordinator to control warnings and functionality.
"""
def __init__(self) -> None:
# search for licences in relevant paths
value = os.getenv("RATESLIB_LICENCE") or _get_licence()
if value is None:
# then no licence data was found either in environment vars or in the standard path.
self._status = _LicenceStatus.NO_LICENCE
else:
verified_expiry = _verify_licence(value)
if verified_expiry is None: # i.e. invalid signature key
self._status = _LicenceStatus.INVALID
else:
# measure the expiry relative to today
self._expiry = datetime.strptime(verified_expiry, "%Y-%m-%d")
if self.expiry > datetime.now():
self._status = _LicenceStatus.VALID
elif self.expiry > datetime.now() - timedelta(days=14):
self._status = _LicenceStatus.EXPIRED_GRACE
else:
self._status = _LicenceStatus.EXPIRED
if self.status == _LicenceStatus.NO_LICENCE:
self._output(LicenceNotice._no_licence_warning, VERSION)
elif self.status == _LicenceStatus.INVALID:
self._output(LicenceNotice._invalid_warning, VERSION)
elif self.status == _LicenceStatus.EXPIRED:
self._output(
LicenceNotice._expired_warning,
VERSION,
self.expiry.strftime("%Y-%m-%d"),
self.print_licence(),
)
def _output(self, text: str, *args: Any) -> None:
warnings.warn(message=text.format(*args), category=LicenceNotice, stacklevel=4)
logger = logging.getLogger(__name__)
logger.info(text.format(*args))
@property
def status(self) -> _LicenceStatus:
return self._status
@property
def expiry(self) -> datetime:
return self._expiry
@classmethod
def add_licence(cls, licence_text: str) -> None:
"""
Store the provided licence as a file on the local disk.
Will create or overwrite any existing licence file as necessary. Will raise
PermissionError if writing to disk fails due to restrictions.
Parameters
----------
licence_text: str
The full JSON format str of the provided licence.
Returns
-------
None
"""
licence_file = _get_licence_path()
try:
if licence_file.exists():
current = licence_file.read_text()
if current != licence_text:
print(f"Warning: Existing licence differs. Overwriting {licence_file}")
# only add if a valid licence string:
try:
valid = _verify_licence(licence_text)
except JSONDecodeError:
raise ValueError(
"The provided licence text does not appear to be valid JSON format or cannot "
f"be decoded as such.\n{licence_text}"
)
if not valid:
raise ValueError(
f"The licence key is invalid and has not been saved to disk.\n{licence_text}"
)
licence_file.write_text(licence_text)
print(f"License saved at {licence_file}")
except PermissionError:
raise PermissionError(
f"Cannot save licence file to {licence_file}.\n "
f"Check your admin or corporate file permissions."
)
@classmethod
def remove_licence(cls) -> bool:
"""
Remove the stored licence file.
Raises PermissionError if the file cannot be deleted from disk due to restrictions.
Returns
-------
bool
*True* on successful removal and *False* if no licence file exists.
"""
licence_file = _get_licence_path()
try:
if licence_file.exists():
licence_file.unlink()
print(f"License removed from {licence_file}")
return True
else:
print("No licence file found to remove.")
return False
except PermissionError:
raise PermissionError(
f"Cannot remove licence file at {licence_file}. Check your permissions."
)
@classmethod
def print_licence(cls) -> str:
"""
Output the licence data to string.
Returns
-------
str
The JSON format of the licence.
"""
value = os.getenv("RATESLIB_LICENCE") or _get_licence()
if value is None:
raise ValueError("No rateslib licence data was found on this machine")
else:
return value
APP_NAME = "rateslib"
LICENSE_FILENAME = "rateslib_licence.txt"
PUBLIC_KEY: tuple[int, int] = (
65537,
86222696103896966718103037502072442336246185093318724988310224539490986842962518392592510336894335238460512594559929385462044884137775548353223089347652775415882082908041940084967476300969806363550378972881577687292674787317782507726743027399965228306794174501671206473081788525064813988527838836758351217651,
)
def _rsa_encrypt(message_int: int, public_key: tuple[int, int]) -> int:
e, n = public_key
if not 0 <= message_int < n:
raise ValueError("Message too large for key")
return pow(message_int, e, n)
def _get_licence_path() -> Path:
"""
Returns the path where the licence file should be stored.
Cross-platform user-specific location.
"""
if os.name == "nt": # Windows
base = Path(os.getenv("APPDATA", Path.home() / "AppData" / "Roaming"))
elif sys.platform == "darwin": # macOS
base = Path.home() / "Library" / "Application Support"
else: # Linux / Unix
base = Path.home() / ".local" / "share"
data_dir = base / APP_NAME
data_dir.mkdir(parents=True, exist_ok=True) # Ensure directory exists
return data_dir / LICENSE_FILENAME
def _get_licence() -> str | None:
"""
Retrieve the stored licence text, or None if not found.
"""
licence_file = _get_licence_path()
if licence_file.exists():
return licence_file.read_text()
return None
def _verify_licence(licence_plaintext: str) -> str | None:
loaded_dict = json.loads(licence_plaintext)
licence_dict = dict(sorted(loaded_dict.items()))
hex_s = licence_dict.pop("xkey", None)
if hex_s is None:
return None
s = int(hex_s, 16)
m = json.dumps(licence_dict, sort_keys=True)
hex_h = hashlib.sha256(m.encode()).hexdigest()
h = int(hex_h, 16) # h = int.from_bytes(hashlib.sha256(m.encode()).digest())
h_ = _rsa_encrypt(s, PUBLIC_KEY)
if h != h_:
return None
else:
try:
return loaded_dict["expiry"] # type: ignore[no-any-return]
except KeyError:
return None
__all__ = ["LicenceNotice", "Licence"]
================================================
FILE: python/rateslib/volatility/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.volatility.fx import (
FXDeltaVolSmile,
FXDeltaVolSurface,
FXSabrSmile,
FXSabrSurface,
_BaseFXSmile,
_FXDeltaVolSmileNodes,
_FXDeltaVolSpline,
_FXDeltaVolSurfaceMeta,
_FXSabrSurfaceMeta,
_FXSmileMeta,
_SabrSmileNodes,
)
from rateslib.volatility.ir import (
IRSabrCube,
IRSabrSmile,
IRSplineCube,
IRSplineSmile,
_BaseIRCube,
_BaseIRSmile,
_IRCubeMeta,
_IRSmileMeta,
_IRSplineSmileNodes,
_IRVolPricingParams,
_IRVolSpline,
)
__all__ = [
"FXSabrSmile",
"FXSabrSurface",
"FXDeltaVolSurface",
"FXDeltaVolSmile",
"IRSabrSmile",
"IRSabrCube",
"IRSplineSmile",
"IRSplineCube",
"_BaseFXSmile",
"_BaseIRSmile",
"_BaseIRCube",
"_FXDeltaVolSurfaceMeta",
"_FXSmileMeta",
"_FXDeltaVolSpline",
"_FXDeltaVolSmileNodes",
"_FXSabrSurfaceMeta",
"_SabrSmileNodes",
"_IRSplineSmileNodes",
"_IRCubeMeta",
"_IRSmileMeta",
"_IRVolPricingParams",
"_IRVolSpline",
]
================================================
FILE: python/rateslib/volatility/fx/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.volatility.fx.base import _BaseFXSmile
from rateslib.volatility.fx.delta_vol import FXDeltaVolSmile, FXDeltaVolSurface
from rateslib.volatility.fx.sabr import FXSabrSmile, FXSabrSurface
from rateslib.volatility.fx.utils import (
_FXDeltaVolSmileNodes,
_FXDeltaVolSpline,
_FXDeltaVolSurfaceMeta,
_FXSabrSurfaceMeta,
_FXSmileMeta,
)
from rateslib.volatility.utils import (
_SabrSmileNodes,
)
__all__ = [
"FXSabrSmile",
"FXSabrSurface",
"FXDeltaVolSurface",
"FXDeltaVolSmile",
"_BaseFXSmile",
"_FXDeltaVolSurfaceMeta",
"_FXSmileMeta",
"_FXDeltaVolSpline",
"_FXDeltaVolSmileNodes",
"_FXSabrSurfaceMeta",
"_SabrSmileNodes",
]
FXVols = FXDeltaVolSmile | FXDeltaVolSurface | FXSabrSmile | FXSabrSurface
FXVolObj = (FXDeltaVolSmile, FXDeltaVolSurface, FXSabrSmile, FXSabrSurface)
================================================
FILE: python/rateslib/volatility/fx/base.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations # type hinting
from typing import TYPE_CHECKING, NoReturn, TypeAlias
from rateslib.default import PlotOutput, plot
from rateslib.dual import Dual, Dual2, Variable
from rateslib.enums.generics import NoInput, _drb
from rateslib.mutability import _WithCache, _WithState
from rateslib.volatility.fx.utils import _FXSmileMeta
if TYPE_CHECKING:
from rateslib.local_types import FXForwards # pragma: no cover
DualTypes: TypeAlias = "float | Dual | Dual2 | Variable" # if not defined causes _WithCache failure
class _BaseFXSmile(_WithState, _WithCache[float, DualTypes]):
"""Abstract base class for implementing *FX Smiles*."""
_ad: int
_default_plot_x_axis: str
meta: _FXSmileMeta
@property
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the *Smile*."""
return self._ad
def __iter__(self) -> NoReturn:
raise TypeError("`Smile` types are not iterable.")
def plot(
self,
comparators: list[_BaseFXSmile] | NoInput = NoInput(0),
labels: list[str] | NoInput = NoInput(0),
x_axis: str | NoInput = NoInput(0),
f: DualTypes | FXForwards | NoInput = NoInput(0),
) -> PlotOutput:
"""
Plot volatilities associated with the *Smile*.
.. warning::
The *'delta'* ``x_axis`` type for a *SabrSmile* is calculated based on a
**forward, unadjusted** delta and is expressed as a negated put option delta
consistent with the definition for a :class:`~rateslib.volatility.FXDeltaVolSmile`.
Parameters
----------
comparators: list[Smile]
A list of Smiles which to include on the same plot as comparators.
Note the comments on
:meth:`FXDeltaVolSmile.plot `.
labels : list[str]
A list of strings associated with the plot and comparators. Must be same
length as number of plots.
x_axis : str in {"strike", "moneyness", "delta"}
*'strike'* is the natural option for this *SabrSmile* types while *'delta'* is the
natural choice for *DeltaVolSmile* types.
If *'delta'* see the warning. If *'moneyness'* the strikes are converted using ``f``.
f: DualTypes
The FX forward rate at delivery.
Returns
-------
(fig, ax, line) : Matplotlib.Figure, Matplotplib.Axes, Matplotlib.Lines2D
"""
# reversed for intuitive strike direction
comparators = _drb([], comparators)
labels = _drb([], labels)
x_axis_: str = _drb(self.meta.plot_x_axis, x_axis)
x_, y_ = self._plot(x_axis_, f) # type: ignore[attr-defined]
x = [x_]
y = [y_]
if not isinstance(comparators, NoInput):
for smile in comparators:
if not isinstance(smile, _BaseFXSmile):
raise ValueError("A `comparator` must be a valid FX Smile type.")
x_, y_ = smile._plot(x_axis_, f) # type: ignore[attr-defined]
x.append(x_)
y.append(y_)
return plot(x, y, labels)
================================================
FILE: python/rateslib/volatility/fx/delta_vol.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations # type hinting
import warnings
from datetime import datetime, timezone
from typing import TYPE_CHECKING
from uuid import uuid4
import numpy as np
from pandas import Series
from rateslib import defaults
from rateslib.default import (
PlotOutput,
plot3d,
)
from rateslib.dual import (
Dual,
Dual2,
Variable,
dual_exp,
dual_inv_norm_cdf,
dual_log,
dual_norm_cdf,
dual_norm_pdf,
newton_1dim,
newton_ndim,
set_order_convert,
)
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import FXDeltaMethod, _get_fx_delta_type
from rateslib.mutability import (
_clear_cache_post,
_new_state_post,
_validate_states,
_WithCache,
_WithState,
)
from rateslib.scheduling import get_calendar
from rateslib.splines.evaluate import evaluate
from rateslib.volatility.fx.base import _BaseFXSmile
from rateslib.volatility.fx.utils import (
_delta_type_constants,
_FXDeltaVolSmileNodes,
_FXDeltaVolSurfaceMeta,
_FXSmileMeta,
_moneyness_from_delta_closed_form,
)
from rateslib.volatility.utils import (
_OptionModelBlack76,
_surface_index_left,
_t_var_interp,
_validate_weights,
)
if TYPE_CHECKING:
from rateslib.local_types import DualTypes, DualTypes_, Sequence # pragma: no cover
UTC = timezone.utc
class FXDeltaVolSmile(_BaseFXSmile):
r"""
Create an *FX Volatility Smile* at a given expiry indexed by delta percent.
See also the :ref:`FX Vol Surfaces section in the user guide `.
Parameters
----------
nodes: dict[float, DualTypes]
Key-value pairs for a delta index amount and associated volatility. See examples.
eval_date: datetime
Acts as the initial node of a *Curve*. Should be assigned today's immediate date.
expiry: datetime
The expiry date of the options associated with this *Smile*
delta_type: FXDeltaMethod or str
The type of delta calculation that is used on the options to attain a delta which
is referenced by the node keys.
id: str, optional
The unique identifier to distinguish between *Smiles* in a multicurrency framework
and/or *Surface*.
ad: int, optional
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
Notes
-----
The *delta* axis of this *Smile* is a **negated put delta**, i.e. 0.25 corresponds to a put
delta of -0.25. This permits increasing strike for increasing delta index.
For a 'forward' delta type 0.25 corresponds to a call delta of 0.75 via
put-call delta parity. For a 'spot' delta type it would not because under a 'spot' delta
type put-call delta parity is not 1.0, but related to the spot versus forward interest rates.
The **interpolation function** between nodes is a **cubic spline**.
- For an *unadjusted* ``delta_type`` the range of the delta index is set to [0,1], and the
cubic spline is **natural** with second order derivatives set to zero at the endpoints.
- For *premium adjusted* ``delta_types`` the range of the delta index is in [0, *d*] where *d*
is set large enough to encompass 99.99% of all possible values. The right endpoint is clamped
with a first derivative of zero to avoid uncontrolled behaviour. The value of *d* is derived
using :math:`d = e^{\sigma \sqrt{t} (3.75 + \frac{1}{2} \sigma \sqrt{t})}`
"""
_ini_solve = 0 # All node values are solvable
_default_plot_x_axis = "delta"
_nodes: _FXDeltaVolSmileNodes
_id: str
_ad: int
@_new_state_post
def __init__(
self,
nodes: dict[float, DualTypes],
eval_date: datetime,
expiry: datetime,
delta_type: FXDeltaMethod | str,
id: str | NoInput = NoInput(0), # noqa: A002
ad: int = 0,
):
self._id: str = (
uuid4().hex[:5] + "_" if isinstance(id, NoInput) else id
) # 1 in a million clash
self._nodes = _FXDeltaVolSmileNodes(
meta=_FXSmileMeta(
_expiry=expiry,
_eval_date=eval_date,
_delta_type=_get_fx_delta_type(delta_type),
_plot_x_axis="delta",
_pair=None,
_delivery=get_calendar(NoInput(0)).lag_bus_days(
expiry, defaults.fx_delivery_lag, True
),
_delivery_lag=defaults.fx_delivery_lag,
_calendar=get_calendar(NoInput(0)),
),
nodes=nodes,
)
self._set_ad_order(ad) # includes _csolve()
@property
def id(self) -> str:
"""A str identifier to name the *Smile* used in
:class:`~rateslib.solver.Solver` mappings."""
return self._id
@property
def meta(self) -> _FXSmileMeta: # type: ignore[override]
"""An instance of :class:`~rateslib.volatility.fx._FXSmileMeta`."""
return self.nodes.meta
@property
def nodes(self) -> _FXDeltaVolSmileNodes:
"""An instance of :class:`~rateslib.volatility.fx._FXDeltaVolSmileNodes`."""
return self._nodes
@property
def _n(self) -> int:
"""The number of parameters of the *Surface*."""
return self.nodes.n
def __getitem__(self, item: DualTypes) -> DualTypes:
"""
Get a value from the DeltaVolSmile given an item which is a delta_index.
"""
if item > self.nodes.spline.t[-1]:
# raise ValueError(
# "Cannot index the FXDeltaVolSmile for a delta index out of bounds.\n"
# f"Got: {item}, valid range: [{self.t[0]}, {self.t[-1]}]"
# )
return self.nodes.spline.spline.ppev_single(self.nodes.spline.t[-1])
elif item < self.nodes.spline.t[0]:
# raise ValueError(
# "Cannot index the FXDeltaVolSmile for a delta index out of bounds.\n"
# f"Got: {item}, valid range: [{self.t[0]}, {self.t[-1]}]"
# )
return self.nodes.spline.spline.ppev_single(self.nodes.spline.t[0])
else:
return evaluate(self.nodes.spline.spline, item, 0)
def _get_index(
self, delta_index: DualTypes, expiry: datetime | NoInput = NoInput(0)
) -> DualTypes:
"""
Return a volatility from a given delta index
Used internally alongside Surface, where a surface also requires an expiry.
"""
return self[delta_index]
def get(
self,
delta: DualTypes,
delta_type: FXDeltaMethod | str,
phi: float,
z_w: DualTypes,
) -> DualTypes:
"""
Return a volatility for a provided real option delta.
This function is more explicit than the `__getitem__` method of the *Smile* because it
permits forward/spot, adjusted/unadjusted and put/call option delta conversions,
by deriving an appropriate delta index relevant to that of the *Smile* ``delta_type``.
Parameters
----------
delta: float
The delta to obtain a volatility for.
delta_type: FXDeltaMethod or str
The delta type the given delta is expressed in.
phi: float
Whether the given delta is assigned to a put or call option.
z_w: DualTypes
Required only for spot delta types. This is a scaling factor between spot and
forward rate, equal to :math:`w_(m_{delivery})/w_(m_{spot})`, where *w* is curve
for the domestic currency collateralised in the foreign currency. If not required
enter 1.0.
Returns
-------
DualTypes
"""
delta_type_ = _get_fx_delta_type(delta_type)
eta_0, z_w_0, _ = _delta_type_constants(delta_type_, z_w, 0.0) # u: unused
eta_1, z_w_1, _ = _delta_type_constants(self.meta.delta_type, z_w, 0.0) # u: unused
# then delta types are both unadjusted, used closed form.
if eta_0 == eta_1 and eta_0 == 0.5:
d_i: DualTypes = (-z_w_1 / z_w_0) * (delta - 0.5 * z_w_0 * (phi + 1.0))
return self[d_i]
# then delta types are both adjusted, use 1-d solver.
elif eta_0 == eta_1 and eta_0 == -0.5:
u = _moneyness_from_delta_one_dimensional(
delta,
delta_type_,
self.meta.delta_type,
self,
self.meta.t_expiry,
z_w,
phi,
)
delta_idx = (-z_w_1 / z_w_0) * (delta - z_w_0 * u * (phi + 1.0) * 0.5)
return self[delta_idx]
else: # delta adjustment types are different, use 2-d solver.
u, delta_idx = _moneyness_from_delta_two_dimensional(
delta, delta_type_, self, self.meta.t_expiry, z_w, phi
)
return self[delta_idx]
def get_from_strike(
self,
k: DualTypes,
f: DualTypes,
expiry: datetime | NoInput = NoInput(0),
z_w: DualTypes | NoInput = NoInput(0),
) -> tuple[DualTypes, DualTypes, DualTypes]:
"""
Given an option strike return associated delta and vol values.
Parameters
-----------
k: float, Dual, Dual2
The strike of the option.
f: float, Dual, Dual2
The forward rate at delivery of the option.
expiry: datetime, optional
Typically used with *Surfaces*.
If given, performs a check to ensure consistency of valuations. Raises if expiry
requested and expiry of the *Smile* do not match. Used internally.
z_w: float, Dual, Dual2, Variable, optional
:math:`z_w` is the factor used to convert between spot and forward type delta values.
It is calculated for a specific option from the *Curve* for discounting cashflows in the
domestic (i.e. LHS side or notional) currency using the appropriate collateral rate for
the option, taking the DF at delivery divided by the DF at spot. If spot type delta
is not used this value is not required.
Returns
-------
delta_index: float, Dual, Dual2, Variable
The delta index that can be used as lookup value on the *Smile*
vol: float, Dual, Dual2, Variable
The volatility value attained from lookup of the index on the *Smile*.
k: float, Dual, Dual2, Variable
The strike value associated with the option of the delta index.
Notes
-----
This function will return a delta index associated with the *FXDeltaVolSmile* and the
volatility attributed to the delta at that point. Recall that the delta index is the
negated put option delta for the given strike ``k``.
"""
expiry = _drb(self.meta.expiry, expiry)
if self.meta.expiry != expiry:
raise ValueError(
"`expiry` of VolSmile and OptionPeriod do not match: calculation aborted "
"due to potential pricing errors.",
)
u: DualTypes = k / f # moneyness
eta, z_w, z_u = _delta_type_constants(self.meta.delta_type, z_w, u)
# Variables are passed to these functions so that iteration can take place using float
# which is faster and then a final iteration at the fixed point can be included with Dual
# variables to capture fixed point sensitivity.
def root(
delta: DualTypes,
u: DualTypes,
sqrt_t: DualTypes,
z_u: DualTypes,
z_w: DualTypes,
ad: int,
) -> tuple[DualTypes, DualTypes]:
# Function value
delta_index = -delta
vol_ = self[delta_index] / 100.0
vol_ = _dual_float(vol_) if ad == 0 else vol_
vol_sqrt_t = sqrt_t * vol_
d_plus_min = -dual_log(u) / vol_sqrt_t + eta * vol_sqrt_t
f0 = delta + z_w * z_u * dual_norm_cdf(-d_plus_min)
# Derivative
dvol_ddelta = -1.0 * evaluate(self.nodes.spline.spline, delta_index, 1) / 100.0
dvol_ddelta = _dual_float(dvol_ddelta) if ad == 0 else dvol_ddelta
dd_ddelta = dvol_ddelta * (dual_log(u) * sqrt_t / vol_sqrt_t**2 + eta * sqrt_t)
f1 = 1 - z_w * z_u * dual_norm_pdf(-d_plus_min) * dd_ddelta
return f0, f1
# Initial approximation is obtained through the closed form solution of the delta given
# an approximated delta at close to the base of the smile.
avg_vol = _dual_float(self.nodes.values[int(self.nodes.n / 2)]) / 100.0
d_plus_min = -dual_log(_dual_float(u)) / (
avg_vol * _dual_float(self.meta.t_expiry_sqrt)
) + eta * avg_vol * _dual_float(self.meta.t_expiry_sqrt)
delta_0 = -_dual_float(z_u) * _dual_float(z_w) * dual_norm_cdf(-d_plus_min)
solver_result = newton_1dim(
root,
delta_0,
args=(u, self.meta.t_expiry_sqrt, z_u, z_w),
pre_args=(0,),
final_args=(1,),
conv_tol=1e-13,
)
delta = solver_result["g"]
delta_index = -delta
return delta_index, self[delta_index], k
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[np.object_]]:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
return np.array(self.nodes.values)
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
return tuple(f"{self.id}{i}" for i in range(self.nodes.n))
# Plotting
def _plot(
self,
x_axis: str,
f: DualTypes | NoInput,
) -> tuple[list[float], list[DualTypes]]:
x: list[float] = list(
np.linspace(_dual_float(self.nodes.plot_upper_bound), self.nodes.spline.t[0], 301)
)
vols: list[float] | list[Dual] | list[Dual2] = self.nodes.spline.spline.ppev(x)
if x_axis in ["moneyness", "strike"]:
if self.meta.delta_type != FXDeltaMethod.Forward:
warnings.warn(
"FXDeltaVolSmile.plot() approximates 'moneyness' and 'strike' using the "
"convention that the Smile has a `delta_type` of 'forward'.\nThe Smile "
f"has type: '{self.meta.delta_type}' so this is likely to lead to inexact "
f"plots.",
UserWarning,
)
x = x[40:-40]
vols = vols[40:-40]
sq_t = self.meta.t_expiry_sqrt
x_as_u: list[DualTypes] = [
dual_exp(_s / 100.0 * sq_t * (dual_inv_norm_cdf(_D) + 0.5 * _s / 100.0 * sq_t)) # type: ignore[operator]
for (_D, _s) in zip(x, vols, strict=True)
]
if x_axis == "strike":
if isinstance(f, NoInput):
raise ValueError(
"`f` (ATM-forward FX rate) is required by `FXDeltaVolSmile.plot` "
"to convert 'moneyness' to 'strike'."
)
return ([_ * _dual_float(f) for _ in x_as_u], vols) # type: ignore[misc, return-value]
return (x_as_u, vols) # type: ignore[return-value]
return (x, vols) # type: ignore[return-value]
# Mutation
@_new_state_post
@_clear_cache_post
def _set_node_vector(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
"""
Update the node values in a Solver. ``ad`` in {1, 2}.
Only the real values in vector are used, dual components are dropped and restructured.
"""
DualType: type[Dual] | type[Dual2] = Dual if ad == 1 else Dual2
DualArgs: tuple[list[float]] | tuple[list[float], list[float]] = (
([],) if ad == 1 else ([], [])
)
base_obj = DualType(0.0, [f"{self.id}{i}" for i in range(self.nodes.n)], *DualArgs)
ident = np.eye(self.nodes.n)
nodes_: dict[float, DualTypes] = {}
for i, k in enumerate(self.nodes.keys):
nodes_[k] = DualType.vars_from(
base_obj, # type: ignore[arg-type]
vector[i].real,
base_obj.vars,
ident[i, :].tolist(),
*DualArgs[1:],
)
self._nodes = _FXDeltaVolSmileNodes(nodes=nodes_, meta=self.meta)
self.nodes.spline.csolve(self.nodes, self.ad)
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
if order == getattr(self, "ad", None):
return None
elif order not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
else:
self._ad = order
nodes: dict[float, DualTypes] = {
k: set_order_convert(v, order, [f"{self.id}{i}"])
for i, (k, v) in enumerate(self.nodes.nodes.items())
}
self._update_nodes_and_csolve(nodes)
# the caller must handle cache and state
def _update_nodes_and_csolve(self, nodes: dict[float, DualTypes]) -> None:
self._nodes = _FXDeltaVolSmileNodes(nodes=nodes, meta=self.meta)
self.nodes.spline.csolve(self.nodes, self.ad)
@_new_state_post
@_clear_cache_post
def update(
self,
nodes: dict[float, DualTypes],
) -> None:
"""
Update a *Smile* with new, manually passed nodes.
For arguments see :class:`~rateslib.volatility.FXDeltaVolSmile`
Returns
-------
None
Notes
-----
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of a *Smile* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *Smile* instance.
This class is labelled as a **mutable on update** object.
"""
if any(isinstance(_, Dual2) for _ in nodes.values()):
ad_: int = 2
elif any(isinstance(_, Dual) for _ in nodes.values()):
ad_ = 1
elif any(isinstance(_, Variable) for _ in nodes.values()):
ad_ = defaults._global_ad_order
else:
ad_ = 0
nodes = {
k: set_order_convert(v, ad_, [f"{self.id}{i}"])
for i, (k, v) in enumerate(nodes.items())
}
self._ad = ad_
self._update_nodes_and_csolve(nodes)
@_new_state_post
@_clear_cache_post
def update_node(self, key: float, value: DualTypes) -> None:
"""
Update a single node value on the *Smile*.
Parameters
----------
key: float
The node date to update. Must exist in ``nodes``.
value: float, Dual, Dual2, Variable
Value to update on the *Curve*.
Returns
-------
None
Notes
-----
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of a *Curve* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *Curve* instance.
This class is labelled as a **mutable on update** object.
.. warning::
This method does not validate the AD order of the input value. Ensure that any
supplied values are consistent with the AD order of the object.
"""
nodes: dict[float, DualTypes] = self.nodes.nodes.copy()
if key not in nodes:
raise KeyError(f"`key`: '{key}' is not in Curve ``nodes``.")
nodes[key] = value
self._update_nodes_and_csolve(nodes)
# Serialization
class FXDeltaVolSurface(_WithState, _WithCache[datetime, FXDeltaVolSmile]):
r"""
Create an *FX Volatility Surface* parametrised by cross-sectional *Smiles* at different
expiries.
See also the :ref:`FX Vol Surfaces section in the user guide `.
Parameters
----------
delta_indexes: list[float]
Axis values representing the delta indexes on each cross-sectional *Smile*.
expiries: list[datetime]
Datetimes representing the expiries of each cross-sectional *Smile*, in ascending order.
node_values: 2d-shape of float, Dual, Dual2
An array of values representing each node value on each cross-sectional *Smile*. Should be
an array of size: (length of ``expiries``, length of ``delta_indexes``).
eval_date: datetime
Acts as the initial node of a *Curve*. Should be assigned today's immediate date.
delta_type: FXDeltaMethod or str
The type of delta calculation that is used as the *Smiles* definition to obtain a delta
index which is referenced by the node keys.
weights: Series, optional
Weights used for temporal volatility interpolation. See notes.
id: str, optional
The unique identifier to label the *Surface* and its variables.
ad: int, optional
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
Notes
-----
See :class:`~rateslib.volatility.FXDeltaVolSmile` for a description of delta indexes and
*Smile* construction.
**Temporal Interpolation**
Interpolation along the expiry axis occurs by performing total linear variance interpolation
for each *delta index* and then dynamically constructing a *Smile* with the usual cubic
interpolation.
If ``weights`` are given this uses the scaling approach of forward volatility (as demonstrated
in Clark's *FX Option Pricing*) for calendar days (different options 'cuts' and timezone are
not implemented). A datetime indexed `Series` must be provided, where any calendar date that
is not included will be assigned the default weight of 1.0.
See :ref:`constructing FX volatility surfaces ` for more details.
**Calibration**
*Instruments* that do not match the ``delta_type`` of this *Surface* can still be used within
a :class:`~rateslib.solver.Solver` to calibrate the surface. This is quite common, when
*Options* less than or equal to one year expiry might use a *'spot'* delta type whilst longer
expiries use *'forward'* delta type.
Internally this is all handled appropriately with necessary conversions, but it is the users
responsibility to label the *Surface* and *Instrument* with the correct types. Failing to
take correct delta types into account often introduces a mismatch -
large enough to be relevant for calibration and pricing, but small enough that it may not be
noticed at first. Parametrising the *Surface* with a *'forward'* delta type is the
**recommended**
choice because it is more standardised and the configuration of which *delta types* to use for
the *Instruments* can be a separate consideration.
For performance reasons it is recommended to match unadjusted delta type *Surfaces* with
calibrating *Instruments* that also have unadjusted delta types. And vice versa with
premium adjusted
delta types. However, *rateslib* has internal root solvers which can handle these
cross-delta type
specifications, although it degrades the performance of the *Solver* because the calculations
are made more difficult. Mixing 'spot' and 'forward' is not a difficult distinction to
refactor and that does not cause performance degradation.
"""
_ini_solve = 0
_mutable_by_association = True
_id: str
_meta: _FXDeltaVolSurfaceMeta
_smiles: list[FXDeltaVolSmile]
def __init__(
self,
delta_indexes: list[float],
expiries: list[datetime],
node_values: list[DualTypes],
eval_date: datetime,
delta_type: FXDeltaMethod | str,
weights: Series[float] | NoInput = NoInput(0),
id: str | NoInput = NoInput(0), # noqa: A002
ad: int = 0,
):
self._id: str = (
uuid4().hex[:5] + "_" if isinstance(id, NoInput) else id
) # 1 in a million clash
self._meta = _FXDeltaVolSurfaceMeta(
_eval_date=eval_date,
_delta_type=_get_fx_delta_type(delta_type),
_plot_x_axis="delta",
_weights=_validate_weights(weights, eval_date, expiries),
_delta_indexes=delta_indexes,
_expiries=expiries,
)
node_values_: np.ndarray[tuple[int, ...], np.dtype[np.object_]] = np.asarray(node_values)
self._smiles = [
FXDeltaVolSmile(
nodes=dict(zip(self.meta.delta_indexes, node_values_[i, :], strict=False)),
expiry=expiry,
eval_date=self.meta.eval_date,
delta_type=self.meta.delta_type,
id=f"{self.id}_{i}_",
)
for i, expiry in enumerate(self.meta.expiries)
]
self._set_ad_order(ad) # includes csolve on each smile
self._set_new_state()
@property
def _n(self) -> int:
"""The number of pricing parameters of the *Surface*."""
return len(self.meta.expiries) * len(self.meta.delta_indexes)
@property
def id(self) -> str:
"""A str identifier to name the *Surface* used in
:class:`~rateslib.solver.Solver` mappings."""
return self._id
@property
def meta(self) -> _FXDeltaVolSurfaceMeta:
"""An instance of :class:`~rateslib.volatility.fx._FXDeltaVolSurfaceMeta`."""
return self._meta
@property
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the *Surface*."""
return self._ad
@property
def smiles(self) -> list[FXDeltaVolSmile]:
"""A list of cross-sectional :class:`FXDeltaVolSmile` instances."""
return self._smiles
def _get_composited_state(self) -> int:
return hash(sum(smile._state for smile in self.smiles))
def _validate_state(self) -> None:
if self._state != self._get_composited_state():
# If any of the associated curves have been mutated then the cache is invalidated
self._clear_cache()
self._set_new_state()
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
self._ad = order
for smile in self.smiles:
smile._set_ad_order(order)
@_new_state_post
@_clear_cache_post
def _set_node_vector(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
m = len(self.meta.delta_indexes)
for i in range(int(len(vector) / m)):
# smiles are indexed by expiry, shortest first
self.smiles[i]._set_node_vector(vector[i * m : i * m + m], ad)
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[np.object_]]:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
return np.array([_.nodes.values for _ in self.smiles]).ravel()
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
vars_: tuple[str, ...] = ()
for smile in self.smiles:
vars_ += tuple(f"{smile.id}{i}" for i in range(smile._n))
return vars_
@_validate_states
def get_smile(self, expiry: datetime) -> FXDeltaVolSmile:
"""
Construct a *DeltaVolSmile* with linear total variance interpolation over delta indexes.
Parameters
----------
expiry: datetime
The expiry for the *Smile* as cross-section of *Surface*.
Returns
-------
FXDeltaVolSmile
"""
if defaults.curve_caching and expiry in self._cache:
return self._cache[expiry]
expiry_posix = expiry.replace(tzinfo=UTC).timestamp()
e_idx, e_next_idx = _surface_index_left(self.meta.expiries_posix, expiry_posix)
if expiry == self.meta.expiries[0]:
smile = self.smiles[0]
elif abs(expiry_posix - self.meta.expiries_posix[e_next_idx]) < 1e-10:
# expiry aligns with a known smile
smile = self.smiles[e_idx + 1]
elif expiry_posix > self.meta.expiries_posix[-1]:
# use the data from the last smile
smile = FXDeltaVolSmile(
nodes={
k: _t_var_interp(
expiries=self.meta.expiries,
expiries_posix=self.meta.expiries_posix,
expiry=expiry,
expiry_posix=expiry_posix,
expiry_index=e_idx,
expiry_next_index=e_next_idx,
eval_posix=self.meta.eval_posix,
weights_cum=self.meta.weights_cum,
vol1=vol1,
vol2=vol1,
bounds_flag=1,
)
for k, vol1 in zip(
self.meta.delta_indexes, self.smiles[e_next_idx].nodes.values, strict=False
)
},
eval_date=self.meta.eval_date,
expiry=expiry,
ad=self.ad,
delta_type=self.meta.delta_type,
id=self.smiles[e_next_idx].id + "_ext",
)
elif expiry <= self.meta.eval_date:
raise ValueError("`expiry` before the `eval_date` of the Surface is invalid.")
elif expiry_posix < self.meta.expiries_posix[0]:
# use the data from the first smile
smile = FXDeltaVolSmile(
nodes={
k: _t_var_interp(
expiries=self.meta.expiries,
expiries_posix=self.meta.expiries_posix,
expiry=expiry,
expiry_posix=expiry_posix,
expiry_index=e_idx,
expiry_next_index=e_next_idx,
eval_posix=self.meta.eval_posix,
weights_cum=self.meta.weights_cum,
vol1=vol1,
vol2=vol1,
bounds_flag=-1,
)
for k, vol1 in zip(
self.meta.delta_indexes, self.smiles[0].nodes.values, strict=False
)
},
eval_date=self.meta.eval_date,
expiry=expiry,
ad=self.ad,
delta_type=self.meta.delta_type,
id=self.smiles[0].id + "_ext",
)
else:
ls, rs = self.smiles[e_idx], self.smiles[e_next_idx] # left_smile, right_smile
smile = FXDeltaVolSmile(
nodes={
k: _t_var_interp(
expiries=self.meta.expiries,
expiries_posix=self.meta.expiries_posix,
expiry=expiry,
expiry_posix=expiry_posix,
expiry_index=e_idx,
expiry_next_index=e_next_idx,
eval_posix=self.meta.eval_posix,
weights_cum=self.meta.weights_cum,
vol1=vol1,
vol2=vol2,
bounds_flag=0,
)
for k, vol1, vol2 in zip(
self.meta.delta_indexes,
ls.nodes.values,
rs.nodes.values,
strict=False,
)
},
eval_date=self.meta.eval_date,
expiry=expiry,
ad=self.ad,
delta_type=self.meta.delta_type,
id=ls.id + "_" + rs.id + "_intp",
)
return self._cached_value(expiry, smile)
# _validate_states not required since called by `get_smile` internally
def get_from_strike(
self,
k: DualTypes,
f: DualTypes,
expiry: datetime | NoInput = NoInput(0),
z_w: DualTypes_ = NoInput(0),
) -> tuple[DualTypes, DualTypes, DualTypes]:
"""
Given an option strike and expiry return associated delta and vol values.
Parameters
-----------
k: float, Dual, Dual2
The strike of the option.
f: float, Dual, Dual2
The forward rate at delivery of the option.
expiry: datetime
Required to produce the cross-sectional *Smile* on the *Surface*.
z_w: float, Dual, Dual2, Variable, optional
:math:`z_w` is the factor used to convert between spot and forward type delta values.
It is calculated for a specific option from the *Curve* for discounting cashflows in the
domestic (i.e. LHS side or notional) currency using the appropriate collateral rate for
the option, taking the DF at delivery divided by the DF at spot. If spot type delta
is not used this value is not required.
Returns
-------
delta_index: float, Dual, Dual2, Variable
The delta index that can be used as lookup value on the *Smile*
vol: float, Dual, Dual2, Variable
The volatility value attained from lookup of the index on the *Smile*.
k: float, Dual, Dual2, Variable
The strike value associated with the option of the delta index.
Notes
-----
This function will return a delta index associated with the *FXDeltaVolSmile* and the
volatility attributed to the delta at that point. Recall that the delta index is the
negated put option delta for the given strike ``k``.
"""
if isinstance(expiry, NoInput):
raise ValueError("`expiry` required to get cross-section of FXDeltaVolSurface.")
smile = self.get_smile(expiry)
return smile.get_from_strike(k, f, expiry, z_w)
# _validate_states not required since called by `get_smile` internally
def _get_index(self, delta_index: DualTypes, expiry: datetime) -> DualTypes:
"""
Return a volatility from a given delta index.
Used internally alongside Surface, where a surface also requires an expiry.
"""
return self.get_smile(expiry)[delta_index]
def plot(self) -> PlotOutput:
plot_upper_bound = max([_.nodes.plot_upper_bound for _ in self.smiles])
deltas = np.linspace(0.0, plot_upper_bound, 20)
vols = np.array([[_._get_index(d, NoInput(0)) for d in deltas] for _ in self.smiles])
expiries = [
(_ - self.meta.eval_posix) / (365 * 24 * 60 * 60.0) for _ in self.meta.expiries_posix
]
return plot3d(deltas, expiries, vols) # type: ignore[arg-type, return-value]
def _moneyness_from_atm_delta_one_dimensional(
delta_type: FXDeltaMethod,
vol_delta_type: FXDeltaMethod,
vol: DualTypes | FXDeltaVolSmile,
t_e: DualTypes,
z_w: DualTypes,
phi: float,
) -> DualTypes:
def root1d(
g: DualTypes,
delta_type: FXDeltaMethod,
vol_delta_type: FXDeltaMethod,
phi: float,
sqrt_t_e: float,
z_w: DualTypes,
ad: int,
) -> tuple[DualTypes, DualTypes]:
u = g
eta_0, z_w_0, z_u_0 = _delta_type_constants(delta_type, z_w, u)
eta_1, z_w_1, z_u_1 = _delta_type_constants(vol_delta_type, z_w, u)
dz_u_0_du = 0.5 - eta_0
delta_idx = z_w_1 * z_u_0 / 2.0
if isinstance(vol, FXDeltaVolSmile):
vol_: DualTypes = vol[delta_idx] / 100.0
dvol_ddeltaidx = evaluate(vol.nodes.spline.spline, delta_idx, 1) / 100.0
else:
vol_ = vol / 100.0
dvol_ddeltaidx = 0.0
vol_ = _dual_float(vol_) if ad == 0 else vol_
dvol_ddeltaidx = _dual_float(dvol_ddeltaidx) if ad == 0 else dvol_ddeltaidx
vol_sqrt_t = vol_ * sqrt_t_e
# Calculate function values
d0 = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, eta_0)
_phi0 = dual_norm_cdf(phi * d0)
f0 = phi * z_w_0 * z_u_0 * (0.5 - _phi0)
# Calculate derivative values
ddelta_idx_du = dz_u_0_du * z_w_1 * 0.5
lnu = dual_log(u) / (vol_**2 * sqrt_t_e)
dd_du = -1 / (u * vol_sqrt_t) + dvol_ddeltaidx * (lnu + eta_0 * sqrt_t_e) * ddelta_idx_du
nd0 = dual_norm_pdf(phi * d0)
f1 = -dz_u_0_du * z_w_0 * phi * _phi0 - z_u_0 * z_w_0 * nd0 * dd_du
return f0, f1
if isinstance(vol, FXDeltaVolSmile):
avg_vol: DualTypes = _dual_float(vol.nodes.values[int(vol.nodes.n / 2)])
else:
avg_vol = vol
g01 = (
phi
* 0.5
* (z_w if delta_type in [FXDeltaMethod.Spot, FXDeltaMethod.SpotPremiumAdjusted] else 1.0)
)
g00 = _moneyness_from_delta_closed_form(g01, avg_vol, t_e, 1.0, phi)
root_solver = newton_1dim(
root1d,
g00,
args=(delta_type, vol_delta_type, phi, t_e**0.5, z_w),
pre_args=(0,),
final_args=(1,),
raise_on_fail=True,
)
u: DualTypes = root_solver["g"]
return u
def _moneyness_from_delta_one_dimensional(
delta: DualTypes,
delta_type: FXDeltaMethod,
vol_delta_type: FXDeltaMethod,
vol: FXDeltaVolSmile | DualTypes,
t_e: DualTypes,
z_w: DualTypes,
phi: float,
) -> DualTypes:
def root1d(
g: DualTypes,
delta: DualTypes,
delta_type: FXDeltaMethod,
vol_delta_type: FXDeltaMethod,
phi: float,
sqrt_t_e: DualTypes,
z_w: DualTypes,
ad: int,
) -> tuple[DualTypes, DualTypes]:
u = g
eta_0, z_w_0, z_u_0 = _delta_type_constants(delta_type, z_w, u)
eta_1, z_w_1, z_u_1 = _delta_type_constants(vol_delta_type, z_w, u)
dz_u_0_du = 0.5 - eta_0
delta_idx = (-z_w_1 / z_w_0) * (delta - z_w_0 * z_u_0 * (phi + 1.0) * 0.5)
if isinstance(vol, FXDeltaVolSmile):
vol_: DualTypes = vol[delta_idx] / 100.0
dvol_ddeltaidx = evaluate(vol.nodes.spline.spline, delta_idx, 1) / 100.0
else:
vol_ = vol / 100.0
dvol_ddeltaidx = 0.0
vol_ = _dual_float(vol_) if ad == 0 else vol_
dvol_ddeltaidx = _dual_float(dvol_ddeltaidx) if ad == 0 else dvol_ddeltaidx
vol_sqrt_t = vol_ * sqrt_t_e
# Calculate function values
d0 = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, eta_0)
_phi0 = dual_norm_cdf(phi * d0)
f0 = delta - z_w_0 * z_u_0 * phi * _phi0
# Calculate derivative values
ddelta_idx_du = dz_u_0_du * z_w_1 * (phi + 1.0) * 0.5
lnu = dual_log(u) / (vol_**2 * sqrt_t_e)
dd_du = -1 / (u * vol_sqrt_t) + dvol_ddeltaidx * (lnu + eta_0 * sqrt_t_e) * ddelta_idx_du
nd0 = dual_norm_pdf(phi * d0)
f1 = -dz_u_0_du * z_w_0 * phi * _phi0 - z_u_0 * z_w_0 * nd0 * dd_du
return f0, f1
if isinstance(vol, FXDeltaVolSmile):
avg_vol: DualTypes = _dual_float(vol.nodes.values[int(vol.nodes.n / 2)])
else:
avg_vol = vol
g01 = delta if phi > 0 else max(delta, -0.75)
g00 = _moneyness_from_delta_closed_form(g01, avg_vol, t_e, 1.0, phi)
msg = (
f"If the delta, {delta:.1f}, is premium adjusted for a call option is it infeasible?"
if phi > 0
else ""
)
try:
root_solver = newton_1dim(
root1d,
g00,
args=(delta, delta_type, vol_delta_type, phi, t_e**0.5, z_w),
pre_args=(0,),
final_args=(1,),
)
except ValueError as e:
raise ValueError(f"Newton root solver failed, with error: {e.__str__()}.\n{msg}")
if root_solver["state"] == -1:
raise ValueError(
f"Newton root solver failed, after {root_solver['iterations']} iterations.\n{msg}",
)
u: DualTypes = root_solver["g"]
return u
def _moneyness_from_atm_delta_two_dimensional(
delta_type: FXDeltaMethod,
vol: FXDeltaVolSmile,
t_e: DualTypes,
z_w: DualTypes,
phi: float,
) -> tuple[DualTypes, DualTypes]:
def root2d(
g: list[DualTypes],
delta_type: FXDeltaMethod,
vol_delta_type: FXDeltaMethod,
phi: float,
sqrt_t_e: DualTypes,
z_w: DualTypes,
ad: int,
) -> tuple[list[DualTypes], list[list[DualTypes]]]:
u, delta_idx = g[0], g[1]
eta_0, z_w_0, z_u_0 = _delta_type_constants(delta_type, z_w, u)
eta_1, z_w_1, z_u_1 = _delta_type_constants(vol_delta_type, z_w, u)
dz_u_0_du = 0.5 - eta_0
dz_u_1_du = 0.5 - eta_1
vol_ = vol[delta_idx] / 100.0
vol_ = _dual_float(vol_) if ad == 0 else vol_
vol_sqrt_t = vol_ * sqrt_t_e
# Calculate function values
d0 = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, eta_0)
_phi0 = dual_norm_cdf(phi * d0)
f0_0 = phi * z_w_0 * z_u_0 * (0.5 - _phi0)
d1 = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, eta_1)
_phi1 = dual_norm_cdf(-d1)
f0_1 = delta_idx - z_w_1 * z_u_1 * _phi1
# Calculate Jacobian values
dvol_ddeltaidx = evaluate(vol.nodes.spline.spline, delta_idx, 1) / 100.0
dvol_ddeltaidx = _dual_float(dvol_ddeltaidx) if ad == 0 else dvol_ddeltaidx
dd_du = -1 / (u * vol_sqrt_t) # this is the same for 0 or 1 variety
nd0 = dual_norm_pdf(phi * d0)
nd1 = dual_norm_pdf(-d1)
lnu = dual_log(u) / (vol_**2 * sqrt_t_e)
dd0_ddeltaidx = (lnu + eta_0 * sqrt_t_e) * dvol_ddeltaidx
dd1_ddeltaidx = (lnu + eta_1 * sqrt_t_e) * dvol_ddeltaidx
f1_00 = phi * z_w_0 * dz_u_0_du * (0.5 - _phi0) - z_w_0 * z_u_0 * nd0 * dd_du
f1_10 = -z_w_1 * dz_u_1_du * _phi1 + z_w_1 * z_u_1 * nd1 * dd_du
f1_01 = -z_w_0 * z_u_0 * nd0 * dd0_ddeltaidx
f1_11 = 1.0 + z_w_1 * z_u_1 * nd1 * dd1_ddeltaidx
return [f0_0, f0_1], [[f1_00, f1_01], [f1_10, f1_11]]
avg_vol = _dual_float(vol.nodes.values[int(vol.nodes.n / 2)])
g01 = (
phi
* 0.5
* (z_w if delta_type in [FXDeltaMethod.Spot, FXDeltaMethod.SpotPremiumAdjusted] else 1.0)
)
g00 = _moneyness_from_delta_closed_form(g01, avg_vol, t_e, 1.0, phi)
root_solver = newton_ndim(
root2d,
[g00, abs(g01)],
args=(delta_type, vol.meta.delta_type, phi, t_e**0.5, z_w),
pre_args=(0,),
final_args=(1,),
raise_on_fail=True,
)
u, delta_idx = root_solver["g"][0], root_solver["g"][1]
return u, delta_idx
def _moneyness_from_delta_two_dimensional(
delta: DualTypes,
delta_type: FXDeltaMethod,
vol: FXDeltaVolSmile,
t_e: DualTypes,
z_w: DualTypes,
phi: float,
) -> tuple[DualTypes, DualTypes]:
def root2d(
g: Sequence[DualTypes],
delta: DualTypes,
delta_type: FXDeltaMethod,
vol_delta_type: FXDeltaMethod,
phi: float,
sqrt_t_e: float,
z_w: DualTypes,
ad: int,
) -> tuple[list[DualTypes], list[list[DualTypes]]]:
u, delta_idx = g[0], g[1]
eta_0, z_w_0, z_u_0 = _delta_type_constants(delta_type, z_w, u)
eta_1, z_w_1, z_u_1 = _delta_type_constants(vol_delta_type, z_w, u)
dz_u_0_du = 0.5 - eta_0
dz_u_1_du = 0.5 - eta_1
vol_ = vol[delta_idx] / 100.0
vol_ = _dual_float(vol_) if ad == 0 else vol_
vol_sqrt_t = vol_ * sqrt_t_e
# Calculate function values
d0 = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, eta_0)
_phi0 = dual_norm_cdf(phi * d0)
f0_0: DualTypes = delta - z_w_0 * z_u_0 * phi * _phi0
d1 = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, eta_1)
_phi1 = dual_norm_cdf(-d1)
f0_1: DualTypes = delta_idx - z_w_1 * z_u_1 * _phi1
# Calculate Jacobian values
dvol_ddeltaidx = evaluate(vol.nodes.spline.spline, delta_idx, 1) / 100.0
dvol_ddeltaidx = _dual_float(dvol_ddeltaidx) if ad == 0 else dvol_ddeltaidx
dd_du = -1 / (u * vol_sqrt_t)
nd0 = dual_norm_pdf(phi * d0)
nd1 = dual_norm_pdf(-d1)
lnu = dual_log(u) / (vol_**2 * sqrt_t_e)
dd0_ddeltaidx = (lnu + eta_0 * sqrt_t_e) * dvol_ddeltaidx
dd1_ddeltaidx = (lnu + eta_1 * sqrt_t_e) * dvol_ddeltaidx
f1_00: DualTypes = -z_w_0 * dz_u_0_du * phi * _phi0 - z_w_0 * z_u_0 * nd0 * dd_du
f1_10: DualTypes = -z_w_1 * dz_u_1_du * _phi1 + z_w_1 * z_u_1 * nd1 * dd_du
f1_01: DualTypes = -z_w_0 * z_u_0 * nd0 * dd0_ddeltaidx
f1_11: DualTypes = 1.0 + z_w_1 * z_u_1 * nd1 * dd1_ddeltaidx
return [f0_0, f0_1], [[f1_00, f1_01], [f1_10, f1_11]]
avg_vol = _dual_float(vol.nodes.values[int(vol.nodes.n / 2)])
g01 = delta if phi > 0 else max(delta, -0.75)
g00 = _moneyness_from_delta_closed_form(g01, avg_vol, t_e, 1.0, phi)
msg = (
f"If the delta, {_dual_float(delta):.1f}, is premium adjusted for a "
"call option is it infeasible?"
if phi > 0
else ""
)
try:
root_solver = newton_ndim(
root2d,
[g00, abs(g01)],
args=(delta, delta_type, vol.meta.delta_type, phi, t_e**0.5, z_w),
pre_args=(0,),
final_args=(1,),
raise_on_fail=False,
)
except ValueError as e:
raise ValueError(f"Newton root solver failed, with error: {e.__str__()}.\n{msg}")
if root_solver["state"] == -1:
raise ValueError(
f"Newton root solver failed, after {root_solver['iterations']} iterations.\n{msg}",
)
u, delta_idx = root_solver["g"][0], root_solver["g"][1]
return u, delta_idx
def _moneyness_from_delta_three_dimensional(
delta_type: FXDeltaMethod,
vol: DualTypes | FXDeltaVolSmile,
t_e: DualTypes,
z_w: DualTypes,
phi: float,
) -> tuple[DualTypes, DualTypes, DualTypes]:
"""
Solve the ATM delta problem where delta is not explicit.
Book2: section "Strike and Volatility implied from ATM delta" (FXDeltaVolSMile)
"""
def root3d(
g: list[DualTypes],
delta_type: FXDeltaMethod,
vol_delta_type: FXDeltaMethod,
phi: float,
sqrt_t_e: DualTypes,
z_w: DualTypes,
ad: int,
) -> tuple[list[DualTypes], list[list[DualTypes]]]:
u, delta_idx, delta = g[0], g[1], g[2]
eta_0, z_w_0, z_u_0 = _delta_type_constants(delta_type, z_w, u)
eta_1, z_w_1, z_u_1 = _delta_type_constants(vol_delta_type, z_w, u)
dz_u_0_du = 0.5 - eta_0
dz_u_1_du = 0.5 - eta_1
if isinstance(vol, FXDeltaVolSmile):
vol_: DualTypes = vol[delta_idx] / 100.0
dvol_ddeltaidx = evaluate(vol.nodes.spline.spline, delta_idx, 1) / 100.0
else:
vol_ = vol / 100.0
dvol_ddeltaidx = 0.0
vol_ = _dual_float(vol_) if ad == 0 else vol_
vol_sqrt_t = vol_ * sqrt_t_e
# Calculate function values
d0 = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, eta_0)
_phi0 = dual_norm_cdf(phi * d0)
f0_0 = delta - z_w_0 * z_u_0 * phi * _phi0
d1 = _OptionModelBlack76._d_plus_min_u(u, vol_sqrt_t, eta_1)
_phi1 = dual_norm_cdf(-d1)
f0_1 = delta_idx - z_w_1 * z_u_1 * _phi1
f0_2 = delta - phi * z_u_0 * z_w_0 / 2.0
# Calculate Jacobian values
dvol_ddeltaidx = _dual_float(dvol_ddeltaidx) if ad == 0 else dvol_ddeltaidx
dd_du = -1 / (u * vol_sqrt_t)
nd0 = dual_norm_pdf(phi * d0)
nd1 = dual_norm_pdf(-d1)
lnu = dual_log(u) / (vol_**2 * sqrt_t_e)
dd0_ddeltaidx = (lnu + eta_0 * sqrt_t_e) * dvol_ddeltaidx
dd1_ddeltaidx = (lnu + eta_1 * sqrt_t_e) * dvol_ddeltaidx
f1_00 = -z_w_0 * dz_u_0_du * phi * _phi0 - z_w_0 * z_u_0 * nd0 * dd_du # dh0/du
f1_10 = -z_w_1 * dz_u_1_du * _phi1 + z_w_1 * z_u_1 * nd1 * dd_du # dh1/du
f1_20 = -phi * z_w_0 * dz_u_0_du / 2.0 # dh2/du
f1_01 = -z_w_0 * z_u_0 * nd0 * dd0_ddeltaidx # dh0/ddidx
f1_11 = 1.0 + z_w_1 * z_u_1 * nd1 * dd1_ddeltaidx # dh1/ddidx
f1_21 = 0.0 # dh2/ddidx
f1_02 = 1.0 # dh0/ddelta
f1_12 = 0.0 # dh1/ddelta
f1_22 = 1.0 # dh2/ddelta
return [f0_0, f0_1, f0_2], [
[f1_00, f1_01, f1_02],
[f1_10, f1_11, f1_12],
[f1_20, f1_21, f1_22],
]
if isinstance(vol, FXDeltaVolSmile):
avg_vol: DualTypes = _dual_float(vol.nodes.values[int(vol.nodes.n / 2)])
vol_delta_type = vol.meta.delta_type
else:
avg_vol = vol
vol_delta_type = delta_type
g02 = (
0.5
* phi
* (z_w if delta_type in [FXDeltaMethod.Spot, FXDeltaMethod.SpotPremiumAdjusted] else 1.0)
)
g01 = g02 if phi > 0 else max(g02, -0.75)
g00 = _moneyness_from_delta_closed_form(g01, avg_vol, t_e, 1.0, phi)
root_solver = newton_ndim(
root3d,
[g00, abs(g01), g02],
args=(delta_type, vol_delta_type, phi, t_e**0.5, z_w),
pre_args=(0,),
final_args=(1,),
raise_on_fail=True,
)
u, delta_idx, delta = root_solver["g"][0], root_solver["g"][1], root_solver["g"][1]
return u, delta_idx, delta
================================================
FILE: python/rateslib/volatility/fx/sabr.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations # type hinting
from datetime import datetime, timezone
from typing import TYPE_CHECKING
from uuid import uuid4
import numpy as np
from pandas import Series
from rateslib import defaults
from rateslib.dual import (
Dual,
Dual2,
Variable,
dual_exp,
dual_inv_norm_cdf,
dual_log,
dual_norm_cdf,
set_order_convert,
)
from rateslib.dual.utils import _dual_float, _to_number
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import FXDeltaMethod
from rateslib.fx import FXForwards
from rateslib.mutability import (
_clear_cache_post,
_new_state_post,
_validate_states,
_WithCache,
_WithState,
)
from rateslib.scheduling import get_calendar
from rateslib.volatility.fx.base import _BaseFXSmile
from rateslib.volatility.fx.utils import (
_FXSabrSurfaceMeta,
_FXSmileMeta,
)
from rateslib.volatility.utils import (
_SabrModel,
_SabrSmileNodes,
_surface_index_left,
_t_var_interp_d_sabr_d_k_or_f,
_validate_weights,
)
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
CalInput,
DualTypes,
DualTypes_,
Number,
Sequence,
datetime_,
int_,
str_,
)
UTC = timezone.utc
class FXSabrSmile(_BaseFXSmile):
r"""
Create an *FX Volatility Smile* at a given expiry indexed by strike using SABR parameters.
Parameters
----------
nodes: dict[str, float]
The parameters for the SABR model. Keys must be *'alpha', 'beta', 'rho', 'nu'*. See below.
eval_date: datetime
Acts as the initial node of a *Curve*. Should be assigned today's immediate date.
expiry: datetime
The expiry date of the options associated with this *Smile*
id: str, optional
The unique identifier to distinguish between *Smiles* in a multicurrency framework
and/or *Surface*.
delivery_lag: int, optional
The number of business days after expiry that the physical settlement of the FX
exchange occurs. Uses ``defaults.fx_delivery_lag``. Used in determination of ATM forward
rates.
calendar : Cal, UnionCal, NamedCal, str, optional
The holiday calendar object to use for FX delivery day determination. If str, looks up
named calendar from static data.
pair : str, optional
The FX currency pair used to determine ATM forward rates.
ad: int, optional
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
Notes
-----
The keys for ``nodes`` are described as the following:
- ``alpha``: The initial volatility parameter (e.g. 0.10 for 10%) of the SABR model,
in (0, inf).
- ``beta``: The scaling parameter between normal (0) and lognormal (1)
of the SABR model in [0, 1].
- ``rho``: The correlation between spot and volatility of the SABR model,
e.g. -0.10, in [-1.0, 1.0)
- ``nu``: The volatility of volatility parameter of the SABR model, e.g. 0.80.
The parameters :math:`\alpha, \rho, \nu` will be calibrated/mutated by
a :class:`~rateslib.solver.Solver` object. These should be entered as *float* and the argument
``ad`` can be used to automatically tag these as variables.
The parameter :math:`\beta` will **not** be calibrated/mutated by a
:class:`~rateslib.solver.Solver`. This value can be entered either as a *float*, or a
:class:`~rateslib.dual.Variable` to capture exogenous sensivities.
The arguments ``delivery_lag``, ``calendar`` and ``pair`` are only required if using an
:class:`~rateslib.fx.FXForwards` object to forecast ATM-forward FX rates for pricing. If
the forward rates are supplied directly as numeric values these arguments are not required.
Examples
--------
See :ref:`Constructing a Smile `.
"""
_ini_solve = 1
_meta: _FXSmileMeta
_id: str
_nodes: _SabrSmileNodes
@_new_state_post
def __init__(
self,
nodes: dict[str, DualTypes],
eval_date: datetime,
expiry: datetime,
delivery_lag: int_ = NoInput(0),
calendar: CalInput = NoInput(0),
pair: str_ = NoInput(0),
id: str | NoInput = NoInput(0), # noqa: A002
ad: int = 0,
):
self._id: str = (
uuid4().hex[:5] + "_" if isinstance(id, NoInput) else id
) # 1 in a million clash
delivery_lag_ = _drb(defaults.fx_delivery_lag, delivery_lag)
cal_ = get_calendar(calendar)
self._meta = _FXSmileMeta(
_eval_date=eval_date,
_expiry=expiry,
_plot_x_axis="strike",
_calendar=cal_,
_delivery_lag=delivery_lag_,
_delivery=cal_.lag_bus_days(expiry, delivery_lag_, True),
_pair=_drb(None, pair),
_delta_type=FXDeltaMethod.Forward, # unused for SABR Model
)
for _ in ["alpha", "beta", "rho", "nu"]:
if _ not in nodes:
raise ValueError(
f"'{_}' is a required SABR parameter that must be included in ``nodes``"
)
self._nodes: _SabrSmileNodes = _SabrSmileNodes(
_alpha=_to_number(nodes["alpha"]),
_beta=nodes["beta"], # type: ignore[arg-type]
_rho=_to_number(nodes["rho"]),
_nu=_to_number(nodes["nu"]),
)
self._set_ad_order(ad)
@property
def _n(self) -> int:
"""The number of pricing parameters in ``nodes``."""
return self.nodes.n
@property
def id(self) -> str:
"""A str identifier to name the *Smile* used in
:class:`~rateslib.solver.Solver` mappings."""
return self._id
@property
def meta(self) -> _FXSmileMeta: # type: ignore[override]
"""An instance of :class:`~rateslib.volatility.fx._FXSmileMeta`."""
return self._meta
@property
def nodes(self) -> _SabrSmileNodes:
"""An instance of :class:`~rateslib.volatility.fx._FXSabrSmileNodes`."""
return self._nodes
def get_from_strike(
self,
k: DualTypes,
f: DualTypes | FXForwards,
expiry: datetime_ = NoInput(0),
z_w: DualTypes_ = NoInput(0),
) -> tuple[DualTypes, DualTypes, DualTypes]:
"""
Given an option strike return the volatility.
Parameters
-----------
k: float, Dual, Dual2
The strike of the option.
f: float, Dual, Dual2
The forward rate at delivery of the option.
expiry: datetime, optional
Typically uses with *Surfaces*.
If given, performs a check to ensure consistency of valuations. Raises if expiry
requested and expiry of the *Smile* do not match. Used internally.
z_w: DualTypes, optional
Not used by *SabrSmile*
Returns
-------
null: float, Dual, Dual2, Variable
A *SabrSmile* has no requirement for a delta index.
vol: float, Dual, Dual2, Variable
The volatility value attained from lookup of the index on the *Smile*.
k: float, Dual, Dual2, Variable
The strike value associated with the option of the delta index.
Notes
-----
This function returns a tuple consistent with an
:class:`~rateslib.volatility.FXDeltaVolSmile`, however since the *FXSabrSmile* has no
concept of a `delta index` the first element returned is always zero and can be
effectively ignored.
"""
expiry = _drb(self._meta.expiry, expiry)
if self._meta.expiry != expiry:
raise ValueError(
"`expiry` of VolSmile and OptionPeriod do not match: calculation aborted "
"due to potential pricing errors.",
)
if isinstance(f, FXForwards):
if self._meta.pair is None:
raise ValueError(
"`FXSabrSmile` must be specified with a `pair` argument to use "
"`FXForwards` objects for forecasting ATM-forward FX rates."
)
f_: DualTypes = f.rate(self._meta.pair, self._meta.delivery)
elif isinstance(f, float | Dual | Dual2 | Variable):
f_ = f
else:
raise ValueError("`f` (ATM-forward FX rate) must be a value or FXForwards object.")
vol_ = _SabrModel._d_sabr_d_k_or_f(
_to_number(k),
_to_number(f_),
self._meta.t_expiry,
self.nodes.alpha,
self.nodes.beta,
self.nodes.rho,
self.nodes.nu,
derivative=0,
)[0]
return 0.0, vol_ * 100.0, k
def _d_sabr_d_k_or_f(
self,
k: DualTypes,
f: DualTypes | FXForwards,
expiry: datetime,
as_float: bool,
derivative: int,
) -> tuple[DualTypes, DualTypes | None]:
"""Get the derivative of sabr vol with respect to strike
as_float: bool
Allow expedited calculation by avoiding dual numbers. Useful during the root solving
phase of Newton iterations.
derivative: int
For with respect to `k` use 1, or `f` use 2.
"""
t_e = (expiry - self._meta.eval_date).days / 365.0
if isinstance(f, FXForwards):
f__: DualTypes = f.rate(self._meta.pair, self._meta.delivery)
else:
f__ = f # type: ignore[assignment]
if as_float:
k_: Number = _dual_float(k)
f_: Number = _dual_float(f__)
a_: Number = _dual_float(self.nodes.alpha)
b_: float | Variable = _dual_float(self.nodes.beta)
p_: Number = _dual_float(self.nodes.rho)
v_: Number = _dual_float(self.nodes.nu)
else:
k_ = _to_number(k)
f_ = _to_number(f__)
a_ = self.nodes.alpha #
b_ = self.nodes.beta
p_ = self.nodes.rho
v_ = self.nodes.nu
return _SabrModel._d_sabr_d_k_or_f(k_, f_, t_e, a_, b_, p_, v_, derivative)
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[np.object_]]:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
return np.array([self.nodes.alpha, self.nodes.rho, self.nodes.nu])
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
return tuple(f"{self.id}{i}" for i in range(3))
@_new_state_post
@_clear_cache_post
def _set_node_vector(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
"""
Update the node values in a Solver. ``ad`` in {1, 2}.
Only the real values in vector are used, dual components are dropped and restructured.
"""
DualType: type[Dual] | type[Dual2] = Dual if ad == 1 else Dual2
DualArgs: tuple[list[float]] | tuple[list[float], list[float]] = (
([],) if ad == 1 else ([], [])
)
base_obj = DualType(0.0, [f"{self.id}{i}" for i in range(3)], *DualArgs)
ident = np.eye(3)
self._nodes = _SabrSmileNodes(
_beta=self.nodes.beta,
_alpha=DualType.vars_from(
base_obj, # type: ignore[arg-type]
vector[0].real,
base_obj.vars,
ident[0, :].tolist(),
*DualArgs[1:],
),
_rho=DualType.vars_from(
base_obj, # type: ignore[arg-type]
vector[1].real,
base_obj.vars,
ident[1, :].tolist(),
*DualArgs[1:],
),
_nu=DualType.vars_from(
base_obj, # type: ignore[arg-type]
vector[2].real,
base_obj.vars,
ident[2, :].tolist(),
*DualArgs[1:],
),
)
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
"""This does not alter the beta node, since that is not varied by a Solver.
beta values that are AD sensitive should be given as a Variable and not Dual/Dual2.
"""
if order == getattr(self, "_ad", None):
return None
elif order not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = order
self._nodes = _SabrSmileNodes(
_beta=self.nodes.beta,
_alpha=set_order_convert(self.nodes.alpha, order, [f"{self.id}0"]),
_rho=set_order_convert(self.nodes.rho, order, [f"{self.id}1"]),
_nu=set_order_convert(self.nodes.nu, order, [f"{self.id}2"]),
)
@_new_state_post
@_clear_cache_post
def update_node(self, key: str, value: DualTypes) -> None:
"""
Update a single node value on the *SABRSmile*.
Parameters
----------
key: str in {"alpha", "beta", "rho", "nu"}
The node value to update.
value: float, Dual, Dual2, Variable
Value to update on the *Smile*.
Returns
-------
None
Notes
-----
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of a *Curve* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *Curve* instance.
This class is labelled as a **mutable on update** object.
"""
params = ["alpha", "beta", "rho", "nu"]
if key not in params:
raise KeyError("`key` is not in ``nodes``.")
kwargs = {f"_{_}": getattr(self.nodes, _) for _ in params if _ != key}
kwargs.update({f"_{key}": value})
self._nodes = _SabrSmileNodes(**kwargs)
self._set_ad_order(self.ad)
# Plotting
def _plot(
self,
x_axis: str,
f: DualTypes | FXForwards | NoInput,
) -> tuple[list[float], list[DualTypes]]:
if isinstance(f, NoInput):
raise ValueError("`f` (ATM-forward FX rate) is required by `FXSabrSmile.plot`.")
elif isinstance(f, FXForwards):
if self._meta.pair is None:
raise ValueError(
"`FXSabrSmile` must be specified with a `pair` argument to use "
"`FXForwards` objects for forecasting ATM-forward FX rates."
)
f_: float = _dual_float(f.rate(self._meta.pair, self._meta.delivery))
elif isinstance(f, float | Dual | Dual2 | Variable):
f_ = _dual_float(f)
else:
raise ValueError("`f` (ATM-forward FX rate) must be a value or FXForwards object.")
v_ = _dual_float(self.get_from_strike(f_, f_)[1]) / 100.0
sq_t = self._meta.t_expiry_sqrt
x_low = _dual_float(
dual_exp(0.5 * v_**2 * sq_t**2 - dual_inv_norm_cdf(0.95) * v_ * sq_t) * f_
)
x_top = _dual_float(
dual_exp(0.5 * v_**2 * sq_t**2 - dual_inv_norm_cdf(0.05) * v_ * sq_t) * f_
)
x = np.linspace(x_low, x_top, 301, dtype=np.float64)
u: Sequence[float] = x / f_ # type: ignore[assignment]
y: list[DualTypes] = [self.get_from_strike(_, f_)[1] for _ in x]
if x_axis == "moneyness":
return list(u), y
elif x_axis == "delta":
# z_w = 1.0 # delta type is assumed to be 'forward' for SabrSmile
# z_u = 1.0 # delta type is assumed to be 'unadjusted' for SabrSmile
eta_1 = 0.5 # for same reason
sq_t = self._meta.t_expiry_sqrt
dn = [
-dual_log(u_) * 100.0 / (s_ * sq_t) + eta_1 * s_ * sq_t / 100.0
for u_, s_ in zip(u, y, strict=True)
]
delta_index = [dual_norm_cdf(-d_) for d_ in dn]
return delta_index, y # type: ignore[return-value]
else: # x_axis = "strike"
return list(x), y
class FXSabrSurface(_WithState, _WithCache[datetime, FXSabrSmile]):
r"""
Create an *FX Volatility Surface* parametrised by cross-sectional *Smiles* at different
expiries.
See also the :ref:`FX Vol Surfaces section in the user guide `.
Parameters
----------
expiries: list[datetime]
Datetimes representing the expiries of each cross-sectional *Smile*, in ascending order.
node_values: 2d-shape of float, Dual, Dual2
An array of values representing each *alpha, beta, rho, nu* node value on each
cross-sectional *Smile*. Should be an array of size: (length of ``expiries``, 4).
eval_date: datetime
Acts as the initial node of a *Curve*. Should be assigned today's immediate date.
weights: Series, optional
Weights used for temporal volatility interpolation. See notes.
delivery_lag: int, optional
The number of business days after expiry that the physical settlement of the FX
exchange occurs. Uses ``defaults.fx_delivery_lag``. Used in determination of ATM forward
rates for different expiries.
calendar : Cal, UnionCal, NamedCal, str, optional
The holiday calendar object to use for FX delivery day determination. If str, looks up
named calendar from static data.
pair : str, optional
The FX currency pair used to determine ATM forward rates.
id: str, optional
The unique identifier to label the *Surface* and its variables.
ad: int, optional
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
Notes
-----
See :class:`~rateslib.volatility.FXSabrSmile` for a description of SABR parameters for
*Smile* construction.
**Temporal Interpolation**
Interpolation along the expiry axis occurs by performing total linear variance interpolation
for a given *strike* measured on neighboring *Smiles*.
If ``weights`` are given this uses the scaling approach of forward volatility (as demonstrated
in Clark's *FX Option Pricing*) for calendar days (different options 'cuts' and timezone are
not implemented). A datetime indexed `Series` must be provided, where any calendar date that
is not included will be assigned the default weight of 1.0.
See :ref:`constructing FX volatility surfaces ` for more details.
**Extrapolation**
When an ``expiry`` is sought that is prior to the first parametrised *Smile expiry* or after the
final parametrised *Smile expiry* extrapolation is required. This is not recommended,
however. It would be wiser to create parameterised *Smiles* at *expiries* which suit those
one wishes to obtian values for.
When seeking an ``expiry`` beyond the final expiry, a new
:class:`~rateslib.volatility.SabrSmile` is created at that specific *expiry* using the
same SABR parameters as matching the final parametrised *Smile*. This will capture the
evolution of ATM-forward rates through time.
When seeking an ``expiry`` prior to the first expiry, the volatility found on the first *Smile*
will be used an interpolated, using total linear variance accooridng to the given ``weights``.
If ``weights`` are not used then this will return the same value as obtained from that
first parametrised *Smile*. This does not account any evolution of ATM-forward rates.
"""
_ini_solve = 0
_mutable_by_association = True
_meta: _FXSabrSurfaceMeta
_id: str
_smiles: list[FXSabrSmile]
def __init__(
self,
expiries: list[datetime],
node_values: list[DualTypes],
eval_date: datetime,
weights: Series[float] | NoInput = NoInput(0),
delivery_lag: int_ = NoInput(0),
calendar: CalInput = NoInput(0),
pair: str_ = NoInput(0),
id: str | NoInput = NoInput(0), # noqa: A002
ad: int = 0,
):
self._id: str = (
uuid4().hex[:5] + "_" if isinstance(id, NoInput) else id
) # 1 in a million clash
self._meta = _FXSabrSurfaceMeta(
_eval_date=eval_date,
_pair=_drb(None, pair),
_calendar=get_calendar(calendar),
_delivery_lag=_drb(defaults.fx_delivery_lag, delivery_lag),
_weights=_validate_weights(weights, eval_date, expiries),
_expiries=expiries,
)
node_values_: np.ndarray[tuple[int, ...], np.dtype[np.object_]] = np.asarray(node_values)
self._smiles = [
FXSabrSmile(
nodes=dict(zip(["alpha", "beta", "rho", "nu"], node_values_[i, :], strict=True)),
expiry=expiry,
eval_date=self._meta.eval_date,
delivery_lag=delivery_lag,
calendar=calendar,
pair=pair,
id=f"{self.id}_{i}_",
)
for i, expiry in enumerate(self.meta.expiries)
]
self._set_ad_order(ad) # includes csolve on each smile
self._set_new_state()
@property
def _n(self) -> int:
"""Number of pricing parameters of the *Surface*."""
return len(self.meta.expiries) * 3 # alpha, beta, rho
@property
def id(self) -> str:
"""A str identifier to name the *Surface* used in
:class:`~rateslib.solver.Solver` mappings."""
return self._id
@property
def meta(self) -> _FXSabrSurfaceMeta:
"""An instance of :class:`~rateslib.volatility.fx._FXSabrSurfaceMeta`."""
return self._meta
@property
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the *Surface*."""
return self._ad
@property
def smiles(self) -> list[FXSabrSmile]:
"""A list of cross-sectional :class:`FXSabrSmile` instances."""
return self._smiles
def _get_composited_state(self) -> int:
return hash(sum(smile._state for smile in self.smiles))
def _validate_state(self) -> None:
if self._state != self._get_composited_state():
# If any of the associated curves have been mutated then the cache is invalidated
self._clear_cache()
self._set_new_state()
@_clear_cache_post
def _set_ad_order(self, order: int) -> None:
self._ad = order
for smile in self.smiles:
smile._set_ad_order(order)
@_new_state_post
@_clear_cache_post
def _set_node_vector(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
m = 3
for i in range(int(len(vector) / m)):
# smiles are indexed by expiry, shortest first
self.smiles[i]._set_node_vector(vector[i * m : i * m + m], ad)
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[np.object_]]:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
return np.array([list(_._get_node_vector()) for _ in self.smiles]).ravel()
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
vars_: tuple[str, ...] = ()
for smile in self.smiles:
vars_ += tuple(f"{smile.id}{i}" for i in range(3))
return vars_
# @_validate_states: not required because state is validated by interior function
def get_from_strike(
self,
k: DualTypes,
f: DualTypes | FXForwards,
expiry: datetime,
z_w: DualTypes | NoInput = NoInput(0),
) -> tuple[DualTypes, DualTypes, DualTypes]:
"""
Given an option strike return the volatility.
Parameters
-----------
k: float, Dual, Dual2
The strike of the option.
f: float, Dual, Dual2
The forward rate at delivery of the option.
expiry: datetime, optional
The expiry of the option. Required for temporal interpolation between
cross-sectional *Smiles*.
z_w: DualTypes, optional
Not used by *SabrSurface*
Returns
-------
null: float, Dual, Dual2, Variable
A *SabrSurface* has no requirement for a delta index.
vol: float, Dual, Dual2, Variable
The volatility value attained from lookup of the index on the *Smile*.
k: float, Dual, Dual2, Variable
The strike value associated with the option of the delta index.
Notes
-----
This function returns a tuple consistent with an
:class:`~rateslib.volatility.FXDeltaVolSmile`, however since the *FXSabrSmile* has no
concept of a `delta index` the first element returned is always zero and can be
effectively ignored.
"""
vol_ = self._d_sabr_d_k_or_f(k, f, expiry, as_float=False, derivative=0)[0]
return 0.0, vol_ * 100.0, k
@_validate_states
def _d_sabr_d_k_or_f(
self,
k: DualTypes,
f: DualTypes | FXForwards,
expiry: datetime,
as_float: bool,
derivative: int,
) -> tuple[DualTypes, DualTypes | None]:
expiry_posix = expiry.replace(tzinfo=UTC).timestamp()
e_idx, e_next_idx = _surface_index_left(self.meta.expiries_posix, expiry_posix)
if expiry == self.meta.expiries[0]:
# expiry matches the expiry on the first Smile, call that method directly.
return self.smiles[0]._d_sabr_d_k_or_f(k, f, expiry, as_float, derivative)
elif abs(expiry_posix - self.meta.expiries_posix[e_next_idx]) < 1e-10:
# expiry matches an expiry of a known Smile (not the first), call method directly.
return self.smiles[e_next_idx]._d_sabr_d_k_or_f(k, f, expiry, as_float, derivative)
elif expiry_posix > self.meta.expiries_posix[-1]:
# expiry is beyond that of the last known Smile. Construct a new Smile at the expiry
# by using the SABR parameters of the final Smile. (allows for ATM-forward calculation)
smile = FXSabrSmile(
nodes={
"alpha": self.smiles[e_next_idx].nodes.alpha,
"beta": self.smiles[e_next_idx].nodes.beta,
"rho": self.smiles[e_next_idx].nodes.rho,
"nu": self.smiles[e_next_idx].nodes.nu,
},
eval_date=self._meta.eval_date,
expiry=expiry,
ad=self.ad,
pair=NoInput(0) if self._meta.pair is None else self._meta.pair,
delivery_lag=self._meta.delivery_lag,
calendar=self._meta.calendar,
id=self.smiles[e_next_idx].id + "_ext",
)
return smile._d_sabr_d_k_or_f(k, f, expiry, as_float, derivative)
elif expiry <= self._meta.eval_date:
raise ValueError("`expiry` before the `eval_date` of the Surface is invalid.")
elif expiry_posix < self.meta.expiries_posix[0]:
# expiry is before the expiry of the first known Smile.
# calculate the vol as if it were for expiry on the first Smile and then use
# temporal interpolation (including weights) to obtain an adjusted volatility.
vol_, dvol_k_or_f = self.smiles[0]._d_sabr_d_k_or_f(
k=k,
f=f,
expiry=self.smiles[0]._meta.expiry,
as_float=as_float,
derivative=derivative,
)
return _t_var_interp_d_sabr_d_k_or_f(
expiries=self.meta.expiries,
expiries_posix=self.meta.expiries_posix,
expiry=expiry,
expiry_posix=expiry_posix,
expiry_index=e_idx,
expiry_next_index=e_next_idx,
eval_posix=self._meta.eval_posix,
weights_cum=self.meta.weights_cum,
vol1=vol_,
dvol1_dk=dvol_k_or_f, # type: ignore[arg-type]
vol2=vol_,
dvol2_dk=dvol_k_or_f, # type: ignore[arg-type]
bounds_flag=-1,
derivative=derivative > 0,
)
else:
# expiry is sandwiched between two known Smile expiries.
# Calculate the vol for strike on either of these Smiles and then interpolate
# for the correct expiry, including weights.
ls, rs = self.smiles[e_idx], self.smiles[e_next_idx] # left_smile, right_smile
if not isinstance(f, FXForwards):
raise ValueError(
"`f` must be supplied as `FXForwards` in order to calculate"
"dynamic ATM-forward rates for temporally-interpolated SABR volatility."
)
lvol, d_lvol_dk_or_f = ls._d_sabr_d_k_or_f(
k=k, f=f, expiry=ls._meta.expiry, as_float=as_float, derivative=derivative
)
rvol, d_rvol_dk_or_f = rs._d_sabr_d_k_or_f(
k=k, f=f, expiry=rs._meta.expiry, as_float=as_float, derivative=derivative
)
return _t_var_interp_d_sabr_d_k_or_f(
expiries=self.meta.expiries,
expiries_posix=self.meta.expiries_posix,
expiry=expiry,
expiry_posix=expiry_posix,
expiry_index=e_idx,
expiry_next_index=e_next_idx,
eval_posix=self._meta.eval_posix,
weights_cum=self.meta.weights_cum,
vol1=lvol,
dvol1_dk=d_lvol_dk_or_f, # type: ignore[arg-type]
vol2=rvol,
dvol2_dk=d_rvol_dk_or_f, # type: ignore[arg-type]
bounds_flag=0,
derivative=derivative > 0,
)
================================================
FILE: python/rateslib/volatility/fx/utils.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations # type hinting
import json
from dataclasses import dataclass
from datetime import datetime, timezone
from functools import cached_property
from typing import TYPE_CHECKING, TypeAlias
from pandas import Series
from rateslib.dual import (
Dual,
Dual2,
Variable,
dual_exp,
dual_inv_norm_cdf,
set_order_convert,
)
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import (
NoInput,
)
from rateslib.enums.parameters import FXDeltaMethod
from rateslib.splines import PPSplineDual, PPSplineDual2, PPSplineF64
if TYPE_CHECKING:
from rateslib.local_types import Any, CalTypes
DualTypes: TypeAlias = "float | Dual | Dual2 | Variable" # if not defined causes _WithCache failure
UTC = timezone.utc
TERMINAL_DATE = datetime(2100, 1, 1)
@dataclass
class _FXSmileMeta:
"""A container of meta data associated with a :class:`~rateslib.volatility._BaseFXSmile`
used to make calculations."""
_eval_date: datetime
_expiry: datetime
_plot_x_axis: str
_delta_type: FXDeltaMethod
_pair: str | None
_calendar: CalTypes
_delivery: datetime
_delivery_lag: int
@property
def eval_date(self) -> datetime:
"""Evaluation date of the *Smile*."""
return self._eval_date
@property
def expiry(self) -> datetime:
"""Expiry date of the options priced by this *Smile*"""
return self._expiry
@property
def plot_x_axis(self) -> str:
"""The default ``x_axis`` parameter passed to
:meth:`~rateslib.volatility._BaseSmile.plot`"""
return self._plot_x_axis
@property
def delta_type(self) -> FXDeltaMethod:
"""The delta type of the delta indexes associated with the ``nodes`` of the *Smile*."""
return self._delta_type
@property
def calendar(self) -> CalTypes:
"""Settlement calendar used to determine ``delivery`` from ``expiry``."""
return self._calendar
@property
def pair(self) -> str | None:
"""FX pair against which options priced by this *Smile* settle against."""
return self._pair
@cached_property
def t_expiry(self) -> float:
"""Calendar days from eval to expiry divided by 365."""
return (self._expiry - self._eval_date).days / 365.0
@cached_property
def t_expiry_sqrt(self) -> float:
"""Square root of ``t_expiry``."""
ret: float = self.t_expiry**0.5
return ret
@property
def delivery(self) -> datetime:
"""Delivery date of the forward FX rate applicable to options priced by this *Smile*"""
return self._delivery
@property
def delivery_lag(self) -> int:
"""Business day settlement lag between ``expiry`` and ``delivery``."""
return self._delivery_lag
class _FXDeltaVolSmileNodes:
"""
A container for data relating to interpolating the `nodes` of a
:class:`~rateslib.volatility.FXDeltaVolSmile`.
"""
_nodes: dict[float, DualTypes]
_meta: _FXSmileMeta
_spline: _FXDeltaVolSpline
def __init__(self, nodes: dict[float, DualTypes], meta: _FXSmileMeta) -> None:
self._nodes = nodes
self._meta = meta
if self.meta.delta_type in [
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.ForwardPremiumAdjusted,
]:
vol: DualTypes = self.values[-1] / 100.0
upper_bound: float = _dual_float(
dual_exp(
vol * self.meta.t_expiry_sqrt * (3.75 - 0.5 * vol * self.meta.t_expiry_sqrt),
)
)
else:
upper_bound = 1.0
if self.n in [1, 2]:
t = [0.0] * 4 + [upper_bound] * 4
else:
t = [0.0] * 4 + self.keys[1:-1] + [upper_bound] * 4
self._spline = _FXDeltaVolSpline(t=t)
def __eq__(self, other: Any) -> bool:
if not isinstance(other, _FXDeltaVolSmileNodes):
return False
return self._nodes == other._nodes and self._meta == other._meta
@property
def plot_upper_bound(self) -> float:
"""The right side delta index bound used in a *'delta' x-axis* plot."""
if self.meta.delta_type in [
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.ForwardPremiumAdjusted,
]:
# upper_bound = exp(vol * t_expiry_sqrt * (3.75 - 0.5 * vol * t_expiry_sqrt)
# plot_upper_bound = exp(vol * t_expiry_sqrt * (3.25 - 0.5 * vol * t_expiry_sqrt)
return (
self.spline.t[-1] - _dual_float(self.values[-1]) * self.meta.t_expiry_sqrt / 200.0
)
else:
return 1.0
@property
def meta(self) -> _FXSmileMeta:
"""An instance of :class:`~rateslib.volatility.fx._FXSmileMeta`."""
return self._meta
@property
def nodes(self) -> dict[float, DualTypes]:
"""The initial nodes dict passed for construction of this class."""
return self._nodes
@cached_property
def keys(self) -> list[float]:
"""A list of the delta index keys in ``nodes``."""
return list(self.nodes.keys())
@cached_property
def values(self) -> list[DualTypes]:
"""A list of the delta index values in ``nodes``."""
return list(self.nodes.values())
@property
def n(self) -> int:
"""The number of pricing parameters in ``nodes``."""
return len(self.keys)
@property
def spline(self) -> _FXDeltaVolSpline:
"""An instance of :class:`~rateslib.volatility.fx._FXDeltaVolSpline`."""
return self._spline
class _FXDeltaVolSpline:
"""
A container for data relating to interpolating the `nodes` of
a :class:`~rateslib.volatility.FXDeltaVolSmile` using a cubic PPSpline.
"""
_t: list[float]
_spline: PPSplineF64 | PPSplineDual | PPSplineDual2
def __init__(self, t: list[float]) -> None:
self._t = t
self._spline = PPSplineF64(4, [0.0] * 5, None) # placeholder: csolve will reengineer
@property
def t(self) -> list[float]:
"""The knot sequence of the PPSpline."""
return self._t
@property
def spline(self) -> PPSplineF64 | PPSplineDual | PPSplineDual2:
"""An instance of :class:`~rateslib.splines.PPSplineF64`,
:class:`~rateslib.splines.PPSplineDual` or :class:`~rateslib.splines.PPSplineDual2`"""
return self._spline
def _csolve_n_other(
self, nodes: _FXDeltaVolSmileNodes, ad: int
) -> tuple[list[float], list[DualTypes], int, int]:
"""
Solve a spline with more than one node value.
Premium adjusted delta types have an unbounded right side delta index so a derivative of
0 is applied to the spline as a boundary condition.
Premium unadjusted delta types have a right side delta index approximately equal to 1.0.
Use a natural spline boundary condition here.
"""
tau = nodes.keys.copy()
y = nodes.values.copy()
# left side constraint
tau.insert(0, self.t[0])
y.insert(0, set_order_convert(0.0, ad, None))
left_n = 2 # natural spline
# right side constraint
tau.append(self.t[-1])
y.append(set_order_convert(0.0, ad, None))
if nodes.meta.delta_type in [
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.ForwardPremiumAdjusted,
]:
right_n = 1 # 1st derivative at zero
else:
right_n = 2 # natural spline
return tau, y, left_n, right_n
def csolve(self, nodes: _FXDeltaVolSmileNodes, ad: int) -> None:
"""
Construct a spline of appropriate AD order and solve the spline coefficients for the
given ``nodes``.
Parameters
----------
nodes: _FXDeltaVolSmileNodes
Required information for constructing a PPSpline.
ad: int
The AD order of the constructed PPSPline.
Returns
-------
None
"""
if ad == 0:
Spline: type[PPSplineF64] | type[PPSplineDual] | type[PPSplineDual2] = PPSplineF64
elif ad == 1:
Spline = PPSplineDual
else:
Spline = PPSplineDual2
if nodes.n == 1:
# one node defines a flat line, all spline coefficients are the equivalent value.
self._spline = Spline(4, self.t, nodes.values * 4) # type: ignore[arg-type]
else:
tau, y, left_n, right_n = self._csolve_n_other(nodes, ad)
self._spline = Spline(4, self.t, None)
self._spline.csolve(tau, y, left_n, right_n, False) # type: ignore[arg-type]
def to_json(self) -> str:
"""
Serialize this object to JSON format.
The object can be deserialized using the :meth:`~rateslib.serialization.from_json` method.
Returns
-------
str
"""
obj = dict(
PyNative=dict(
_FXDeltaVolSpline=dict(
t=self.t,
)
)
)
return json.dumps(obj)
@classmethod
def _from_json(cls, loaded_json: dict[str, Any]) -> _FXDeltaVolSpline:
return _FXDeltaVolSpline(
t=loaded_json["t"],
)
def __eq__(self, other: Any) -> bool:
"""CurveSplines are considered equal if their knot sequence and endpoints are equivalent.
For the same nodes this will resolve to give the same spline coefficients.
"""
if not isinstance(other, _FXDeltaVolSpline):
return False
else:
return self.t == other.t
@dataclass(frozen=True)
class _FXDeltaVolSurfaceMeta:
"""
An immutable container of meta data associated with a
:class:`~rateslib.volatility.FXDeltaVolSurface` used to make calculations.
"""
_eval_date: datetime
_delta_type: FXDeltaMethod
_plot_x_axis: str
_weights: Series[float] | None
_delta_indexes: list[float]
_expiries: list[datetime]
def __post_init__(self) -> None:
for idx in range(1, len(self.expiries)):
if self.expiries[idx - 1] >= self.expiries[idx]:
raise ValueError("Surface `expiries` are not sorted or contain duplicates.\n")
@property
def delta_indexes(self) -> list[float]:
"""A list of delta indexes associated with each cross-sectional
:class:`~rateslib.volatility.FXDeltaVolSmile`."""
return self._delta_indexes
@property
def expiries(self) -> list[datetime]:
"""A list of the expiries of each cross-sectional
:class:`~rateslib.volatility.FXDeltaVolSmile`."""
return self._expiries
@cached_property
def expiries_posix(self) -> list[float]:
"""A list of the unix timestamps of each date in ``expiries``."""
return [_.replace(tzinfo=UTC).timestamp() for _ in self.expiries]
@property
def weights(self) -> Series[float] | None:
"""Weights used for temporal volatility interpolation."""
return self._weights
@cached_property
def weights_cum(self) -> Series[float] | None:
"""Weight adjusted time to expiry (in calendar days) per date for temporal volatility
interpolation."""
if self.weights is None:
return None
else:
return self.weights.cumsum()
@property
def eval_date(self) -> datetime:
"""Evaluation date of the *Surface*."""
return self._eval_date
@property
def eval_posix(self) -> float:
"""The unix timestamp of the ``eval_date``."""
return self.eval_date.replace(tzinfo=UTC).timestamp()
@property
def delta_type(self) -> FXDeltaMethod:
"""The delta type of the delta indexes associated with the ``nodes`` of each
cross-sectional *Smile*."""
return self._delta_type
@property
def plot_x_axis(self) -> str:
"""The default ``x_axis`` parameter passed to
:meth:`~rateslib.volatility._BaseSmile.plot`"""
return self._plot_x_axis
@dataclass(frozen=True)
class _FXSabrSurfaceMeta:
"""
An immutable container of meta data associated with a
:class:`~rateslib.volatility.FXSabrSurface` used to make calculations.
"""
_eval_date: datetime
_pair: str | None
_calendar: CalTypes
_delivery_lag: int
_weights: Series[float] | None
_expiries: list[datetime]
def __post_init__(self) -> None:
for idx in range(1, len(self.expiries)):
if self.expiries[idx - 1] >= self.expiries[idx]:
raise ValueError("Surface `expiries` are not sorted or contain duplicates.\n")
@property
def weights(self) -> Series[float] | None:
"""Weights used for temporal volatility interpolation."""
return self._weights
@cached_property
def weights_cum(self) -> Series[float] | None:
"""Weight adjusted time to expiry (in calendar days) per date for temporal volatility
interpolation."""
if self.weights is None:
return None
else:
return self.weights.cumsum()
@property
def expiries(self) -> list[datetime]:
"""A list of the expiries of each cross-sectional
:class:`~rateslib.volatility.FXSabrSmile`."""
return self._expiries
@cached_property
def expiries_posix(self) -> list[float]:
"""A list of the unix timestamps of each date in ``expiries``."""
return [_.replace(tzinfo=UTC).timestamp() for _ in self.expiries]
@cached_property
def eval_posix(self) -> float:
"""The unix timestamp of the ``eval_date``."""
return self.eval_date.replace(tzinfo=UTC).timestamp()
@property
def delivery_lag(self) -> int:
"""Business day settlement lag between ``expiry`` and ``delivery``."""
return self._delivery_lag
@property
def eval_date(self) -> datetime:
"""Evaluation date of the *Surface*."""
return self._eval_date
@property
def pair(self) -> str | None:
"""FX pair against which options priced by this *Surface* settle against."""
return self._pair
@property
def calendar(self) -> CalTypes:
"""Settlement calendar used to determine ``delivery`` from ``expiry``."""
return self._calendar
def _delta_type_constants(
delta_type: FXDeltaMethod, w: DualTypes | NoInput, u: DualTypes | NoInput
) -> tuple[float, DualTypes, DualTypes]:
"""
Get the values: (eta, z_w, z_u) for the type of expressed delta
w: should be input as w_deli / w_spot
u: should be input as K / f_d
"""
if delta_type == FXDeltaMethod.Forward:
return 0.5, 1.0, 1.0
elif delta_type == FXDeltaMethod.Spot:
return 0.5, w, 1.0 # type: ignore[return-value]
elif delta_type == FXDeltaMethod.ForwardPremiumAdjusted:
return -0.5, 1.0, u # type: ignore[return-value]
else: # "spot_pa"
return -0.5, w, u # type: ignore[return-value]
def _moneyness_from_atm_delta_closed_form(vol: DualTypes, t_e: DualTypes) -> DualTypes:
"""
Return `u` given premium unadjusted `delta`, of either 'spot' or 'forward' type.
This function preserves AD.
Book2: section "Strike and Volatility implied from ATM delta" (FXDeltaVolSMile)
Parameters
-----------
vol: float, Dual, Dual2
The volatility (in %, e.g. 10.0) to use in calculations.
t_e: float,
The time to expiry.
Returns
-------
float, Dual or Dual2
"""
return dual_exp((vol / 100.0) ** 2 * t_e / 2.0)
def _moneyness_from_delta_closed_form(
delta: DualTypes,
vol: DualTypes,
t_e: DualTypes,
z_w_0: DualTypes,
phi: float,
) -> DualTypes:
"""
Return `u` given premium unadjusted `delta`, of either 'spot' or 'forward' type.
This function preserves AD.
Book2: section "Strike and Volatility implied from a given option's delta" (FXDeltaVolSmile)
Parameters
-----------
delta: float
The input unadjusted delta for which to determine the moneyness for.
vol: float, Dual, Dual2
The volatility (in %, e.g. 10.0) to use in calculations.
t_e: float, Dual, Dual2
The time to expiry.
z_w_0: float, Dual, Dual2
The scalar for 'spot' or 'forward' delta types.
If 'forward', this should equal 1.0.
If 'spot', this should be :math:`w_deli / w_spot`.
phi: float
1.0 if is call, -1.0 if is put.
Returns
-------
float, Dual or Dual2
"""
vol_sqrt_t = vol * t_e**0.5 / 100.0
_: DualTypes = dual_inv_norm_cdf(phi * delta / z_w_0)
_ = dual_exp(vol_sqrt_t * (0.5 * vol_sqrt_t - phi * _))
return _
================================================
FILE: python/rateslib/volatility/ir/__init__.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.volatility.ir.base import _BaseIRCube, _BaseIRSmile
from rateslib.volatility.ir.sabr import IRSabrCube, IRSabrSmile
from rateslib.volatility.ir.spline import (
IRSplineCube,
IRSplineSmile,
_IRSplineSmileNodes,
_IRVolSpline,
)
from rateslib.volatility.ir.utils import _IRCubeMeta, _IRSmileMeta, _IRVolPricingParams
__all__ = [
"IRSabrSmile",
"IRSplineSmile",
"IRSabrCube",
"IRSplineCube",
"_BaseIRSmile",
"_BaseIRCube",
"_IRSmileMeta",
"_IRCubeMeta",
"_IRVolPricingParams",
"_IRSplineSmileNodes",
"_IRVolSpline",
]
IRVols = IRSabrSmile | IRSabrCube | IRSplineSmile | IRSplineCube
IRVolObj = (IRSabrSmile, IRSabrCube, IRSplineSmile, IRSplineCube)
================================================
FILE: python/rateslib/volatility/ir/base.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations # type hinting
from abc import ABC, abstractmethod
from datetime import datetime, timezone
from typing import TYPE_CHECKING, Generic, NoReturn, TypeAlias, TypeVar
import numpy as np
from rateslib.curves.interpolation import index_left
from rateslib.default import PlotOutput, plot
from rateslib.dual import Dual, Dual2, Variable
from rateslib.dual.utils import _dual_float
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import OptionPricingModel
from rateslib.mutability import _clear_cache_post, _new_state_post, _WithCache, _WithState
from rateslib.volatility.ir.utils import (
_bilinear_interp,
_get_ir_expiry,
_get_ir_tenor,
_IRCubeMeta,
_IRSmileMeta,
)
UTC = timezone.utc
T = TypeVar("T")
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
Arr1dObj,
Arr3dObj,
CurvesT_,
DualTypes_,
Iterable,
Sequence,
_IRVolPricingParams,
datetime_,
float_,
)
DualTypes: TypeAlias = "float | Dual | Dual2 | Variable" # if not defined causes _WithCache failure
class _WithMutability(ABC):
"""Abstract base class containing the necessary methods to interoperate with a
:class:`~rateslib.solver.Solver`."""
# Get methods allow the Solver to extract and order the parameters of the pricing object.
@property
@abstractmethod
def _n(self) -> int:
"""The number of parameters associated with the pricing object."""
pass
@property
@abstractmethod
def _ini_solve(self) -> int:
"""The number of parameters that are initially ignored by
:class:`~rateslib.solver.Solver` and not mutated during iterations."""
pass
@abstractmethod
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[np.object_]]:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
pass
@abstractmethod
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
pass
# Set methods allow the Solver to make mutable updates to the pricing object
# Direct methods implement the underlying operations, wrapped methods (which are
# automatically provided) control additionals such as cache clearing and state management.
@abstractmethod
def _set_node_vector_direct(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
"""
Allow Solver to update parameter values of the pricing object.
``ad`` in {1, 2}.
Only the real values in vector are used, dual components are dropped and restructured.
"""
pass
@abstractmethod
def _set_ad_order_direct(self, order: int | None) -> None:
"""
Update the parameter values of the pricing object.
None: Do nothing regardless of the AD order of the parameters as stated.
0: Convert all values to float.
1: Convert to Dual with vars ordered by `_get_node_vars`
2: Convert to Dual2 with vars ordered by `_get_node_vars`
"""
pass
@abstractmethod
def _set_single_node(self, key: Any, value: DualTypes) -> None:
"""
Update a single named node on the pricing object.
"""
pass
@_new_state_post
@_clear_cache_post
def _set_node_vector(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
"""
Update the node values in a Solver. ``ad`` in {1, 2}.
Only the real values in vector are used, dual components are dropped and restructured.
"""
return self._set_node_vector_direct(vector, ad)
@_clear_cache_post
def _set_ad_order(self, order: int | None) -> None:
"""
When pricing objects are mutated by a Solver this method should convert pricing
parameters to DualTypes with `vars` as defined by the solver, i.e. overwriting
any user specific DualTypes.
If `order` is *None*, this method will do nothing.
If `order` is in [0, 1, 2] and that matches the existing AD order of the object then
nothing is also done.
If `order` is in [0, 1, 2] and that represents a new AD order then values are converted
using `vars` configured and expected by a Solver.
If `order` is in [-1, -2] this forces a conversion to the appropriate order, even if the
object matches the requested AD order. I.e. user variables will be overridden regardless.
"""
return self._set_ad_order_direct(order)
@_new_state_post
@_clear_cache_post
def update_node(self, key: str, value: DualTypes) -> None:
"""
Update a single node value on the *Smile*.
Parameters
----------
key: str in {"alpha", "beta", "rho", "nu"}
The node value to update.
value: float, Dual, Dual2, Variable
Value to update on the *Smile*.
Returns
-------
None
Notes
-----
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of a *Curve* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *Curve* instance.
This class is labelled as a **mutable on update** object.
"""
return self._set_single_node(key, value)
class _BaseIRSmile(_WithState, _WithCache[float, DualTypes], ABC):
"""
Abstract base class for implementing *IR Volatility Smiles*.
Any :class:`~rateslib.volatility._BaseIRSmile` is required to implement the following
**properties**:
- **id** (str)
- **ad** (int)
- **meta** (:class:`~rateslib.volatility._IRSmileMeta`)
- **pricing_params** (Iterable[float | Dual | Dual2 | Variable])
Any :class:`~rateslib.volatility._BaseIRSmile` is required to implement the following
**methods**:
- **_plot(x_axis, f, y_axis, curves)**
- **_get_from_strike(k, f, curves)**
- **_d_sigma_d_f(k, f)**
The directly provided methods with these implementations are:
- :meth:`~rateslib.volatility._BaseIRSmile.plot`.
- :meth:`~rateslib.volatility._BaseIRSmile.get_from_strike`.
"""
_default_plot_x_axis: str
@property
@abstractmethod
def id(self) -> str:
"""
A str identifier to name the *Smile* used in :class:`~rateslib.solver.Solver` mappings.
"""
pass
@property
@abstractmethod
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the
:class:`~rateslib.volatility._BaseIRSmile`."""
pass
@property
@abstractmethod
def meta(self) -> _IRSmileMeta:
"""An instance of :class:`~rateslib.volatility.ir.utils._IRSmileMeta`."""
pass
@property
@abstractmethod
def pricing_params(self) -> Iterable[float | Dual | Dual2 | Variable]:
"""An ordered set of pricing parameters associated with the
:class:`~rateslib.volatility._BaseIRSmile`."""
pass
@abstractmethod
def _get_from_strike(
self,
k: DualTypes,
f: DualTypes,
) -> _IRVolPricingParams:
"""
Given an option strike and forward rate return the volatility.
Note this function does not validate the expiry and tenor of the intended option.
Parameters
-----------
k: float, Dual, Dual2
The strike of the option.
f: float, Dual, Dual2
The forward rate at delivery of the option.
Returns
-------
_IRVolPricingParams
"""
pass
@abstractmethod
def _plot(
self,
x_axis: str,
f: float,
y_axis: str,
tgt_shift: float_,
) -> tuple[Iterable[float], Iterable[float]]:
"""Perform the necessary calculation to derive (x,y) coordinates for a chart."""
pass
@abstractmethod
def _d_sigma_d_f(
self,
k: DualTypes,
f: DualTypes,
) -> DualTypes:
"""
Calculate the derivative :math:`\frac{d \\sigma}{d f}` for a generic spline model.
"""
pass
def _plot_conversion(
self,
y_axis: str,
x_axis: str,
f: float,
shift: float,
tgt_shift: float,
x: Iterable[float],
y: Iterable[float],
) -> tuple[Iterable[float], Iterable[float]]:
# def _hagan_convert(k: DualTypes, sigma_b: DualTypes) -> DualTypes:
# if abs(f - k) < 1e-13:
# center = f + shf
# else:
# center = (f - k) / dual_log((f + shf) / (k + shf))
# return sigma_b * center * (1 - sigma_b ** 2 * sq_t / 24)
match (self.meta.pricing_model, y_axis.lower()):
case (OptionPricingModel.Black76, "black_vol"):
if shift == tgt_shift:
y_ = y
else:
y_ = [
_
* (((f + shift) * (k + shift)) / ((f + tgt_shift) * (k + tgt_shift))) ** 0.5
for _, k in zip(y, x, strict=True)
]
case (OptionPricingModel.Bachelier, "normal_vol"):
y_ = y
case (OptionPricingModel.Black76, "normal_vol"):
y_ = [
sigma_b * ((f + shift) * (k + shift)) ** 0.5
for (k, sigma_b) in zip(x, y, strict=True)
]
case (OptionPricingModel.Bachelier, "black_vol"):
y_ = [
sigma_n * ((f + tgt_shift) * (k + tgt_shift)) ** -0.5
for (k, sigma_n) in zip(x, y, strict=True)
]
case _:
raise ValueError("`y_axis` must be in {'normal_vol', 'black_vol'}.")
if x_axis == "moneyness":
u: Iterable[float] = x / f # type: ignore[operator, assignment]
return u, y_
else: # x_axis = "strike"
return x, y_
def plot(
self,
comparators: list[_BaseIRSmile] | NoInput = NoInput(0),
labels: list[str] | NoInput = NoInput(0),
x_axis: str | NoInput = NoInput(0),
y_axis: str | NoInput = NoInput(0),
f: DualTypes | NoInput = NoInput(0),
curves: CurvesT_ = NoInput(0),
shift: float_ = NoInput(0),
) -> PlotOutput:
r"""
Plot volatilities associated with the *Smile*.
.. role:: green
.. role:: red
Parameters
----------
comparators: list[Smile], :green:`optional`
A list of Smiles which to include on the same plot as comparators.
labels : list[str], :green:`optional`
A list of strings associated with the plot and comparators. Must be same
length as number of plots.
x_axis : str in {"strike", "moneyness"}, :green:`optional (set by object)`
*'strike'* is the natural option for this *SabrSmile*.
If *'moneyness'* the strikes are converted using ``f``.
y_axis : str in {"black_vol", "normal_vol"}, :green:`optional (set by object)`
Convert the y-axis to a different representation using an approximation.
f: DualTypes, :green:`optional`
The mid-market IRS rate. If ``curves`` are not given then ``f`` is required.
curves: Curves, :green:`optional`
The *Curves* in the required form for an :class:`~rateslib.instruments.IRS`. If ``f``
is not given then ``curves`` are required.
shift: float, :green:`optional`
If plotting a *'black_vol'* this will use an approximation to convert any native
shift into another that is specified here. If not given uses the native shift meta
attribute of the *Smile*.
Returns
-------
(fig, ax, line) : Matplotlib.Figure, Matplotplib.Axes, Matplotlib.Lines2D
Notes
-----
Any approximations converting between *normal* and *black* vol are done so with the
first order approximation generally attributable to Fei Zhou. These approximations are only
used for charting. Actual instrument pricing metrics are determined more accurately
with root solvers.
.. math::
\sigma_{LN+h} \approx \frac{\sigma_{N}}{\sqrt{(F+h)(K+h)}}
and,
.. math::
\sigma_{LN+h} \approx \sigma_{LN+h2} \sqrt{ \frac{(F+h_2)(K+h_2)}{(F+h)(K+h)}}
for *h* and :math:`h_2` potentially different shifts.
""" # noqa: E501
if isinstance(f, NoInput) and isinstance(curves, NoInput):
raise ValueError("`f` (ATM-forward interest rate) is required by `_BaseIRSmile.plot`.")
elif isinstance(f, float | Dual | Dual2 | Variable):
f_: float = _dual_float(f)
elif not isinstance(curves, NoInput):
f_ = _dual_float(self.meta.irs_fixing.irs.rate(curves=curves))
del f
# reversed for intuitive strike direction
comparators = _drb([], comparators)
labels = _drb([], labels)
x_axis_: str = _drb(self.meta.plot_x_axis, x_axis)
y_axis_: str = _drb(self.meta.plot_y_axis, y_axis)
del x_axis, y_axis
x_, y_ = self._plot(x_axis_, f_, y_axis_, shift)
x: list[list[float]] = [list(x_)]
y: list[list[float]] = [list(y_)]
if not isinstance(comparators, NoInput):
for smile in comparators:
if not isinstance(smile, _BaseIRSmile):
raise ValueError("A `comparator` must be a valid IR Smile type.")
x_, y_ = smile._plot(x_axis_, f_, y_axis_, shift)
x.append(list(x_))
y.append(list(y_))
return plot(x, y, labels)
def get_from_strike(
self,
k: DualTypes,
expiry: datetime_ = NoInput(0),
tenor: datetime_ = NoInput(0),
f: DualTypes_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
) -> _IRVolPricingParams:
"""
Given an option strike return the volatility.
Note if the ``expiry`` and ``tenor`` are given these will be validated against the
*_BaseIRSmile* *meta* parameters.
.. role:: red
.. role:: green
Parameters
-----------
k: float, Dual, Dual2, Variable, :red:`required`
The strike of the option.
expiry: datetime, :green:`optional`
The expiry of the option. Required for temporal interpolation.
tenor: datetime, :green:`optional`
The termination date of the underlying *IRS*, required for parameter interpolation.
f: float, Dual, Dual2, Variable, :green:`optional`
The forward rate at delivery of the option.
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** on :class:`~rateslib.instruments.IRSCall`
for details of allowed inputs. Required if ``f`` is not given.
Returns
-------
_IRVolPricingParams
"""
if not isinstance(expiry, NoInput) and self.meta.expiry != expiry:
raise ValueError(
f"`expiry` of _BaseIRSmile and intended price do not match. Got: {expiry} "
f"and {self.meta.expiry}.\nCalculation aborted due to potential pricing errors.",
)
if not isinstance(tenor, NoInput) and self.meta.irs_fixing.termination != tenor:
raise ValueError(
f"`tenor` of _BaseIRSmile and intended price do not match. Got: {tenor} "
f"and {self.meta.irs_fixing.termination}.\nCalculation aborted due to potential "
f"pricing errors.",
)
if isinstance(f, NoInput):
f_: DualTypes = self.meta.irs_fixing.irs.rate(curves=curves)
else:
f_ = f
del f
return self._get_from_strike(f=f_, k=k)
def __iter__(self) -> NoReturn:
raise TypeError("`_BaseIRSmile` types are not iterable.")
class _BaseIRCube(Generic[T], _WithState, _WithCache[tuple[datetime, datetime], _BaseIRSmile], ABC):
"""
Abstract base class for implementing *IR Volatility Cubes*.
Any :class:`~rateslib.volatility._BaseIRCube` is required to implement the following
**properties**:
- **id** (str)
- **ad** (int)
- **meta** (:class:`~rateslib.volatility._IRCubeMeta`)
- **pricing_params** (3D ndarray)
Any :class:`~rateslib.volatility._BaseIRCube` is required to implement the following
**methods**:
- **_construct_smile(expiry, tenor, params)**
- **_get_from_strike(k, f, curves)**
The directly provided methods with these implementations are:
- :meth:`~rateslib.volatility._BaseIRCube.plot`.
- :meth:`~rateslib.volatility._BaseIRCube.get_from_strike`.
"""
_SmileType: type[_BaseIRSmile]
_node_values_: Arr3dObj
@property
@abstractmethod
def id(self) -> str:
"""
A str identifier to name the *Cube* used in :class:`~rateslib.solver.Solver` mappings.
"""
pass
@property
@abstractmethod
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the
:class:`~rateslib.volatility._BaseIRCube`."""
pass
@property
@abstractmethod
def meta(self) -> _IRCubeMeta:
"""An instance of :class:`~rateslib.volatility.ir.utils._IRCubeMeta`."""
pass
@property
@abstractmethod
def pricing_params(self) -> Arr3dObj:
"""A 3-d array of pricing parameters with axes (expiry, tenor, strike)."""
pass
def _bilinear_interpolation(
self,
expiry: datetime,
tenor: datetime,
) -> Arr1dObj:
"""
Linearly interpolate the expiries / tenors array and return interpolated values
for the alpha, rho and nu parameters.
Returns
-------
(alpha, rho, nu)
"""
# For out of bounds expiry values convert to boundary expiries with tenor time adjustment
if expiry < self.meta.expiry_dates[0]:
return self._bilinear_interpolation(
expiry=self.meta.expiry_dates[0],
tenor=tenor + (self.meta.expiry_dates[0] - expiry),
)
elif expiry > self.meta.expiry_dates[-1]:
return self._bilinear_interpolation(
expiry=self.meta.expiry_dates[-1],
tenor=tenor - (expiry - self.meta.expiry_dates[-1]),
)
e_posix = expiry.replace(tzinfo=UTC).timestamp()
t_posix = tenor.replace(tzinfo=UTC).timestamp()
match (self.meta._n_expiries, self.meta._n_tenors):
case (1, 1):
# nothing to interpolate: return the only parameters of the surface
return self.pricing_params[0, 0, :]
case (1, _):
# interpolate only over tenor
e_l = 0
e_l_p = 0
t_posix_1 = t_posix - (e_posix - self.meta.expiries_posix[0])
t_l_1 = index_left(
list_input=self.meta.tenor_dates_posix[0, :], # type: ignore[arg-type]
list_length=self.meta._n_tenors,
value=t_posix_1,
)
t_l_1_p = t_l_1 + 1
v_ = (0.0, 0.0) # only one expiry so no interpolation over that dimension
t_l_2, t_l_2_p = t_l_1, t_l_1_p
h_: tuple[float, float] = (
(t_posix_1 - self.meta.tenor_dates_posix[e_l, t_l_1])
/ (
self.meta.tenor_dates_posix[e_l, t_l_1_p]
- self.meta.tenor_dates_posix[e_l, t_l_1]
),
) * 2
case (_, 1):
# interpolate only over expiry
e_l = index_left(
list_input=self.meta.expiries_posix,
list_length=self.meta._n_expiries,
value=e_posix,
)
e_l_p = e_l + 1
t_l_1, t_l_2 = 0, 0
t_l_1_p, t_l_2_p = 0, 0
h_ = (0, 0)
v_ = (
(e_posix - self.meta.expiries_posix[e_l])
/ (self.meta.expiries_posix[e_l_p] - self.meta.expiries_posix[e_l]),
) * 2
case _:
# perform true bilinear interpolation
e_l = index_left(
list_input=self.meta.expiries_posix,
list_length=self.meta._n_expiries,
value=e_posix,
)
e_l_p = e_l + 1
v_ = (
(e_posix - self.meta.expiries_posix[e_l])
/ (self.meta.expiries_posix[e_l_p] - self.meta.expiries_posix[e_l]),
) * 2
# these are the relative tenors as measured per each benchmark expiry
t_posix_1 = t_posix - (e_posix - self.meta.expiries_posix[e_l])
t_posix_2 = t_posix - (e_posix - self.meta.expiries_posix[e_l_p])
t_l_1 = index_left(
list_input=self.meta.tenor_dates_posix[e_l, :], # type: ignore[arg-type]
list_length=self.meta._n_tenors,
value=t_posix_1,
)
t_l_1_p = t_l_1 + 1
t_l_2 = index_left(
list_input=self.meta.tenor_dates_posix[e_l_p, :], # type: ignore[arg-type]
list_length=self.meta._n_tenors,
value=t_posix_2,
)
t_l_2_p = t_l_2 + 1
h_ = (
(t_posix_1 - self.meta.tenor_dates_posix[e_l, t_l_1])
/ (
self.meta.tenor_dates_posix[e_l, t_l_1 + 1]
- self.meta.tenor_dates_posix[e_l, t_l_1]
),
(t_posix_2 - self.meta.tenor_dates_posix[e_l_p, t_l_2])
/ (
self.meta.tenor_dates_posix[e_l_p, t_l_2 + 1]
- self.meta.tenor_dates_posix[e_l_p, t_l_2]
),
)
h_ = (min(max(h_[0], 0), 1), min(max(h_[1], 0), 1))
return np.array(
[
_bilinear_interp(
tl=param[e_l, t_l_1],
tr=param[e_l, t_l_1_p],
bl=param[e_l_p, t_l_2],
br=param[e_l_p, t_l_2_p],
h=h_,
v=v_,
)
for param in [
self.pricing_params[:, :, i] for i in range(self.pricing_params.shape[2])
]
]
)
def _construct_smile(
self,
expiry: datetime,
tenor: datetime,
params: Sequence[DualTypes] | Arr1dObj,
) -> _BaseIRSmile:
if isinstance(self.meta.time_scalars, NoInput):
ts: DualTypes_ = NoInput(0)
else:
if expiry > self.meta.time_scalars.index[-1]:
ts = NoInput(0)
else:
ts = self.meta.time_scalars[expiry]
return self._SmileType( # type: ignore[call-arg]
nodes=dict(zip(self.meta.indexes, params, strict=True)),
eval_date=self.meta.eval_date,
expiry=expiry,
irs_series=self.meta.irs_series,
tenor=tenor,
shift=self.meta.shift,
ad=None, # inherit the AD variables from the params
time_scalar=ts,
**self.meta.smile_params,
)
def get_from_strike(
self,
k: DualTypes,
expiry: datetime,
tenor: datetime,
f: DualTypes_ = NoInput(0),
curves: CurvesT_ = NoInput(0),
) -> _IRVolPricingParams:
"""
Given an option strike, expiry and tenor, return the volatility.
.. role:: red
.. role:: green
Parameters
-----------
k: float, Dual, Dual2, Variable, :red:`required`
The strike of the option.
expiry: datetime, :red:`required`
The expiry of the option. Required for temporal interpolation.
tenor: datetime, :red:`required`
The termination date of the underlying *IRS*, required for parameter interpolation.
f: float, Dual, Dual2, :green:`optional`
The forward rate at delivery of the option.
curves: _Curves, :green:`optional`
Pricing objects. See **Pricing** notes of an :class:`~rateslib.instruments.IRSCall`
for details of allowed inputs.
Returns
-------
_IRVolPricingParams
"""
smile = self.get_smile(expiry, tenor)
return smile.get_from_strike(k=k, f=f, curves=curves)
def get_smile(self, expiry: datetime | str, tenor: datetime | str) -> _BaseIRSmile:
"""
Return a constructed :class:`~rateslib.volatility._BaseIRSmile` for a given
expiry and tenor.
.. role:: red
.. role:: green
Parameters
-----------
expiry: datetime, str, :red:`required`
The expiry of the option. Required for temporal interpolation.
tenor: datetime, str, :red:`required`
The termination date of the underlying *IRS*, required for parameter interpolation.
Returns
-------
_BaseIRSmile
"""
expiry_ = _get_ir_expiry(
eval_date=self.meta.eval_date, irs_series=self.meta.irs_series, expiry=expiry
)
tenor_ = _get_ir_tenor(expiry=expiry_, irs_series=self.meta.irs_series, tenor=tenor)
del expiry, tenor
if (expiry_, tenor_) in self._cache:
smile = self._cache[expiry_, tenor_]
else:
params = self._bilinear_interpolation(expiry=expiry_, tenor=tenor_)
smile = self._cached_value(
key=(expiry_, tenor_), val=self._construct_smile(expiry_, tenor_, params)
)
return smile
def _get_node_vector(self) -> Arr1dObj:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
return self.pricing_params.ravel()
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
vars_: tuple[str, ...] = tuple(
f"{self.id}{i}"
for i in range(self.meta._n_expiries * self.meta._n_tenors * len(self.meta.indexes))
)
return vars_
def _set_single_node(
self, key: tuple[datetime | str, datetime | str, T], value: DualTypes
) -> None:
"""
Update some generic parameters on the *SabrCube*.
Parameters
----------
key: tuple of (datetime, datetime, str in {"alpha", "rho", "nu"})
The node value to update, indexed by (expiry, tenor, SABR param).
value: Array, float, Dual, Dual2, Variable
Value to update on the *Cube*.
Returns
-------
None
Notes
-----
This function may update all of the AD variable names to be a consistent pricing object
familiar to a :class:`~rateslib.solver.Solver`.
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of a *Curve* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *Curve* instance.
This class is labelled as a **mutable on update** object.
"""
expiry_ = _get_ir_expiry(
eval_date=self.meta.eval_date, irs_series=self.meta.irs_series, expiry=key[0]
)
tenor_ = _get_ir_tenor(expiry=expiry_, irs_series=self.meta.irs_series, tenor=key[1])
if expiry_ not in self.meta.expiry_dates:
raise KeyError(f"'{expiry_}' is not in `meta.expiry_dates`.")
tenor_row = self.meta.expiry_dates.index(expiry_)
if tenor_ not in self.meta.tenor_dates[tenor_row]:
raise KeyError(f"'{tenor_}' is not in `meta.tenor_dates`.")
return self._set_single_node_direct((expiry_, tenor_, key[2]), value)
@abstractmethod
def _set_single_node_direct(self, key: tuple[datetime, datetime, T], value: DualTypes) -> None:
pass
================================================
FILE: python/rateslib/volatility/ir/sabr.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations # type hinting
from datetime import datetime, timezone
from typing import TYPE_CHECKING
from uuid import uuid4
import numpy as np
from pandas import DataFrame, Index
from rateslib.data.fixings import IRSSeries, _get_irs_series
from rateslib.dual import Dual, Dual2, Variable, set_order_convert
from rateslib.dual.utils import _dual_float, _to_number, dual_exp, dual_inv_norm_cdf
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import OptionPricingModel
from rateslib.mutability import (
_new_state_post,
)
from rateslib.volatility.ir.base import _BaseIRCube, _BaseIRSmile, _WithMutability
from rateslib.volatility.ir.utils import (
_IRCubeMeta,
_IRSmileMeta,
_IRVolPricingParams,
)
from rateslib.volatility.utils import _SabrModel, _SabrSmileNodes
UTC = timezone.utc
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Arr1dObj,
Arr2dObj,
Arr3dObj,
DualTypes,
DualTypes_,
Iterable,
Number,
Series,
float_,
)
class IRSabrSmile(_BaseIRSmile, _WithMutability):
r"""
Create an *IR Volatility Smile* at a given expiry indexed for a specific IRS tenor
using SABR parameters.
An *IRSabrSmile* is intended as a grid point element of the more general
:class:`~rateslib.volatility.IRSabrCube`, which users are recommended to use instead.
.. warning::
*Swaptions* and *IR Volatility* are in Beta status introduced in v2.7.0
.. role:: green
.. role:: red
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import IRSabrSmile, dt
.. ipython:: python
irss = IRSabrSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
beta=0.5,
nodes=dict(alpha=0.2, rho=-0.05, nu=0.65),
shift=0.0,
)
irss.plot(f=2.5513, x_axis="strike", y_axis="normal_vol")
.. plot::
from rateslib import IRSabrSmile, dt
irss = IRSabrSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
beta=0.5,
nodes=dict(alpha=0.2, rho=-0.05, nu=0.65),
shift=0.0,
)
fig, ax, lines = irss.plot(f=2.5513, x_axis="strike", y_axis="normal_vol")
plt.show()
plt.close()
For further examples see :ref:`Constructing a Smile `.
Parameters
----------
nodes: dict[str, float], :red:`required`
The parameters for the SABR model. Keys must be *'alpha', 'rho', 'nu'*. See below.
beta: float, Variable, :red:`required`
The SABR beta parameter assumed by this *Smile*.
eval_date: datetime, :red:`required`
Acts like the initial node of a *Curve*. Should be assigned today's immediate date.
expiry: datetime, :red:`required`
The expiry date of the options associated with this *Smile*.
irs_series: IRSSeries, :red:`required`
The :class:`~rateslib.data.fixings.IRSSeries` that contains the parameters for the
underlying :class:`~rateslib.instruments.IRS` that the swaptions are settled against.
tenor: datetime, str, :red:`required`
The tenor parameter for the underlying :class:`~rateslib.instruments.IRS` that the
swaptions are settled against.
shift: float, Variable, :green:`optional (set as zero)`
The number of basis points to apply to the strike and forward under a 'Black Shifted
Volatility' model.
time_scalar: float, Dual, Dual2, Variable, :green:`optional (set as one)`
A quantity to remap calendar day time to expiry from ``eval_date`` to another measure
of time.
id: str, optional, :green:`optional (set as random)`
The unique identifier to distinguish between *Smiles* in a multicurrency framework
and/or *Surface*.
ad: int, :green:`optional (set by default)`
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
Notes
-----
A SABR model uses a (shifted) Black (log-normal) volatility with a Black-76 option pricing
formula.
The keys for ``nodes`` are described as the following:
- ``alpha``: The initial volatility parameter (e.g. 0.10 for 10%) of the SABR model,
in (0, inf).
- ``rho``: The correlation between spot and volatility of the SABR model,
e.g. -0.10, in [-1.0, 1.0)
- ``nu``: The volatility of volatility parameter of the SABR model, e.g. 0.80.
The parameters :math:`\alpha, \rho, \nu` will be calibrated/mutated by
a :class:`~rateslib.solver.Solver` object. These should be entered as *float* and the argument
``ad`` can be used to automatically tag these as variables.
The parameter :math:`\beta` will **not** be calibrated/mutated by a
:class:`~rateslib.solver.Solver`. This value can be entered either as a *float*, or a
:class:`~rateslib.dual.Variable` to capture exogenous sensitivities.
"""
@_new_state_post
def __init__(
self,
nodes: dict[str, DualTypes],
beta: float | Variable,
eval_date: datetime,
expiry: datetime | str,
irs_series: IRSSeries | str,
tenor: datetime | str,
*,
shift: DualTypes_ = NoInput(0),
time_scalar: DualTypes_ = NoInput(0),
id: str | NoInput = NoInput(0), # noqa: A002
ad: int | None = 0,
):
self._id: str = (
uuid4().hex[:5] + "_" if isinstance(id, NoInput) else id
) # 1 in a million clash
self._meta: _IRSmileMeta = _IRSmileMeta(
_tenor_input=tenor,
_irs_series=_get_irs_series(irs_series),
_eval_date=eval_date,
_expiry_input=expiry,
_plot_x_axis="strike",
_plot_y_axis="black_vol",
_shift=_drb(0.0, shift),
_pricing_model=OptionPricingModel.Black76,
_time_scalar=_drb(1.0, time_scalar),
)
try:
self._nodes: _SabrSmileNodes = _SabrSmileNodes(
_alpha=_to_number(nodes["alpha"]),
_beta=beta,
_rho=_to_number(nodes["rho"]),
_nu=_to_number(nodes["nu"]),
)
except KeyError as e:
for _ in ["alpha", "rho", "nu"]:
if _ not in nodes:
raise ValueError(
f"'{_}' is a required SABR parameter that must be included in ``nodes``"
)
raise e # pragma: no cover
self._set_ad_order(ad)
### Object unique elements
@property
def _n(self) -> int:
return self.nodes.n
@property
def _ini_solve(self) -> int:
return 1
@property
def id(self) -> str:
"""A str identifier to name the *Smile* used in
:class:`~rateslib.solver.Solver` mappings."""
return self._id
@property
def nodes(self) -> _SabrSmileNodes:
"""An instance of :class:`~rateslib.volatility.utils._SabrSmileNodes`."""
return self._nodes
def _d_sabr_d_k_or_f(
self,
k: DualTypes,
f: DualTypes,
expiry: datetime,
as_float: bool,
derivative: int,
) -> tuple[DualTypes, DualTypes | None]:
"""Get the derivative of sabr vol with respect to strike
as_float: bool
Allow expedited calculation by avoiding dual numbers. Useful during the root solving
phase of Newton iterations.
derivative: int
For with respect to `k` use 1, or `f` use 2.
"""
t_e = _to_number(self.meta.t_expiry)
K = k + self.meta.rate_shift
F = f + self.meta.rate_shift
del k, f
if as_float:
k_: Number = _dual_float(K)
f_: Number = _dual_float(F)
a_: Number = _dual_float(self.nodes.alpha)
b_: float | Variable = _dual_float(self.nodes.beta)
p_: Number = _dual_float(self.nodes.rho)
v_: Number = _dual_float(self.nodes.nu)
else:
k_ = _to_number(K)
f_ = _to_number(F)
a_ = self.nodes.alpha #
b_ = self.nodes.beta
p_ = self.nodes.rho
v_ = self.nodes.nu
return _SabrModel._d_sabr_d_k_or_f(k_, f_, t_e, a_, b_, p_, v_, derivative)
### _WithMutability ABCs:
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[np.object_]]:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
return np.array([self.nodes.alpha, self.nodes.rho, self.nodes.nu])
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
return tuple(f"{self.id}{i}" for i in range(3))
def _set_node_vector_direct(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
"""
Update the node values in a Solver. ``ad`` in {1, 2}.
Only the real values in vector are used, dual components are dropped and restructured.
"""
DualType: type[Dual] | type[Dual2] = Dual if ad == 1 else Dual2
DualArgs: tuple[list[float]] | tuple[list[float], list[float]] = (
([],) if ad == 1 else ([], [])
)
base_obj = DualType(0.0, [f"{self.id}{i}" for i in range(3)], *DualArgs)
ident = np.eye(3)
self._nodes = _SabrSmileNodes(
_beta=self.nodes.beta,
_alpha=DualType.vars_from(
base_obj, # type: ignore[arg-type]
vector[0].real,
base_obj.vars,
ident[0, :].tolist(),
*DualArgs[1:],
),
_rho=DualType.vars_from(
base_obj, # type: ignore[arg-type]
vector[1].real,
base_obj.vars,
ident[1, :].tolist(),
*DualArgs[1:],
),
_nu=DualType.vars_from(
base_obj, # type: ignore[arg-type]
vector[2].real,
base_obj.vars,
ident[2, :].tolist(),
*DualArgs[1:],
),
)
def _set_ad_order_direct(self, order: int | None) -> None:
"""This does not alter the beta node, since that is not varied by a Solver.
beta values that are AD sensitive should be given as a Variable and not Dual/Dual2.
Using `None` allows this Smile to be constructed without overwriting any variable names.
"""
# -1, -2 force updates to new variables
if order is None or order == getattr(self, "ad", None):
return None
elif abs(order) not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = abs(order)
self._nodes = _SabrSmileNodes(
_beta=self.nodes.beta,
_alpha=set_order_convert(self.nodes.alpha, order, [f"{self.id}0"]),
_rho=set_order_convert(self.nodes.rho, order, [f"{self.id}1"]),
_nu=set_order_convert(self.nodes.nu, order, [f"{self.id}2"]),
)
def _set_single_node(self, key: str, value: DualTypes) -> None:
params = ["alpha", "rho", "nu", "beta"]
if key not in params:
raise KeyError(f"'{key}' is not in `nodes`.")
kwargs = {f"_{_}": getattr(self.nodes, _) for _ in params if _ != key}
kwargs.update({f"_{key}": value})
self._nodes = _SabrSmileNodes(**kwargs)
self._set_ad_order(self.ad)
# _BaseIRSmile ABCS:
def _plot(
self,
x_axis: str,
f: float,
y_axis: str,
tgt_shift: float_,
) -> tuple[Iterable[float], Iterable[float]]:
shf = _dual_float(self.meta.shift) / 100.0
v_ = _dual_float(self.get_from_strike(k=f, f=f).vol) / 100.0
sq_t = self._meta.t_expiry_sqrt
x_low = _dual_float(
dual_exp(0.5 * v_**2 * sq_t**2 - dual_inv_norm_cdf(0.95) * v_ * sq_t) * (f + shf) - shf
)
x_top = _dual_float(
dual_exp(0.5 * v_**2 * sq_t**2 - dual_inv_norm_cdf(0.05) * v_ * sq_t) * (f + shf) - shf
)
x = np.linspace(x_low, x_top, 301, dtype=np.float64)
y: Iterable[float] = [_dual_float(self.get_from_strike(k=_, f=f).vol) for _ in x]
return self._plot_conversion(
y_axis=y_axis, x_axis=x_axis, f=f, shift=shf, tgt_shift=_drb(shf, tgt_shift), x=x, y=y
)
@property
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the
:class:`~rateslib.volatility._BaseIRSmile`."""
return self._ad
@property
def pricing_params(self) -> tuple[float | Dual | Dual2 | Variable, ...]:
"""An ordered set of pricing parameters associated with the
:class:`~rateslib.volatility._BaseIRSmile`."""
return self.nodes.alpha, self.nodes.rho, self.nodes.nu
@property
def meta(self) -> _IRSmileMeta:
"""An instance of :class:`~rateslib.volatility.ir.utils._IRSmileMeta`."""
return self._meta
def _get_from_strike(self, k: DualTypes, f: DualTypes) -> _IRVolPricingParams:
"""
Given an option strike return the volatility.
Parameters
-----------
k: float, Dual, Dual2
The strike of the option.
f: float, Dual, Dual2
The forward rate at delivery of the option.
expiry: datetime, optional
The expiry of the option. Required for temporal interpolation.
tenor: datetime, optional
The termination date of the underlying *IRS*, required for parameter interpolation.
curves: _Curves,
Pricing objects. See **Pricing** on :class:`~rateslib.instruments.IRSCall`
for details of allowed inputs.
Returns
-------
_IRVolPricingParams
"""
vol_ = _SabrModel._d_sabr_d_k_or_f(
_to_number(k + self.meta.rate_shift),
_to_number(f + self.meta.rate_shift),
_to_number(self._meta.t_expiry),
self.nodes.alpha,
self.nodes.beta,
self.nodes.rho,
self.nodes.nu,
derivative=0,
)[0]
return _IRVolPricingParams(
vol=vol_ * 100.0,
k=k,
f=f,
shift=self.meta.shift,
pricing_model=OptionPricingModel.Black76,
t_e=self._meta.t_expiry,
)
def _d_sigma_d_f(
self,
k: DualTypes,
f: DualTypes,
) -> DualTypes:
"""
Calculate the derivative :math:`\frac{d \\sigma}{d f}` for a generic spline model.
"""
return _SabrModel._d_sabr_d_k_or_f( # type: ignore[return-value]
_to_number(k + self.meta.rate_shift),
_to_number(f + self.meta.rate_shift),
_to_number(self._meta.t_expiry),
self.nodes.alpha,
self.nodes.beta,
self.nodes.rho,
self.nodes.nu,
derivative=2,
)[1]
class IRSabrCube(_BaseIRCube[str], _WithMutability):
r"""
Create an *IR Volatility Cube* parametrized by :class:`~rateslib.volatility.IRSabrSmile` at
different *expiries* and *IRS* *tenors*.
.. warning::
*Swaptions* and *IR Volatility* are in Beta status introduced in v2.7.0
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import IRSabrCube, dt
.. ipython:: python
irsc = IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["3m", "1y"],
tenors=["1y", "2y"],
irs_series="usd_irs",
beta=0.5,
alpha=[[0.21, 0.22], [0.20, 0.20]],
rho=-0.05, # <-- applied to all values in the array
nu=[[0.5, 0.55], [0.65, 0.65]],
)
irss = irsc.get_smile("6m", "1y")
irss.plot(f=2.5513, x_axis="strike", y_axis="normal_vol")
.. plot::
from rateslib import IRSabrCube, dt
irsc = IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["3m", "1y"],
tenors=["1y", "2y"],
irs_series="usd_irs",
beta=0.5,
alpha=[[0.21, 0.22], [0.20, 0.20]],
rho=-0.05, # <-- applied to all values in the array
nu=[[0.5, 0.55], [0.65, 0.65]],
)
irss = irsc.get_smile("6m", "1y")
fig, ax, lines = irss.plot(f=2.5513, x_axis="strike", y_axis="normal_vol")
plt.show()
plt.close()
For further information see also the
:ref:`IR Vol Smiles & Cubes ` section in the user guide.
.. role:: green
.. role:: red
Parameters
----------
eval_date: datetime, :red:`required`
Acts as the initial node of a *Curve*. Should be assigned today's immediate date.
If expiry is given as string used to derive the specific date.
expiries: list[datetime | str], :red:`required`
Datetimes representing the expiries of each parametrized *Smile*, in ascending order.
tenors: list[str], :red:`required`
The tenors of each underlying *IRS* from each expiry for the parameterised *Smiles*.
alpha: float, Variable, or 2D-ndarray of such, :red:`required`
The alpha, :math:`\alpha_{expiry, tenor}`, parameters of each (expiry, tenor) node.
rho: float, Variable, or 2D-ndarray of such, :red:`required`
The rho, :math:`\rho_{expiry, tenor}`, parameters of each (expiry, tenor) node.
nu: float, Variable, or 2D-ndarray of such, :red:`required`
The nu, :math:`\nu_{expiry, tenor}`, parameters of each (expiry, tenor) node.
irs_series: str, IRSSeries, :red:`required`
The :class:`~rateslib.data.fixings.IRSSeries` that contains the parameters for the
underlying :class:`~rateslib.instruments.IRS` that the swaptions are settled against.
beta: float, Variable, :red:`required`
The beta, :math:`\beta`, parameter of the SABR model.
weights: Series, :green:`optional`
Weights used for temporal volatility interpolation. Please see
:ref:`IR vol time remapping ` before using this argument.
id: str, :green:`optional`
The unique identifier to label the *Surface* and its variables.
ad: int, :green:`optional`
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
Notes
-----
SABR parameters for any **(expiry, tenor)** pair are bilinearly interpolated from
immediately neighbouring grid points. Grid points outside of the domain of the given
``expiries`` and ``tenors`` assume values from the singular nearest grid point.
"""
_ini_solve = 0
_SmileType = IRSabrSmile
_meta: _IRCubeMeta
_id: str
def __init__(
self,
eval_date: datetime,
expiries: list[datetime | str],
tenors: list[str],
alpha: DualTypes | Arr2dObj,
rho: DualTypes | Arr2dObj,
nu: DualTypes | Arr2dObj,
irs_series: str | IRSSeries,
beta: DualTypes,
shift: DualTypes_ = NoInput(0),
weights: Series[float] | NoInput = NoInput(0),
id: str | NoInput = NoInput(0), # noqa: A002
ad: int = 0,
):
self._id: str = (
uuid4().hex[:5] + "_" if isinstance(id, NoInput) else id
) # 1 in a million clash
self._meta = _IRCubeMeta(
_eval_date=eval_date,
_tenors=tenors,
_weights=weights,
_expiries=expiries,
_irs_series=_get_irs_series(irs_series),
_shift=_drb(0.0, shift),
_indexes=["alpha", "rho", "nu"],
_smile_params=dict(beta=beta),
_pricing_model=OptionPricingModel.Black76,
)
_shape = (self.meta._n_expiries, self.meta._n_tenors)
self._node_values_: Arr3dObj = np.empty(shape=_shape + (3,), dtype=object)
for i, kw in enumerate([alpha, rho, nu]):
if isinstance(kw, float | Dual | Dual2 | Variable):
self._node_values_[:, :, i] = np.full(fill_value=kw, shape=_shape)
else:
self._node_values_[:, :, i] = np.asarray(kw)
self._set_ad_order(ad) # includes csolve on each smile
self._set_new_state()
@property
def beta(self) -> DualTypes:
"""The *beta* value of each :class:`~rateslib.volatility.IRSabrSmile` associated with
this *Cube*."""
return self.meta.smile_params["beta"] # type: ignore[no-any-return]
@property
def alpha(self) -> DataFrame:
"""The *alpha* value of each :class:`~rateslib.volatility.IRSabrSmile` associated with
this *Cube*."""
return DataFrame(
index=Index(data=self.meta.expiries, name="expiry"),
columns=Index(data=self.meta.tenors, name="tenor"),
data=self._node_values_[:, :, 0],
)
@property
def alpha_float(self) -> DataFrame:
"""The *alpha* value of each :class:`~rateslib.volatility.IRSabrSmile` associated with
this *Cube* in float format."""
return self.alpha.map(lambda x: _dual_float(x))
@property
def rho(self) -> DataFrame:
"""The *rho* value of each :class:`~rateslib.volatility.IRSabrSmile` associated with
this *Cube*."""
return DataFrame(
index=Index(data=self.meta.expiries, name="expiry"),
columns=Index(data=self.meta.tenors, name="tenor"),
data=self._node_values_[:, :, 1],
)
@property
def rho_float(self) -> DataFrame:
"""The *rho* value of each :class:`~rateslib.volatility.IRSabrSmile` associated with
this *Cube* in float format."""
return self.rho.map(lambda x: _dual_float(x))
@property
def nu(self) -> DataFrame:
"""The *nu* value of each :class:`~rateslib.volatility.IRSabrSmile` associated with
this *Cube*."""
return DataFrame(
index=Index(data=self.meta.expiries, name="expiry"),
columns=Index(data=self.meta.tenors, name="tenor"),
data=self._node_values_[:, :, 2],
)
@property
def nu_float(self) -> DataFrame:
"""The *nu* value of each :class:`~rateslib.volatility.IRSabrSmile` associated with
this *Cube* in float format."""
return self.nu.map(lambda x: _dual_float(x))
@property
def _n(self) -> int:
"""Number of pricing parameters of the *Cube*."""
en = self._node_values_.shape[0]
tn = self._node_values_.shape[1]
return en * tn * 3 # alpha, beta, rho
@property
def id(self) -> str:
"""A str identifier to name the *Surface* used in
:class:`~rateslib.solver.Solver` mappings."""
return self._id
@property
def meta(self) -> _IRCubeMeta:
"""An instance of :class:`~rateslib.volatility._IRCubeMeta`."""
return self._meta
@property
def pricing_params(self) -> Arr3dObj:
"""The pricing parameters of the *Cube* as 3-d array by (expiry, tenor, strike)."""
return self._node_values_
@property
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the *Surface*."""
return self._ad
def _set_ad_order_direct(self, order: int | None) -> None:
if order == getattr(self, "ad", None):
return None
elif order not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = order
vec = self._get_node_vector()
vars_ = self._get_node_vars()
new_vec = [set_order_convert(v, order, [t]) for v, t in zip(vec, vars_, strict=False)]
en = self._node_values_.shape[0]
tn = self._node_values_.shape[1]
n = en * tn
self._node_values_[:, :, 0] = np.reshape(list(new_vec[:n]), (en, tn))
self._node_values_[:, :, 1] = np.reshape(list(new_vec[n : 2 * n]), (en, tn))
self._node_values_[:, :, 2] = np.reshape(list(new_vec[2 * n :]), (en, tn))
return None
def _set_node_vector_direct(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
en = self._node_values_.shape[0]
tn = self._node_values_.shape[1]
n = en * tn
if ad == 0:
self._node_values_[:, :, 0] = np.reshape([_dual_float(_) for _ in vector[:n]], (en, tn))
self._node_values_[:, :, 1] = np.reshape(
[_dual_float(_) for _ in vector[n : 2 * n]], (en, tn)
)
self._node_values_[:, :, 2] = np.reshape(
[_dual_float(_) for _ in vector[2 * n :]], (en, tn)
)
else:
DualType: type[Dual] | type[Dual2] = Dual if ad == 1 else Dual2
DualArgs: tuple[list[float]] | tuple[list[float], list[float]] = (
([],) if ad == 1 else ([], [])
)
vars_ = self._get_node_vars()
base_obj = DualType(0.0, vars_, *DualArgs)
ident = np.eye(len(vars_))
for i in range(3):
self._node_values_[:, :, i] = np.reshape(
[
DualType.vars_from(
base_obj, # type: ignore[arg-type]
_dual_float(vector[n * i + j]),
base_obj.vars,
ident[n * i + j, :].tolist(),
*DualArgs[1:],
)
for j in range(n)
],
(en, tn),
)
def _get_node_vector(self) -> Arr1dObj:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
return np.block(
[
self._node_values_[:, :, 0].ravel(), # alphas
self._node_values_[:, :, 1].ravel(), # rhos
self._node_values_[:, :, 2].ravel(), # nus
]
)
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
vars_: tuple[str, ...] = ()
for tag in ["_a_", "_p_", "_v_"]:
vars_ += tuple(
f"{self.id}{tag}{i}_{j}"
for i in range(self._node_values_.shape[0])
for j in range(self._node_values_.shape[1])
)
return vars_
def _set_single_node_direct(
self, key: tuple[datetime, datetime, str], value: DualTypes
) -> None:
"""
Update some generic parameters on the *SabrCube*.
Parameters
----------
key: tuple of (datetime, datetime, str in {"alpha", "rho", "nu"})
The node value to update, indexed by (expiry, tenor, SABR param).
value: Array, float, Dual, Dual2, Variable
Value to update on the *Cube*.
Returns
-------
None
Notes
-----
This function may update all of the AD variable names to be a consistent pricing object
familiar to a :class:`~rateslib.solver.Solver`.
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of a *Curve* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *Curve* instance.
This class is labelled as a **mutable on update** object.
"""
params = ["alpha", "rho", "nu"]
if key[2] not in params:
raise KeyError(f"'{key[2]}' is not in `nodes`.")
tenor_row = self.meta.expiry_dates.index(key[0])
self._node_values_[
self.meta.expiry_dates.index(key[0]),
self.meta.tenor_dates[tenor_row].tolist().index(key[1]),
self.meta.indexes.index(key[2]),
] = value
self._set_ad_order(self.ad)
return None
================================================
FILE: python/rateslib/volatility/ir/spline.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations # type hinting
from datetime import datetime, timezone
from functools import cached_property
from typing import TYPE_CHECKING
from uuid import uuid4
import numpy as np
from rateslib.data.fixings import IRSSeries, _get_irs_series
from rateslib.dual import Dual, Dual2, Variable, set_order_convert
from rateslib.dual.utils import _dual_float, _get_order_of, dual_exp, dual_inv_norm_cdf
from rateslib.enums.generics import NoInput, _drb
from rateslib.enums.parameters import OptionPricingModel, _get_option_pricing_model
from rateslib.mutability import (
_new_state_post,
)
from rateslib.splines import PPSplineDual, PPSplineDual2, PPSplineF64
from rateslib.splines.evaluate import evaluate
from rateslib.volatility.ir.base import _BaseIRCube, _BaseIRSmile, _WithMutability
from rateslib.volatility.ir.utils import (
_IRCubeMeta,
_IRSmileMeta,
_IRVolPricingParams,
)
UTC = timezone.utc
SPLINE_LOWER = -5000.0
SPLINE_UPPER = 10000.0
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
Arr3dObj,
DualTypes,
DualTypes_,
Iterable,
Number,
Sequence,
Series,
float_,
int_,
)
class _IRSplineSmileNodes:
"""
A container for data relating to interpolating the `nodes` of a
:class:`~rateslib.volatility.IRSplineSmile`.
"""
_nodes: dict[float, DualTypes]
_spline: _IRVolSpline
def __init__(self, nodes: dict[float, DualTypes], k: int) -> None:
self._nodes = dict(sorted(nodes.items()))
match (self.n, k):
case (1, _) | (2, _):
# 1 DoF yields a flat smile, but treat it as a line of zero gradient
# 2 DoF yields a straight line, usually with some non-zero gradient
k = 2
t = [SPLINE_LOWER, SPLINE_LOWER, SPLINE_UPPER, SPLINE_UPPER]
case (_, 2):
# 3 or more DoF but piecewise linear endpoints have 2 knots
t = [SPLINE_LOWER, SPLINE_LOWER] + self.keys[1:-1] + [SPLINE_UPPER, SPLINE_UPPER]
case (_, 4):
# 3 or more DoF but piecewise cubic ensure endpoints have 4 knots.
t = [SPLINE_LOWER] * 4 + self.keys[1:-1] + [SPLINE_UPPER] * 4
self._spline = _IRVolSpline(t=t, k=k)
def __eq__(self, other: Any) -> bool:
if not isinstance(other, _IRSplineSmileNodes):
return False
return self._nodes == other._nodes and self.k == other.k
@property
def nodes(self) -> dict[float, DualTypes]:
"""The initial nodes dict passed for construction of this class."""
return self._nodes
@cached_property
def keys(self) -> list[float]:
"""A list of the relative strike keys in ``nodes``."""
return list(self.nodes.keys())
@cached_property
def values(self) -> list[DualTypes]:
"""A list of the delta index values in ``nodes``."""
return list(self.nodes.values())
@property
def n(self) -> int:
"""The number of pricing parameters in ``nodes``."""
return len(self.keys)
@property
def k(self) -> int:
"""The order of the interpolating polynomial spline."""
return self.spline.k
@property
def spline(self) -> _IRVolSpline:
"""An instance of :class:`~rateslib.volatility.ir._IRVolSpline`."""
return self._spline
class _IRVolSpline:
"""
A container for data relating to interpolating the `nodes` of
a :class:`~rateslib.volatility.IRSplineSmile` using a PPSpline.
"""
_k: int
_t: list[float]
_spline: PPSplineF64 | PPSplineDual | PPSplineDual2
def __init__(self, t: list[float], k: int) -> None:
self._t = t
self._k = k
self._spline = PPSplineF64(k, [0.0] * 5, None) # placeholder: csolve will reengineer
@property
def t(self) -> list[float]:
"""The knot sequence of the PPSpline."""
return self._t
@property
def k(self) -> int:
"""The order of the spline."""
return self._k
@property
def spline(self) -> PPSplineF64 | PPSplineDual | PPSplineDual2:
"""An instance of :class:`~rateslib.splines.PPSplineF64`,
:class:`~rateslib.splines.PPSplineDual` or :class:`~rateslib.splines.PPSplineDual2`"""
return self._spline
def evaluate(self, x: DualTypes, m: int = 0) -> Number:
"""Perform the :meth:`~rateslib.splines.evaluate` method on the object's ``spline``."""
return evaluate(spline=self.spline, x=x, m=m)
def _csolve_n_other(
self, nodes: _IRSplineSmileNodes, ad: int
) -> tuple[list[float], list[DualTypes], int, int]:
"""
Solve a spline with more than one node value.
Premium adjusted delta types have an unbounded right side delta index so a derivative of
0 is applied to the spline as a boundary condition.
Premium unadjusted delta types have a right side delta index approximately equal to 1.0.
Use a natural spline boundary condition here.
"""
tau = nodes.keys.copy()
y = nodes.values.copy()
if self.k == 4:
# now insert the natural spline 2nd derivative constraint
y.insert(0, set_order_convert(0.0, ad, None))
tau.insert(0, SPLINE_LOWER)
left_n = 2 # natural spline
else: # == 2
left_n = 0
if self.k == 4:
tau.append(self.t[-1])
y.append(set_order_convert(0.0, ad, None))
right_n = 2 # natural spline
else: # == 2
right_n = 0
return tau, y, left_n, right_n
def csolve(self, nodes: _IRSplineSmileNodes, ad: int) -> None:
"""
Construct a spline of appropriate AD order and solve the spline coefficients for the
given ``nodes``.
Parameters
----------
nodes: _IRSplineSmileNodes
Required information for constructing a PPSpline.
ad: int
The AD order of the constructed PPSPline.
Returns
-------
None
"""
if ad == 0:
Spline: type[PPSplineF64] | type[PPSplineDual] | type[PPSplineDual2] = PPSplineF64
elif ad == 1:
Spline = PPSplineDual
else:
Spline = PPSplineDual2
if nodes.n == 1:
# one node defines a flat line, all spline coefficients are the equivalent value.
# no need to solve, just craft the spline directly.
self._spline = Spline(self.k, self.t, nodes.values * self.k) # type: ignore[arg-type]
else:
tau, y, left_n, right_n = self._csolve_n_other(nodes, ad)
self._spline = Spline(self.k, self.t, None)
self._spline.csolve(tau, y, left_n, right_n, False) # type: ignore[arg-type]
# def to_json(self) -> str:
# """
# Serialize this object to JSON format.
#
# The object can be deserialized using the :meth:`~rateslib.serialization.from_json` method.
#
# Returns
# -------
# str
# """
# obj = dict(
# PyNative=dict(
# _FXDeltaVolSpline=dict(
# t=self.t,
# )
# )
# )
# return json.dumps(obj)
#
# @classmethod
# def _from_json(cls, loaded_json: dict[str, Any]) -> _FXDeltaVolSpline:
# return _FXDeltaVolSpline(
# t=loaded_json["t"],
# )
def __eq__(self, other: Any) -> bool:
"""CurveSplines are considered equal if their knot sequence and endpoints are equivalent.
For the same nodes this will resolve to give the same spline coefficients.
"""
if not isinstance(other, _IRVolSpline):
return False
else:
return self.t == other.t and self.k == other.k
class IRSplineSmile(_BaseIRSmile, _WithMutability):
r"""
Create an *IR Volatility Smile* at a given expiry indexed for a specific IRS tenor
with volatility values interpolated by a polynomial spline curve.
An *IRSplineSmile* is intended as a grid point element of the more general
:class:`~rateslib.volatility.IRSplineCube`, which users are recommended to use instead.
.. warning::
*Swaptions* and *IR Volatility* are in Beta status introduced in v2.7.0
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import IRSplineSmile, dt
.. ipython:: python
irss = IRSplineSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
nodes={
-25.0: 33.375,
-10.0: 32.551,
0.0: 32.488,
10.0: 32.859,
25.0: 34.164
},
k=4,
)
irss.plot(f=2.5513, x_axis="strike", y_axis="normal_vol")
.. plot::
from rateslib import IRSplineSmile, dt
irss = IRSplineSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
nodes={
-25.0: 33.375,
-10.0: 32.551,
0.0: 32.488,
10.0: 32.859,
25.0: 34.164
},
k=4,
)
fig, ax, lines = irss.plot(f=2.5513, x_axis="strike", y_axis="normal_vol")
plt.show()
plt.close()
For further examples see :ref:`Constructing a Smile `.
.. role:: green
.. role:: red
Parameters
----------
nodes: dict[float, float], :red:`required`
The parameters for the spline. Keys must be basis points relative to the forward rate,
and values are normal volatility basis points.
eval_date: datetime, :red:`required`
Acts like the initial node of a *Curve*. Should be assigned today's immediate date.
expiry: datetime, :red:`required`
The expiry date of the options associated with this *Smile*.
irs_series: IRSSeries, :red:`required`
The :class:`~rateslib.data.fixings.IRSSeries` that contains the parameters for the
underlying :class:`~rateslib.instruments.IRS` that the swaptions are settled against.
tenor: datetime, str, :red:`required`
The tenor parameter for the underlying :class:`~rateslib.instruments.IRS` that the
swaptions are settled against.
k: int in {2, 4}, :green:`optional (set as 2)`
The order of the interpolating spline, with (2, 4) representing (linear, cubic)
interpolation respectively.
pricing_model: str, OptionPricingModel, :green:`optional (set as 'normal_vol')`
The option pricing model used by this object. Parameters must be represented
in the appropriate form for the model.
shift: float, :green:`optional (set as zero)`
The shift applied to the forward and strike in pricing formula or in plot conversions.
time_scalar: float, Dual, Dual2, Variable, :green:`optional (set as one)`
A quantity to remap calendar day time to expiry from ``eval_date`` to another measure
of time.
id: str, optional, :green:`optional (set as random)`
The unique identifier to distinguish between *Smiles* in a multicurrency framework
and/or *Surface*.
ad: int, :green:`optional (set by default)`
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
Notes
-----
The keys for ``nodes`` must be basis points relative to the forward rate. For example
.. code-block:: python
nodes = {-200.: 50.0, -100.: 47.0, 0.: 46.0, 100.: 48, 200.: 52.0}
This means that the volatility model of this spline is naturally dependent on the forward
*IRS* rate, very similar to an :class:`~rateslib.volatility.FXDeltaVolSmile`, and any type
SABR type *Smile*.
The value of ``nodes`` are treated as the parameters that will be calibrated/mutated by
a :class:`~rateslib.solver.Solver` object. The order of the spline, ``k``, in {2, 4} is a
hyper-parameter of this model and will not be mutated.
The primary reason for the implementation of this *IRSplineSmile* is generally for expression
of risk to normal volatility values. In particular using ``k=2`` allows a risk representation
with localized strikes. For a more thorough demonstration of this see
:ref:`IR Vol Pricing and Risks `.
""" # noqa: E501
@_new_state_post
def __init__(
self,
nodes: dict[float, DualTypes],
eval_date: datetime,
expiry: datetime | str,
irs_series: IRSSeries | str,
tenor: datetime | str,
*,
k: int_ = NoInput(0),
pricing_model: OptionPricingModel | str = "normal_vol",
shift: DualTypes_ = NoInput(0),
time_scalar: DualTypes_ = NoInput(0),
id: str | NoInput = NoInput(0), # noqa: A002
ad: int | None = 0,
):
k_ = _drb(2, k)
del k
if k_ not in [2, 4]:
raise ValueError(
f"`k` must imply linear(2) or cubic(4) spline interpolation. Got {k_}."
)
self._id: str = (
uuid4().hex[:5] + "_" if isinstance(id, NoInput) else id
) # 1 in a million clash
self._meta: _IRSmileMeta = _IRSmileMeta(
_tenor_input=tenor,
_irs_series=_get_irs_series(irs_series),
_eval_date=eval_date,
_expiry_input=expiry,
_plot_x_axis="moneyness",
_plot_y_axis="normal_vol",
_shift=_drb(0.0, shift),
_pricing_model=_get_option_pricing_model(pricing_model),
_time_scalar=_drb(1.0, time_scalar),
)
self._nodes = _IRSplineSmileNodes(nodes=nodes, k=k_)
self._set_ad_order(ad)
### Object unique elements
@property
def _n(self) -> int:
return self.nodes.n
@property
def _ini_solve(self) -> int:
return 0
@property
def id(self) -> str:
"""A str identifier to name the *Smile* used in
:class:`~rateslib.solver.Solver` mappings."""
return self._id
@property
def nodes(self) -> _IRSplineSmileNodes:
"""An instance of :class:`~rateslib.volatility._IRSplineSmileNodes`."""
return self._nodes
### _WithMutability ABCs:
def _get_node_vector(self) -> np.ndarray[tuple[int, ...], np.dtype[np.object_]]:
"""Get a 1d array of variables associated with nodes of this object updated by Solver"""
return np.array(self.nodes.values)
def _get_node_vars(self) -> tuple[str, ...]:
"""Get the variable names of elements updated by a Solver"""
return tuple(f"{self.id}{i}" for i in range(self._n))
def _set_node_vector_direct(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
"""
Update the node values in a Solver. ``ad`` in {1, 2}.
Only the real values in vector are used, dual components are dropped and restructured.
"""
DualType: type[Dual] | type[Dual2] = Dual if ad == 1 else Dual2
DualArgs: tuple[list[float]] | tuple[list[float], list[float]] = (
([],) if ad == 1 else ([], [])
)
base_obj = DualType(0.0, [f"{self.id}{i}" for i in range(self.nodes.n)], *DualArgs)
ident = np.eye(self.nodes.n)
nodes_: dict[float, DualTypes] = {}
for i, k in enumerate(self.nodes.keys):
nodes_[k] = DualType.vars_from(
base_obj, # type: ignore[arg-type]
vector[i].real,
base_obj.vars,
ident[i, :].tolist(),
*DualArgs[1:],
)
self._nodes = _IRSplineSmileNodes(nodes=nodes_, k=self.nodes.k)
self.nodes.spline.csolve(self.nodes, self.ad)
def _set_ad_order_direct(self, order: int | None) -> None:
# -1, -2 force updates to new variables
if order is None or order == getattr(self, "ad", None):
if self.nodes.spline.spline.c is None:
self.nodes.spline.csolve(self.nodes, _get_order_of(self.pricing_params[0]))
return None
elif abs(order) not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = abs(order)
nodes: dict[float, DualTypes] = {
k: set_order_convert(v, abs(order), [f"{self.id}{i}"])
for i, (k, v) in enumerate(self.nodes.nodes.items())
}
self._nodes = _IRSplineSmileNodes(nodes=nodes, k=self.nodes.spline.k)
self.nodes.spline.csolve(self.nodes, self.ad)
def _set_single_node(self, key: float, value: DualTypes) -> None:
if key not in self.nodes.keys:
raise KeyError(f"'{key}' is not in `nodes`.")
self.nodes._nodes[key] = value
self.nodes.spline.csolve(self.nodes, self.ad)
# _BaseIRSmile ABCS:
def _plot(
self,
x_axis: str,
f: float,
y_axis: str,
tgt_shift: float_,
) -> tuple[Iterable[float], Iterable[float]]:
# approximate a range for the x-axis
shf = _dual_float(self.meta.shift) / 100.0
sq_t = self._meta.t_expiry_sqrt
v_ = _dual_float(self.get_from_strike(k=f, f=f).vol) / 100.0
if self.meta.pricing_model == OptionPricingModel.Black76:
v_ = v_
else:
v_ = v_ / (f + shf)
x_low = _dual_float(
dual_exp(0.5 * v_**2 * sq_t**2 - dual_inv_norm_cdf(0.95) * v_ * sq_t) * (f + shf) - shf
)
x_top = _dual_float(
dual_exp(0.5 * v_**2 * sq_t**2 - dual_inv_norm_cdf(0.05) * v_ * sq_t) * (f + shf) - shf
)
x = np.linspace(x_low, x_top, 301, dtype=np.float64)
y: Iterable[float] = [_dual_float(self.get_from_strike(k=_, f=f).vol) for _ in x]
return self._plot_conversion(
y_axis=y_axis, x_axis=x_axis, f=f, shift=shf, tgt_shift=_drb(shf, tgt_shift), x=x, y=y
)
@property
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the
:class:`~rateslib.volatility._BaseIRSmile`."""
return self._ad
@property
def pricing_params(self) -> Sequence[float | Dual | Dual2 | Variable]:
"""An ordered set of pricing parameters associated with the
:class:`~rateslib.volatility._BaseIRSmile`."""
return self.nodes.values
@property
def meta(self) -> _IRSmileMeta:
"""An instance of :class:`~rateslib.volatility.ir.utils._IRSmileMeta`."""
return self._meta
def _get_from_strike(self, k: DualTypes, f: DualTypes) -> _IRVolPricingParams:
"""
Given an option strike return the volatility.
Parameters
-----------
k: float, Dual, Dual2
The strike of the option.
f: float, Dual, Dual2
The forward rate at delivery of the option.
expiry: datetime, optional
The expiry of the option. Required for temporal interpolation.
tenor: datetime, optional
The termination date of the underlying *IRS*, required for parameter interpolation.
curves: _Curves,
Pricing objects. See **Pricing** on :class:`~rateslib.instruments.IRSCall`
for details of allowed inputs.
Returns
-------
_IRVolPricingParams
"""
vol_ = self.nodes.spline.evaluate(x=(k - f) * 100.0, m=0)
return _IRVolPricingParams(
vol=vol_,
k=k,
f=f,
shift=self.meta.shift,
pricing_model=self.meta.pricing_model,
t_e=self.meta.t_expiry,
)
def _d_sigma_d_f(
self,
k: DualTypes,
f: DualTypes,
) -> DualTypes:
"""
Calculate the derivative :math:`\frac{d \\sigma}{d f}` for a generic spline model.
"""
return self.nodes.spline.evaluate(x=(k - f) * 100.0, m=1) * -1.0
class IRSplineCube(_BaseIRCube[float | Variable], _WithMutability):
r"""
Create an *IR Volatility Cube* parametrized by :class:`~rateslib.volatility.IRSplineSmile` at
different *expiries* and *IRS* *tenors*.
.. warning::
*Swaptions* and *IR Volatility* are in Beta status introduced in v2.7.0
.. rubric:: Examples
.. ipython:: python
:suppress:
from rateslib import IRSplineCube, dt
.. ipython:: python
irsc = IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["3m", "1y"],
tenors=["1y", "2y"],
strikes=[-25.0, 0.0, 25.0],
irs_series="usd_irs",
parameters=[ # <- normal vol at each strike for each row expiry and column tenor
[[33.5, 32.5, 34.1], [33.7, 32.6, 34.6]],
[[33.4, 32.2, 33.9], [33.1, 32.1, 34.1]],
],
k=4,
)
irss = irsc.get_smile("6m", "1y")
irss.plot(f=2.5513, x_axis="strike", y_axis="normal_vol")
.. plot::
from rateslib import IRSplineCube, dt
irsc = IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["3m", "1y"],
tenors=["1y", "2y"],
strikes=[-25.0, 0.0, 25.0],
irs_series="usd_irs",
parameters=[ # <- normal vol at each strike for each row expiry and column tenor
[[33.5, 32.5, 34.1], [33.7, 32.6, 34.6]],
[[33.4, 32.2, 33.9], [33.1, 32.1, 34.1]],
],
k=4,
)
irss = irsc.get_smile("6m", "1y")
fig, ax, lines = irss.plot(f=2.5513, x_axis="strike", y_axis="normal_vol")
plt.show()
plt.close()
For further information see also the
:ref:`IR Vol Smiles & Cubes ` section in the user guide.
.. role:: green
.. role:: red
Parameters
----------
expiries: list[datetime | str], :red:`required`
Datetimes representing the expiries of each parametrised *Smile*, in ascending order.
tenors: list[str], :red:`required`
The tenors of each underlying *IRS* from each expiry for the parameterised *Smiles*.
strikes: list[float], :red:`required`
The indexes for the strike values on each *Smile*, expressed in basis points relative to the
ATM forward rate.
eval_date: datetime, :red:`required`
Acts as the initial node of a *Curve*. Should be assigned today's immediate date.
If expiry is given as string used to derive the specific date.
irs_series: str, IRSSeries, :red:`required`
The :class:`~rateslib.data.fixings.IRSSeries` that contains the parameters for the
underlying :class:`~rateslib.instruments.IRS` that the swaptions are settled against.
parameters: float, Dual, Dual2, Variable or 3d-ndarray of such
The parameters for each *Smile* either adopting a single universal value or as a 3D array
with axes (expiry, tenor, strike).
k: int in {2, 4}, :green:`optional (set as 2)`
The order of the interpolating spline, with (2, 4) representing (linear, cubic)
interpolation respectively.
pricing_model: str, OptionPricingModel, :green:`optional (set as 'normal_vol')`
The option pricing model used by this object. Parameters must be represented
in the appropriate form for the model.
shift: float, :green:`optional (set as zero)`
The shift applied to the forward and strike in pricing formula or in plot conversions.
weights: Series, :green:`optional`
Weights used for temporal volatility interpolation. Please see
:ref:`IR vol time remapping ` before using this argument.
id: str, :green:`optional`
The unique identifier to label the *Surface* and its variables.
ad: int, :green:`optional`
Sets the automatic differentiation order. Defines whether to convert node
values to float, :class:`~rateslib.dual.Dual` or
:class:`~rateslib.dual.Dual2`. It is advised against
using this setting directly. It is mainly used internally.
Notes
-----
Normal vol parameters for any **(expiry, tenor, strike)** triplet are bilinearly
interpolated from immediately neighbouring grid points. Grid points outside of the
domain of the given ``expiries`` and ``tenors`` assume values from the singular nearest
grid point.
"""
_ini_solve = 0
_SmileType = IRSplineSmile
_meta: _IRCubeMeta
_id: str
def __init__(
self,
expiries: list[datetime | str],
tenors: list[str],
strikes: list[float],
eval_date: datetime,
irs_series: str | IRSSeries,
parameters: DualTypes | Arr3dObj,
shift: DualTypes_ = NoInput(0),
pricing_model: OptionPricingModel | str = "normal_vol",
k: int_ = NoInput(0),
weights: Series[float] | NoInput = NoInput(0),
id: str | NoInput = NoInput(0), # noqa: A002
ad: int = 0,
):
self._id: str = (
uuid4().hex[:5] + "_" if isinstance(id, NoInput) else id
) # 1 in a million clash
self._meta = _IRCubeMeta(
_eval_date=eval_date,
_tenors=tenors,
_weights=weights,
_indexes=strikes,
_expiries=expiries,
_irs_series=_get_irs_series(irs_series),
_shift=_drb(0.0, shift),
_smile_params=dict(
k=_drb(2, k),
pricing_model=_get_option_pricing_model(pricing_model),
),
_pricing_model=_get_option_pricing_model(pricing_model),
)
_shape = (self.meta._n_expiries, self.meta._n_tenors, len(strikes))
self._node_values_: Arr3dObj = np.empty(shape=_shape, dtype=object)
if isinstance(parameters, float | Dual | Dual2 | Variable):
self._node_values_.fill(parameters)
else:
p = np.asarray(parameters)
if p.shape != _shape:
raise ValueError(
"If providing `parameters` must be a 3D array-like with shape "
"(expiries, tenors, strikes)."
)
self._node_values_ = p
self._set_ad_order(ad) # includes csolve on each smile
self._set_new_state()
@property
def _n(self) -> int:
"""Number of pricing parameters of the *Cube*."""
en = self._node_values_.shape[0]
tn = self._node_values_.shape[1]
sn = self._node_values_.shape[2]
return en * tn * sn
@property
def id(self) -> str:
"""A str identifier to name the *Surface* used in
:class:`~rateslib.solver.Solver` mappings."""
return self._id
@property
def meta(self) -> _IRCubeMeta:
"""An instance of :class:`~rateslib.volatility._IRCubeMeta`."""
return self._meta
@property
def pricing_params(self) -> Arr3dObj:
"""The pricing parameters of the *Cube* as 3-d array by (expiry, tenor, strike)."""
return self._node_values_
@property
def ad(self) -> int:
"""Int in {0,1,2} describing the AD order associated with the *Surface*."""
return self._ad
def _set_ad_order_direct(self, order: int | None) -> None:
# -1, and -2 input will force direct vars settings.
if order is None or order == getattr(self, "ad", None):
return None
elif abs(order) not in [0, 1, 2]:
raise ValueError("`order` can only be in {0, 1, 2} for auto diff calcs.")
self._ad = abs(order)
vec = self._get_node_vector()
vars_ = self._get_node_vars()
new_vec = [set_order_convert(v, abs(order), [t]) for v, t in zip(vec, vars_, strict=False)]
self._node_values_ = np.reshape(
np.array(new_vec), (self.meta._n_expiries, self.meta._n_tenors, len(self.meta.indexes))
)
return None
def _set_node_vector_direct(
self, vector: np.ndarray[tuple[int, ...], np.dtype[np.object_]], ad: int
) -> None:
shape = self._node_values_.shape
if ad == 0:
self._node_values_ = np.reshape([_dual_float(_) for _ in vector], shape)
else:
DualType: type[Dual] | type[Dual2] = Dual if ad == 1 else Dual2
DualArgs: tuple[list[float]] | tuple[list[float], list[float]] = (
([],) if ad == 1 else ([], [])
)
vars_ = self._get_node_vars()
base_obj = DualType(0.0, vars_, *DualArgs)
ident = np.eye(len(vars_))
self._node_values_ = np.reshape(
[
DualType.vars_from(
base_obj, # type: ignore[arg-type]
_dual_float(v),
base_obj.vars,
ident[j, :].tolist(),
*DualArgs[1:],
)
for j, v in enumerate(vector)
],
shape,
)
def _set_single_node_direct(
self, key: tuple[datetime, datetime, float | Variable], value: DualTypes
) -> None:
"""
Update some generic parameters on the *SplineCube*.
Parameters
----------
key: tuple of (datetime, datetime, float)
The node value to update, indexed by (expiry, tenor, strike).
value: Array, float, Dual, Dual2, Variable
Value to update on the *Cube*.
Returns
-------
None
Notes
-----
This function may update all of the AD variable names to be a consistent pricing object
familiar to a :class:`~rateslib.solver.Solver`.
.. warning::
*Rateslib* is an object-oriented library that uses complex associations. Although
Python may not object to directly mutating attributes of a *Curve* instance, this
should be avoided in *rateslib*. Only use official ``update`` methods to mutate the
values of an existing *Curve* instance.
This class is labelled as a **mutable on update** object.
"""
if key[2] not in self.meta.indexes:
raise KeyError(f"'{key[2]}' is not in `meta.indexes`.")
tenor_row = self.meta.expiry_dates.index(key[0])
self._node_values_[
self.meta.expiry_dates.index(key[0]),
self.meta.tenor_dates[tenor_row].tolist().index(key[1]),
self.meta.indexes.index(key[2]),
] = value
self._set_ad_order(self.ad)
return None
================================================
FILE: python/rateslib/volatility/ir/utils.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations # type hinting
from dataclasses import dataclass
from datetime import datetime, timezone
from functools import cached_property
from typing import TYPE_CHECKING, NamedTuple
import numpy as np
from pandas import Series
from rateslib import calendars
from rateslib.data.fixings import IRSFixing, _get_irs_series
from rateslib.enums.generics import NoInput
from rateslib.scheduling import Adjuster, add_tenor
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Any,
Arr2dObj,
DualTypes,
IRSSeries,
OptionPricingModel,
datetime_,
)
UTC = timezone.utc
class _IRVolPricingParams(NamedTuple):
"""Container for parameters for pricing IR options."""
vol: DualTypes
"""The volatility parameter associated with the specified ``pricing_model``."""
k: DualTypes
"""The strike price of the option."""
f: DualTypes
"""The mid-market forward rate of underlying."""
shift: DualTypes
"""The shift (basis points) applied to the strike and forward under the ``pricing_model``."""
t_e: DualTypes
"""The time to expiry used in the pricing formula."""
pricing_model: OptionPricingModel
"""The specific option pricing formula used for valuation."""
@property
def rate_shift(self) -> DualTypes:
"""The shift (rate percentage terms) applied to the strike and forward under
the ``pricing_model``."""
return self.shift / 100.0
class _IRSmileMeta:
"""
A container of meta data associated with a :class:`~rateslib.volatility._BaseIRSmile`
used to make calculations.
"""
def __init__(
self,
_eval_date: datetime,
_expiry_input: datetime | str,
_tenor_input: datetime | str,
_irs_series: IRSSeries,
_shift: DualTypes,
_plot_x_axis: str,
_plot_y_axis: str,
_pricing_model: OptionPricingModel,
_time_scalar: DualTypes,
):
self._eval_date = _eval_date
self._expiry_input = _expiry_input
self._tenor_input = _tenor_input
self._irs_series = _irs_series
self._plot_x_axis = _plot_x_axis
self._plot_y_axis = _plot_y_axis
self._time_scalar = _time_scalar
self._irs_fixing = IRSFixing(
irs_series=self.irs_series,
publication=self.expiry,
tenor=self.tenor_input,
value=NoInput(0),
identifier=NoInput(0),
)
self._shift = _shift
self._pricing_model = _pricing_model
@property
def time_scalar(self) -> DualTypes:
"""A quantity to multiple calendar day time to expiry to remap time."""
return self._time_scalar
@property
def pricing_model(self) -> OptionPricingModel:
"""The option pricing model associated with this *Smile* volatility output."""
return self._pricing_model
@property
def eval_date(self) -> datetime:
"""Evaluation date of the *Smile*."""
return self._eval_date
@property
def shift(self) -> DualTypes:
"""
The number of basis points used by this *Smile* when using 'Black Shifted Volatility'.
"""
return self._shift
@cached_property
def rate_shift(self) -> DualTypes:
"""
The ``shift`` amount expressed in rate percentage terms.
"""
return self.shift / 100.0
@property
def plot_x_axis(self) -> str:
"""The default ``x_axis`` parameter passed to
:meth:`~rateslib.volatility._BaseIRSmile.plot`"""
return self._plot_x_axis
@property
def plot_y_axis(self) -> str:
"""The default ``y_axis`` parameter passed to
:meth:`~rateslib.volatility._BaseIRSmile.plot`"""
return self._plot_y_axis
@property
def irs_series(self) -> IRSSeries:
"""The :class:`~rateslib.data.fixings.IRSSeries` of for the conventions of the *Smile*."""
return self._irs_series
@property
def expiry_input(self) -> datetime | str:
"""Expiry input of the options priced by this *Smile*."""
return self._expiry_input
@cached_property
def expiry(self) -> datetime:
"""Derived expiry date of the options priced by this *Smile*."""
if isinstance(self.expiry_input, str):
return add_tenor(
start=self.eval_date,
tenor=self.expiry_input,
modifier=self.irs_series.modifier,
calendar=self.irs_series.calendar,
)
else:
return self.expiry_input
@property
def tenor_input(self) -> datetime | str:
"""Tenor input of the underlying IRS priced by this *Smile*."""
return self._tenor_input
@property
def irs_fixing(self) -> IRSFixing:
"""The :class:`~rateslib.data.fixings.IRSFixing` underlying for the swaptions priced
by this *Smile*."""
return self._irs_fixing
@cached_property
def t_expiry(self) -> DualTypes:
"""Calendar days from eval to expiry divided by 365 multiplied by remapping."""
return (self.expiry - self.eval_date).days / 365.0 * self.time_scalar
def _t_expiry(self, expiry: datetime) -> DualTypes:
"""Calendar days from eval to specified expiry divided by 365 multiplied by remapping."""
return (expiry - self.eval_date).days / 365.0 * self.time_scalar
@cached_property
def t_expiry_sqrt(self) -> DualTypes:
"""Square root of ``t_expiry``."""
ret: DualTypes = self.t_expiry**0.5
return ret
@dataclass(frozen=True)
class _IRCubeMeta:
"""
An immutable container of meta data associated with a
:class:`~rateslib.volatility._BaseIRCube` used to make calculations.
"""
_eval_date: datetime
_weights: Series[float] | NoInput
_expiries: list[str | datetime]
_tenors: list[str]
_irs_series: IRSSeries
_shift: DualTypes
_indexes: list[Any]
_smile_params: dict[str, Any]
_pricing_model: OptionPricingModel
def __post_init__(self) -> None:
for idx in range(1, len(self.expiries)):
if self.expiry_dates[idx - 1] >= self.expiry_dates[idx]:
raise ValueError("Cube `expiries` are not sorted or contain duplicates.\n")
if not isinstance(self._weights, NoInput):
object.__setattr__(
self,
"_weights",
_scale_weights(
eval_date=self.eval_date,
weights=self._weights,
expiries=self.expiry_dates,
),
)
@property
def shift(self) -> DualTypes:
"""
The number of basis points used by any *Smile* when using 'Black Shifted Volatility'.
"""
return self._shift
@property
def _n_expiries(self) -> int:
"""The number of expiries."""
return len(self._expiries)
@property
def _n_tenors(self) -> int:
"""The number of tenors."""
return len(self._tenors)
@property
def irs_series(self) -> IRSSeries:
"""
The :class:`~rateslib.data.fixings.IRSSeries` of the underlying
:class:`~rateslib.instruments.IRS`
"""
return self._irs_series
@property
def smile_params(self) -> dict[str, Any]:
"""
A list of additional parameters used only by the specific *Cube* in constructing its
individual *Smile* types.
"""
return self._smile_params
@property
def weights(self) -> Series[float] | NoInput:
"""Weights used for temporal volatility interpolation."""
return self._weights
@cached_property
def time_scalars(self) -> Series[float] | NoInput:
"""Weight adjusted time to expiry (in calendar days) per date for temporal volatility
interpolation."""
if isinstance(self.weights, NoInput):
return NoInput(0)
else:
c = Series(index=self.weights.index, data=1.0)
c.iloc[0] = 0.0
return self.weights.cumsum() / c.cumsum()
@property
def tenors(self) -> list[str]:
"""A list of the tenors as measured according the underlying from each expiry."""
return self._tenors
@property
def indexes(self) -> list[Any]:
"""A list of the indexes used as strikes for the third dimension of the *Cube*."""
return self._indexes
@cached_property
def tenor_dates(self) -> Arr2dObj:
"""An array of *IRS* termination dates measured from each expiry's effective date."""
arr = np.empty(shape=(self._n_expiries, self._n_tenors), dtype=object)
for i, expiry in enumerate(self.expiry_dates):
effective = self.irs_series.calendar.adjust(expiry, self.irs_series.settle)
for j, tenor in enumerate(self.tenors):
arr[i, j] = add_tenor(
start=effective,
tenor=tenor,
modifier=self.irs_series.modifier,
calendar=self.irs_series.calendar,
)
return arr
@cached_property
def tenor_dates_posix(self) -> Arr2dObj:
"""An array of *IRS* termination dates as unix timestamp."""
return np.reshape(
[_.replace(tzinfo=UTC).timestamp() for _ in self.tenor_dates.ravel()],
(self._n_expiries, self._n_tenors),
)
def _t_expiry(self, expiry: datetime) -> float:
"""Calendar days from eval to specified expiry divided by 365."""
return (expiry - self.eval_date).days / 365.0
# @cached_property
# def tenor_posix(self) -> list[float]:
# """A list of the tenors as posix timestamp."""
# return [_.replace(tzinfo=UTC).timestamp() for _ in self.tenor_dates]
@property
def expiries(self) -> list[datetime | str]:
"""A list of the expiries."""
return self._expiries
@cached_property
def expiry_dates(self) -> list[datetime]:
"""A list of the expiries as datetime."""
_: list[datetime] = []
for date in self.expiries:
if isinstance(date, str):
_.append(
add_tenor(
start=self._eval_date,
tenor=date,
modifier=self.irs_series.modifier,
calendar=self.irs_series.calendar,
)
)
else:
_.append(date)
return _
@cached_property
def expiries_posix(self) -> list[float]:
"""A list of the unix timestamps of each date in ``expiries``."""
return [_.replace(tzinfo=UTC).timestamp() for _ in self.expiry_dates]
@cached_property
def eval_posix(self) -> float:
"""The unix timestamp of the ``eval_date``."""
return self.eval_date.replace(tzinfo=UTC).timestamp()
@property
def eval_date(self) -> datetime:
"""Evaluation date of the *Surface*."""
return self._eval_date
@property
def pricing_model(self) -> OptionPricingModel:
"""The option pricing model associated with this *Cube's* volatility output."""
return self._pricing_model
def _get_ir_expiry_and_payment(
eval_date: datetime_,
expiry: str | datetime,
irs_series: str | IRSSeries,
payment_lag: int | datetime_,
) -> tuple[datetime, datetime]:
"""
Determines the expiry and payment date of an IR option using the following rules.
Parameters
----------
eval_date: datetime
The evaluation date, which is today (if required)
expiry: str, datetime
The expiry date
irs_series: IRSSeries, str
The :class:`~rateslib.enums.parameters.IRSSeries` of the underlying IRS.
payment_lag: Adjuster, int, datetime
Number of business days to lag payment by after expiry.
Returns
-------
tuple of datetime
"""
irs_series_ = _get_irs_series(irs_series)
del irs_series
if isinstance(expiry, str):
# then use the objects to derive the expiry
if isinstance(eval_date, NoInput):
raise ValueError("`expiry` as string tenor requires `eval_date`.")
# then the expiry will be implied
expiry_ = add_tenor(
start=eval_date,
tenor=expiry,
modifier=irs_series_.modifier,
calendar=irs_series_.calendar,
roll=eval_date.day,
settlement=False,
mod_days=False,
)
else:
expiry_ = expiry
if isinstance(payment_lag, int):
payment_lag_: datetime | Adjuster = Adjuster.BusDaysLagSettle(payment_lag)
elif isinstance(payment_lag, NoInput):
payment_lag_ = irs_series_.settle
else:
payment_lag_ = payment_lag
del payment_lag
if isinstance(payment_lag_, datetime):
payment_ = payment_lag_
else:
payment_ = payment_lag_.adjust(expiry_, irs_series_.calendar)
return expiry_, payment_
def _get_ir_expiry(
eval_date: datetime,
irs_series: str | IRSSeries,
expiry: datetime | str,
) -> datetime:
"""
Determines the expiry of a Swaption possibly from string tenor.
Parameters
----------
eval_date: datetime
The horizon or evaluation date, i.e. today.
irs_series: IRSSeries, str
The :class:`~rateslib.enums.parameters.IRSSeries` of the underlying IRS.
expiry: str, datetime
The expiry for the swaption.
Returns
-------
datetime
"""
if isinstance(expiry, datetime):
return expiry
irs_series_ = _get_irs_series(irs_series)
del irs_series
expiry_ = add_tenor( # TODO: maybe adopt a Schedule here instead of add tenor
start=eval_date,
tenor=expiry,
modifier=irs_series_.modifier,
calendar=irs_series_.calendar,
roll=eval_date.day,
settlement=False,
mod_days=False,
)
return expiry_
def _get_ir_tenor(
expiry: datetime,
irs_series: str | IRSSeries,
tenor: str | datetime,
) -> datetime:
"""
Determines the termination of an IRS associated with a Swaption expiry.
Parameters
----------
expiry: datetime
The expiry date
irs_series: IRSSeries, str
The :class:`~rateslib.enums.parameters.IRSSeries` of the underlying IRS.
tenor: str, datetime
The tenor for the IRS
Returns
-------
tuple of datetime
"""
if isinstance(tenor, datetime):
return tenor
irs_series_ = _get_irs_series(irs_series)
del irs_series
effective = irs_series_.settle.adjust(expiry, irs_series_.calendar)
tenor_ = add_tenor( # TODO: maybe adopt a Schedule here instead of add tenor
start=effective,
tenor=tenor,
modifier=irs_series_.modifier,
calendar=irs_series_.calendar,
roll=effective.day,
settlement=False,
mod_days=False,
)
return tenor_
def _bilinear_interp(
tl: DualTypes,
tr: DualTypes,
bl: DualTypes,
br: DualTypes,
h: tuple[float, float],
v: tuple[float, float],
) -> DualTypes:
"""
tl, tr, bl, br: the values on the vertices of a unit square.
h: the progression along the horizontal top edge and the horizontal bottom edge in [0,1].
v: the progression along the vertical left edge and the vertical right edge in [0,1].
p: the interior point as the intersection when lines are drawn between the progression on edges.
"""
return (
tl * (1 - h[0]) * (1 - v[0])
+ tr * (h[0]) * (1 - v[1])
+ bl * (1 - h[1]) * v[0]
+ br * h[1] * v[1]
)
def _scale_weights(
eval_date: datetime,
weights: Series[float],
expiries: list[datetime],
) -> Series[float]:
# the last weight is considered the end point of interest
w = weights.sort_index(ascending=True) # sorted input
del weights
d = calendars.get("all").cal_date_range(eval_date, w.index[-1])
s = Series(data=1.0, index=d)
s.update(w)
s.update(Series(index=[eval_date], data=0.0))
c = s.cumsum()
adj_expiries = [eval_date] + expiries
for i, expiry in enumerate(adj_expiries):
if i == 0:
continue
if expiry < s.index[-1]:
# this expiry is within the middle of the weights series
left_index = (adj_expiries[i - 1] - eval_date).days
right_index = (expiry - adj_expiries[i - 1]).days + left_index
left_count = c[adj_expiries[i - 1]]
right_count = c[adj_expiries[i]]
s.iloc[left_index + 1 : right_index + 1] *= (right_index - left_index) / (
right_count - left_count
)
elif adj_expiries[i - 1] < s.index[-1]:
# the weights extend beyond the last expiry but to to the present expiry
left_index = (adj_expiries[i - 1] - eval_date).days
right_index = (s.index[-1] - adj_expiries[i - 1]).days + left_index
left_count = c[adj_expiries[i - 1]]
right_count = c[s.index[-1]]
s.iloc[left_index + 1 : right_index + 1] *= (right_index - left_index) / (
right_count - left_count
)
else:
# the weights have been exhausted
break
if s.index[-1] > expiries[-1]:
# scale the weights beyond last expiry
left_index = (adj_expiries[-1] - eval_date).days
right_index = (s.index[-1] - adj_expiries[-1]).days + left_index
left_count = c[adj_expiries[-1]]
right_count = c[s.index[-1]]
s.iloc[left_index + 1 : right_index + 1] *= (right_index - left_index) / (
right_count - left_count
)
return s
================================================
FILE: python/rateslib/volatility/utils.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from __future__ import annotations # type hinting
from dataclasses import dataclass
from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING, TypeAlias
from pandas import Series
from rateslib.dual import (
Dual,
Dual2,
Variable,
dual_log,
dual_norm_cdf,
dual_norm_pdf,
ift_1dim,
)
from rateslib.dual.utils import _dual_float, _to_number
from rateslib.enums.generics import (
NoInput,
)
from rateslib.rs import _sabr_x0 as _rs_sabr_x0
from rateslib.rs import _sabr_x1 as _rs_sabr_x1
from rateslib.rs import _sabr_x2 as _rs_sabr_x2
from rateslib.rs import index_left_f64
from rateslib.scheduling import get_calendar
if TYPE_CHECKING:
from rateslib.local_types import ( # pragma: no cover
Number,
)
DualTypes: TypeAlias = "float | Dual | Dual2 | Variable" # if not defined causes _WithCache failure
TERMINAL_DATE = datetime(2100, 1, 1)
UTC = timezone.utc
@dataclass(frozen=True)
class _SabrSmileNodes:
"""
A container for data relating to the SABR parameters of a
:class:`~rateslib.volatility.FXSabrSmile` and :class:`~rateslib.volatility.IRSabrSmile`.
"""
_alpha: Number
_beta: float | Variable
_rho: Number
_nu: Number
@property
def alpha(self) -> Number:
"""The :math:`\\alpha` parameter of the SABR function."""
return self._alpha
@property
def beta(self) -> float | Variable:
"""The :math:`\\beta` parameter of the SABR function."""
return self._beta
@property
def rho(self) -> Number:
"""The :math:`\\rho` parameter of the SABR function."""
return self._rho
@property
def nu(self) -> Number:
"""The :math:`\\nu` parameter of the SABR function."""
return self._nu
@property
def n(self) -> int:
"""The number of parameters."""
return 4
def _validate_weights(
weights: Series[float] | NoInput,
eval_date: datetime,
expiries: list[datetime],
) -> Series[float] | None:
if isinstance(weights, NoInput):
return None
w: Series[float] = Series(
1.0, index=get_calendar("all").cal_date_range(eval_date, TERMINAL_DATE)
)
w.update(weights)
# restrict to sorted and filtered for outliers
w = w.sort_index()
w = w[eval_date:] # type: ignore[misc]
node_points: list[datetime] = [eval_date] + expiries + [TERMINAL_DATE]
for i in range(len(expiries) + 1):
s, e = node_points[i] + timedelta(days=1), node_points[i + 1]
days = (e - s).days + 1
w[s:e] = ( # type: ignore[misc]
w[s:e] * days / w[s:e].sum() # type: ignore[misc]
) # scale the weights to allocate the correct time between nodes.
w[eval_date] = 0.0 # type: ignore[call-overload]
return w
def _t_var_interp(
expiries: list[datetime],
expiries_posix: list[float],
expiry: datetime,
expiry_posix: float,
expiry_index: int,
expiry_next_index: int,
eval_posix: float,
weights_cum: Series[float] | None,
vol1: DualTypes,
vol2: DualTypes,
bounds_flag: int,
) -> DualTypes:
"""
Return the volatility of an intermediate timestamp via total linear variance interpolation.
Possibly scaled by time weights if weights is available.
Parameters
----------
expiry_index: int
The index defining the interval within which expiry falls.
expiries_posix: list[datetime]
The list of datetimes associated with the expiries of the *Surface*.
expiries_posix: list[float]
The list of posix timestamps associated with the expiries of the *Surface*.
expiry: datetime
The target expiry to be interpolated.
expiry_posix: float
The pre-calculated posix timestamp for expiry.
expiry_index: int
The integer index of the expiries period in which the expiry falls.
expiry_next_index: int
Will be expiry_index + 1, unless the surface only has one expiry, in which case it will
equal the expiry_index.
eval_posix: float
The pre-calculated posix timestamp for eval date of the *Surface*
weights_cum: Series[float] or NoInput
The cumulative sum of the weights indexes by date
vol1: float, Dual, DUal2
The volatility of the left side
vol2: float, Dual, Dual2
The volatility on the right side
bounds_flag: int
-1: left side extrapolation, 0: normal interpolation, 1: right side extrapolation
Notes
-----
This function performs different interpolation if weights are given or not. ``bounds_flag``
is used to parse the inputs when *Smiles* to the left and/or right are not available.
"""
return _t_var_interp_d_sabr_d_k_or_f(
expiries,
expiries_posix,
expiry,
expiry_posix,
expiry_index,
expiry_next_index,
eval_posix,
weights_cum,
vol1,
dvol1_dk=0.0,
vol2=vol2,
dvol2_dk=0.0,
bounds_flag=bounds_flag,
derivative=False,
)[0]
def _t_var_interp_d_sabr_d_k_or_f(
expiries: list[datetime],
expiries_posix: list[float],
expiry: datetime,
expiry_posix: float,
expiry_index: int,
expiry_next_index: int,
eval_posix: float,
weights_cum: Series[float] | None,
vol1: DualTypes,
dvol1_dk: DualTypes,
vol2: DualTypes,
dvol2_dk: DualTypes,
bounds_flag: int,
derivative: bool,
) -> tuple[DualTypes, DualTypes | None]:
if weights_cum is None: # weights must also be NoInput
if bounds_flag == 0:
t1 = expiries_posix[expiry_index] - eval_posix
t2 = expiries_posix[expiry_next_index] - eval_posix
elif bounds_flag == -1:
# left side extrapolation
t1 = 0.0
t2 = expiries_posix[expiry_index] - eval_posix
else: # bounds_flag == 1:
# right side extrapolation
t1 = expiries_posix[expiry_next_index] - eval_posix
t2 = TERMINAL_DATE.replace(tzinfo=UTC).timestamp() - eval_posix
t_hat = expiry_posix - eval_posix
t = expiry_posix - eval_posix
else:
if bounds_flag == 0:
t1 = weights_cum[expiries[expiry_index]]
t2 = weights_cum[expiries[expiry_next_index]]
elif bounds_flag == -1:
# left side extrapolation
t1 = 0.0
t2 = weights_cum[expiries[expiry_index]]
else: # bounds_flag == 1:
# right side extrapolation
t1 = weights_cum[expiries[expiry_next_index]]
t2 = weights_cum[TERMINAL_DATE]
t_hat = weights_cum[expiry] # number of vol weighted calendar days
t = (expiry_posix - eval_posix) / 86400.0 # number of calendar days
t_quotient = (t_hat - t1) / (t2 - t1)
vol = ((t1 * vol1**2 + t_quotient * (t2 * vol2**2 - t1 * vol1**2)) / t) ** 0.5
if derivative:
dvol_dk = (
(t2 / t) * t_quotient * vol2 * dvol2_dk + (t1 / t) * (1 - t_quotient) * vol1 * dvol1_dk
) / vol
else:
dvol_dk = None
return vol, dvol_dk
class _OptionModelBlack76:
"""Container for option pricing formulae relating to the lognormal Black-76 model."""
@staticmethod
def _d_plus_min(
K: DualTypes, f: DualTypes, rate_shift: DualTypes, vol_sqrt_t: DualTypes, eta: float
) -> DualTypes:
# AD preserving calculation of d_plus in Black-76 formula (eta should +/- 0.5)
return dual_log((f + rate_shift) / (K + rate_shift)) / vol_sqrt_t + eta * vol_sqrt_t
@staticmethod
def _d_plus_min_u(shifted_u: DualTypes, vol_sqrt_t: DualTypes, eta: float) -> DualTypes:
# AD preserving calculation of d_plus in Black-76 formula (eta should +/- 0.5)
return -dual_log(shifted_u) / vol_sqrt_t + eta * vol_sqrt_t
@staticmethod
def _d_min(
K: DualTypes, f: DualTypes, rate_shift: DualTypes, vol_sqrt_t: DualTypes
) -> DualTypes:
return _OptionModelBlack76._d_plus_min(K, f, rate_shift, vol_sqrt_t, -0.5)
@staticmethod
def _d_plus(
K: DualTypes, f: DualTypes, rate_shift: DualTypes, vol_sqrt_t: DualTypes
) -> DualTypes:
return _OptionModelBlack76._d_plus_min(K, f, rate_shift, vol_sqrt_t, +0.5)
@staticmethod
def _value(
F: DualTypes,
K: DualTypes,
rate_shift: DualTypes,
t_e: DualTypes,
v2: DualTypes,
vol: DualTypes,
phi: float,
) -> DualTypes:
"""
Option price in points terms for immediate premium settlement.
Parameters
-----------
F: float, Dual, Dual2
The forward price for settlement at the delivery date.
K: float, Dual, Dual2
The strike price of the option.
t_e: float, Dual, Dual2
The annualised time to expiry.
v2: float, Dual, Dual2
The discounting rate to delivery (ccy2 on FX options), at the appropriate collateral
rate.
vol: float, Dual, Dual2
The volatility measured over the period until expiry.
phi: float
Whether to calculate for call (1.0) or put (-1.0).
Returns
--------
float, Dual, Dual2
"""
vol_sqrt_t = vol * t_e**0.5
d1 = _OptionModelBlack76._d_plus(K, F, rate_shift, vol_sqrt_t)
d2 = d1 - vol_sqrt_t
Nd1, Nd2 = dual_norm_cdf(phi * d1), dual_norm_cdf(phi * d2)
_: DualTypes = phi * ((F + rate_shift) * Nd1 - (K + rate_shift) * Nd2)
# Spot formulation instead of F (Garman Kohlhagen formulation)
# https://quant.stackexchange.com/a/63661/29443
# r1, r2 = dual_log(df1) / -t, dual_log(df2) / -t
# S_imm = F * df2 / df1
# d1 = (dual_log(S_imm / K) + (r2 - r1 + 0.5 * vol ** 2) * t) / vs
# d2 = d1 - vs
# Nd1, Nd2 = dual_norm_cdf(d1), dual_norm_cdf(d2)
# _ = df1 * S_imm * Nd1 - K * df2 * Nd2
return _ * v2
@classmethod
def convert_to_bachelier(
cls,
f: DualTypes,
k: DualTypes,
shift: DualTypes,
vol: DualTypes,
t_e: DualTypes,
) -> DualTypes:
phi = 1.0 if k > f else -1.0
s_tgt = cls._value(
F=f, K=k, rate_shift=shift / 100.0, t_e=t_e, v2=1.0, vol=vol / 100.0, phi=phi
)
if vol < 0.0:
raise RuntimeError(
"`vol` cannot be negative.\nIf this has occurred during a Solver calibration:\n"
"- are your convergence tolerances wide enough?\n"
"- are your initial parameters too far from target? (perhaps use gradient_descent "
"to find a better starting point)\n"
"- have you tried slackening the `ini_lambda` to say (20000, 0.5, 4)?"
)
def s(g: DualTypes) -> DualTypes:
"""s(g) is the price, s, of an option given a volatility, g,"""
return _OptionModelBachelier._value(
F=f,
K=k,
t_e=t_e,
v2=1.0,
vol=g,
phi=phi,
)
ini_guess = _dual_float(vol * (f + shift / 100.0)) / 100.0
result = ift_1dim(
s=s,
s_tgt=s_tgt,
h="modified_brent",
ini_h_args=(0.01 * ini_guess, 10.0 * ini_guess),
)
g: DualTypes = result["g"]
return g * 100.0
@classmethod
def convert_to_new_shift(
cls,
f: DualTypes,
k: DualTypes,
old_shift: DualTypes,
target_shift: DualTypes,
vol: DualTypes,
t_e: DualTypes,
) -> DualTypes:
phi = -1.0 if k < f else 1.0
if old_shift == target_shift:
return vol
s_tgt = cls._value(
F=f,
K=k,
rate_shift=old_shift / 100.0,
t_e=t_e,
v2=1.0,
vol=vol / 100.0,
phi=phi,
)
def s(g: DualTypes) -> DualTypes:
"""s(g) is the price, s, of an option given a volatility, g,"""
return cls._value(
F=f,
K=k,
rate_shift=target_shift / 100.0,
t_e=t_e,
v2=1.0,
vol=g,
phi=phi,
)
ini_guess = (
_dual_float(
vol
* (
((f + old_shift / 100.0) * (k + old_shift / 100.0))
/ ((f + target_shift / 100.0) * (k + target_shift / 100.0))
)
** 0.5
)
/ 100.0
)
# result = ift_1dim(s=s, s_tgt=s_tgt, h="modified_brent", ini_h_args=(0.0001, 10.0))
result = ift_1dim(
s=s,
s_tgt=s_tgt,
h="modified_brent",
ini_h_args=(0.01 * ini_guess, 10.0 * ini_guess),
)
g: DualTypes = result["g"]
return g * 100.0
class _OptionModelBachelier:
"""Container for option pricing formulae relating to the lognormal Black-76 model."""
@staticmethod
def _value(
F: DualTypes,
K: DualTypes,
t_e: DualTypes,
v2: DualTypes,
vol: DualTypes,
phi: float,
) -> DualTypes:
"""
Option price in points terms for immediate premium settlement.
Parameters
-----------
F: float, Dual, Dual2
The forward price for settlement at the delivery date.
K: float, Dual, Dual2
The strike price of the option.
t_e: float, Dual, Dual2
The annualised time to expiry.
v2: float, Dual, Dual2
The discounting rate to delivery (ccy2 on FX options), at the appropriate collateral
rate.
vol: float, Dual, Dual2
The volatility measured over the period until expiry.
phi: float
Whether to calculate for call (1.0) or put (-1.0).
Returns
--------
float, Dual, Dual2
"""
vs = vol * t_e**0.5
d = (F - K) / vs
P = dual_norm_cdf(phi * d)
p = dual_norm_pdf(d)
_: DualTypes = phi * (F - K) * P + vs * p
return _ * v2
@classmethod
def convert_to_black76(
cls,
f: DualTypes,
k: DualTypes,
shift: DualTypes,
vol: DualTypes,
t_e: DualTypes,
) -> DualTypes:
phi = -1.0 if k < f else 1.0
s_tgt = cls._value(F=f, K=k, t_e=t_e, v2=1.0, vol=vol / 100.0, phi=phi)
def s(g: DualTypes) -> DualTypes:
"""s(g) is the price, s, of an option given a volatility, g,"""
return _OptionModelBlack76._value(
F=f,
K=k,
rate_shift=shift / 100.0,
t_e=t_e,
v2=1.0,
vol=g,
phi=phi,
)
ini_guess = vol / (100.0 * ((f + shift / 100.0) * (k + shift / 100.0)) ** 0.5)
# result = ift_1dim(s=s, s_tgt=s_tgt, h="modified_brent", ini_h_args=(0.0001, 10.0))
result = ift_1dim(
s=s,
s_tgt=s_tgt,
h="modified_brent",
ini_h_args=(0.01 * ini_guess, 10.0 * ini_guess),
func_tol=1e-11,
)
g: DualTypes = result["g"]
return g * 100.0
class _SabrModel:
"""Container for formulae relating to the SABR volatility model."""
@staticmethod
def _d_sabr_d_k_or_f(
k: Number,
f: Number,
t: Number,
a: Number,
b: float | Variable,
p: Number,
v: Number,
derivative: int,
) -> tuple[Number, Number | None]:
"""
Calculate the SABR function and its derivative with respect to k or f.
For formula see for example I. Clark "Foreign Exchange Option
Pricing" section 3.10.
Rateslib uses the representation sigma(k) = X0 * X1 * X2, with these variables as defined in
"Coding Interest Rates" chapter 13 to handle AD using dual numbers effectively.
For no derivative and just the SABR function value use 0.
For derivatives with respect to `k` use 1.
For derivatives with respect to `f` use 2.
See "Coding Interest Rates: FX Swaps and Bonds edition 2"
"""
b_: Number = _to_number(b)
X0, dX0 = _SabrModel._sabr_X0(k, f, t, a, b_, p, v, derivative)
X1, dX1 = _SabrModel._sabr_X1(k, f, t, a, b_, p, v, derivative)
X2, dX2 = _SabrModel._sabr_X2(k, f, t, a, b_, p, v, derivative)
if derivative == 0:
return X0 * X1 * X2, None
else:
return X0 * X1 * X2, dX0 * X1 * X2 + X0 * dX1 * X2 + X0 * X1 * dX2 # type: ignore[operator]
@staticmethod
def _sabr_X0(
k: Number,
f: Number,
t: Number,
a: Number,
b: Number,
p: Number,
v: Number,
derivative: int = 0,
) -> tuple[Number, Number | None]:
"""
X0 = a / ((fk)^((1-b)/2) * (1 + (1-b)^2/24 ln^2(f/k) + (1-b)^4/1920 ln^4(f/k) )
If ``derivative`` is 1 also returns dX0/dk, derived using sympy auto code generator.
If ``derivative`` is 2 also returns dX0/df, derived using sympy auto code generator.
"""
return _rs_sabr_x0(k, f, t, a, b, p, v, derivative)
@staticmethod
def _sabr_X1(
k: Number,
f: Number,
t: Number,
a: Number,
b: Number,
p: Number,
v: Number,
derivative: int = 0,
) -> tuple[Number, Number | None]:
"""
X1 = 1 + t ( (1-b)^2 / 24 * a^2 / (fk)^(1-b) + 1/4 p b v a / (fk)^((1-b)/2) + (2-3p^2)/24 v^2 )
If ``derivative`` also returns dX0/dk, calculated using sympy.
""" # noqa: E501
return _rs_sabr_x1(k, f, t, a, b, p, v, derivative)
@staticmethod
def _sabr_X2(
k: Number,
f: Number,
t: Number,
a: Number,
b: Number,
p: Number,
v: Number,
derivative: int = 0,
) -> tuple[Number, Number | None]:
"""
X2 = z / chi(z)
z = v / a * (fk) ^((1-b)/2) * ln(f/k)
chi(z) = ln( (sqrt(1-2pz+z^2) + z -p) / (1-p) )
If ``derivative`` = 1 also returns dX2/dk, calculated using sympy.
If ``derivative`` = 2 also returns dX2/df, calculated using sympy.
"""
return _rs_sabr_x2(k, f, t, a, b, p, v, derivative)
def _surface_index_left(expiries_posix: list[float], expiry_posix: float) -> tuple[int, int]:
"""use `index_left_f64` to derive left and right index,
but exclude surfaces with only one expiry."""
if len(expiries_posix) == 1:
return 0, 0
else:
e_idx = index_left_f64(expiries_posix, expiry_posix)
e_next_idx = e_idx + 1
return e_idx, e_next_idx
================================================
FILE: python/tests/curves/test_curves.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
from math import exp, log
import numpy as np
import pytest
from matplotlib import pyplot as plt
from pandas import Series
from rateslib import default_context, defaults, fixings
from rateslib.curves import (
CompositeCurve,
Curve,
LineCurve,
MultiCsaCurve,
average_rate,
index_left,
index_value,
)
from rateslib.curves.curves import CreditImpliedCurve, _BaseCurve, _CurveMeta, _try_index_value
from rateslib.curves.utils import _CurveNodes, _CurveSpline
from rateslib.data.loader import FixingMissingDataError
from rateslib.dual import Dual, Dual2, Variable, gradient
from rateslib.dual.utils import _get_order_of
from rateslib.enums.generics import Err, NoInput, Ok
from rateslib.fx import FXForwards, FXRates
from rateslib.instruments import IRS
from rateslib.periods import FloatPeriod
from rateslib.scheduling import Cal, dcf, get_calendar
from rateslib.solver import Solver
@pytest.fixture
def curve():
return Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation="linear",
id="v",
convention="Act360",
ad=1,
)
@pytest.fixture
def line_curve():
return LineCurve(
nodes={
dt(2022, 3, 1): 2.00,
dt(2022, 3, 31): 2.01,
},
interpolation="linear",
id="v",
ad=1,
)
@pytest.fixture
def index_curve():
return Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.999,
},
interpolation="linear_index",
id="v",
ad=1,
index_base=110.0,
)
def test_meta_attribute(curve, line_curve):
assert isinstance(curve._meta, _CurveMeta)
assert isinstance(line_curve._meta, _CurveMeta)
@pytest.mark.parametrize("method", ["flat_forward", "flat_backward"])
def test_flat_interp(method) -> None:
curve = Curve(
{dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.9, dt(2002, 1, 1): 0.8},
interpolation=method,
)
assert curve[dt(2000, 1, 1)] == 1.0
assert curve[dt(2001, 1, 1)] == 0.9
assert curve[dt(2002, 1, 1)] == 0.8
if method == "flat_forward":
assert curve[dt(2000, 7, 1)] == 1.0
else:
assert curve[dt(2000, 7, 1)] == 0.9
@pytest.mark.parametrize(("curve_style", "expected"), [("df", 0.995), ("line", 2.005)])
def test_linear_interp(curve_style, expected, curve, line_curve) -> None:
if curve_style == "df":
obj = curve
else:
obj = line_curve
result = obj[dt(2022, 3, 16)]
assert abs(result - Dual(expected, ["v1", "v0"], [0.5, 0.5])) < 1e-10
assert np.all(np.isclose(result.dual, np.array([0.5, 0.5])))
def test_log_linear_interp() -> None:
curve = Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation="log_linear",
id="v",
convention="Act360",
ad=1,
)
val = exp((log(1.00) + log(0.99)) / 2)
result = curve[dt(2022, 3, 16)]
expected = Dual(val, ["v0", "v1"], [0.49749372, 0.50251891])
assert abs(result - expected) < 1e-15
assert all(np.isclose(gradient(result, ["v0", "v1"]), expected.dual))
def test_linear_zero_rate_interp() -> None:
# not tested
pass
def test_line_curve_rate(line_curve) -> None:
expected = Dual(2.005, ["v0", "v1"], [0.5, 0.5])
result = line_curve.rate(effective=dt(2022, 3, 16))
assert abs(result - expected) < 1e-10
assert np.all(np.isclose(result.dual, np.array([0.5, 0.5])))
@pytest.mark.parametrize(
("scm", "exp"),
[
("none_simple", 5.56617834937),
("isda_flat_compounding", 5.57234801943),
("isda_compounding", 5.58359355318),
],
)
def test_curve_rate_floating_spread(scm, exp) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.9985, dt(2022, 3, 1): 0.995})
result = curve.rate(dt(2022, 1, 1), dt(2022, 3, 1), None, 250, scm)
assert (result - exp) < 1e-8
def test_curve_rate_raises(curve) -> None:
with pytest.raises(ValueError, match="Must supply a valid `spread_compound"):
curve.rate(dt(2022, 3, 3), "7d", float_spread=10.0, spread_compound_method="bad")
@pytest.mark.parametrize(
("li", "ll", "val", "expected"),
[
([0, 1, 2, 3, 4], 5, 0, 0),
([0, 1, 2, 3, 4], 5, 0.5, 0),
([0, 1, 2, 3, 4], 5, 1, 0),
([0, 1, 2, 3, 4], 5, 1.5, 1),
([0, 1, 2, 3, 4], 5, 2, 1),
([0, 1, 2, 3, 4], 5, 2.5, 2),
([0, 1, 2, 3, 4], 5, 3, 2),
([0, 1, 2, 3, 4], 5, 3.5, 3),
([0, 1, 2, 3, 4], 5, 4, 3),
([0, 1, 2, 3, 4], 5, 4.5, 3), # extrapolate
([0, 1, 2, 3, 4], 5, -0.5, 0), # extrapolate
],
)
def test_index_left(li, ll, val, expected) -> None:
result = index_left(li, ll, val)
assert result == expected
def test_zero_rate_plot() -> None:
# test calcs without raise
curve_zero = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.99,
dt(2024, 1, 1): 0.979,
dt(2025, 1, 1): 0.967,
},
interpolation="linear_zero_rate",
)
curve_zero.plot("1d")
plt.close("all")
def test_curve_equality_type_differ(curve, line_curve) -> None:
assert curve != line_curve
def test_copy_curve(curve, line_curve) -> None:
copied = curve.copy()
assert copied == curve
assert id(copied) != id(curve)
copied = line_curve.copy()
assert copied == line_curve
assert id(copied) != id(line_curve)
@pytest.mark.parametrize(
("attr", "val"),
[
("_nodes", _CurveNodes({dt(2000, 1, 1): 1.0})),
("_interpolator", "some_value"),
("_id", "x"),
("_ad", 0),
("_meta", "some_value"),
],
)
def test_curve_equality_checks(attr, val, curve) -> None:
copied_curve = curve.copy()
assert copied_curve == curve
setattr(copied_curve, attr, val)
assert copied_curve != curve
def test_curve_equality_spline_coeffs() -> None:
curve = Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
dt(2022, 5, 1): 0.98,
dt(2022, 6, 4): 0.97,
dt(2022, 7, 4): 0.96,
},
interpolation="linear",
id="v",
convention="Act360",
ad=0,
t=[
dt(2022, 5, 1),
dt(2022, 5, 1),
dt(2022, 5, 1),
dt(2022, 5, 1),
dt(2022, 6, 4),
dt(2022, 7, 4),
dt(2022, 7, 4),
dt(2022, 7, 4),
dt(2022, 7, 4),
],
)
curve2 = Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
dt(2022, 5, 1): 0.98,
dt(2022, 6, 4): 0.97,
dt(2022, 7, 4): 0.93, # <- note generates different spline
},
interpolation="linear",
id="v",
convention="Act360",
ad=0,
t=[
dt(2022, 5, 1),
dt(2022, 5, 1),
dt(2022, 5, 1),
dt(2022, 5, 1),
dt(2022, 6, 4),
dt(2022, 7, 4),
dt(2022, 7, 4),
dt(2022, 7, 4),
dt(2022, 7, 4),
],
)
assert curve2 != curve # should detect on curve2.spline.c
curve2.update_node(dt(2022, 7, 4), 0.96)
assert curve2 == curve # spline.c will be resolved on calculation to the same values
def test_curve_interp_raises() -> None:
interp = "BAD"
err = "Curve interpolation: 'bad' not ava"
with pytest.raises(ValueError, match=err):
Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 2, 1): 0.9,
},
id="curve",
interpolation=interp,
)
def test_curve_sorted_nodes_raises() -> None:
err = "Curve node dates are not sorted or contain duplicates."
with pytest.raises(ValueError, match=err):
Curve(
nodes={
dt(2022, 2, 1): 0.9,
dt(2022, 1, 1): 1.0,
},
id="curve",
)
def test_curve_interp_case() -> None:
curve_lower = Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation="log_linear",
id="id",
convention="Act360",
ad=1,
)
curve_upper = Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation="LOG_LINEAR",
id="id",
convention="Act360",
ad=1,
)
assert curve_lower[dt(2022, 3, 16)] == curve_upper[dt(2022, 3, 16)]
def test_custom_interpolator() -> None:
def interp(date, nodes):
return date
curve = Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation=interp,
id="v",
convention="Act360",
ad=1,
)
assert curve[dt(2022, 3, 15)] == dt(2022, 3, 15)
def test_df_is_zero_in_past(curve) -> None:
assert curve[dt(1999, 1, 1)] == 0.0
def test_curve_none_return(curve) -> None:
result = curve.rate(dt(2022, 2, 1), dt(2022, 2, 2))
assert result is None
@pytest.mark.parametrize(
("endpoints", "expected"),
[
("natural", [1.0, 0.995913396831872, 0.9480730429565414, 0.95]),
("not_a_knot", [1.0, 0.9967668788593117, 0.9461282456344617, 0.95]),
(("not_a_knot", "natural"), [1.0, 0.9965809643843604, 0.9480575781858877, 0.95]),
(("natural", "not_a_knot"), [1.0, 0.9959615881004005, 0.9461971628597721, 0.95]),
],
)
def test_spline_endpoints(endpoints, expected) -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.99,
dt(2024, 1, 1): 0.97,
dt(2025, 1, 1): 0.95,
dt(2026, 1, 1): 0.95,
},
endpoints=endpoints,
t=[
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2026, 1, 1),
dt(2026, 1, 1),
dt(2026, 1, 1),
],
)
for i, date in enumerate([dt(2022, 1, 1), dt(2022, 7, 1), dt(2025, 7, 1), dt(2026, 1, 1)]):
result = curve[date]
assert (result - expected[i]) < 1e-12
@pytest.mark.parametrize("endpoints", [("natural", "bad"), ("bad", "natural")])
def test_spline_endpoints_raise(endpoints) -> None:
with pytest.raises(NotImplementedError, match="Endpoint method"):
Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.99,
dt(2024, 1, 1): 0.97,
dt(2025, 1, 1): 0.95,
dt(2026, 1, 1): 0.95,
},
endpoints=endpoints,
t=[
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2026, 1, 1),
dt(2026, 1, 1),
dt(2026, 1, 1),
],
)
def test_not_a_knot_raises() -> None:
with pytest.raises(ValueError, match="`endpoints` cannot be 'not_a_knot'"):
Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2024, 1, 1): 0.97,
dt(2026, 1, 1): 0.95,
},
endpoints="not_a_knot",
t=[
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2024, 1, 1),
dt(2026, 1, 1),
dt(2026, 1, 1),
dt(2026, 1, 1),
dt(2026, 1, 1),
],
)
def test_set_ad_order_no_spline() -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.99,
},
id="v",
)
assert curve[dt(2022, 1, 1)] == 1.0
assert curve.ad == 0
curve._set_ad_order(1)
assert curve[dt(2022, 1, 1)] == Dual(1.0, ["v0"], [])
assert curve.ad == 1
old_id = id(curve.nodes)
curve._set_ad_order(2)
assert curve[dt(2022, 1, 1)] == Dual2(1.0, ["v0"], [], [])
assert curve.ad == 2
assert id(curve.nodes) != old_id # new nodes object thus a new id
expected_id = id(curve.nodes)
curve._set_ad_order(2)
assert id(curve.nodes) == expected_id # new objects not created when order unchged
def test_set_ad_order_raises(curve) -> None:
with pytest.raises(ValueError, match="`order` can only be in {0, 1, 2}"):
curve._set_ad_order(100)
def test_index_left_raises() -> None:
with pytest.raises(ValueError, match="`index_left` designed for intervals."):
index_left([1], 1, 100)
# def test_curve_shift():
# curve = Curve(
# nodes={
# dt(2022, 1, 1): 1.0,
# dt(2023, 1, 1): 0.988,
# dt(2024, 1, 1): 0.975,
# dt(2025, 1, 1): 0.965,
# dt(2026, 1, 1): 0.955,
# dt(2027, 1, 1): 0.9475
# },
# t=[
# dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1), dt(2024, 1, 1),
# dt(2025, 1, 1),
# dt(2026, 1, 1),
# dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1), dt(2027, 1, 1),
# ],
# )
# result_curve = curve.shift(25)
# diff = np.array([
# result_curve.rate(_, "1D") - curve.rate(_, "1D") - 0.25 for _ in [
# dt(2022, 1, 10), dt(2023, 3, 24), dt(2024, 11, 11), dt(2026, 4, 5)
# ]
# ])
# assert np.all(np.abs(diff) < 1e-7)
@pytest.mark.parametrize("ad_order", [0, 1, 2])
# @pytest.mark.parametrize("composite", [True, False])
def test_curve_shift_ad_order(ad_order) -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
ad=ad_order,
)
result_curve = curve.shift(25)
diff = np.array(
[
result_curve.rate(_, "1D") - curve.rate(_, "1D") - 0.25
for _ in [dt(2022, 1, 10), dt(2023, 3, 24), dt(2024, 11, 11), dt(2026, 4, 5)]
],
)
assert np.all(np.abs(diff) < 1e-7)
result_curve._set_ad_order((ad_order + 1) % 3)
assert result_curve.ad == (ad_order + 1) % 3
@pytest.mark.skip(reason="composite argument removed from shift method in v2.1")
def test_curve_shift_association() -> None:
# test a dynamic shift association with curves, active after a Solver mutation
args = (dt(2022, 2, 1), "1d")
curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.988},
)
solver = Solver(
curves=[curve],
instruments=[IRS(dt(2022, 1, 1), "1Y", "A", curves=curve)],
s=[2.0],
)
base = curve.rate(*args)
ass_shifted_curve = curve.shift(100)
stat_shifted_curve = curve.shift(100, composite=False)
assert abs(base - ass_shifted_curve.rate(*args) + 1.00) < 1e-5
assert abs(base - stat_shifted_curve.rate(*args) + 1.00) < 1e-5
solver.s = [3.0]
solver.iterate()
base = curve.rate(*args)
assert abs(base - ass_shifted_curve.rate(*args) + 1.00) < 1e-5
assert abs(ass_shifted_curve.rate(*args) - stat_shifted_curve.rate(*args)) > 0.95
def test_curve_shift_dual_input() -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
)
result_curve = curve.shift(Dual(25, ["z"], []))
diff = np.array(
[
result_curve.rate(_, "1D") - curve.rate(_, "1D") - 0.25
for _ in [dt(2022, 1, 10), dt(2023, 3, 24), dt(2024, 11, 11), dt(2026, 4, 5)]
],
)
assert np.all(np.abs(diff) < 1e-7)
def test_composite_curve_shift() -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.998})
cc = CompositeCurve([c1, c2])
result = cc.shift(20).rate(dt(2022, 1, 1), "1d")
expected = c1.rate(dt(2022, 1, 1), "1d") + c2.rate(dt(2022, 1, 1), "1d") + 0.2
assert abs(result - expected) < 1e-3
@pytest.mark.parametrize("ad_order", [0, 1, 2])
# @pytest.mark.parametrize("composite", [True, False])
def test_linecurve_shift(ad_order) -> None:
curve = LineCurve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
ad=ad_order,
)
result_curve = curve.shift(25)
diff = np.array(
[
result_curve[_] - curve[_] - 0.25
for _ in [dt(2022, 1, 10), dt(2023, 3, 24), dt(2024, 11, 11), dt(2026, 4, 5)]
],
)
assert np.all(np.abs(diff) < 1e-7)
def test_linecurve_shift_dual_input() -> None:
curve = LineCurve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
)
result_curve = curve.shift(Dual(25, ["z"], []))
diff = np.array(
[
result_curve[_] - curve[_] - 0.25
for _ in [dt(2022, 1, 10), dt(2023, 3, 24), dt(2024, 11, 11), dt(2026, 4, 5)]
],
)
assert np.all(np.abs(diff) < 1e-7)
@pytest.mark.parametrize("ad_order", [0, 1, 2])
# @pytest.mark.parametrize("composite", [True, False])
def test_indexcurve_shift(ad_order) -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
ad=ad_order,
index_base=110.0,
interpolation="log_linear",
)
result_curve = curve.shift(25)
diff = np.array(
[
result_curve.rate(_, "1D") - curve.rate(_, "1D") - 0.25
for _ in [dt(2022, 1, 10), dt(2023, 3, 24), dt(2024, 11, 11), dt(2026, 4, 5)]
],
)
assert np.all(np.abs(diff) < 1e-7)
assert result_curve.meta.index_base == curve.meta.index_base
def test_indexcurve_shift_dual_input() -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
index_base=110.0,
interpolation="log_linear",
)
result_curve = curve.shift(Dual(25, ["z"], []))
diff = np.array(
[
result_curve.rate(_, "1D") - curve.rate(_, "1D") - 0.25
for _ in [dt(2022, 1, 10), dt(2023, 3, 24), dt(2024, 11, 11), dt(2026, 4, 5)]
],
)
assert np.all(np.abs(diff) < 1e-7)
assert result_curve.meta.index_base == curve.meta.index_base
@pytest.mark.parametrize("c_obj", ["c", "l", "i"])
@pytest.mark.parametrize("ini_ad", [0, 1, 2])
@pytest.mark.parametrize(
"spread", [1.0, Dual(1.0, ["z"], []), Dual2(1.0, ["z"], [], []), Variable(1.0, ["z"])]
)
# @pytest.mark.parametrize("composite", [False, True])
def test_curve_shift_ad_orders(curve, line_curve, index_curve, c_obj, ini_ad, spread):
if c_obj == "c":
c = curve
elif c_obj == "l":
c = line_curve
else:
c = index_curve
c._set_ad_order(ini_ad)
if ini_ad + _get_order_of(spread) == 3:
with pytest.raises(TypeError, match="Cannot create a ShiftedCurve with mixed AD orders"):
c.shift(spread)
return None
result = c.shift(spread)
expected = max(_get_order_of(spread), ini_ad)
assert result._ad == expected
@pytest.mark.parametrize(
("crv", "tol"),
[
(
Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
),
1e-8,
),
(
Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
index_base=110.0,
),
1e-8,
),
(
Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
index_base=110.0,
interpolation="linear_index",
),
1e-8,
),
(
LineCurve(
nodes={
dt(2022, 1, 1): 1.7,
dt(2023, 1, 1): 1.65,
dt(2024, 1, 1): 1.4,
dt(2025, 1, 1): 1.3,
dt(2026, 1, 1): 1.25,
dt(2027, 1, 1): 1.35,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
),
1e-8,
),
(
Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 2): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2023, 1, 2),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
),
1e-3,
),
],
)
def test_curve_translate(crv, tol) -> None:
result_curve = crv.translate(dt(2023, 1, 1))
diff = np.array(
[
result_curve.rate(_, "1D") - crv.rate(_, "1D")
for _ in [dt(2023, 1, 25), dt(2023, 3, 24), dt(2024, 11, 11), dt(2026, 4, 5)]
],
)
assert np.all(np.abs(diff) < tol)
if not isinstance(result_curve.meta.index_base, NoInput):
projected_base = crv.index_value(dt(2023, 1, 1), crv.meta.index_lag)
assert abs(result_curve.meta.index_base - projected_base) < 1e-14
# test date between original initial and translated initial is zero
assert result_curve[dt(1900, 1, 1)] == 0.0
assert result_curve[dt(2022, 12, 31)] == 0.0
@pytest.mark.parametrize(
"crv",
[
Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
),
LineCurve(
nodes={
dt(2022, 1, 1): 1.7,
dt(2023, 1, 1): 1.65,
dt(2024, 1, 1): 1.4,
dt(2025, 1, 1): 1.3,
dt(2026, 1, 1): 1.25,
dt(2027, 1, 1): 1.35,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
),
],
)
@pytest.mark.parametrize(
"dates",
[
("10d", "-10d"),
(dt(2022, 1, 11), dt(2021, 12, 22)),
(10, -10),
],
)
def test_curve_roll(crv, dates) -> None:
rolled_curve = crv.roll(dates[0])
rolled_curve2 = crv.roll(dates[1])
expected = np.array(
[
crv.rate(_, "1D")
for _ in [dt(2023, 1, 15), dt(2023, 3, 15), dt(2024, 11, 15), dt(2026, 4, 15)]
],
)
result = np.array(
[
rolled_curve.rate(_, "1D")
for _ in [dt(2023, 1, 25), dt(2023, 3, 25), dt(2024, 11, 25), dt(2026, 4, 25)]
],
)
result2 = np.array(
[
rolled_curve2.rate(_, "1D")
for _ in [dt(2023, 1, 5), dt(2023, 3, 5), dt(2024, 11, 5), dt(2026, 4, 5)]
],
)
assert np.all(np.abs(result - expected) < 1e-7)
assert np.all(np.abs(result2 - expected) < 1e-7)
# value prior to initial node
assert rolled_curve[dt(1900, 1, 1)] == 0.0
@pytest.mark.skip(reason="v2.1 uses a RolledCurve and does not return a compatible object for eq")
def test_curve_roll_copy(curve) -> None:
result = curve.roll("0d")
assert result == curve
def test_curve_spline_warning() -> None:
curve = Curve(
nodes={
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 0.99,
dt(2025, 1, 1): 0.97,
dt(2026, 1, 1): 0.94,
dt(2027, 1, 1): 0.91,
},
t=[
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
)
with pytest.warns(UserWarning):
curve[dt(2028, 1, 1)]
def test_index_curve_roll() -> None:
crv = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
index_base=110.0,
interpolation="log_linear",
)
rolled_curve = crv.roll("10d")
rolled_curve2 = crv.roll("-10d")
expected = np.array(
[
crv.rate(_, "1D")
for _ in [dt(2023, 1, 15), dt(2023, 3, 15), dt(2024, 11, 15), dt(2026, 4, 15)]
],
)
result = np.array(
[
rolled_curve.rate(_, "1D")
for _ in [dt(2023, 1, 25), dt(2023, 3, 25), dt(2024, 11, 25), dt(2026, 4, 25)]
],
)
result2 = np.array(
[
rolled_curve2.rate(_, "1D")
for _ in [dt(2023, 1, 5), dt(2023, 3, 5), dt(2024, 11, 5), dt(2026, 4, 5)]
],
)
assert np.all(np.abs(result - expected) < 1e-7)
assert np.all(np.abs(result2 - expected) < 1e-7)
assert rolled_curve.meta.index_base == crv.meta.index_base
@pytest.mark.parametrize(
"s",
[
Series(index=[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)], data=[100.0, 200, 300]),
158.62068965517238,
"KLMN",
],
)
def test_index_value_series(s) -> None:
# test that a Series input to fixings works
fixings.add(
"KLMN",
Series(index=[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)], data=[100.0, 200, 300]),
)
result = index_value(
index_lag=1,
index_method="daily",
index_fixings=s,
index_date=dt(2000, 2, 18),
index_curve=NoInput(0),
)
expected = 12 / 29 * 100.0 + 17 / 29 * 200
fixings.pop("KLMN")
assert abs(result - expected) < 1e-10
def test_curve_translate_raises(curve) -> None:
with pytest.raises(ValueError, match="Cannot translate into the past."):
curve.translate(dt(2020, 4, 1))
def test_curve_zero_width_rate_raises(curve) -> None:
with pytest.raises(ZeroDivisionError, match="effective:"):
curve.rate(dt(2022, 3, 10), dt(2022, 3, 10))
def test_set_node_vector_updates_ad_attribute(curve) -> None:
curve._set_node_vector([0.98], ad=2)
assert curve.ad == 2
@pytest.mark.parametrize(
("convention", "expected"),
[
("act360", 4.3652192566314705),
("30360", 4.372999441829487),
("act365f", 4.372518793743008),
("bus252", 4.354756779569957),
],
)
def test_average_rate(convention, expected):
start = dt(2000, 1, 1)
end = dt(2006, 1, 1)
rate = 5.0
d = dcf(start, end, convention, calendar="bus")
result, d_, n_ = average_rate(start, end, convention, rate, d)
assert abs(result - expected) < 1e-12
assert abs((1 + d * rate / 100.0) - (1 + d_ * result / 100.0) ** n_) < 1e-12
@pytest.mark.parametrize("curve", [Curve, LineCurve])
def test_spline_interpolation_feature(curve):
t = [dt(2000, 1, 1)] * 4 + [dt(2001, 1, 1)] + [dt(2002, 1, 1)] * 4
original = curve(nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98, dt(2002, 1, 1): 0.975}, t=t)
feature = curve(
nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98, dt(2002, 1, 1): 0.975},
interpolation="spline",
)
assert feature.interpolator.spline.t == t
assert feature.interpolator.spline.spline.c == original.interpolator.spline.spline.c
assert feature[dt(2000, 1, 1)] == original[dt(2000, 1, 1)]
assert feature[dt(1999, 1, 1)] == original[dt(1999, 1, 1)]
assert feature[dt(2001, 5, 1)] == original[dt(2001, 5, 1)]
def test_conventions_and_calendar_unnecessary():
# test that the calendar and the convention of a Curve is not required to forecast rates
# this test currently raises but in future versions the calendar and convention attributes
# of a curve may be separated from this mechanism.
curve = Curve({dt(2026, 4, 1): 1.0, dt(2028, 4, 1): 0.98}, calendar="nyc", convention="act360")
period = FloatPeriod(
start=dt(2026, 4, 1),
end=dt(2026, 7, 1),
payment=dt(2026, 7, 1),
frequency="Q",
convention="act365f",
calendar="osl",
)
with pytest.raises(ValueError, match="A `rate_curve` and `rate_index` have been supplied with"):
period.rate(rate_curve=curve)
class TestCurve:
def test_repr(self):
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955,
},
id="sofr",
)
expected = f""
assert expected == curve.__repr__()
def test_cache_clear_and_defaults(self):
curve = Curve({dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99})
curve[dt(2001, 1, 1)]
assert len(curve._cache) == 1
curve._clear_cache()
assert len(curve._cache) == 0
v1 = curve[dt(2001, 1, 1)]
curve.update_node(dt(2002, 1, 1), 0.98)
# cache cleared by function
assert len(curve._cache) == 0
v2 = curve[dt(2001, 1, 1)]
assert v2 != v1
with default_context("curve_caching", False):
curve.nodes.nodes[dt(2002, 1, 1)] = 0.90
# no clear cache required, but value will re-calc anyway
assert curve[dt(2001, 1, 1)] != v2
def test_typing_as_base_curve(self):
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955,
},
id="sofr",
)
assert isinstance(curve, _BaseCurve)
@pytest.mark.skip(reason="TranslatedCurve was constructed in v2.1 and bypasses this.")
def test_curve_translate_knots_raises(self) -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 1, 1),
dt(2022, 12, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
)
with pytest.raises(ValueError, match="Cannot translate spline knots for given"):
curve.translate(dt(2022, 12, 15))
def test_calendar_passed_to_rate_dcf(self):
# Holidays on which no overnight DI rate is published
reserve_holidays = [
"2025-01-01",
"2025-03-03",
"2025-03-04",
"2025-04-18",
"2025-04-21",
"2025-05-01",
"2025-06-19",
"2025-09-07",
"2025-10-12",
"2025-11-02",
"2025-11-15",
"2025-11-20",
"2025-12-25",
"2026-01-01",
"2026-02-16",
"2026-02-17",
"2026-04-03",
"2026-04-21",
"2026-05-01",
"2026-06-04",
"2026-09-07",
"2026-10-12",
"2026-11-02",
"2026-11-15",
"2026-11-20",
"2026-12-25",
]
bra = Cal(holidays=[dt.strptime(h, "%Y-%m-%d") for h in reserve_holidays], week_mask=[5, 6])
curve = Curve(
nodes={
dt(2025, 5, 15): 1.0,
dt(2026, 1, 2): 0.919218,
},
convention="bus252",
calendar=bra,
)
d = dcf(dt(2025, 5, 15), dt(2026, 1, 2), "bus252", calendar=bra)
expected = (1 + 0.14) ** -d
assert abs(expected - curve[dt(2026, 1, 2)]) < 5e-7
# period rate
result = curve.rate(dt(2025, 5, 15), dt(2026, 1, 2))
expected = (1 / 0.919218 - 1) * 100 / d
assert abs(expected - result) < 5e-7
@pytest.mark.parametrize("interpolation", ["linear", "log_linear"])
def test_linear_bus_interpolation(self, interpolation) -> None:
curve = Curve(
nodes={dt(2000, 1, 3): 1.0, dt(2000, 1, 17): 0.9},
calendar="bus",
convention="act365f",
interpolation=interpolation,
)
curve2 = Curve(
nodes={dt(2000, 1, 3): 1.0, dt(2000, 1, 17): 0.9},
calendar="bus",
convention="bus252",
interpolation=interpolation,
)
assert curve[dt(2000, 1, 17)] == curve2[dt(2000, 1, 17)]
assert curve[dt(2000, 1, 3)] == curve2[dt(2000, 1, 3)]
assert curve[dt(2000, 1, 5)] != curve2[dt(2000, 1, 5)]
assert curve[dt(2000, 1, 10)] == curve2[dt(2000, 1, 10)] # half calendar and bus
assert curve[dt(2000, 1, 13)] != curve2[dt(2000, 1, 13)]
def test_update_meta(self, curve):
curve.update_meta("credit_discretization", 101)
assert curve.meta.credit_discretization == 101
def test_no_termination(self, curve):
with pytest.raises(ValueError, match="`termination` must be supplied"):
curve.rate(dt(2022, 3, 2))
def test_index_value_lag_mismatch(self, index_curve):
with pytest.raises(ValueError, match="'curve' interpolation can only be used"):
index_curve.index_value(
index_date=dt(2022, 3, 4),
index_lag=22,
index_method="curve",
)
def test_update_node_raises(self, curve):
with pytest.raises(KeyError, match="`key` is not in"):
curve.update_node(dt(2000, 1, 1), 1.0)
class TestLineCurve:
def test_repr(self):
curve = LineCurve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955,
},
id="libor1m",
)
expected = f""
assert expected == curve.__repr__()
def test_typing_as_base_curve(self):
curve = LineCurve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955,
},
id="libor1m",
)
assert isinstance(curve, _BaseCurve)
def test_index_values_raises(self, line_curve):
with pytest.raises(TypeError, match="A 'values' type Curve cannot"):
line_curve.index_value(dt(2022, 3, 3), index_lag=0)
class TestIndexCurve:
def test_curve_index_linear_daily_interp(self) -> None:
curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 1, 5): 0.9999},
index_base=200.0,
interpolation="linear_index",
index_lag=2,
)
result = curve.index_value(dt(2022, 1, 5), 2)
expected = 200.020002002
assert abs(result - expected) < 1e-7
result = curve.index_value(dt(2022, 1, 3), 2)
expected = 200.010001001 # value is linearly interpolated between index values.
assert abs(result - expected) < 1e-7
# SKIP: with deprecation of IndexCurve errors must be deferred to price time.
# def test_indexcurve_raises(self) -> None:
# with pytest.raises(ValueError, match="`index_base` must be given"):
# Curve({dt(2022, 1, 1): 1.0})
def test_index_value_raises(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0}, index_base=100.0)
with pytest.raises(ValueError, match="`index_method` as string: 'BAD' is not a v"):
curve.index_value(dt(2022, 1, 1), 3, index_method="BAD")
@pytest.mark.parametrize("ad", [0, 1, 2])
def test_roll_preserves_ad(self, ad) -> None:
curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_base=100.0,
index_lag=3,
id="tags_",
ad=ad,
)
new_curve = curve.roll("1m")
assert new_curve.ad == curve.ad
def test_historic_rate_is_none(self) -> None:
curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_base=100.0,
index_lag=3,
id="tags_",
)
assert curve.rate(dt(2021, 3, 4), "1b", "f") is None
def test_repr(self):
curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 1, 5): 0.9999}, index_base=200.0, id="us_cpi"
)
expected = f""
assert expected == curve.__repr__()
def test_typing_as_base_curve(self):
curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 1, 5): 0.9999}, index_base=200.0, id="us_cpi"
)
assert isinstance(curve, _BaseCurve)
class TestCompositeCurve:
def test_long_1day_rate_captured(self):
c1 = Curve({dt(2000, 1, 1): 1.0, dt(2030, 1, 1): 0.8, dt(2030, 1, 2): 0.7999})
c2 = Curve({dt(2000, 1, 1): 1.0, dt(2030, 1, 1): 0.7, dt(2030, 1, 2): 0.6999})
r1 = c1.rate(dt(2030, 1, 1), dt(2030, 1, 2))
r2 = c2.rate(dt(2030, 1, 1), dt(2030, 1, 2))
cc = CompositeCurve([c1, c2])
result = cc.rate(dt(2030, 1, 1), dt(2030, 1, 2))
assert abs(result - r1 - r2) < 5e-4
def test_curve_df_based(self) -> None:
curve1 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955,
},
t=[
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
],
)
curve2 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 6, 30): 1.0,
dt(2022, 7, 1): 0.999992,
dt(2022, 12, 31): 0.999992,
dt(2023, 1, 1): 0.999984,
dt(2023, 6, 30): 0.999984,
dt(2023, 7, 1): 0.999976,
dt(2023, 12, 31): 0.999976,
dt(2024, 1, 1): 0.999968,
dt(2024, 6, 30): 0.999968,
dt(2024, 7, 1): 0.999960,
dt(2025, 1, 1): 0.999960,
},
)
curve = CompositeCurve([curve1, curve2])
for date in [dt(2022, 12, 30), dt(2022, 12, 31), dt(2023, 1, 1)]:
result1 = curve.rate(date, "1d")
expected1 = curve1.rate(date, "1d") + curve2.rate(date, "1d")
assert abs(result1 - expected1) < 2e-8
result = curve.rate(dt(2022, 6, 1), "1Y")
expected = curve1.rate(dt(2022, 6, 1), "1Y") + curve2.rate(dt(2022, 6, 1), "1Y")
assert abs(result - expected) < 1e-4
def test_composite_curve_translate(self) -> None:
curve1 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955,
},
t=[
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
],
)
curve2 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 6, 30): 1.0,
dt(2022, 7, 1): 0.999992,
dt(2022, 12, 31): 0.999992,
dt(2023, 1, 1): 0.999984,
dt(2023, 6, 30): 0.999984,
dt(2023, 7, 1): 0.999976,
dt(2023, 12, 31): 0.999976,
dt(2024, 1, 1): 0.999968,
dt(2024, 6, 30): 0.999968,
dt(2024, 7, 1): 0.999960,
dt(2025, 1, 1): 0.999960,
},
)
crv = CompositeCurve([curve1, curve2])
result_curve = crv.translate(dt(2022, 3, 1))
diff = np.array(
[
result_curve.rate(_, "1D") - crv.rate(_, "1D")
for _ in [dt(2023, 1, 25), dt(2023, 3, 24), dt(2024, 11, 11)]
],
)
assert np.all(np.abs(diff) < 1e-5)
def test_composite_curve_roll(self) -> None:
curve1 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955,
},
t=[
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
],
)
curve2 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 6, 30): 1.0,
dt(2022, 7, 1): 0.999992,
dt(2022, 12, 31): 0.999992,
dt(2023, 1, 1): 0.999984,
dt(2023, 6, 30): 0.999984,
dt(2023, 7, 1): 0.999976,
dt(2023, 12, 31): 0.999976,
dt(2024, 1, 1): 0.999968,
dt(2024, 6, 30): 0.999968,
dt(2024, 7, 1): 0.999960,
dt(2025, 1, 1): 0.999960,
},
)
crv = CompositeCurve([curve1, curve2])
rolled_curve = crv.roll("10d")
expected = np.array(
[crv.rate(_, "1D") for _ in [dt(2023, 1, 15), dt(2023, 3, 15), dt(2024, 11, 15)]],
)
result = np.array(
[
rolled_curve.rate(_, "1D")
for _ in [dt(2023, 1, 25), dt(2023, 3, 25), dt(2024, 11, 25)]
],
)
assert np.all(np.abs(result - expected) < 1e-7)
@pytest.mark.parametrize(
("method", "args"),
[
("rate", (dt(2022, 1, 1), "1d")),
("roll", ("10d",)),
("translate", (dt(2022, 1, 10),)),
("shift", (10.0, "id")),
("__getitem__", (dt(2022, 1, 10),)),
("index_value", (dt(2022, 1, 10), 3)),
],
)
def test_composite_curve_precheck_cache(self, method, args) -> None:
# test precache_check on shift
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}, index_base=100.0, index_lag=3)
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.998})
cc = CompositeCurve([c1, c2])
cc._cache[dt(1980, 1, 1)] = 100.0
# mutate a curve to trigger cache id clear
c1._set_node_vector([0.99], 0)
getattr(cc, method)(*args)
assert dt(1980, 1, 1) not in cc._cache
def test_isinstance_raises(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99})
line_curve = LineCurve({dt(2022, 1, 1): 10.0, dt(2023, 1, 1): 12.0})
with pytest.raises(TypeError, match="CompositeCurve can only contain curves of the same t"):
CompositeCurve([curve, line_curve])
@pytest.mark.parametrize(
("attribute", "val"),
[
("modifier", ["MF", "MP"]),
("calendar", ["ldn", "tgt"]),
("convention", ["act360", "act365f"]),
],
)
def test_attribute_error_raises(self, attribute, val) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, **{attribute: val[0]})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, **{attribute: val[1]})
with pytest.raises(ValueError, match="Cannot composite curves with dif"):
CompositeCurve([c1, c2])
def test_line_based(self) -> None:
c1 = LineCurve({dt(2022, 1, 1): 1.5, dt(2022, 1, 3): 1.0})
c2 = LineCurve({dt(2022, 1, 1): 2.0, dt(2022, 1, 3): 3.0})
cc = CompositeCurve([c1, c2])
expected = 3.75
result = cc.rate(dt(2022, 1, 2))
assert abs(result - expected) < 1e-8
result = cc[dt(2022, 1, 2)]
assert abs(result - expected) < 1e-8
def test_initial_node_raises(self) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99})
c2 = Curve({dt(2022, 1, 2): 1.0, dt(2023, 1, 1): 0.99})
with pytest.raises(ValueError, match="`curves` must share the same ini"):
CompositeCurve([c1, c2])
@pytest.mark.parametrize(
("lag", "base"), [([2, 3], [100.0, 99.0]), ([4, NoInput(0)], [100.0, NoInput(0)])]
)
def test_index_curves_take_first_value(self, lag, base) -> None:
ic1 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_lag=lag[0],
index_base=base[0],
)
ic2 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_lag=lag[1],
index_base=base[1],
)
cc = CompositeCurve([ic1, ic2])
assert cc.meta.index_base == base[0]
assert cc.meta.index_lag == lag[0]
def test_index_curves_attributes_warns(self):
ic1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
ic2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
cc = CompositeCurve([ic1, ic2])
with pytest.warns(UserWarning):
result = cc.index_value(dt(1999, 1, 1), 3)
expected = 0.0
assert abs(result - expected) < 1e-5
def test_index_curves_attributes(self) -> None:
ic1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
ic2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
cc = CompositeCurve([ic1, ic2])
assert cc.meta.index_lag == 3
assert cc.meta.index_base == 101.1
result = cc.index_value(dt(2022, 1, 31), 3, index_method="monthly")
expected = 101.1
assert abs(result - expected) < 1e-5
result = cc.index_value(dt(2022, 1, 1), 3)
expected = 101.1
assert abs(result - expected) < 1e-5
def test_index_curves_interp_raises(self) -> None:
ic1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
ic2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
cc = CompositeCurve([ic1, ic2])
with pytest.raises(ValueError, match="`index_method` as string: 'bad interp'"):
cc.index_value(index_date=dt(2022, 1, 31), index_lag=3, index_method="bad interp")
def test_composite_curve_proxies(self) -> None:
uu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="uu")
ee = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.991}, id="ee")
eu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.992}, id="eu")
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.1}, settlement=dt(2022, 1, 1)),
fx_curves={
"usdusd": uu,
"eureur": ee,
"eurusd": eu,
},
)
pc = MultiCsaCurve([uu, fxf.curve("usd", "eur")])
result = pc[dt(2023, 1, 1)]
expected = 0.98900
assert abs(result - expected) < 1e-4
pc = MultiCsaCurve(
[
fxf.curve("usd", "eur"),
uu,
],
)
result = pc[dt(2023, 1, 1)]
assert abs(result - expected) < 1e-4
def test_composite_curve_no_index_value_raises(self, curve) -> None:
cc = CompositeCurve([curve])
with pytest.raises(ValueError, match="Curve must be initialised with an `index_base`"):
cc.index_value(dt(2022, 1, 1), 3)
def test_historic_rate_is_none(self) -> None:
c1 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99997260, # 1%
dt(2022, 1, 3): 0.99991781, # 2%
dt(2022, 1, 4): 0.99983564, # 3%
dt(2022, 1, 5): 0.99972608, # 4%
},
convention="Act365F",
)
c2 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99980825, # 3%
dt(2022, 1, 4): 0.99975347, # 2%
dt(2022, 1, 5): 0.99972608, # 1%
},
convention="Act365F",
)
cc = CompositeCurve([c1, c2])
assert cc.rate(dt(2021, 3, 4), "1b", "f") is None
def test_repr(self):
curve1 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955,
},
t=[
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
],
)
curve2 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 6, 30): 1.0,
dt(2022, 7, 1): 0.999992,
dt(2022, 12, 31): 0.999992,
dt(2023, 1, 1): 0.999984,
dt(2023, 6, 30): 0.999984,
dt(2023, 7, 1): 0.999976,
dt(2023, 12, 31): 0.999976,
dt(2024, 1, 1): 0.999968,
dt(2024, 6, 30): 0.999968,
dt(2024, 7, 1): 0.999960,
dt(2025, 1, 1): 0.999960,
},
)
curve = CompositeCurve([curve1, curve2])
expected = f""
assert expected == curve.__repr__()
assert isinstance(curve.id, str)
def test_typing_as_base_curve(self):
curve1 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.955,
},
t=[
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
dt(2025, 1, 1),
],
)
curve2 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 6, 30): 1.0,
dt(2022, 7, 1): 0.999992,
dt(2022, 12, 31): 0.999992,
dt(2023, 1, 1): 0.999984,
dt(2023, 6, 30): 0.999984,
dt(2023, 7, 1): 0.999976,
dt(2023, 12, 31): 0.999976,
dt(2024, 1, 1): 0.999968,
dt(2024, 6, 30): 0.999968,
dt(2024, 7, 1): 0.999960,
dt(2025, 1, 1): 0.999960,
},
)
curve = CompositeCurve([curve1, curve2])
assert isinstance(curve, _BaseCurve)
def test_cache(self):
curve1 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
},
)
curve2 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 6, 30): 1.0,
dt(2022, 7, 1): 0.999992,
dt(2022, 12, 31): 0.999992,
dt(2023, 1, 1): 0.999984,
},
)
curve = CompositeCurve([curve1, curve2])
curve[dt(2022, 3, 1)]
assert curve._cache == {dt(2022, 3, 1): 0.9967396833121631}
# update a curve
curve2.update_node(dt(2022, 6, 30), 0.95)
curve[dt(2022, 3, 1)]
assert curve._cache == {dt(2022, 3, 1): 0.9801226964242061}
def test_composite_curve_of_composite_curve(self):
c1 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
},
)
c2 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 30): 0.99,
}
)
cc1 = CompositeCurve([c1, c2])
cc2 = CompositeCurve([cc1, c1])
result = cc2.rate(dt(2022, 2, 15), "3m")
assert abs(result - 4.933123726330553) < 1e-8
def test_composite_curve_of_composite_line_curve(self):
c1 = LineCurve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
},
)
c2 = LineCurve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 30): 0.99,
}
)
cc1 = CompositeCurve([c1, c2])
cc2 = CompositeCurve([cc1, c1])
result = cc2.rate(dt(2022, 2, 15), "3m")
assert abs(result - 2.993926361170989) < 1e-8
def test_ad_order_is_max(self):
c1 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99})
c2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99})
c2._set_ad_order(2)
assert CompositeCurve([c1, c2])._ad == 2
assert CompositeCurve([c2, c1])._ad == 2
def test_initial_df(self):
curve1 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99}, ad=1, id="v")
curve2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98}, ad=1, id="w")
cc = CompositeCurve([curve1, curve2])
result = cc[dt(2000, 1, 1)]
expected = Dual(1.0, ["v0", "v1", "w0", "w1"], [1.0, 0.0, 1.0, 0.0])
assert result == expected
def test_update_meta_raises(self):
ic1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
ic2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
cc = CompositeCurve([ic1, ic2])
with pytest.raises(AttributeError, match="'CompositeCurve' object has no attribute 'updat"):
cc.update_meta("h", 100.0)
def test_update_meta(self):
ic1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
ic2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, index_lag=3, index_base=101.1)
cc = CompositeCurve([ic1, ic2])
before = cc.meta.credit_recovery_rate
ic1.update_meta("credit_recovery_rate", 0.88)
after = cc.meta.credit_recovery_rate
assert before != after
assert after == 0.88
class TestMultiCsaCurve:
def test_historic_rate_is_none(self) -> None:
c1 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99997260, # 1%
dt(2022, 1, 3): 0.99991781, # 2%
dt(2022, 1, 4): 0.99983564, # 3%
dt(2022, 1, 5): 0.99972608, # 4%
},
convention="Act365F",
)
c2 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99980825, # 3%
dt(2022, 1, 4): 0.99975347, # 2%
dt(2022, 1, 5): 0.99972608, # 1%
},
convention="Act365F",
)
cc = MultiCsaCurve([c1, c2])
assert cc.rate(dt(2021, 3, 4), "1b", "f") is None
def test_multi_raises(self, line_curve, curve) -> None:
with pytest.raises(TypeError, match="MultiCsaCurve must use discount factors"):
MultiCsaCurve([line_curve])
def test_multi_csa_shift(self) -> None:
c1 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99997260, # 1%
dt(2022, 1, 3): 0.99991781, # 2%
dt(2022, 1, 4): 0.99983564, # 3%
dt(2022, 1, 5): 0.99972608, # 4%
},
convention="Act365F",
)
c2 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99980825, # 3%
dt(2022, 1, 4): 0.99975347, # 2%
dt(2022, 1, 5): 0.99972608, # 1%
},
convention="Act365F",
)
c3 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99979455, # 3.5%
dt(2022, 1, 4): 0.99969869, # 3.5%
dt(2022, 1, 5): 0.99958915, # 4%
},
convention="Act365F",
)
cc = MultiCsaCurve([c1, c2, c3])
cc_shift = cc.shift(100)
with default_context("multi_csa_steps", [1, 1, 1, 1, 1, 1, 1]):
r1 = cc_shift.rate(dt(2022, 1, 1), "1d")
r2 = cc_shift.rate(dt(2022, 1, 2), "1d")
r3 = cc_shift.rate(dt(2022, 1, 3), "1d")
r4 = cc_shift.rate(dt(2022, 1, 4), "1d")
assert abs(r1 - 5.0) < 1e-3
assert abs(r2 - 4.5) < 1e-3
assert abs(r3 - 4.5) < 1e-3
assert abs(r4 - 5.0) < 1e-3
@pytest.mark.parametrize("caching", [True, False])
def test_multi_csa(self, caching) -> None:
with default_context("curve_caching", caching):
c1 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99997260, # 1%
dt(2022, 1, 3): 0.99991781, # 2%
dt(2022, 1, 4): 0.99983564, # 3%
dt(2022, 1, 5): 0.99972608, # 4%
},
convention="Act365F",
)
c2 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99980825, # 3%
dt(2022, 1, 4): 0.99975347, # 2%
dt(2022, 1, 5): 0.99972608, # 1%
},
convention="Act365F",
)
c3 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99979455, # 3.5%
dt(2022, 1, 4): 0.99969869, # 3.5%
dt(2022, 1, 5): 0.99958915, # 4%
},
convention="Act365F",
)
cc = MultiCsaCurve([c1, c2, c3])
with default_context("multi_csa_steps", [1, 1, 1, 1, 1, 1, 1]):
r1 = cc.rate(dt(2022, 1, 1), "1d")
r2 = cc.rate(dt(2022, 1, 2), "1d")
r3 = cc.rate(dt(2022, 1, 3), "1d")
r4 = cc.rate(dt(2022, 1, 4), "1d")
assert abs(r1 - 4.0) < 1e-3
assert abs(r2 - 3.5) < 1e-3
assert abs(r3 - 3.5) < 1e-3
assert abs(r4 - 4.0) < 1e-3
def test_multi_csa_granularity(self) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 0.9, dt(2072, 1, 1): 0.5})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 0.8, dt(2072, 1, 1): 0.7})
with default_context("multi_csa_max_step", 182, "multi_csa_min_step", 182):
cc = MultiCsaCurve([c1, c2])
r1 = cc.rate(dt(2052, 5, 24), "1d")
# r2 = cc.rate(dt(2052, 5, 25), "1d")
# r3 = cc.rate(dt(2052, 5, 26), "1d")
assert abs(r1 - 1.448374) < 1e-3
def test_repr(self):
c1 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99997260, # 1%
dt(2022, 1, 3): 0.99991781, # 2%
dt(2022, 1, 4): 0.99983564, # 3%
dt(2022, 1, 5): 0.99972608, # 4%
},
convention="Act365F",
)
c2 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99980825, # 3%
dt(2022, 1, 4): 0.99975347, # 2%
dt(2022, 1, 5): 0.99972608, # 1%
},
convention="Act365F",
)
c3 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99979455, # 3.5%
dt(2022, 1, 4): 0.99969869, # 3.5%
dt(2022, 1, 5): 0.99958915, # 4%
},
convention="Act365F",
)
curve = MultiCsaCurve([c1, c2, c3])
expected = f""
assert expected == curve.__repr__()
assert isinstance(curve.id, str)
def test_typing_as_base_curve(self):
c1 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99997260, # 1%
dt(2022, 1, 3): 0.99991781, # 2%
dt(2022, 1, 4): 0.99983564, # 3%
dt(2022, 1, 5): 0.99972608, # 4%
},
convention="Act365F",
)
c2 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99980825, # 3%
dt(2022, 1, 4): 0.99975347, # 2%
dt(2022, 1, 5): 0.99972608, # 1%
},
convention="Act365F",
)
c3 = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 1, 2): 0.99989042, # 4%
dt(2022, 1, 3): 0.99979455, # 3.5%
dt(2022, 1, 4): 0.99969869, # 3.5%
dt(2022, 1, 5): 0.99958915, # 4%
},
convention="Act365F",
)
curve = MultiCsaCurve([c1, c2, c3])
assert isinstance(curve, _BaseCurve)
@pytest.mark.parametrize(
("method", "args"),
[
("rate", (dt(2022, 1, 1), "1d")),
("roll", ("10d",)),
("translate", (dt(2022, 1, 10),)),
("shift", (10.0, "id")),
("__getitem__", (dt(2022, 1, 10),)),
],
)
def test_multi_csa_curve_precheck_cache(self, method, args) -> None:
# test precache_check on shift
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.998})
cc = MultiCsaCurve([c1, c2])
cc._cache[dt(1980, 1, 1)] = 100.0
# mutate a curve to trigger cache id clear
c1._set_node_vector([0.99], 0)
getattr(cc, method)(*args)
assert dt(1980, 1, 1) not in cc._cache
def test_multi_csa_curve_add_to_cache(self):
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2052, 2, 1): 0.9})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2052, 2, 1): 0.8})
cc = MultiCsaCurve([c1, c2])
cc[dt(2052, 2, 1)]
assert len(cc._cache) == 31
class TestProxyCurve:
def test_repr(self) -> None:
fxr1 = FXRates({"usdeur": 0.95}, dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, dt(2022, 1, 2))
fxf = FXForwards(
[fxr1, fxr2],
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.95}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 1.0}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.99}),
"cadusd": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.97}),
"cadcad": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.969}),
},
)
curve = fxf.curve("cad", "eur")
expected = f""
assert curve.__repr__() == expected
assert isinstance(curve.id, str)
def test_typing_as_basecurve(self):
fxr1 = FXRates({"usdeur": 0.95}, dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, dt(2022, 1, 2))
fxf = FXForwards(
[fxr1, fxr2],
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.95}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 1.0}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.99}),
"cadusd": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.97}),
"cadcad": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.969}),
},
)
curve = fxf.curve("cad", "eur")
assert isinstance(curve, _BaseCurve)
def test_cache_is_validated_on_getitem_and_lookup(self):
fxr1 = FXRates({"usdeur": 0.95}, dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, dt(2022, 1, 2))
fxf = FXForwards(
[fxr1, fxr2],
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.95}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 1.0}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.99}),
"cadusd": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.97}),
"cadcad": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.969}),
},
)
curve = fxf.curve("cad", "eur")
assert curve._state == fxf._state
fxr1.update({"usdeur": 100000000.0})
fxf.curve("eur", "eur")._set_node_vector([0.5], 1)
state1 = fxf._state
# performing an action on the proxy curve will validate and update states
curve[dt(2022, 1, 9)]
state2 = fxf._state
assert state1 != state2
fxr1.update({"usdeur": 10.0})
fxf.curve("eur", "eur")._set_node_vector([0.6], 1)
state3 = curve._state
assert state3 == state2 # becuase no method validation has yet occurred
def test_update(self):
fxr1 = FXRates({"usdeur": 0.95}, dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, dt(2022, 1, 2))
fxf = FXForwards(
[fxr1, fxr2],
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.95}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 1.0}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.99}),
"cadusd": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.97}),
"cadcad": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.969}),
},
)
curve = fxf.curve("cad", "eur")
with pytest.raises(AttributeError):
curve.update_meta("h", 100.0)
with pytest.raises(AttributeError):
curve.update_node("h", 100.0)
with pytest.raises(AttributeError):
curve.update("h", 100.0)
class TestPlotCurve:
def test_plot_curve(self, curve) -> None:
fig, ax, lines = curve.plot("1d")
result = lines[0].get_data()
assert result[0][0] == dt(2022, 3, 1)
assert abs(result[1][0].real - 12.004001333774994) < 1e-6
plt.close("all")
def test_plot_linecurve(self, line_curve) -> None:
fig, ax, lines = line_curve.plot("0d")
result = lines[0].get_data()
assert result[0][0] == dt(2022, 3, 1)
assert abs(result[1][0].real - 2.0) < 1e-6
plt.close("all")
@pytest.mark.parametrize("left", ["1d", dt(2022, 3, 2)])
def test_plot_curve_left(self, curve, left) -> None:
fig, ax, lines = curve.plot("1d", left=left)
result = lines[0].get_data()
assert result[0][0] == dt(2022, 3, 2)
assert abs(result[1][0].real - 12.008005336896055) < 1e-6
plt.close("all")
def test_plot_curve_left_raise(self, curve) -> None:
with pytest.raises(ValueError, match="`left` must be supplied as"):
fig, ax, lines = curve.plot("1d", left=100.3)
plt.close("all")
@pytest.mark.parametrize("right", ["2d", dt(2022, 3, 3)])
def test_plot_curve_right(self, curve, right) -> None:
fig, ax, lines = curve.plot("1d", right=right)
result = lines[0].get_data()
assert result[0][-1] == dt(2022, 3, 3)
assert abs(result[1][-1].real - 12.012012012015738) < 1e-6
plt.close("all")
def test_plot_curve_right_raise(self, curve) -> None:
with pytest.raises(ValueError, match="`right` must be supplied as"):
fig, ax, lines = curve.plot("1d", right=100.3)
plt.close("all")
def test_plot_comparators(self, curve) -> None:
fig, ax, lines = curve.plot("1d", comparators=[curve])
assert len(lines) == 2
res1 = lines[0].get_data()
res2 = lines[1].get_data()
assert res1[0][0] == res2[0][0]
assert res1[1][0] == res2[1][0]
plt.close("all")
def test_plot_diff(self, curve) -> None:
fig, ax, lines = curve.plot("1d", comparators=[curve], difference=True)
assert len(lines) == 1
result = lines[0].get_data()
assert result[0][0] == dt(2022, 3, 1)
assert result[1][0] == 0
plt.close("all")
@pytest.mark.parametrize("left", [NoInput(0), dt(2022, 1, 1), "0d"])
@pytest.mark.parametrize("right", [NoInput(0), dt(2022, 2, 1), "0d"])
def test_plot_index(self, left, right) -> None:
i_curve = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 1.0}, index_base=2.0)
fig, ax, lines = i_curve.plot_index(left=left, right=right)
result = lines[0].get_data()
assert result[0][0] == dt(2022, 1, 1)
assert abs(result[1][0].real - 2.0) < 1e-6
plt.close("all")
def test_plot_index_comparators(self) -> None:
i_curve = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 1.0}, index_base=2.0)
i_curv2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 1.0}, index_base=2.0)
fig, ax, lines = i_curve.plot_index(comparators=[i_curv2])
assert len(lines) == 2
res1 = lines[0].get_data()
res2 = lines[1].get_data()
assert res1[0][0] == res2[0][0]
assert res1[1][0] == res2[1][0]
plt.close("all")
def test_plot_index_diff(self) -> None:
i_curv = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 1.0}, index_base=2.0)
i_curv2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 1.0}, index_base=2.0)
fig, ax, lines = i_curv.plot_index("1d", comparators=[i_curv2], difference=True)
assert len(lines) == 1
result = lines[0].get_data()
assert result[0][0] == dt(2022, 1, 1)
assert result[1][0] == 0
plt.close("all")
def test_plot_index_raises(self) -> None:
i_curve = Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 1.0}, index_base=2.0)
with pytest.raises(ValueError, match="`left` must be supplied as"):
i_curve.plot_index(left=2.0)
with pytest.raises(ValueError, match="`right` must be supplied as"):
i_curve.plot_index(right=2.0)
def test_composite_curve_plot(self) -> None:
curve1 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 12, 1): 0.95}, modifier="MF", calendar="bus")
curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 12, 1): 0.97}, modifier="MF", calendar="bus")
cc = CompositeCurve(curves=[curve1, curve2])
cc.plot("1m")
def test_plot_a_rolled_spline_curve(self) -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.988,
dt(2024, 1, 1): 0.975,
dt(2025, 1, 1): 0.965,
dt(2026, 1, 1): 0.955,
dt(2027, 1, 1): 0.9475,
},
t=[
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
dt(2027, 1, 1),
],
)
rolled_curve = curve.roll("6m")
rolled_curve2 = curve.roll("-6m")
curve.plot(
"1d",
comparators=[rolled_curve, rolled_curve2],
labels=["orig", "rolled", "rolled2"],
right=dt(2026, 6, 30),
)
usd_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 0.98, dt(2023, 1, 1): 0.95},
calendar="nyc",
id="sofr",
)
usd_args = dict(effective=dt(2022, 1, 1), spec="usd_irs", curves="sofr")
Solver(
curves=[usd_curve],
instruments=[
IRS(**usd_args, termination="6M"),
IRS(**usd_args, termination="1Y"),
],
s=[4.35, 4.85],
instrument_labels=["6M", "1Y"],
id="us_rates",
)
usd_curve.plot("1b", labels=["SOFR o/n"])
class TestStateAndCache:
@pytest.mark.parametrize(
"curve",
[
Curve(nodes={dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99}),
LineCurve(nodes={dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99}),
Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
},
index_base=200.0,
),
],
)
@pytest.mark.parametrize(("method", "args"), [("_set_ad_order", (1,))])
def test_method_does_not_change_state(self, curve, method, args):
before = curve._state
getattr(curve, method)(*args)
after = curve._state
assert before == after
@pytest.mark.parametrize(
"curve",
[
Curve(nodes={dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99, dt(2003, 1, 1): 0.98}),
LineCurve(nodes={dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99}),
Curve(
nodes={
dt(2000, 1, 1): 1.0,
dt(2002, 1, 1): 0.98,
},
index_base=200.0,
),
],
)
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98], 1)),
("update_node", (dt(2002, 1, 1), 0.98)),
("update", ({dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99},)),
("csolve", tuple()),
],
)
def test_method_changes_state(self, curve, method, args):
before = curve._state
getattr(curve, method)(*args)
after = curve._state
assert before != after
@pytest.mark.parametrize(
"curve",
[
Curve(nodes={dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99}),
LineCurve(nodes={dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99}),
Curve(
nodes={
dt(2000, 1, 1): 1.0,
dt(2002, 1, 1): 0.98,
},
index_base=200.0,
),
],
)
def test_populate_cache(self, curve):
assert curve._cache == {}
curve[dt(2000, 5, 1)]
assert dt(2000, 5, 1) in curve._cache
@pytest.mark.parametrize(
"curve",
[
Curve(nodes={dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99, dt(2003, 1, 1): 0.98}),
LineCurve(nodes={dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99}),
Curve(
nodes={
dt(2000, 1, 1): 1.0,
dt(2002, 1, 1): 0.98,
},
index_base=200.0,
),
],
)
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98], 1)),
("update_node", (dt(2002, 1, 1), 0.98)),
("update", ({dt(2000, 1, 1): 1.0, dt(2002, 1, 1): 0.99},)),
("csolve", tuple()),
("_set_ad_order", (1,)),
],
)
def test_method_clears_cache(self, curve, method, args):
curve[dt(2000, 5, 1)]
assert dt(2000, 5, 1) in curve._cache
getattr(curve, method)(*args)
assert curve._cache == {}
@pytest.mark.parametrize("Klass", [CompositeCurve, MultiCsaCurve])
def test_composite_curve_validation_cache_clearing_and_state(self, Klass):
# test that a composite curve will validate and clear its cache
# and following that update its own state to its composited state
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.95})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.90})
cc = Klass([c1, c2])
cc_state_pre = cc._state
# get a value and check the cache
cc_result_pre = cc[dt(2022, 6, 1)]
_ = cc[dt(2022, 6, 30)]
assert dt(2022, 6, 1) in cc._cache
assert dt(2022, 6, 30) in cc._cache
# update an underlying curve
c2.update_node(dt(2024, 1, 1), 0.85)
# check the cache is cleared when using a get using
cc_result_post = cc[dt(2022, 6, 1)]
assert cc_result_post < cc_result_pre
# check that the state of the composite curve has changed
cc_state_post = cc._state
assert cc_state_pre != cc_state_post
assert cc_state_post == cc._get_composited_state()
# check that the cache is correct
assert dt(2022, 6, 1) in cc._cache
assert dt(2022, 6, 30) not in cc._cache
def test_max_cache_size(self):
with default_context("curve_caching_max", 3):
curve = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.95})
assert curve._cache_len == 0
curve[dt(2022, 2, 1)]
assert curve._cache_len == 1
curve[dt(2022, 3, 1)]
assert curve._cache_len == 2
curve[dt(2022, 4, 1)]
assert curve._cache_len == 3
curve[dt(2022, 5, 1)]
assert curve._cache_len == 3
assert dt(2022, 2, 1) not in curve._cache
assert dt(2022, 3, 1) in curve._cache
assert dt(2022, 4, 1) in curve._cache
assert dt(2022, 5, 1) in curve._cache
class TestIndexValue:
def test_dict_raise(self):
with pytest.raises(
NotImplementedError, match="`index_curve` cannot currently be supplied as dict"
):
index_value(0, "curve", NoInput(0), 0, {"a": 0, "b": 0})
def test_return_index_fixings_directly(self):
assert index_value(0, "curve", 2.5, NoInput(0), NoInput(0)) == 2.5
assert index_value(0, "curve", Dual(2, ["a"], []), NoInput(0), NoInput(0)) == Dual(
2, ["a"], []
)
@pytest.mark.parametrize("method", ["curve", "daily"])
def test_forecast_from_curve_no_fixings(self, method):
# these methods should be identical when using "linear_index" interpolation directly on the
# curve and parametrising the curve nodes with the start of month dates. See next test.
curve = Curve(
{dt(2000, 1, 1): 1.0, dt(2000, 2, 1): 0.99},
index_base=100.0,
index_lag=0,
interpolation="linear_index",
)
result = index_value(0, method, NoInput(0), dt(2000, 1, 15), curve)
expected = 100.0 / curve[dt(2000, 1, 15)]
assert abs(result - expected) < 1e-9
def test_forecast_from_curve_no_fixings_methods_identical(self):
curve = Curve(
{dt(2000, 1, 1): 1.0, dt(2000, 2, 1): 0.99},
index_base=100.0,
index_lag=0,
interpolation="linear_index",
)
result1 = index_value(0, "curve", NoInput(0), dt(2000, 1, 15), curve)
result2 = index_value(0, "daily", NoInput(0), dt(2000, 1, 15), curve)
assert abs(result1 - result2) < 1e-9
@pytest.mark.parametrize("date", [dt(2000, 2, 1), dt(2000, 2, 27)])
def test_forecast_from_curve_no_fixings_monthly(self, date):
# monthly interpolation should only require the date of 1st Feb from the curve
curve = Curve(
{dt(2000, 1, 1): 1.0, dt(2000, 2, 1): 0.99},
index_base=100.0,
index_lag=0,
interpolation="linear_index",
)
result = index_value(0, "monthly", NoInput(0), date, curve)
expected = 100.0 / curve[dt(2000, 2, 1)]
assert abs(result - expected) < 1e-9
@pytest.mark.parametrize("method", ["curve", "daily", "monthly"])
def test_no_input_return_result_err(self, method):
assert _try_index_value(0, method, NoInput(0), dt(2000, 1, 1), NoInput(0)).is_err
@pytest.mark.parametrize("method", ["curve", "daily", "monthly"])
def test_fixings_type_raises(self, method):
with pytest.raises(TypeError, match="`index_fixings` must be of type: Str, Series, DualTy"):
index_value(0, method, [1, 2], dt(2000, 1, 1), NoInput(0))
def test_no_index_date_raises(self):
with pytest.raises(ValueError, match="Must supply an `index_date` from whic"):
index_value(0, "curve", NoInput(0), NoInput(0), NoInput(0))
def test_non_zero_index_lag_with_curve_method_raises(self):
ser = Series([1.0], index=[dt(2000, 1, 1)])
fixings.add("1234FGFS6", ser)
with pytest.raises(ValueError, match="`index_lag` must be zero when using a 'Curve' `inde"):
index_value(
index_lag=4,
index_method="curve",
index_fixings="1234FGFS6",
index_date=dt(2000, 1, 1),
index_curve=NoInput(0),
)
fixings.pop("1234FGFS6")
def test_documentation_uk_dmo_replication(self):
# this is an example in the index value documentation
rpi_series = Series(
[172.2, 173.1, 174.2, 174.4],
index=[dt(2001, 3, 1), dt(2001, 4, 1), dt(2001, 5, 1), dt(2001, 6, 1)],
)
result = index_value(
index_lag=3, index_method="daily", index_fixings=rpi_series, index_date=dt(2001, 7, 20)
)
expected = 173.77419
assert abs(result - expected) < 5e-6
def test_no_input_return_if_future_based(self):
# the requested date is beyond the ability of the fixings series and no curve is provided
rpi_series = Series([172.2, 173.1], index=[dt(2001, 3, 1), dt(2001, 4, 1)])
res1 = _try_index_value(0, "curve", rpi_series, dt(2001, 4, 2))
assert res1.is_err
res2 = _try_index_value(0, "curve", rpi_series, dt(2001, 4, 1))
assert res2.is_ok
def test_mixed_forecast_value_fixings_with_curve(self):
rpi = Series([100.0], index=[dt(2000, 1, 1)])
curve = Curve({dt(2000, 1, 1): 1.0, dt(2000, 4, 1): 0.99}, index_base=110.0, index_lag=0)
date = dt(2000, 5, 15)
rpi_2 = 110 * 1.0 / curve[dt(2000, 2, 1)]
expected = 100.0 + (14 / 31) * (rpi_2 - 100.0)
result = index_value(4, "daily", rpi, date, curve)
assert abs(result - expected) < 1e-9
def test_mixed_forecast_value_fixings_with_curve2(self):
rpi = Series([100.0], index=[dt(2000, 1, 1)])
curve = Curve(
nodes={dt(2000, 2, 1): 1.0, dt(2000, 5, 1): 0.99}, index_base=110.0, index_lag=1
)
date = dt(2000, 5, 15)
rpi_2 = 110 * 1.0 / curve[dt(2000, 3, 1)]
expected = 100.0 + (14 / 31) * (rpi_2 - 100.0)
result = index_value(4, "daily", rpi, date, curve)
assert abs(result - expected) < 1e-9
def test_keyerror_for_series_using_curve_method(self):
rpi = Series([9.0, 8.0], index=[dt(1999, 1, 1), dt(2000, 1, 1)])
with pytest.raises(FixingMissingDataError, match="Fixing lookup for date "):
index_value(0, "curve", rpi, dt(1999, 12, 31), NoInput(0))
def test_daily_method_returns_directly_if_date_som(self):
rpi = Series([100.0], index=[dt(2000, 1, 1)])
assert index_value(0, "daily", rpi, dt(2000, 1, 1), NoInput(0)) == 100.0
def test_daily_method_returns_err_if_data_unavailable(self):
rpi = Series([100.0], index=[dt(2000, 1, 1)])
res = _try_index_value(0, "daily", rpi, dt(2000, 1, 2), NoInput(0))
assert res.is_err
def test_curve_method_from_curve_with_non_zero_index_lag(self):
curve = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2000, 2, 1): 0.99},
index_base=100.0,
index_lag=1,
)
result = index_value(1, "curve", NoInput(0), dt(2000, 1, 15), curve)
expected = 100.0 / curve[dt(2000, 1, 15)]
assert abs(result - expected) < 1e-9
@pytest.mark.parametrize(
("curve", "exp"),
[
(NoInput(0), Err),
(
Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99}, index_base=100.0, index_lag=0),
Ok,
),
],
)
def test_series_len_zero(self, curve, exp):
s = Series(data=[], index=[], dtype=float)
result = _try_index_value(0, "curve", s, dt(2000, 1, 1), curve)
assert isinstance(result, exp)
def test_series_and_curve_aligns_with_som_date(self):
# the relevant value can be directly matched on the Series
s = Series(data=[100.0], index=[dt(2000, 1, 1)])
c = Curve({dt(2001, 1, 1): 1.0, dt(2002, 1, 1): 0.99}, index_base=100.0, index_lag=2)
result = index_value(1, "daily", s, dt(2000, 2, 1), c)
assert result == 100.0
def test_mixed_series_and_curve(self):
# the relevant value can be directly matched on the Series
s = Series(
data=[100.0, 200.0, 300.0], index=[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)]
)
c = Curve({dt(2001, 1, 1): 1.0, dt(2002, 1, 1): 0.99}, index_base=100.0, index_lag=2)
result = index_value(0, "curve", s, dt(2000, 2, 1), c)
assert result == 200.0
def test_mixed_series_and_curve_inside_range_raises(self):
s = Series(
data=[100.0, 200.0, 300.0], index=[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)]
)
c = Curve({dt(2001, 1, 1): 1.0, dt(2002, 1, 1): 0.99}, index_base=100.0, index_lag=2)
with pytest.raises(ValueError, match="The Series given for `index_fixings` requires, but"):
index_value(0, "curve", s, dt(2000, 2, 15), c)
def test_mixed_series_and_curve_inside_range_reverts_to_curve_due_to_lag(self):
s = Series(
data=[100.0, 200.0, 300.0], index=[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)]
)
c = Curve({dt(2001, 1, 1): 1.0, dt(2002, 1, 1): 0.99}, index_base=100.0, index_lag=1)
with pytest.warns(UserWarning):
# this warning exists when a curve returns 0.0 and the date is prior to curve start
index_value(1, "curve", s, dt(2000, 2, 15), c)
def test_mixed_series_and_curve_outside_range(self):
s = Series(
data=[100.0, 200.0, 300.0], index=[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)]
)
c = Curve({dt(2001, 1, 1): 1.0, dt(2002, 1, 1): 0.99}, index_base=100.0, index_lag=2)
with pytest.raises(ValueError, match="The Series given for `index_fixings` requires, but"):
index_value(0, "curve", s, dt(2000, 2, 15), c)
def test_mixed_series_and_curve_raises_on_lag(self):
s = Series(
data=[100.0, 200.0, 300.0], index=[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)]
)
c = Curve({dt(2001, 1, 1): 1.0, dt(2002, 1, 1): 0.99}, index_base=100.0, index_lag=2)
with pytest.raises(
ValueError, match="`index_lag` must be zero when using a 'curve' `index"
):
index_value(1, "curve", s, dt(2000, 2, 1), c)
class TestCurveSpline:
@pytest.mark.parametrize("endpoints", [("natural", "natural"), ("not-a-knot", "natural")])
@pytest.mark.parametrize("c", [NoInput(0), [1.0, 1.0, 1.0, 1.0, 1.0, 1.0]])
def test_equality(self, endpoints, c):
t = [
dt(2000, 1, 1),
dt(2000, 1, 1),
dt(2000, 1, 1),
dt(2000, 1, 1),
dt(2001, 1, 1),
dt(2001, 6, 1),
dt(2002, 1, 1),
dt(2002, 1, 1),
dt(2002, 1, 1),
dt(2002, 1, 1),
]
a = _CurveSpline(t=t, endpoints=endpoints)
b = _CurveSpline(t=t, endpoints=endpoints)
assert a == b
@pytest.mark.parametrize("differ", ["t", "end"])
def test_inequality(self, differ):
t = [
dt(2000, 1, 1),
dt(2000, 1, 1),
dt(2000, 1, 1),
dt(2000, 1, 1),
dt(2001, 1, 1),
dt(2001, 6, 1),
dt(2002, 1, 1),
dt(2002, 1, 1),
dt(2002, 1, 1),
dt(2002, 1, 1),
]
t_diff = [
dt(2000, 1, 1),
dt(2000, 1, 1),
dt(2000, 1, 1),
dt(2000, 1, 1),
dt(2001, 1, 1),
dt(2001, 7, 1),
dt(2002, 1, 1),
dt(2002, 1, 1),
dt(2002, 1, 1),
dt(2002, 1, 1),
]
end = ("natural", "natural")
end_diff = ("natural", "not-a-knot")
a = _CurveSpline(t=t, endpoints=end)
if differ == "t":
b = _CurveSpline(t=t_diff, endpoints=end)
else:
b = _CurveSpline(t=t, endpoints=end_diff)
assert a != b
assert a != 10.0
class Test_CreditImpliedCurve:
def test_credit_implied_rates(self):
risk_free = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
hazard = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.95},
credit_recovery_rate=Variable(0.4, ["RR"]),
)
implied = CreditImpliedCurve(risk_free=risk_free, hazard=hazard, id="my-id")
assert implied.id == "my-id"
rate1 = risk_free.rate(dt(2000, 2, 1), "1b")
rate2 = hazard.rate(dt(2000, 2, 1), "1b")
result = implied.rate(dt(2000, 2, 1), "1b")
approximate = rate1 + rate2 * (1 - 0.4)
assert abs(result - approximate) < 1e-9
def test_risk_free_rates(self):
credit = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
hazard = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.95},
credit_recovery_rate=Variable(0.4, ["RR"]),
)
implied = CreditImpliedCurve(credit=credit, hazard=hazard)
rate1 = credit.rate(dt(2000, 2, 1), "1b")
rate2 = hazard.rate(dt(2000, 2, 1), "1b")
result = implied.rate(dt(2000, 2, 1), "1b")
approximate = rate1 - rate2 * (1 - 0.4)
assert abs(result - approximate) < 1e-9
def test_hazard_rates(self):
risk_free = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
credit = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.95},
credit_recovery_rate=Variable(0.4, ["RR"]),
)
implied = CreditImpliedCurve(credit=credit, risk_free=risk_free)
rate1 = credit.rate(dt(2000, 2, 1), "1b")
rate2 = risk_free.rate(dt(2000, 2, 1), "1b")
result = implied.rate(dt(2000, 2, 1), "1b")
approximate = (rate1 - rate2) / (1 - 0.4)
assert abs(result - approximate) < 1e-9
def test_round_trip_hazard(self):
risk_free = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
credit = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.95},
credit_recovery_rate=Variable(0.4, ["RR"]),
)
implied = CreditImpliedCurve(credit=credit, risk_free=risk_free)
credit_implied = CreditImpliedCurve(hazard=implied, risk_free=risk_free)
rate1 = credit.rate(dt(2000, 2, 1), "1b")
rate2 = credit_implied.rate(dt(2000, 2, 1), "1b")
assert abs(rate1 - rate2) < 1e-9
def test_round_trip_credit(self):
risk_free = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
hazard = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.95},
credit_recovery_rate=Variable(0.4, ["RR"]),
)
implied = CreditImpliedCurve(hazard=hazard, risk_free=risk_free)
hazard_implied = CreditImpliedCurve(credit=implied, risk_free=risk_free)
rate1 = hazard.rate(dt(2000, 2, 1), "1b")
rate2 = hazard_implied.rate(dt(2000, 2, 1), "1b")
assert abs(rate1 - rate2) < 1e-9
def test_meta_dynacism(self):
risk_free = Curve(
{dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98},
)
hazard = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98}, credit_recovery_rate=0.25)
credit = CreditImpliedCurve(risk_free=risk_free, hazard=hazard)
result = credit.rate(dt(2000, 1, 10), "10b")
expected = 2.0 + 2.0 * 0.75
assert abs(result - expected) < 3e-2
hazard.update_meta("credit_recovery_rate", 0.90)
result = credit.rate(dt(2000, 1, 10), "10b")
expected = 2.0 + 2.0 * 0.1
assert abs(result - expected) < 2e-2
def test_meta_dynacism2(self):
risk_free = Curve(
{dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98},
)
hazard = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98}, credit_recovery_rate=0.25)
credit = CreditImpliedCurve(risk_free=risk_free, hazard=hazard)
hazard.update_meta("credit_recovery_rate", 0.90)
result = credit.meta.credit_recovery_rate
expected = 0.90
assert abs(result - expected) < 1e-12
class TestMeta:
def test_meta_mutation(self, curve, line_curve):
# test all the rateslib curve types metas can be mutated
curves = [curve, line_curve]
dependent_curves = []
dependent_curves.append(CompositeCurve([curve, curve]))
dependent_curves.append(curve.shift(10))
dependent_curves.append(curve.roll("10d"))
dependent_curves.append(curve.translate(dt(2022, 3, 14)))
dependent_curves.append(MultiCsaCurve([curve, curve]))
fxf = FXForwards(
FXRates({"eurusd": 1.10}, dt(2022, 3, 1)),
{"eureur": curve, "eurusd": curve, "usdusd": curve},
)
dependent_curves.append(fxf.curve("usd", "eur"))
dependent_curves.append(CreditImpliedCurve(risk_free=curve, hazard=curve))
for c in dependent_curves + curves:
from random import random
x = int(random() * 100.0)
c.meta._credit_discretization = x
assert c.meta.credit_discretization == x
curve.update_meta("credit_recovery_rate", 500.0)
for c in dependent_curves:
print(c)
assert c.meta.credit_recovery_rate == 500.0
================================================
FILE: python/tests/curves/test_curvesrs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import math
from datetime import datetime as dt
import pytest
from rateslib.curves.rs import (
CurveObj,
CurveRs,
FlatBackwardInterpolator,
FlatForwardInterpolator,
LinearInterpolator,
LinearZeroRateInterpolator,
LogLinearInterpolator,
_get_convention,
_get_interpolator,
)
from rateslib.dual import Dual2
from rateslib.dual.utils import ADOrder, _get_adorder
from rateslib.rs import Convention, Modifier
from rateslib.scheduling import get_calendar
from rateslib.serialization import from_json
@pytest.fixture
def curve():
return CurveObj(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolator=_get_interpolator("linear"),
id="v",
ad=_get_adorder(1),
convention=_get_convention("Act360"),
modifier=Modifier.ModF,
calendar=get_calendar("all"),
)
@pytest.fixture
def curvers():
return CurveRs(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation="log_linear",
id="v",
ad=1,
)
@pytest.fixture
def indexcurvers():
return CurveObj(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolator=_get_interpolator("linear"),
id="v",
ad=_get_adorder(1),
convention=_get_convention("Act360"),
modifier=Modifier.ModF,
calendar=get_calendar("all"),
index_base=100.0,
)
@pytest.mark.parametrize(
("name", "expected"),
[
("linear", LinearInterpolator),
("log_linear", LogLinearInterpolator),
("linear_zero_rate", LinearZeroRateInterpolator),
("flat_forward", FlatForwardInterpolator),
("flat_backward", FlatBackwardInterpolator),
],
)
def test_get_interpolator(name, expected) -> None:
result = _get_interpolator(name)
assert type(result) is expected
@pytest.mark.parametrize(
"name",
[
"linear",
"log_linear",
"linear_zero_rate",
"flat_forward",
"flat_backward",
],
)
def test_pickle_interpolator(name) -> None:
import pickle
obj = _get_interpolator(name)
bytes_ = pickle.dumps(obj)
pickle.loads(bytes_)
def test_get_interpolation(curve) -> None:
result = curve.interpolation
assert result == "linear"
def test_get_modifier(curvers) -> None:
result = curvers.modifier
assert result == "MF"
def test_get_convention(curvers) -> None:
result = curvers.convention
assert result == "Act360"
def test_get_ad(curvers) -> None:
result = curvers.ad
assert result == 1
def test_get_interpolator_raises() -> None:
with pytest.raises(ValueError, match="Interpolator `name` is invalid"):
_get_interpolator("bad")
def test_get_item(curve, curvers) -> None:
result = curve[dt(2022, 3, 16)]
assert abs(result - 0.995) < 1e-14
result = curvers[dt(2022, 3, 16)]
expected = math.log(1.0) + (16 - 1) / (31 - 1) * (math.log(0.99) - math.log(1.0))
expected = math.exp(expected)
assert abs(result - expected) < 1e-14
def test_json_round_trip(curvers) -> None:
json = curvers.to_json()
curve2 = from_json(json)
assert curvers == curve2
@pytest.mark.parametrize(
"kind",
[
"linear",
"log_linear",
"linear_zero_rate",
"flat_forward",
"flat_backward",
],
)
def test_interp_constructs(kind) -> None:
result = CurveRs(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation=kind,
id="v",
ad=1,
)
assert isinstance(result, CurveRs)
def test_index_value(indexcurvers) -> None:
result = indexcurvers.index_value(dt(2022, 3, 31))
assert abs(result - 100.0 / 0.99) < 1e-12
def test_set_ad_order(curvers) -> None:
curvers._set_ad_order(2)
assert curvers.nodes == {
dt(2022, 3, 1): Dual2(1.0, ["v0"], [], []),
dt(2022, 3, 31): Dual2(0.99, ["v1"], [], []),
}
def test_pickle(curvers) -> None:
import pickle
obj = pickle.dumps(curvers)
pickle.loads(obj)
================================================
FILE: python/tests/curves/test_ns.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import numpy as np
import pytest
from rateslib.curves.academic import NelsonSiegelCurve
from rateslib.dual import Dual2
from rateslib.scheduling import Convention
def test_init():
ns = NelsonSiegelCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(0.01, 0.01, 0.05, 1.0),
)
result = ns.rate(dt(2001, 1, 1), "1b")
expected = 3.206911736865603
assert abs(result - expected) < 1e-5
assert ns.meta.convention == Convention.ActActISDA
def test_cache():
ns = NelsonSiegelCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(0.01, 0.01, 0.05, 1.0),
)
ns.rate(dt(2001, 1, 1), "1b")
assert dt(2001, 1, 1) in ns._cache
old_state = ns._state
ns._set_node_vector([1.0, 1.0, 1.0, 1.0], 0)
assert ns._state != old_state
assert dt(2001, 1, 1) not in ns._cache
def test_special_domain():
ns = NelsonSiegelCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(0.01, 0.01, 0.05, 1.0),
)
assert ns[dt(2000, 1, 1)] == 1.0
assert ns[dt(1999, 12, 31)] == 0.0
def test_getters():
ns = NelsonSiegelCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(1.0, 2.0, 3.0, 4.0),
id="v",
)
assert all(ns._get_node_vector() == np.array([1.0, 2.0, 3.0, 4.0]))
assert ns._get_node_vars() == ("v0", "v1", "v2", "v3")
def test_set_ad_order():
ns = NelsonSiegelCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(1.0, 2.0, 3.0, 4.0),
id="v",
ad=2,
)
assert isinstance(ns.params[0], Dual2)
ns._set_ad_order(2) # does nothing
assert isinstance(ns.params[0], Dual2)
with pytest.raises(ValueError):
ns._set_ad_order(3)
================================================
FILE: python/tests/curves/test_nss.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import numpy as np
import pytest
from rateslib.curves.academic import NelsonSiegelSvenssonCurve
from rateslib.dual import Dual2
from rateslib.scheduling import Convention
def test_init():
ns = NelsonSiegelSvenssonCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(0.01, 0.01, 0.05, 1.0, 0.05, 1.0),
)
result = ns.rate(dt(2001, 1, 1), "1b")
expected = 5.046514607521035
assert abs(result - expected) < 1e-5
assert ns.meta.convention == Convention.ActActISDA
def test_cache():
ns = NelsonSiegelSvenssonCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(0.01, 0.01, 0.05, 1.0, 0.05, 1.0),
)
ns.rate(dt(2001, 1, 1), "1b")
assert dt(2001, 1, 1) in ns._cache
old_state = ns._state
ns._set_node_vector([1.0, 1.0, 1.0, 1.0], 0)
assert ns._state != old_state
assert dt(2001, 1, 1) not in ns._cache
def test_special_domain():
ns = NelsonSiegelSvenssonCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(0.01, 0.01, 0.05, 1.0, 0.05, 1.0),
)
assert ns[dt(2000, 1, 1)] == 1.0
assert ns[dt(1999, 12, 31)] == 0.0
def test_getters():
ns = NelsonSiegelSvenssonCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(1.0, 2.0, 3.0, 4.0, 5.0, 6.0),
id="v",
)
assert all(ns._get_node_vector() == np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0]))
assert ns._get_node_vars() == ("v0", "v1", "v2", "v3", "v4", "v5")
def test_set_ad_order():
ns = NelsonSiegelSvenssonCurve(
dates=(dt(2000, 1, 1), dt(2030, 1, 1)),
parameters=(0.01, 0.01, 0.05, 1.0, 0.05, 1.0),
id="v",
ad=2,
)
assert isinstance(ns.params[0], Dual2)
ns._set_ad_order(2) # does nothing
assert isinstance(ns.params[0], Dual2)
with pytest.raises(ValueError):
ns._set_ad_order(3)
================================================
FILE: python/tests/curves/test_sw.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
from datetime import timedelta
import numpy as np
import pytest
from rateslib import dual_log
from rateslib.curves.academic import SmithWilsonCurve
from rateslib.dual import Dual2
from rateslib.scheduling import Convention
def test_init():
sw = SmithWilsonCurve(
nodes={dt(2000, 1, 1): 0.10, dt(2001, 1, 1): -0.1, dt(2002, 1, 1): 0.5},
ufr=4.2,
)
result = sw.rate(dt(2001, 1, 1), "1b")
expected = 3.3906104222626796
assert abs(result - expected) < 1e-5
assert sw.meta.convention == Convention.Act365_25
def test_cache():
sw = SmithWilsonCurve(
nodes={dt(2000, 1, 1): 0.10, dt(2001, 1, 1): -0.1, dt(2002, 1, 1): 0.5},
ufr=4.2,
)
sw.rate(dt(2001, 1, 1), "1b")
assert dt(2001, 1, 1) in sw._cache
old_state = sw._state
sw._set_node_vector([1.0, 1.0], 0)
assert sw._state != old_state
assert dt(2001, 1, 1) not in sw._cache
def test_special_domain():
sw = SmithWilsonCurve(
nodes={dt(2000, 1, 1): 0.10, dt(2001, 1, 1): -0.1, dt(2002, 1, 1): 0.5},
ufr=4.2,
)
assert sw[dt(2000, 1, 1)] == 1.0
assert sw[dt(1999, 12, 31)] == 0.0
def test_getters():
sw = SmithWilsonCurve(
nodes={dt(2000, 1, 1): 0.10, dt(2001, 1, 1): -0.1, dt(2002, 1, 1): 0.5},
ufr=4.2,
id="v",
)
assert all(sw._get_node_vector() == np.array([-0.1, 0.5]))
assert sw._get_node_vars() == ("v1", "v2")
sw = SmithWilsonCurve(
nodes={dt(2000, 1, 1): 0.10, dt(2001, 1, 1): -0.1, dt(2002, 1, 1): 0.5},
ufr=4.2,
solve_alpha=True,
id="v",
)
assert all(sw._get_node_vector() == np.array([0.10, -0.1, 0.5]))
assert sw._get_node_vars() == ("v0", "v1", "v2")
def test_set_ad_order():
sw = SmithWilsonCurve(
nodes={dt(2000, 1, 1): 0.10, dt(2001, 1, 1): -0.1, dt(2002, 1, 1): 0.5},
ufr=4.2,
id="v",
ad=2,
)
assert isinstance(sw.nodes.values[0], Dual2)
sw._set_ad_order(2) # does nothing
assert isinstance(sw.nodes.values[0], Dual2)
with pytest.raises(ValueError):
sw._set_ad_order(3)
def test_eiopa_example():
from rateslib import FixedRateBond, Solver
sw = SmithWilsonCurve(
nodes={dt(2000, 1, 1): 0.12376, **{dt(2000 + i, 1, 1): 0.1 for i in range(1, 21)}},
solve_alpha=False,
ufr=4.2,
id="academic_curve",
)
coupons = [
0.2,
0.225,
0.3,
0.425,
0.55,
0.7,
0.85,
1.0,
1.15,
1.275,
1.4,
1.475,
1.575,
1.65,
1.7,
1.75,
1.8,
1.825,
1.85,
1.875,
]
bonds = [
FixedRateBond(
effective=dt(2000, 1, 1),
termination=f"{i}Y",
fixed_rate=coupons[i - 1],
calendar="all",
ex_div=1,
convention="actacticma",
frequency="A",
curves="academic_curve",
metric="dirty_price",
)
for i in range(1, 21)
]
prices = [100.0] * 20
Solver(curves=[sw], instruments=bonds, s=prices)
assert abs(sw.k - 0.737944) < 5e-3
eiopa_u = [
0.00,
0.25,
0.50,
0.75,
1.00,
1.25,
1.50,
1.75,
2.00,
2.25,
2.50,
2.75,
3.00,
3.25,
3.50,
3.75,
4.00,
4.25,
4.50,
4.75,
5.00,
5.25,
5.50,
5.75,
6.00,
6.25,
6.50,
6.75,
7.00,
7.25,
7.50,
7.75,
8.00,
8.25,
8.50,
8.75,
9.00,
9.25,
9.50,
9.75,
10.00,
10.25,
10.50,
10.75,
11.00,
11.25,
11.50,
11.75,
12.00,
12.25,
12.50,
12.75,
13.00,
13.25,
13.50,
13.75,
14.00,
14.25,
14.50,
14.75,
15.00,
15.25,
15.50,
15.75,
16.00,
16.25,
16.50,
16.75,
17.00,
17.25,
17.50,
17.75,
18.00,
18.25,
18.50,
18.75,
19.00,
19.25,
19.50,
19.75,
20.00,
40.00,
60.0,
]
eiopa_v = [
1.0000,
0.9996,
0.9991,
0.9986,
0.9980,
0.9975,
0.9969,
0.9962,
0.9955,
0.9947,
0.9937,
0.9925,
0.9910,
0.9894,
0.9874,
0.9854,
0.9831,
0.9808,
0.9784,
0.9757,
0.9728,
0.9696,
0.9662,
0.9625,
0.9587,
0.9547,
0.9506,
0.9463,
0.9419,
0.9373,
0.9325,
0.9275,
0.9224,
0.9170,
0.9114,
0.9059,
0.9004,
0.8950,
0.8896,
0.8841,
0.8783,
0.8723,
0.8661,
0.8601,
0.8544,
0.8493,
0.8444,
0.8395,
0.8343,
0.8287,
0.8226,
0.8164,
0.8103,
0.8045,
0.7989,
0.7935,
0.7883,
0.7833,
0.7784,
0.7736,
0.7688,
0.7640,
0.7591,
0.7540,
0.7489,
0.7437,
0.7385,
0.7334,
0.7286,
0.7242,
0.7200,
0.7159,
0.7119,
0.7077,
0.7035,
0.6993,
0.6951,
0.6909,
0.6867,
0.6825,
0.6782,
0.3330,
0.1475,
]
for i in range(80):
date = dt(2000, 1, 1) + timedelta(days=round(eiopa_u[i] * 365.25, 0))
rateslib_v = sw[date]
assert abs(rateslib_v - eiopa_v[i]) < 2e-4
def test_2357_example():
from rateslib import FixedRateBond, Solver
sw = SmithWilsonCurve(
nodes={
dt(2000, 1, 1): 0.12376,
**{dt(2000 + i, 1, 1): 0.1 for i in [2, 3, 5, 7]},
# **{dt(2000+i, 1, 1): 0.1 for i in [1,2,3,4,5,6,7]}
},
solve_alpha=False,
ufr=4.2,
id="academic_curve",
)
sw2 = SmithWilsonCurve(
nodes={
dt(2000, 1, 1): 0.12376,
# **{dt(2000+i, 1, 1): 0.1 for i in [2,3,5,7]}
**{dt(2000 + i, 1, 1): 0.1 for i in [1, 2, 3, 4, 5, 6, 7]},
},
solve_alpha=False,
ufr=4.2,
id="academic_curve",
)
coupons = [1.5, 1.8, 2.2, 2.5]
bonds = [
FixedRateBond(
effective=dt(2000, 1, 1),
termination=f"{i}Y",
fixed_rate=coupons[idx],
frequency="A",
convention="ActActICMA",
calendar="all",
modifier="F",
curves="academic_curve",
metric="dirty_price",
)
for (idx, i) in enumerate([2, 3, 5, 7])
]
prices = [100.0] * 4
Solver(curves=[sw], instruments=bonds, s=prices)
Solver(curves=[sw2], instruments=bonds, s=prices)
eiopa_u = [
0.10,
0.20,
0.30,
0.40,
0.50,
0.60,
0.70,
0.80,
0.90,
1.00,
1.10,
1.20,
1.30,
1.40,
1.50,
1.60,
1.70,
1.80,
1.90,
2.00,
2.10,
2.20,
2.30,
2.40,
2.50,
2.60,
2.70,
2.80,
2.90,
3.00,
3.10,
3.20,
3.30,
3.40,
3.50,
3.60,
3.70,
3.80,
3.90,
4.00,
4.10,
4.20,
4.30,
4.40,
4.50,
4.60,
4.70,
4.80,
4.90,
5.00,
5.10,
5.20,
5.30,
5.40,
5.50,
5.60,
5.70,
5.80,
5.90,
6.00,
6.10,
6.20,
6.30,
6.40,
6.50,
6.60,
6.70,
6.80,
6.90,
7.00,
7.10,
7.20,
7.30,
7.40,
7.50,
7.60,
7.70,
7.80,
7.90,
8.00,
]
eiopa_v = [
0.9989,
0.9977,
0.9965,
0.9953,
0.9941,
0.9929,
0.9916,
0.9903,
0.9889,
0.9875,
0.9861,
0.9846,
0.9831,
0.9815,
0.9799,
0.9781,
0.9764,
0.9745,
0.9726,
0.9706,
0.9686,
0.9664,
0.9642,
0.9620,
0.9597,
0.9573,
0.9550,
0.9526,
0.9501,
0.9477,
0.9452,
0.9428,
0.9403,
0.9378,
0.9353,
0.9328,
0.9303,
0.9277,
0.9252,
0.9226,
0.9200,
0.9174,
0.9148,
0.9122,
0.9095,
0.9069,
0.9042,
0.9015,
0.8988,
0.8961,
0.8933,
0.8906,
0.8878,
0.8850,
0.8822,
0.8794,
0.8765,
0.8737,
0.8708,
0.8680,
0.8651,
0.8623,
0.8594,
0.8565,
0.8536,
0.8507,
0.8479,
0.8450,
0.8421,
0.8392,
0.8363,
0.8335,
0.8306,
0.8277,
0.8248,
0.8220,
0.8191,
0.8163,
0.8134,
0.8106,
]
# from matplotlib import pyplot as plt
# fig, ax, lines = sw.plot("Z", comparators=[sw2])
# ax.scatter(
# [dt(2000, 1, 1) + timedelta(days=round(u*365.25)) for u in eiopa_u],
# [100.0 * dual_log(v) / -t for v,t in zip(eiopa_v, eiopa_u)],
# )
# plt.show()
for i in range(80):
date = dt(2000, 1, 1) + timedelta(days=round(eiopa_u[i] * 365.25, 0))
rateslib_v = sw[date]
assert abs(rateslib_v - eiopa_v[i]) < 2e-4
================================================
FILE: python/tests/instruments/test_instruments_bonds_legacy.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import os
from datetime import datetime as dt
from itertools import product
import numpy as np
import pytest
from pandas import DataFrame, Series, date_range
from pandas.testing import assert_frame_equal
from rateslib import defaults, fixings
from rateslib.curves import Curve, LineCurve
from rateslib.default import NoInput
from rateslib.dual import Dual, Dual2, Variable, gradient
from rateslib.enums import FloatFixingMethod
from rateslib.fx import FXForwards, FXRates
from rateslib.instruments import (
IRS,
Bill,
BondFuture,
FixedRateBond,
FloatRateNote,
IndexFixedRateBond,
)
from rateslib.instruments.bonds.conventions import US_GBB, BondCalcMode
from rateslib.instruments.protocols.pricing import _Curves
from rateslib.scheduling import dcf, get_calendar
from rateslib.solver import Solver
@pytest.fixture
def curve():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.99,
dt(2022, 7, 1): 0.98,
dt(2022, 10, 1): 0.97,
}
# convention = "Act360"
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def curve2():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.98,
dt(2022, 7, 1): 0.97,
dt(2022, 10, 1): 0.95,
}
return Curve(nodes=nodes, interpolation="log_linear")
class TestBondCalcMode:
def test_custom_function(self):
def _my_acc(*args):
return 0.5
my_calc = BondCalcMode(
settle_accrual=_my_acc,
ytm_accrual=_my_acc,
v1="compounding",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
bond = FixedRateBond(dt(2022, 1, 1), "2y", spec="de_gb", fixed_rate=2.0, calc_mode=my_calc)
de_bond = FixedRateBond(
dt(2022, 1, 1),
"2y",
spec="de_gb",
fixed_rate=2.0,
)
assert bond.accrued(dt(2022, 2, 4)) == 1.0 # 0.5 * 2.0
assert bond.accrued(dt(2022, 2, 4)) != de_bond.accrued(dt(2022, 2, 4))
assert bond.ytm(100.0, dt(2022, 2, 4)) != de_bond.ytm(100.0, dt(2022, 2, 4))
assert my_calc.kwargs["settle_accrual"] == "custom"
assert my_calc.kwargs["ytm_accrual"] == "custom"
def test_custom_function_affects_ytm(self):
def _my_acc(*args):
return 0.4
my_calc = BondCalcMode(
settle_accrual="linear_days",
ytm_accrual=_my_acc,
v1="compounding_final_simple",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
bond = FixedRateBond(dt(2022, 1, 1), "2y", spec="de_gb", fixed_rate=2.0, calc_mode=my_calc)
v2 = 1 / (1 + 0.02)
v1 = v2 ** (1 - 0.4)
expected = 2 * v1 + 102 * v1 * v2 - 0.4 * 2
result = bond.price(ytm=2.00, settlement=dt(2022, 1, 1))
assert abs(result - expected) < 1e-10
def test_custom_ytm_disc_funcs(self):
def _my_acc(*args):
return 0.0
def _v(*args):
return 1 / (1 + 0.02)
calc_mode = BondCalcMode(
settle_accrual=_my_acc,
ytm_accrual=_my_acc,
v1=_v,
v2=_v,
v3=_v,
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
bond = FixedRateBond(
effective=dt(2000, 1, 1),
termination="2y",
fixed_rate=2.00,
spec="de_gb",
calc_mode=calc_mode,
)
# custom funcs give the same clean price of 100 for any date
for date in [dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 11, 1), dt(2001, 6, 1)]:
result = bond.price(ytm=2.0, settlement=dt(2000, 1, 1))
assert abs(result - 100.0) < 1e-10
class TestFixedRateBond:
def test_metric_ytm_no_fx(self) -> None:
# GH 193
usd = Curve(nodes={dt(2000, 1, 1): 1.0, dt(2005, 1, 1): 0.9, dt(2010, 1, 5): 0.8})
gbp = Curve(nodes={dt(2000, 1, 1): 1.0, dt(2005, 1, 1): 0.9, dt(2010, 1, 5): 0.8})
fxf = FXForwards(
fx_rates=FXRates({"gbpusd": 1.25}, settlement=dt(2000, 1, 1)),
fx_curves={"gbpgbp": gbp, "usdusd": usd, "gbpusd": gbp},
)
expected = FixedRateBond(dt(2000, 1, 1), "10y", spec="uk_gb", fixed_rate=2.0).rate(
curves=gbp,
metric="ytm",
)
result = FixedRateBond(dt(2000, 1, 1), "10y", spec="uk_gb", fixed_rate=2.0).rate(
curves=gbp,
metric="ytm",
fx=fxf,
)
assert abs(result - expected) < 1e-9
def test_accrued_in_text(self) -> None:
bond = FixedRateBond(
effective=dt(2022, 1, 1),
termination=dt(2023, 1, 1),
fixed_rate=5.0,
spec="ca_gb",
)
assert abs(bond.accrued(dt(2022, 4, 15)) - 1.42465753) < 1e-8
bond = FixedRateBond(
effective=dt(2022, 1, 1),
termination=dt(2023, 1, 1),
fixed_rate=5.0,
spec="uk_gb",
)
assert abs(bond.accrued(dt(2022, 4, 15)) - 1.43646409) < 1e-8
# UK Gilts Tests: Data from public DMO website.
@pytest.mark.parametrize(
("settlement", "exp"),
[
(dt(1999, 5, 24), False),
(dt(1999, 5, 26), False),
(dt(1999, 5, 27), True),
(dt(1999, 6, 7), True), # on payment date the
],
)
def test_ex_div(self, settlement, exp) -> None:
ukg = FixedRateBond(
effective=dt(1998, 1, 1),
termination=dt(2015, 12, 7),
frequency="S",
fixed_rate=8.0,
convention="ActActICMA",
calendar="ldn",
ex_div=7,
modifier="NONE",
)
assert ukg.ex_div(settlement) is exp
def test_fixed_rate_bond_price_ukg(self) -> None:
# test pricing functions against Gilt Example prices from UK DMO
bond = FixedRateBond(
dt(1995, 1, 1),
dt(2015, 12, 7),
"S",
convention="ActActICMA",
fixed_rate=8,
ex_div=7,
calendar="ldn",
modifier="NONE",
)
assert abs(bond.price(4.445, dt(1999, 5, 24), True) - 145.012268) < 1e-6
assert abs(bond.price(4.445, dt(1999, 5, 26), True) - 145.047301) < 1e-6
assert abs(bond.price(4.445, dt(1999, 5, 27), True) - 141.070132) < 1e-6
assert abs(bond.price(4.445, dt(1999, 6, 7), True) - 141.257676) < 1e-6
bond = FixedRateBond(
dt(1997, 1, 1),
dt(2004, 11, 26),
"S",
convention="ActActICMA",
fixed_rate=6.75,
ex_div=7,
calendar="ldn",
modifier="F",
)
assert abs(bond.price(4.634, dt(1999, 5, 10), True) - 113.315543) < 1e-6
assert abs(bond.price(4.634, dt(1999, 5, 17), True) - 113.415969) < 1e-6
assert abs(bond.price(4.634, dt(1999, 5, 18), True) - 110.058738) < 1e-6
assert abs(bond.price(4.634, dt(1999, 5, 26), True) - 110.170218) < 1e-6
def test_fixed_rate_bond_price_ukg_back_stub(self) -> None:
bond = FixedRateBond(
dt(1995, 12, 7),
dt(2015, 1, 23),
"S",
stub="SHORTBACK",
roll=7,
convention="ActActICMA",
fixed_rate=8,
ex_div=7,
calendar="ldn",
modifier="NONE",
calc_mode="ukg",
)
result = bond.price(ytm=8.00, settlement=dt(1995, 12, 7))
expected = 100.00334028292 # compounded back stub does not yield par
assert abs(result - expected) < 1e-9
def test_fixed_rate_bond_yield_ukg(self) -> None:
# test pricing functions against Gilt Example prices from UK DMO
bond = FixedRateBond(
dt(1995, 1, 1),
dt(2015, 12, 7),
"S",
convention="ActActICMA",
fixed_rate=8,
ex_div=7,
calendar="ldn",
modifier="NONE",
)
assert bond.ytm(135.0, dt(1999, 5, 24), True) - 5.1620635 < 1e-6
assert bond.ytm(135.0, dt(1999, 5, 26), True) - 5.1649111 < 1e-6
assert bond.ytm(135.0, dt(1999, 5, 27), True) - 4.871425 < 1e-6
assert bond.ytm(135.0, dt(1999, 6, 7), True) - 4.8856785 < 1e-6
bond = FixedRateBond(
dt(1997, 1, 1),
dt(2004, 11, 26),
"S",
convention="ActActICMA",
fixed_rate=6.75,
ex_div=7,
calendar="ldn",
modifier="F",
)
assert bond.ytm(108.0, dt(1999, 5, 10), True) - 5.7009527 < 1e-6
assert bond.ytm(108.0, dt(1999, 5, 17), True) - 5.7253361 < 1e-6
assert bond.ytm(108.0, dt(1999, 5, 18), True) - 5.0413308 < 1e-6
assert bond.ytm(108.0, dt(1999, 5, 26), True) - 5.0652248 < 1e-6
def test_fixed_rate_bond_accrual(self) -> None:
# test pricing functions against Gilt Example prices from UK DMO, with stub
bond = FixedRateBond(
dt(1999, 5, 7),
dt(2002, 12, 7),
"S",
convention="ActActICMA",
front_stub=dt(1999, 12, 7),
fixed_rate=6,
ex_div=7,
calendar="ldn",
modifier="NONE",
)
bond.accrued(dt(1999, 5, 8)) == 0.016484
bond.accrued(dt(1999, 6, 8)) == 0.527382
bond.accrued(dt(1999, 7, 8)) == 1.019186
bond.accrued(dt(1999, 11, 8)) == 3.035579
bond.accrued(dt(1999, 11, 26)) == 3.330661
bond.accrued(dt(1999, 11, 27)) == -0.16393
bond.accrued(dt(1999, 12, 6)) == -0.01639
bond.accrued(dt(1999, 12, 7)) == 0.0
def test_fixed_rate_bond_stub_ytm(self) -> None:
# if a regular bond is set to stub similar output should be gotten
bond = FixedRateBond(
dt(1999, 6, 7),
dt(2002, 12, 7),
"S",
convention="ActActICMA",
fixed_rate=6,
ex_div=7,
calendar="ldn",
modifier="NONE",
)
regular_ytm = bond.ytm(101, dt(1999, 11, 8), dirty=True)
bond.leg1.periods[0].stub = True
stubbed_ytm = bond.ytm(101, dt(1999, 11, 8), dirty=True)
assert regular_ytm == stubbed_ytm
# US Treasury Tests. Examples from Rulebook.
@pytest.mark.parametrize(
("e", "t", "s", "fr", "ec", "ed", "y", "se"),
[
(
dt(1990, 5, 15),
dt(2020, 5, 15),
NoInput(0),
8.75,
99.057893,
99.057893,
8.84,
dt(1990, 5, 15),
), # A
(
dt(1990, 4, 2),
dt(1992, 3, 31),
NoInput(0),
8.5,
99.838183,
99.838183,
8.59,
dt(1990, 4, 2),
), # B
(
dt(1990, 3, 1),
dt(1995, 5, 15),
dt(1990, 11, 15),
8.5,
99.805118,
99.805118,
8.53,
dt(1990, 3, 1),
), # C
(
dt(1985, 11, 15),
dt(1995, 11, 15),
NoInput(0),
9.5,
99.730918,
100.098321,
9.54,
dt(1985, 11, 29),
), # D
(
dt(1985, 7, 2),
dt(2005, 8, 15),
dt(1986, 2, 15),
10.75,
102.214586,
105.887384,
10.47,
dt(1985, 11, 4),
), # E
(
dt(1983, 5, 16),
dt(1991, 5, 15),
dt(1983, 11, 15),
10.5,
99.777074,
102.373541,
10.53,
dt(1983, 8, 15),
), # F
(
dt(1988, 10, 15),
dt(1994, 12, 15),
dt(1989, 6, 15),
9.75,
99.738045,
100.563865,
9.79,
dt(1988, 11, 15),
), # G
],
)
def test_fixed_rate_bond_price_ust(self, e, t, s, fr, ec, ed, y, se) -> None:
# The UST tests are from:
# https://www.ecfr.gov/current/title-31/subtitle-B/chapter-II/subchapter-A/part-356/appendix-Appendix%20B%20to%20Part%20356
ust = FixedRateBond(
effective=e,
termination=t,
front_stub=s,
fixed_rate=fr,
frequency="S",
calendar="nyc",
convention="ActActICMA",
calc_mode="ust_31Bii",
ex_div=1,
modifier="NONE",
)
res1 = ust.price(ytm=y, settlement=se, dirty=False)
res2 = ust.price(ytm=y, settlement=se, dirty=True)
assert abs(res1 - ec) < 1e-6
assert abs(res2 - ed) < 1e-6
@pytest.mark.parametrize(
("s", "exp", "acc"),
[
(dt(2025, 2, 14), 99.106414, 1.926970),
(dt(2025, 2, 18), 99.107179, 0.032113),
(dt(2025, 8, 15), 99.151393, 0.0),
],
)
def test_ust_price_street(self, s, exp, acc) -> None:
bond = FixedRateBond(
effective=dt(2023, 8, 15),
termination=dt(2033, 8, 15),
fixed_rate=3.875,
spec="us_gb",
)
result = bond.price(ytm=4, settlement=s)
accrued = bond.accrued(settlement=s)
assert abs(accrued - acc) < 1e-6
assert abs(result - exp) < 1e-5
def test_long_stub_first_cashflow(self):
# test against 31.B.ii.A356.Appendix.B.I.A Example Long First
note = FixedRateBond(
effective=dt(1990, 12, 3),
termination=dt(1996, 2, 15),
stub="longfront",
spec="us_gb",
fixed_rate=7.875,
notional=-7000,
)
assert abs(note.leg1.periods[0].cashflow() - 386.474184670) < 5e-7
def test_calc_mode_ytm(self):
b = FixedRateBond(dt(1985, 11, 15), dt(1995, 11, 15), fixed_rate=9.5, spec="us_gb_tsy")
y1 = b.ytm(price=99.730918, settlement=dt(1985, 11, 29))
assert abs(y1 - 9.54) < 1e-6
b2 = FixedRateBond(dt(1985, 11, 15), dt(1995, 11, 15), fixed_rate=9.5, spec="us_gb")
exp_y2 = b2.ytm(price=99.730918, settlement=dt(1985, 11, 29))
# street convention
y2 = b.ytm(price=99.730918, settlement=dt(1985, 11, 29), calc_mode="us_gb")
assert abs(y2 - 9.54) > 1e-6
assert abs(y2 - exp_y2) < 1e-6
def test_street_convention_simple_first_period(self):
# US91282CLB53
bond = FixedRateBond(dt(2024, 7, 31), "2y", spec="us_gb", fixed_rate=4.375)
result = bond.price(ytm=4.0, settlement=dt(2026, 3, 31))
expected = 100.1152156
assert abs(result - expected) < 1e-6
result2 = bond.price(ytm=4.0, settlement=dt(2026, 1, 7))
expected2 = 100.205071
assert abs(result2 - expected2) < 1e-6
# Swedish Government Bond Tests. Data from alternative systems.
@pytest.mark.parametrize(
("settlement", "exp_accrued", "exp_price"),
[
(dt(2024, 5, 3), 0.73125, 88.134),
# (dt(2024, 5, 5), 0.735417, 88.150), # ambiguous Sunday
(dt(2024, 5, 6), -0.0125, 88.158),
(dt(2024, 5, 7), -0.0104, 88.165),
(dt(2024, 5, 8), -0.008333, 88.173),
(dt(2024, 5, 12), 0.0, 88.203),
(dt(2024, 5, 13), 0.002083, 88.210),
],
)
def test_sgb_1060s_price_and_accrued(self, settlement, exp_accrued, exp_price) -> None:
sgb = FixedRateBond(
effective=dt(2023, 5, 12),
termination=dt(2028, 5, 12),
frequency="A",
convention="ActActICMA",
calendar="stk",
ex_div=5,
modifier="NONE",
fixed_rate=0.75,
calc_mode="sgb",
)
accrued = sgb.accrued(settlement)
assert abs(accrued - exp_accrued) < 1e-4
price = sgb.price(ytm=4.0, settlement=settlement, dirty=False)
assert abs(price - exp_price) < 1e-3
def test_sgb_ultra_short_ytm(self):
# SE0010469205
komins = FixedRateBond(
effective=dt(2017, 10, 2), termination=dt(2024, 10, 2), fixed_rate=1.0, spec="se_gb"
)
dp = komins.price(ytm=3.42092, settlement=dt(2024, 9, 24), dirty=True)
cp = komins.price(ytm=3.42092, settlement=dt(2024, 9, 24), dirty=False)
assert abs(dp - cp - komins.accrued(settlement=dt(2024, 9, 24))) < 1e-10
assert abs(cp - 99.9455205) < 1e-4
def test_fixed_rate_bond_price_sgb_back_stub(self) -> None:
bond = FixedRateBond(
dt(1995, 12, 7),
dt(2015, 1, 23),
"A",
stub="SHORTBACK",
roll=7,
convention="ActActICMA",
fixed_rate=8,
ex_div=7,
calendar="ldn",
modifier="NONE",
calc_mode="sgb",
)
result = bond.price(ytm=8.00, settlement=dt(1995, 12, 7))
expected = 100.0018153890108 # simple period back stub yields close to par
assert abs(result - expected) < 1e-9
# Canadian Government Bond Tests. Data from alternative systems
# and from https://iiac-accvm.ca/wp-content/uploads/Canadian-Conventions-in-FI-Markets-Release-1.3.pdf
@pytest.mark.parametrize(
("settlement", "exp"),
[
(dt(2005, 12, 1), 1.671232),
(dt(2006, 1, 31), 2.486301),
],
)
def test_settlement_accrued(self, settlement, exp) -> None:
bond = FixedRateBond(
effective=dt(2004, 8, 1),
termination=dt(2008, 2, 1),
fixed_rate=5.0,
modifier="NONE",
frequency="S",
convention="ActActICMA_stub365f",
calc_mode="cadgb",
ex_div=1,
)
result = bond.accrued(settlement=settlement)
assert abs(result - exp) < 1e-6
@pytest.mark.skip(reason="<1Y CAD bonds NotImplemented")
@pytest.mark.parametrize(
("s", "exp", "acc"),
[
(dt(2024, 8, 1), 99.839907, 0.0),
(dt(2024, 7, 17), 99.866051, 1.715753),
(dt(2024, 8, 7), 99.842641, 0.061644),
],
)
def test_cadgb_price(self, s, exp, acc) -> None:
bond = FixedRateBond(
effective=dt(2022, 11, 2),
termination=dt(2025, 2, 1),
fixed_rate=3.75,
modifier="NONE",
convention="ActActICMA_STUB365f",
frequency="S",
calc_mode="cadgb",
roll=1,
stub="SHORTFRONT",
ex_div=1,
)
result = bond.price(ytm=4.0, settlement=s)
accrued = bond.accrued(settlement=s)
assert abs(accrued - acc) < 1e-6
# Price fails becuase bond is <1Y from maturity needs a branched formula.
assert abs(result - exp) < 1e-6
@pytest.mark.parametrize(
("s", "exp", "acc"),
[
(dt(2024, 11, 26), 91.055145, 1.341096),
(dt(2024, 12, 2), 91.069934, 0.007534),
(dt(2024, 6, 3), 90.634570, 0.015068),
],
)
def test_cadgb_price2(self, s, exp, acc) -> None:
bond = FixedRateBond(
effective=dt(2023, 2, 2),
termination=dt(2033, 6, 1),
fixed_rate=2.75,
modifier="NONE",
convention="ActActICMA_STUB365f",
frequency="S",
calc_mode="ca_gb",
roll=1,
stub="SHORTFRONT",
ex_div=1,
)
result = bond.price(ytm=4.0, settlement=s)
accrued = bond.accrued(settlement=s)
assert abs(accrued - acc) < 1e-6
assert abs(result - exp) < 1e-6
def test_cadgb_price3(self) -> None:
bond = FixedRateBond(
effective=dt(2018, 7, 27),
termination=dt(2029, 6, 1),
fixed_rate=2.25,
modifier="NONE",
convention="ActActICMA_STUB365f",
frequency="S",
calc_mode="cadgb",
roll=1,
stub="SHORTFRONT",
ex_div=1,
)
result = bond.price(ytm=2.249977, settlement=dt(2018, 10, 16))
accrued = bond.accrued(settlement=dt(2018, 10, 16))
stub_cash = bond.leg1.periods[0].cashflow()
assert abs(accrued - 0.499315) < 1e-6
assert abs(result - 100.00) < 1e-5
assert abs(stub_cash + 7828.77) < 1e-2
def test_cadgb_ytm_dirty_calc(self) -> None:
# Cad GB has different Accrual function for a YTM and physical settlement.
# If a price is supplied dirty it is expected to be a physical settlement dirty price
bond = FixedRateBond(
effective=dt(2018, 7, 27),
termination=dt(2029, 6, 1),
fixed_rate=2.25,
spec="ca_gb",
)
physical_accrued = bond._accrued(
dt(2019, 6, 10), bond.kwargs.meta["calc_mode"]._settle_accrual
)
ytm_accrued = bond._accrued(dt(2019, 6, 10), bond.kwargs.meta["calc_mode"]._ytm_accrual)
assert abs(physical_accrued - ytm_accrued) > 1e-4
clean_price = 101.00
clean_ytm = bond.ytm(clean_price, dt(2019, 6, 10))
dirty_ytm = bond.ytm(clean_price + physical_accrued, dt(2019, 6, 10), dirty=True)
assert abs(clean_ytm - dirty_ytm) < 1e-8
def test_cadgb_ytm_indexed_dirty_calc(self) -> None:
# Cad GB has different Accrual function for a YTM and physical settlement.
# If a price is supplied dirty it is expected to be a physical settlement dirty price
bond = IndexFixedRateBond(
effective=dt(2018, 7, 27),
termination=dt(2029, 6, 1),
fixed_rate=2.25,
spec="ca_gbi",
index_base=90.0,
)
curve = Curve({dt(2019, 1, 1): 1.0, dt(2030, 1, 1): 1.0}, index_base=99.0).shift(100.0)
physical_indexed_accrued = bond.accrued(dt(2019, 6, 10), indexed=True, index_curve=curve)
ytm_indexed_accrued = bond._accrued(
dt(2019, 6, 10), bond.kwargs.meta["calc_mode"]._ytm_accrual
) * bond.index_ratio(settlement=dt(2019, 6, 18), index_curve=curve)
assert abs(physical_indexed_accrued - ytm_indexed_accrued) > 1e-4
clean_price = 111.00
clean_ytm = bond.ytm(
clean_price, dt(2019, 6, 10), indexed_price=True, indexed_ytm=False, index_curve=curve
)
dirty_ytm = bond.ytm(
clean_price + physical_indexed_accrued,
dt(2019, 6, 10),
dirty=True,
indexed_price=True,
indexed_ytm=False,
index_curve=curve,
)
assert abs(clean_ytm - dirty_ytm) < 1e-8
## German gov bonds comparison with official bundesbank publications.
@pytest.mark.parametrize(
("sett", "price", "exp_ytm", "exp_acc"),
[
(dt(2024, 1, 10), 105.0, 1.208836, 0.321311),
(
dt(2024, 6, 12),
97.180,
2.66368627,
1.204918,
), # https://www.bundesbank.de/en/service/federal-securities/prices-and-yields
(dt(2022, 12, 20), 99.31, 2.208075, 0.350959),
# (dt(2022, 12, 20), 99.31, 2.20804175, 0.3452055), # Bundesbank official data:
# see link above (accrual is unexplained and does not match systems)
(
dt(2023, 11, 2),
97.04,
2.636708016,
2.174795,
), # Bundesbank official data: see link above (agrees with BXT)
(dt(2028, 11, 15), 97.5, 4.717949, 0.0), # YAS
],
)
def test_de_gb(self, sett, price, exp_ytm, exp_acc) -> None:
frb = FixedRateBond( # ISIN DE0001102622
effective=dt(2022, 10, 20),
termination=dt(2029, 11, 15),
stub="LONGFRONT",
fixed_rate=2.1,
spec="de_gb",
)
result = frb.accrued(settlement=sett)
assert abs(result - exp_acc) < 1e-6
result = frb.ytm(price=price, settlement=sett)
assert abs(result - exp_ytm) < 1e-6
@pytest.mark.parametrize(
("sett", "price", "exp_ytm", "exp_acc"),
[
(
dt(2024, 6, 12),
99.555,
3.5314195,
0.825137,
), # https://www.bundesbank.de/en/service/federal-securities/prices-and-yields
],
)
def test_de_gb_mm(self, sett, price, exp_ytm, exp_acc) -> None:
# tests the MoneyMarket simple yield for the final period.
frb = FixedRateBond( # ISIN DE0001102366
effective=dt(2014, 8, 15),
termination=dt(2024, 8, 15),
fixed_rate=1.0,
spec="de_gb",
)
result = frb.accrued(settlement=sett)
assert abs(result - exp_acc) < 1e-6
result = frb.ytm(price=price, settlement=sett)
assert abs(result - exp_ytm) < 1e-6
def test_long_stub(self):
# DE000BU2Z056
bond = FixedRateBond(dt(2025, 7, 4), dt(2035, 8, 15), spec="de_gb", fixed_rate=2.60)
assert bond.leg1.schedule.aschedule[0:2] == [dt(2025, 7, 4), dt(2026, 8, 15)]
def test_de_long_front_split_accrued_no_leap(self):
# this bond was not issued around a leap year so there is no difference between
# linear_days_long_front_split and linear_days
bond = FixedRateBond(dt(2025, 3, 12), dt(2056, 8, 15), spec="de_gb", fixed_rate=2.90)
result1 = bond.accrued(settlement=dt(2026, 1, 16))
result2 = bond.accrued(settlement=dt(2026, 7, 14))
expected1 = (dt(2026, 1, 16) - dt(2025, 3, 12)).days / 365 * 2.9
expected2 = (dt(2026, 7, 14) - dt(2025, 3, 12)).days / 365 * 2.9
assert abs(result1 - expected1) < 1e-6
assert abs(result2 - expected2) < 1e-6
def test_de_long_front_split_accrued_leap(self):
# this bond was issued in 2024 so there is a difference between linear_days and
# linear_days_long_front_split: ISIN DE000BU2D004
bond = FixedRateBond(dt(2024, 2, 6), dt(2054, 8, 15), spec="de_gb", fixed_rate=2.50)
result1 = bond.accrued(settlement=dt(2024, 7, 15))
result2 = bond.accrued(settlement=dt(2025, 7, 15))
expected1 = (dt(2024, 7, 15) - dt(2024, 2, 6)).days / 366 * 2.5
expected2 = (dt(2024, 8, 15) - dt(2024, 2, 6)).days / 366 * 2.5 + (
dt(2025, 7, 15) - dt(2024, 8, 15)
).days / 365 * 2.5
assert abs(result1 - expected1) < 1e-8
assert abs(result2 - expected2) < 1e-8
## French OAT
@pytest.mark.parametrize(
("sett", "price", "exp_ytm", "exp_acc"),
[
(dt(2024, 6, 14), 101.0, 2.886581, 1.655738),
(dt(2033, 11, 25), 99.75, 3.258145, 0.0),
(dt(2034, 6, 13), 101.0, 0.769200, 1.643836),
],
)
def test_fr_gb(self, sett, price, exp_ytm, exp_acc) -> None:
frb = FixedRateBond( # ISIN FR001400QMF9
effective=dt(2023, 11, 25),
termination=dt(2034, 11, 25),
fixed_rate=3.0,
spec="fr_gb",
)
result = frb.accrued(settlement=sett)
assert abs(result - exp_acc) < 1e-6
result = frb.ytm(price=price, settlement=sett)
assert abs(result - exp_ytm) < 1e-6
## Italian BTP
@pytest.mark.parametrize(
("sett", "price", "exp_ytm", "exp_acc"),
[
(dt(2024, 6, 14), 98.0, 4.730058, 0.526090),
(dt(2026, 4, 14), 99.0, 4.617209, 1.993370),
],
)
def test_regular_it_gb(self, sett, price, exp_ytm, exp_acc) -> None:
frb = FixedRateBond( # ISIN IT0005518128
effective=dt(2022, 11, 1),
termination=dt(2033, 5, 1),
fixed_rate=4.4,
spec="it_gb",
)
result = frb.accrued(settlement=sett)
assert abs(result - exp_acc) < 5e-6
result = frb.price(ytm=exp_ytm, settlement=sett)
result = frb.ytm(price=price, settlement=sett)
assert abs(result - exp_ytm) < 2e-4
@pytest.mark.parametrize(
("sett", "ytm", "exp_price", "exp_acc"),
[
(dt(2032, 11, 1), 6.5, 98.96593464, 0.0),
(dt(2032, 11, 2), 6.5, 98.97099073, 0.01215),
(dt(2033, 3, 15), 6.5, 99.69805695, 1.628730),
(dt(2033, 4, 29), 6.5, 99.96938727, 2.175690),
],
)
def test_regular_it_gb_final_simple_vs_excel(self, sett, ytm, exp_price, exp_acc) -> None:
# These values are not the same as for BBG YA.
# BBG YA can be replicated here if the number of days between payments
# in the last period 1/11/32 to 2/5/33 is 184 days and the pay_adj is 1/184 instead of
# 1/182. Problem is, the actual number of days between payments are 182.
frb = FixedRateBond( # ISIN IT0005518128
effective=dt(2022, 11, 1),
termination=dt(2033, 5, 1),
fixed_rate=4.4,
spec="it_gb",
)
result = frb.accrued(settlement=sett)
assert abs(result - exp_acc) < 5e-6
result = frb.price(ytm=ytm, settlement=sett)
assert abs(result - exp_price) < 1e-6
@pytest.mark.parametrize(
("sett", "ytm", "exp_price", "exp_acc"),
[
(dt(2032, 11, 1), 6.429702, 99.00, 0.0), # Last coupon simple rate
(dt(2032, 11, 2), 6.439891, 99.00, 0.01215), # Last coupon simple rate
(dt(2033, 3, 15), 6.862519, 99.65, 1.628730), # Last coupon simple rate
(dt(2033, 4, 29), 6.450803, 99.97, 2.175690), # Test accrual upto adjusted payment date
],
)
def test_regular_it_gb_final_simple(self, sett, ytm, exp_price, exp_acc) -> None:
frb = FixedRateBond( # ISIN IT0005518128
effective=dt(2022, 11, 1),
termination=dt(2033, 5, 1),
fixed_rate=4.4,
spec="it_gb",
)
result = frb.accrued(settlement=sett)
assert abs(result - exp_acc) < 5e-6
result = frb.price(ytm=ytm, settlement=sett)
assert abs(result - exp_price) < 3e-3
@pytest.mark.parametrize(
("sett", "ytm", "exp_price", "exp_acc"),
[
(dt(2026, 12, 13), 6.5, 98.77226353, 0.0),
(dt(2027, 1, 11), 6.5, 98.95139167, 0.318681),
],
)
def test_regular_it_gb_final_simple_vs_excel2(self, sett, ytm, exp_price, exp_acc) -> None:
frb = FixedRateBond( # ISIN IT0005547408
effective=dt(2023, 6, 13),
termination=dt(2027, 6, 13),
fixed_rate=4.00,
spec="it_gb",
)
result = frb.accrued(settlement=sett)
assert abs(result - exp_acc) < 5e-6
result = frb.price(ytm=ytm, settlement=sett)
assert abs(result - exp_price) < 1e-6
## Norwegian
@pytest.mark.parametrize(
("set_", "price", "exp_ytm", "exp_acc"),
[
(dt(2026, 4, 13), 99.3, 3.727804, 0.0), # YAS Coupon aligned
(dt(2033, 4, 13), 99.9, 3.728729, 0.0), # Last period
(dt(2033, 9, 12), 99.9, 3.772713, 1.509589), # Middle Last period
(dt(2024, 2, 13), 99.9, 3.638007, 0.0), # Start of bond
(
dt(2024, 3, 13),
99.9,
3.637518,
0.288014,
), # Mid stub period
],
)
def test_no_gb(self, set_, price, exp_ytm, exp_acc) -> None:
frb = FixedRateBond( # ISIN NO0013148338
effective=dt(2024, 2, 13),
termination=dt(2034, 4, 13),
fixed_rate=3.625,
spec="no_gb",
)
result = frb.accrued(settlement=set_)
assert abs(result - exp_acc) < 5e-6
result = frb.ytm(price=price, settlement=set_)
assert abs(result - exp_ytm) < 1e-5
## Dutch
@pytest.mark.parametrize(
("set_", "price", "exp_ytm", "exp_acc"),
[
(dt(2025, 6, 10), 98.0, 2.751162, 2.260274), # YAS Coupon aligned
(dt(2033, 7, 15), 99.8, 2.705411, 0.0), # Last period
(dt(2033, 7, 18), 99.9, 2.602897, 0.020548), # Middle Last period
(dt(2024, 2, 8), 99.0, 2.611616, 0.0), # Start of bond
(dt(2024, 3, 13), 99.0, 2.612194, 0.232240), # Mid stub period
],
)
def test_nl_gb(self, set_, price, exp_ytm, exp_acc) -> None:
frb = FixedRateBond( # ISIN NL0015001XZ6
effective=dt(2024, 2, 8),
termination=dt(2034, 7, 15),
fixed_rate=2.5,
spec="nl_gb",
)
result = frb.accrued(settlement=set_)
assert abs(result - exp_acc) < 5e-6
result = frb.ytm(price=price, settlement=set_)
assert abs(result - exp_ytm) < 1e-5
# US Corp: BNY Mello
@pytest.mark.parametrize(
("settlement", "price", "exp_ytm", "exp_acc"),
[
(dt(2025, 5, 6), 101.0, 3.493237, 0.08555556),
(dt(2028, 4, 3), 100.05, 3.077448, 1.65763889),
],
)
def test_bny_mellon(self, settlement, price, exp_ytm, exp_acc) -> None:
# BNY Mellon ISIN: US06406RAH03,
b = FixedRateBond(
effective=dt(2018, 4, 30),
termination=dt(2028, 4, 28),
fixed_rate=3.85,
convention="30u360",
spec="us_gb",
calc_mode="us_corp",
)
ytm = b.ytm(price, settlement)
acc = b.accrued(settlement)
assert abs(ytm - exp_ytm) < 1e-6
assert abs(acc - exp_acc) < 1e-8
@pytest.mark.parametrize(
("settlement", "price", "exp_ytm", "exp_acc"),
[
(dt(2018, 5, 30), 101.0, 3.728114, 0.32083333),
(dt(2018, 5, 31), 101.0, 3.728114, 0.32083333),
(dt(2025, 5, 6), 101.0, 3.493237, 0.08555556),
(dt(2028, 4, 3), 100.05, 3.077448, 1.65763889),
],
)
def test_bny_mellon_spec(self, settlement, price, exp_ytm, exp_acc) -> None:
# BNY Mellon ISIN: US06406RAH03,
b = FixedRateBond(
effective=dt(2018, 4, 30),
termination=dt(2028, 4, 28),
fixed_rate=3.85,
spec="us_corp",
)
assert abs(b.leg1.periods[0].cashflow() + 19036.1111) < 1e-4
ytm = b.ytm(price, settlement)
acc = b.accrued(settlement)
assert abs(acc - exp_acc) < 1e-8
assert abs(ytm - exp_ytm) < 1e-6
# US MUNI: Cali State
@pytest.mark.parametrize(
("settlement", "price", "exp_ytm", "exp_acc"),
[
(dt(2025, 5, 12), 102.35, 2.879, 1.819444),
(dt(2025, 1, 31), 100.1, 4.923, 0.416667),
(dt(2026, 1, 1), 102.35, 0.293, 0.0),
(dt(2026, 5, 19), 100.10, 4.061, 1.916667),
(dt(2026, 6, 30), 100.10, -30.219, 2.486111),
],
)
def test_cali_state_school(self, settlement, price, exp_ytm, exp_acc) -> None:
# LA Unif ISIN: US544647CW89,
b = FixedRateBond(
effective=dt(2020, 11, 10),
termination=dt(2026, 7, 1),
fixed_rate=5.0,
spec="us_muni",
)
ytm = b.ytm(price, settlement)
acc = b.accrued(settlement)
assert abs(ytm - exp_ytm) < 1e-3
assert abs(acc - exp_acc) < 1e-6
@pytest.mark.parametrize(
("settlement", "price", "exp_ytm", "exp_acc"),
[
(dt(2025, 3, 31), 110.0, -3.441, 1.356800),
(dt(2025, 5, 12), 101.0, 3.662, 1.881600),
(dt(2025, 5, 30), 100.02, 4.586, 2.11200),
(dt(2025, 12, 15), 101.0, 2.582, 0.0),
(dt(2026, 3, 19), 101.0, 0.413, 1.203200),
(dt(2026, 6, 11), 100.02, 2.746, 2.2528),
],
)
def test_new_jersey_transport(self, settlement, price, exp_ytm, exp_acc) -> None:
# NJ Transport ISIN: US64613CEZ77,
b = FixedRateBond(
effective=dt(2024, 10, 24),
termination=dt(2026, 6, 15),
fixed_rate=4.608,
spec="us_muni",
)
ytm = b.ytm(price, settlement)
acc = b.accrued(settlement)
assert abs(acc - exp_acc) < 1e-6
assert abs(ytm - exp_ytm) < 1e-3
# Customised Thai Government Bonds
def test_thai_example_a3(self):
# see file in _static/thai_standard_formula.pdf
def _v1_thb_gb(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx):
r_u = (obj.leg1.schedule.uschedule[acc_idx + 1] - settlement).days
return v2 ** (r_u * f / 365)
def _v3_thb_gb(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx):
r_u = (obj.leg1.schedule.uschedule[-1] - obj.leg1.schedule.uschedule[-2]).days
return v2 ** (r_u * f / 365)
thai_cm = BondCalcMode(
settle_accrual="linear_days",
ytm_accrual="linear_days",
v1=_v1_thb_gb,
v2="regular",
v3=_v3_thb_gb,
c1="full_coupon",
ci="full_coupon",
cn="cashflow",
)
b = FixedRateBond(
effective=dt(1993, 1, 15),
termination=dt(1996, 4, 30),
stub="shortback",
frequency="S",
fixed_rate=11.25,
convention="act365f",
modifier="none",
currency="thb",
calendar="bus",
calc_mode=thai_cm,
)
expected_acc = 4.86986301
expected_clean = 103.1099263
result_acc = b.accrued(settlement=dt(1994, 12, 20))
result_clean = b.price(ytm=8.75, settlement=dt(1994, 12, 20))
assert abs(result_acc - expected_acc) < 1e-8
assert abs(result_clean - expected_clean) < 1e-7
def test_thai_example_a3_exdiv(self):
# see file in _static/thai_standard_formula.pdf
def _v1_thb_gb(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx):
r_u = (obj.leg1.schedule.uschedule[acc_idx + 1] - settlement).days
return v2 ** (r_u * f / 365)
def _v3_thb_gb(obj, ytm, f, settlement, acc_idx, v2, accrual, period_idx):
r_u = (obj.leg1.schedule.uschedule[-1] - obj.leg1.schedule.uschedule[-2]).days
return v2 ** (r_u * f / 365)
thai_cm = BondCalcMode(
settle_accrual="linear_days",
ytm_accrual="linear_days",
v1=_v1_thb_gb,
v2="regular",
v3=_v3_thb_gb,
c1="cashflow",
ci="full_coupon",
cn="cashflow",
)
b = FixedRateBond(
effective=dt(1993, 1, 15),
termination=dt(1996, 4, 30),
stub="shortback",
frequency="S",
fixed_rate=11.25,
convention="act365f",
modifier="none",
currency="thb",
calendar="bus",
calc_mode=thai_cm,
ex_div=21,
)
result_acc = b.accrued(dt(1994, 12, 20))
expected_acc = -0.80136986
assert abs(result_acc - expected_acc) < 1e-8
result_clean = b.price(ytm=8.75, settlement=dt(1994, 12, 20))
expected_clean = 103.19036939
assert abs(result_clean - expected_clean) < 1e-8
# Swiss GB
@pytest.mark.parametrize(
("ytm", "sett", "exp"),
[
(2.01111, dt(2025, 5, 23), [92.724231, 0.095833333]),
(2.01111, dt(2018, 5, 29), [90.369254, 0.120833333]), # accrued DCF
(2.01111, dt(2018, 5, 30), [90.370093, 0.125000000]), # accrued DCF
(2.01111, dt(2018, 5, 31), [90.370093, 0.125000000]), # accrued DCF
(2.01111, dt(2018, 6, 1), [90.370931, 0.129166666]),
(2.01111, dt(2024, 4, 29), [92.343879, 1.49583333]), # Ex div
(2.01111, dt(2024, 4, 30), [92.344903, 0.000000000]), # Ex div
(2.01111, dt(2042, 4, 15), [99.978326, 1.43750000]), # Final period
],
)
def test_ch_gb(self, ytm, sett, exp):
# ISIN: CH0127181169
bond = FixedRateBond(dt(2012, 4, 30), dt(2042, 4, 30), fixed_rate=1.5, spec="ch_gb")
accrued = bond.accrued(sett)
assert abs(accrued - exp[1]) < 1e-8
price = bond.price(ytm=ytm, settlement=sett)
assert abs(price - exp[0]) < 1e-6
# New Zealand GB
@pytest.mark.parametrize(
("ytm", "sett", "maturity", "coupon", "exp"),
[
(4.355, dt(2022, 11, 22), dt(2034, 5, 15), 4.25, [99.0583817412, 0.0821823204]),
(
5.348,
dt(2051, 4, 15),
dt(2051, 5, 15),
2.75,
[99.7842450753699, 1.1470994475],
), # Last period simple_act365f
(0.745, dt(2021, 2, 10), dt(2026, 5, 15), 0.50, [98.7384877998, 0.1201657459]),
],
)
def test_nz_gb(self, ytm, sett, maturity, coupon, exp):
bond = FixedRateBond(dt(2020, 5, 15), maturity, fixed_rate=coupon, spec="nz_gb")
accrued = bond.accrued(sett)
assert abs(accrued - exp[1]) < 1e-8
price = bond.price(ytm=ytm, settlement=sett)
assert abs(price - exp[0]) < 1e-6
# Australian GB
@pytest.mark.parametrize(
("ytm", "sett", "maturity", "coupon", "exp"),
[
# these values are tested without australian rounding convention of 3 dp (5e-4 tol)
(4.0, dt(2026, 4, 8), dt(2051, 6, 21), 1.75, [64.479000, 0.5190]),
(4.0, dt(2050, 6, 21), dt(2051, 6, 21), 1.75, [97.816, 0.0]),
(4.0, dt(2051, 5, 8), dt(2051, 6, 21), 1.75, [99.727923, 0.6630]),
(4.0, dt(2050, 12, 21), dt(2051, 6, 21), 1.75, [98.902372, 0.0]),
# test ex div
(4.0, dt(2026, 6, 12), dt(2051, 6, 21), 1.75, [64.627, 0.832]),
(4.0, dt(2026, 6, 13), dt(2051, 6, 21), 1.75, [64.631, -0.038]),
(4.0, dt(2026, 6, 14), dt(2051, 6, 21), 1.75, [64.633, -0.034]),
(4.0, dt(2026, 6, 15), dt(2051, 6, 21), 1.75, [64.635, -0.029]),
],
)
def test_au_gb(self, ytm, sett, maturity, coupon, exp):
# AU0000097495
bond = FixedRateBond(dt(2020, 6, 21), maturity, fixed_rate=coupon, spec="au_gb")
accrued = bond.accrued(sett)
assert abs(accrued - exp[1]) < 5e-4
price = bond.price(ytm=ytm, settlement=sett)
assert abs(price - exp[0]) < 6e-4
def test_au_gb_docs_basic_formula_worked_example(self):
bond = FixedRateBond(dt(2018, 11, 21), dt(2029, 11, 21), fixed_rate=2.75, spec="au_gb")
result = bond.price(ytm=1.10, settlement=dt(2019, 9, 12), dirty=True)
expected = 116.716
assert abs(round(result, 3) - expected) < 1e-4
def test_au_gb_docs_ex_interest_formula_worked_example(self):
bond = FixedRateBond(dt(2018, 5, 21), dt(2030, 5, 21), fixed_rate=2.50, spec="au_gb")
result = bond.price(ytm=1.10, settlement=dt(2019, 11, 15), dirty=True)
expected = 113.827
assert abs(round(result, 3) - expected) < 1e-4
def test_au_gb_docs_near_maturing_worked_example(self):
bond = FixedRateBond(dt(2010, 4, 21), dt(2019, 10, 21), fixed_rate=2.75, spec="au_gb")
result = bond.price(ytm=1.00, settlement=dt(2019, 9, 26), dirty=True)
expected = 101.305613
assert abs(round(result, 6) - expected) < 5e-6
def test_au_gb_docs_near_maturing_ex_interest_worked_example(self):
bond = FixedRateBond(dt(2010, 4, 21), dt(2019, 10, 21), fixed_rate=2.75, spec="au_gb")
result = bond.price(ytm=1.00, settlement=dt(2019, 10, 16), dirty=True)
expected = 99.986303
assert abs(round(result, 6) - expected) < 5e-6
def test_au_gb_record_date_examples(self):
bond = FixedRateBond(dt(2023, 11, 21), dt(2028, 5, 21), fixed_rate=2.75, spec="au_gb")
record = bond.leg1.schedule.pschedule3[1]
assert record == dt(2024, 5, 13)
bond = FixedRateBond(dt(2024, 4, 21), dt(2026, 4, 21), fixed_rate=4.25, spec="au_gb")
record = bond.leg1.schedule.pschedule3[1]
assert record == dt(2024, 10, 11)
# Chinese GB
@pytest.mark.parametrize(
("ytm", "maturity", "coupon", "exp"),
[
# gen AI cross check
(2.35, dt(2036, 3, 15), 2.50, [101.3230902997, 0.1290760870]),
(2.15, dt(2031, 6, 20), 2.20, [100.243946673846, 0.6285714286]),
(2.37, dt(2056, 1, 15), 2.38, [100.211057878950, 0.512707182320]),
],
)
def test_cn_gb(self, ytm, maturity, coupon, exp):
bond = FixedRateBond(dt(2020, 6, 21), maturity, fixed_rate=coupon, spec="cn_gb")
accrued = bond.accrued(dt(2026, 4, 3))
price = bond.price(ytm=ytm, settlement=dt(2026, 4, 3))
assert abs(accrued - exp[1]) < 1e-2
assert abs(price - exp[0]) < 5e-5
# General Method Coverage
def test_fixed_rate_bond_yield_domains(self) -> None:
bond = FixedRateBond(
dt(1995, 1, 1),
dt(2015, 12, 7),
"S",
convention="ActActICMA",
fixed_rate=8,
ex_div=7,
calendar="ldn",
)
assert bond.ytm(500.0, dt(1999, 5, 24), True) + 5.86484231333 < 1e-8
assert bond.ytm(200, dt(1999, 5, 24), True) - 1.4366895440550 < 1e-8
assert bond.ytm(100, dt(1999, 5, 24), True) - 8.416909601459 < 1e-8
assert bond.ytm(50, dt(1999, 5, 24), True) - 18.486840866431 < 1e-6
assert bond.ytm(1, dt(1999, 5, 24), True) - 13421775210.82037 < 1e-3
def test_fixed_rate_bond_ytm_duals(self) -> None:
bond = FixedRateBond(
dt(1995, 1, 1),
dt(2015, 12, 7),
"S",
convention="ActActICMA",
fixed_rate=8,
ex_div=7,
calendar="ldn",
)
dPdy = bond.duration(4, dt(1995, 1, 1))
P = bond.price(4, dt(1995, 1, 1))
result = bond.ytm(Dual(P, ["a", "b"], [1, -0.5]), dt(1995, 1, 1))
expected = Dual(4.00, ["a", "b"], [-1 / dPdy, 0.5 / dPdy])
assert abs(result - expected) < 1e-11
assert all(np.isclose(expected.dual, result.dual))
d2ydP2 = -bond.convexity(4, dt(1995, 1, 1)) * -(dPdy**-3)
result = bond.ytm(Dual2(P, ["a", "b"], [1, -0.5], []), dt(1995, 1, 1))
expected = Dual2(
4.00,
["a", "b"],
[-1 / dPdy, 0.5 / dPdy],
[d2ydP2 * 0.5, d2ydP2 * -0.25, d2ydP2 * -0.25, d2ydP2 * 0.125],
)
assert abs(result - expected) < 1e-11
assert all(np.isclose(result.dual, expected.dual))
assert all(np.isclose(result.dual2, expected.dual2).flat)
@pytest.mark.skip(reason="Bills have Z frequency, this no longer raises")
def test_fixed_rate_bond_zero_frequency_raises(self) -> None:
with pytest.raises(ValueError, match="FixedRateBond `frequency`"):
FixedRateBond(dt(1999, 5, 7), dt(2002, 12, 7), "Z", convention="ActActICMA")
@pytest.mark.parametrize("metric", ["risk", "duration", "modified"])
def test_fixed_rate_bond_duration(self, metric) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
)
price0 = gilt.price(4.445, dt(1999, 5, 27), dirty=True)
price1 = gilt.price(4.446, dt(1999, 5, 27), dirty=True)
if metric == "risk":
numeric = price0 - price1
elif metric == "modified":
numeric = (price0 - price1) / price0 * 100
elif metric == "duration":
numeric = (price0 - price1) / price0 * (1 + 4.445 / (100 * 2)) * 100
result = gilt.duration(4.445, dt(1999, 5, 27), metric=metric)
assert (result - numeric * 1000) < 1e-1
def test_fixed_rate_bond_convexity(self) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
)
numeric = gilt.duration(4.445, dt(1999, 5, 27)) - gilt.duration(4.446, dt(1999, 5, 27))
result = gilt.convexity(4.445, dt(1999, 5, 27))
assert (result - numeric * 1000) < 1e-3
price = gilt.price(4.445, dt(1999, 5, 27), dirty=True)
result2 = gilt.convexity(4.445, dt(1999, 5, 27), "convexity")
assert abs(result2 - result * 100.0 / price) < 1e-6
def test_convexity_traditional(self):
aapl_bond = FixedRateBond(dt(2013, 5, 4), dt(2043, 5, 4), fixed_rate=3.85, spec="us_corp")
# c1 = aapl_bond.convexity(4.653674794785435, dt(2014, 3, 5))
c2 = aapl_bond.convexity(4.653674794785435, dt(2014, 3, 5), metric="convexity")
assert abs(c2 - 3.803) < 1e-4
def test_fixed_rate_bond_rate(self) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
settle=0,
)
curve = Curve({dt(1998, 12, 9): 1.0, dt(2015, 12, 7): 0.50})
clean_price = gilt.rate(curves=curve, metric="clean_price")
result = gilt.rate(
curves={"disc_curve": curve}, metric="clean_price", settlement=dt(1998, 12, 9)
)
assert abs(result - clean_price) < 1e-8
result = gilt.rate(curves=_Curves(disc_curve=curve), metric="dirty_price")
expected = clean_price + gilt.accrued(dt(1998, 12, 9))
assert result == expected
result = gilt.rate(curves=curve, metric="dirty_price", settlement=dt(1998, 12, 9))
assert abs(result - clean_price - gilt.accrued(dt(1998, 12, 9))) < 1e-8
result = gilt.rate(curves=curve, metric="ytm")
expected = gilt.ytm(clean_price, dt(1998, 12, 9), False)
assert abs(result - expected) < 1e-8
def test_initialisation_rate_metric(self) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
settle=0,
metric="ytm",
)
curve = Curve({dt(1998, 12, 9): 1.0, dt(2015, 12, 7): 0.50})
clean_price = gilt.rate(curves=curve, metric="clean_price")
expected = gilt.ytm(price=clean_price, settlement=dt(1998, 12, 9))
result = gilt.rate(curves=curve) # default metric is "ytm"
assert abs(result - expected) < 1e-8
def test_fixed_rate_bond_npv(self) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-100,
settle=0,
)
curve = Curve({dt(2010, 11, 25): 1.0, dt(2015, 12, 7): 0.75})
result = gilt.npv(curves=curve)
expected = 113.22198344812742
assert abs(result - expected) < 1e-6
gilt.kwargs.meta["settle"] = 2
result = gilt.npv(curves=curve) # bond is ex div on settlement 27th Nov 2010
expected = 109.229489312983 # bond has dropped a coupon payment of 4.
assert abs(result - expected) < 1e-6
result = gilt.npv(curves=curve, local=True)
assert abs(result["gbp"] - expected) < 1e-6
def test_fixed_rate_bond_npv_private(self) -> None:
# this test shadows 'fixed_rate_bond_npv' but extends it for projection on 27th Nov ex div.
curve = Curve({dt(2004, 11, 25): 1.0, dt(2010, 11, 25): 1.0, dt(2015, 12, 7): 0.75})
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-100,
settle=0,
)
result = gilt.npv(curves=curve, settlement=dt(2010, 11, 27), forward=dt(2010, 11, 25))
expected = 109.229489312983 # npv should match associated test
assert abs(result - expected) < 1e-6
def test_fixed_rate_bond_analytic_delta(self) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-1000000,
settle=0,
)
curve = Curve({dt(2010, 11, 25): 1.0, dt(2015, 12, 7): 1.0})
result = gilt.analytic_delta(curves=curve)
expected = -550.0
assert abs(result - expected) < 1e-6
gilt.kwargs.meta["settle"] = 2
result = gilt.analytic_delta(curves=curve) # bond is ex div on settle 27th Nov 2010
expected = -500.0 # bond has dropped a 6m coupon payment
assert abs(result - expected) < 1e-6
def test_fixed_rate_bond_cashflows(self) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-100,
settle=1,
)
curve = Curve({dt(2010, 11, 25): 1.0, dt(2015, 12, 7): 0.75})
flows = gilt.cashflows(curves=curve) # bond is ex div on 26th nov 2010
result = flows[defaults.headers["npv"]].sum()
expected = gilt.npv(curves=curve)
assert abs(result - expected) < 1e-6
gilt.settle = 0
flows = gilt.cashflows(curves=curve) # settlement from curve initial node
result = flows[defaults.headers["npv"]].sum()
expected = gilt.npv(curves=curve)
assert abs(result - expected) < 1e-6
def test_fixed_rate_bond_rate_raises(self) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-100,
)
curve = Curve({dt(1998, 12, 7): 1.0, dt(2015, 12, 7): 0.50})
with pytest.raises(ValueError, match="`metric` must be in"):
gilt.rate(curves=curve, metric="bad_metric")
def test_fixed_rate_bond_no_amortization(self) -> None:
with pytest.raises(TypeError, match="got an unexpected keyword argument 'amortization"):
FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-100,
amortization=100,
)
@pytest.mark.parametrize(
("f_s", "exp"),
[
(dt(2001, 12, 31), 99.997513754), # compounding of mid year coupon
(dt(2002, 1, 1), 99.9975001688), # this is now ex div on last coupon
],
)
def test_fixed_rate_bond_forward_price_analogue(self, f_s, exp) -> None:
gilt = FixedRateBond(
effective=dt(2001, 1, 1),
termination=dt(2002, 1, 1),
frequency="S",
calendar=NoInput(0),
currency="gbp",
convention="Act365f",
ex_div=0,
fixed_rate=1.0,
notional=-100,
settle=0,
)
result = gilt.fwd_from_repo(100.0, dt(2001, 1, 1), f_s, 1.0, "act365f")
assert abs(result - exp) < 1e-6
@pytest.mark.parametrize(
("f_s", "exp"),
[
(dt(2001, 12, 31), 100.49888361793), # compounding of mid year coupon
(dt(2002, 1, 1), 99.9975001688), # this is now ex div on last coupon
],
)
def test_fixed_rate_bond_forward_price_analogue_dirty(self, f_s, exp) -> None:
gilt = FixedRateBond(
effective=dt(2001, 1, 1),
termination=dt(2002, 1, 1),
frequency="S",
calendar=NoInput(0),
currency="gbp",
convention="Act365f",
ex_div=1,
fixed_rate=1.0,
notional=-100,
settle=0,
)
result = gilt.fwd_from_repo(100.0, dt(2001, 1, 1), f_s, 1.0, "act365f", dirty=True)
assert abs(result - exp) < 1e-6
@pytest.mark.parametrize(
("s", "f_s", "exp"),
[
(dt(2010, 11, 25), dt(2011, 11, 25), 99.9975000187), # div div
(dt(2010, 11, 28), dt(2011, 11, 29), 99.997471945), # ex-div ex-div
(dt(2010, 11, 28), dt(2011, 11, 25), 99.997419419), # ex-div div
(dt(2010, 11, 25), dt(2011, 11, 29), 99.9975516607), # div ex-div
],
)
def test_fixed_rate_bond_forward_price_analogue_ex_div(self, s, f_s, exp) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="act365f",
ex_div=7,
fixed_rate=1.0,
notional=-100,
settle=0,
)
result = gilt.fwd_from_repo(100.0, s, f_s, 1.0, "act365f")
assert abs(result - exp) < 1e-6
@pytest.mark.parametrize(
("f_s", "f_p"),
[
(dt(2001, 12, 31), 99.997513754), # compounding of mid year coupon
(dt(2002, 1, 1), 99.9975001688), # this is now ex div on last coupon
],
)
def test_fixed_rate_bond_implied_repo(self, f_s, f_p) -> None:
gilt = FixedRateBond(
effective=dt(2001, 1, 1),
termination=dt(2002, 1, 1),
frequency="S",
calendar=NoInput(0),
currency="gbp",
convention="Act365f",
ex_div=0,
fixed_rate=1.0,
notional=-100,
settle=0,
)
result = gilt.repo_from_fwd(100.0, dt(2001, 1, 1), f_s, f_p, "act365f")
assert abs(result - 1.00) < 1e-8
@pytest.mark.parametrize(
("f_s", "f_p"),
[
(dt(2001, 12, 31), 100.49888361793), # compounding of mid year coupon
(dt(2002, 1, 1), 99.9975001688), # this is now ex div on last coupon
],
)
def test_fixed_rate_bond_implied_repo_analogue_dirty(self, f_s, f_p) -> None:
gilt = FixedRateBond(
effective=dt(2001, 1, 1),
termination=dt(2002, 1, 1),
frequency="S",
calendar=NoInput(0),
currency="gbp",
convention="Act365f",
ex_div=1,
fixed_rate=1.0,
notional=-100,
settle=0,
)
result = gilt.repo_from_fwd(100.0, dt(2001, 1, 1), f_s, f_p, "act365f", dirty=True)
assert abs(result - 1.0) < 1e-8
@pytest.mark.parametrize(
("price", "tol"),
[(112.0, 5e-7), (104.0, 1e-8), (96.0, 1e-7), (91.0, 1e-6)],
)
def test_oaspread(self, price, tol) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-100,
settle=0,
)
curve = Curve({dt(2010, 11, 25): 1.0, dt(2015, 12, 7): 0.75})
# result = gilt.npv(curve) = 113.22198344812742
result = gilt.oaspread(curves=curve, price=price)
curve_z = curve.shift(result)
result = gilt.rate(curves=curve_z, metric="clean_price")
assert abs(result - price) < tol
@pytest.mark.parametrize(
("price", "tol"),
[
(85, 5e-8),
(75, 5e-8),
(65, 1e-7),
(55, 1e-7),
(45, 5e-8),
(35, 5e-8),
],
)
def test_oaspread_low_price(self, price, tol) -> None:
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=1.0,
notional=-100,
settle=0,
)
curve = Curve({dt(1999, 11, 25): 1.0, dt(2015, 12, 7): 0.85})
# result = gilt.npv(curve) = 113.22198344812742
result = gilt.oaspread(curves=curve, price=price)
curve_z = curve.shift(result)
result = gilt.rate(curves=curve_z, metric="clean_price")
assert abs(result - price) < tol
def test_oas_spread_with_solver(self):
gilt = FixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
spec="uk_gb",
fixed_rate=1.0,
)
curve = Curve({dt(1999, 11, 25): 1.0, dt(2015, 12, 7): 0.85})
Solver(
curves=[curve],
instruments=[
IRS(
effective=dt(1999, 12, 8),
termination="2y",
spec="gbp_irs",
curves=curve,
leg2_fixing_series="eur_rfr",
)
],
s=[2.5],
)
# gilt.rate(curve, metric="dirty_price") = 80.52025551638633
result = gilt.oaspread(curves=curve, price=95.00)
curve_z = curve.shift(result)
result = gilt.rate(curves=curve_z, metric="clean_price")
assert abs(result - 95.00) < 1e-8
def test_oaspread_ift_fwddiff(self):
bond = FixedRateBond(dt(2000, 1, 1), "3Y", fixed_rate=2.5, spec="us_gb")
curve = Curve({dt(2000, 7, 1): 1.0, dt(2005, 7, 1): 0.80})
# Add AD variables to the curve without a Solver
curve._set_ad_order(1)
result = bond.oaspread(curves=curve, price=Variable(95.0, ["price"], []))
grad = gradient(result, ["price"])[0]
assert abs(bond.oaspread(curves=curve, price=95.01) - result - 0.01 * grad) < 1e-3
assert abs(bond.oaspread(curves=curve, price=94.99) - result + 0.01 * grad) < 1e-3
def test_oas_spread_metric(self):
gilt = FixedRateBond(dt(1998, 12, 7), dt(2015, 12, 7), spec="uk_gb", fixed_rate=1.0)
curve = Curve({dt(1999, 11, 3): 1.0, dt(2015, 12, 7): 0.85})
result1 = gilt.oaspread(curves=curve, price=95.0, metric="clean_price")
result2 = gilt.oaspread(
curves=curve, price=95.0 + gilt.accrued(dt(1999, 11, 4)), metric="dirty_price"
)
result3 = gilt.oaspread(curves=curve, price=gilt.ytm(95.0, dt(1999, 11, 4)), metric="ytm")
assert abs(result1 - result2) < 1e-5
assert abs(result1 - result3) < 1e-5
def test_cashflows_no_curve(self) -> None:
gilt = FixedRateBond(
effective=dt(2001, 1, 1), termination="1Y", spec="uk_gb", fixed_rate=5.0
)
result = gilt.cashflows() # no curve argument is passed to cashflows
assert isinstance(result, DataFrame)
def test_schedule_start_non_business(self) -> None:
frb = FixedRateBond(
effective=dt(2000, 1, 1),
termination="1y",
spec="us_gb",
notional=5e6,
fixed_rate=2.0,
)
assert frb.leg1.periods[1].settlement_params.payment == dt(2001, 1, 2)
def test_random_ytm_collection(self):
NUMBER = 75
START = dt(2000, 1, 1)
TENORS = ["2y", "3y", "4y", "5y", "6y", "7y", "8y", "9y", "10y", "15y"]
COUPS = [
1.0,
2.0,
3.0,
4.0,
]
RAND_PRICES = np.random.rand(NUMBER) * 150 + 25.0
BONDS = [
FixedRateBond(
effective=START,
termination=TENORS[i % 10],
spec="us_gb",
fixed_rate=COUPS[i % 4],
)
for i in range(NUMBER)
]
for i in range(NUMBER):
BONDS[i].ytm(price=RAND_PRICES[i], settlement=dt(2001, 8, 30))
def test_custom_calc_mode(self):
cm = BondCalcMode(
settle_accrual="linear_days",
ytm_accrual="linear_days",
v1="compounding",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
bond = FixedRateBond(
effective=dt(2001, 1, 1),
termination="10y",
frequency="s",
calendar="ldn",
convention="ActActICMA",
modifier="none",
settle=1,
calc_mode=cm,
fixed_rate=1.0,
)
bond2 = FixedRateBond(dt(2001, 1, 1), "10y", spec="uk_gb", fixed_rate=1.0)
assert bond.price(3.0, dt(2002, 3, 4)) == bond2.price(3.0, dt(2002, 3, 4))
assert bond.accrued(dt(2002, 3, 4)) == bond2.accrued(dt(2002, 3, 4))
def test_must_have_fixed_rate(self):
with pytest.raises(ValueError, match=r"`fixed_rate` must be provided for FixedRateBond."):
FixedRateBond(
effective=dt(2001, 1, 1),
termination="10y",
frequency="s",
calendar="ldn",
convention="ActActICMA",
modifier="none",
settle=1,
)
def test_ytm_domains2(self):
# the first pass in the quadratic approximator predicts a yield outside of the
# interval so a bisection method is adopted instead.
frb = FixedRateBond(
effective=dt(2000, 1, 15),
termination=dt(2030, 9, 25),
spec="uk_gb",
stub="shortfront",
fixed_rate=0.57744089871129,
)
result = frb.ytm(price=173.80904334438674, settlement=dt(2000, 1, 20))
assert abs(result + 1.3549202231746622) < 1e-10
def test_oas_coupon_on_non_bus_day(self):
# coupon falls on 30th Jun (sunday) and paid on 1st July. OAS spread now handles.
# dev gh 17
bond = FixedRateBond(dt(2023, 12, 31), "3y", fixed_rate=0.5, spec="us_gb")
curve = Curve({dt(2024, 6, 24): 1.0, dt(2028, 6, 25): 1.0})
for today in [
dt(2024, 6, 25),
dt(2024, 6, 26),
dt(2024, 6, 27),
dt(2024, 6, 28),
dt(2024, 6, 29),
dt(2024, 6, 30),
dt(2024, 7, 1),
dt(2024, 7, 2),
dt(2024, 7, 3),
]:
curve_ = curve.translate(today)
assert 49.1 < bond.oaspread(curves=curve_, price=100.0) < 49.2
def test_dirty_price_on_non_bus_day(self):
# coupon falls on 30th Jun (sunday) and paid on 1st July. OAS spread now handles.
# dev gh 17
bond = FixedRateBond(dt(2023, 12, 31), "3y", fixed_rate=0.5, spec="us_gb")
curve = Curve({dt(2024, 6, 24): 1.0, dt(2028, 6, 25): 1.0})
for today in [
dt(2024, 6, 25),
dt(2024, 6, 26),
dt(2024, 6, 27),
dt(2024, 6, 28),
dt(2024, 6, 29),
dt(2024, 6, 30),
dt(2024, 7, 1),
dt(2024, 7, 2),
dt(2024, 7, 3),
]:
curve_ = curve.translate(today)
if today <= dt(2024, 6, 27): # settlement Friday 28th June
assert bond.rate(curves=curve_, metric="dirty_price") == 101.5
else:
assert bond.rate(curves=curve_, metric="dirty_price") == 101.25
@pytest.mark.parametrize(
"bond",
[
FixedRateBond(dt(2023, 12, 31), dt(2025, 12, 31), fixed_rate=4.25, spec="us_gb"),
FixedRateBond(
dt(2023, 12, 31), dt(2025, 12, 31), fixed_rate=4.25, spec="us_gb", modifier="F"
),
],
)
def test_npv_and_oas_with_adjusted_accrual_on_non_bus_day(self, bond):
curve = Curve({dt(2024, 6, 28): 1.0, dt(2026, 6, 30): 0.96})
result = (
bond.npv(curves=curve),
bond.oaspread(curves=curve, price=97.0),
bond.rate(curves=curve, metric="clean_price"),
)
for date in [dt(2024, 7, 1), dt(2024, 7, 2)]:
curve_ = curve.translate(date)
assert abs(bond.npv(curves=curve_) - result[0]) < 250.0
assert abs(bond.oaspread(curves=curve_, price=97.0) - result[1]) < 0.75
assert abs(bond.rate(curves=curve_, metric="clean_price") - result[2]) < 0.03
@pytest.mark.parametrize(
("settlement", "forward_settlement", "expected"),
[
(dt(2024, 6, 27), dt(2024, 6, 28), 100.002503),
(dt(2024, 6, 27), dt(2024, 6, 29), 100.005596),
(dt(2024, 6, 27), dt(2024, 6, 30), 100.007805),
(dt(2024, 6, 27), dt(2024, 7, 1), 100.010140),
(dt(2024, 6, 27), dt(2024, 7, 2), 100.012475),
(dt(2024, 6, 29), dt(2024, 7, 1), 100.004550),
],
)
def test_fwd_from_repo_ex_div_and_holidays(self, settlement, forward_settlement, expected):
bond = FixedRateBond(dt(2023, 12, 31), dt(2025, 12, 31), fixed_rate=4.25, spec="us_gb")
result = bond.fwd_from_repo(
price=100.0,
settlement=settlement,
forward_settlement=forward_settlement,
repo_rate=5.0,
convention="Act360",
)
assert abs(result - expected) < 1e-6
@pytest.mark.parametrize(
("settlement", "forward_settlement", "fwd_price"),
[
(dt(2024, 6, 27), dt(2024, 6, 28), 100.002503),
(dt(2024, 6, 27), dt(2024, 6, 29), 100.005596),
(dt(2024, 6, 27), dt(2024, 6, 30), 100.007805),
(dt(2024, 6, 27), dt(2024, 7, 1), 100.010140),
(dt(2024, 6, 27), dt(2024, 7, 2), 100.012475),
(dt(2024, 6, 29), dt(2024, 7, 1), 100.004550),
],
)
def test_repo_from_fwd_ex_div_and_holidays(self, settlement, forward_settlement, fwd_price):
bond = FixedRateBond(dt(2023, 12, 31), dt(2025, 12, 31), fixed_rate=4.25, spec="us_gb")
result = bond.repo_from_fwd(
price=100.0,
settlement=settlement,
forward_settlement=forward_settlement,
forward_price=fwd_price,
convention="Act360",
)
assert abs(result - 5.00) < 2e-4
def test_183d_ytm(self):
bond_base = FixedRateBond(dt(2000, 1, 1), dt(2001, 1, 1), fixed_rate=5.0, spec="us_gb")
bond_test = FixedRateBond(
dt(2000, 1, 1), dt(2001, 1, 1), fixed_rate=5.0, spec="us_gb", frequency="183D"
)
expected = bond_base.ytm(100, dt(2000, 1, 1))
result = bond_test.ytm(100, dt(2000, 1, 1))
assert abs(expected - result) < 1e-5
def test_long_back_stub_split_accrued(self):
bond = FixedRateBond(
dt(2000, 1, 1), dt(2001, 2, 15), fixed_rate=20.0, spec="us_gb", stub="LongBack"
)
accrued = bond.accrued(dt(2001, 1, 15))
approximation = (dt(2001, 1, 15) - dt(2000, 7, 1)).days / 365 * 20.0
assert abs(accrued - approximation) < 1e-1
def test_long_back_front_stubs_split_accrued(self):
bond = FixedRateBond(
dt(2000, 1, 1),
dt(2002, 2, 15),
front_stub=dt(2000, 9, 8),
fixed_rate=20.0,
spec="us_gb",
stub="LongBack",
)
accrued = bond.accrued(dt(2002, 1, 15))
approximation = (dt(2002, 1, 15) - dt(2001, 3, 8)).days / 365 * 20.0
assert abs(accrued - approximation) < 1e-1
price = bond.price(ytm=20.0, settlement=dt(2002, 1, 15))
assert abs(price - 100.0) < 5e-1
def test_coupon_setter(self):
frb = FixedRateBond(dt(2000, 1, 1), dt(2005, 1, 1), fixed_rate=2.0, spec="uk_gb")
frb.fixed_rate = 3.0
assert frb.fixed_rate == 3.0
assert frb.kwargs.leg1["fixed_rate"] == 3.0
class TestIndexFixedRateBond:
def test_fixed_rate_bond_price(self) -> None:
# test pricing functions against Nominal Gilt Example prices from UK DMO
# these prices should be equivalent for the REAL component of Index Bonds
bond = IndexFixedRateBond(
dt(1995, 1, 1),
dt(2015, 12, 7),
"S",
convention="ActActICMA",
fixed_rate=8,
ex_div=7,
calendar="ldn",
index_base=100.0,
)
assert abs(bond.price(4.445, dt(1999, 5, 24), True) - 145.012268) < 1e-6
assert abs(bond.price(4.445, dt(1999, 5, 26), True) - 145.047301) < 1e-6
assert abs(bond.price(4.445, dt(1999, 5, 27), True) - 141.070132) < 1e-6
assert abs(bond.price(4.445, dt(1999, 6, 7), True) - 141.257676) < 1e-6
bond = IndexFixedRateBond(
dt(1997, 1, 1),
dt(2004, 11, 26),
"S",
convention="ActActICMA",
fixed_rate=6.75,
ex_div=7,
calendar="ldn",
index_base=100.0,
)
assert abs(bond.price(4.634, dt(1999, 5, 10), True) - 113.315543) < 1e-6
assert abs(bond.price(4.634, dt(1999, 5, 17), True) - 113.415969) < 1e-6
assert abs(bond.price(4.634, dt(1999, 5, 18), True) - 110.058738) < 1e-6
assert abs(bond.price(4.634, dt(1999, 5, 26), True) - 110.170218) < 1e-6
@pytest.mark.skip(reason="Frequency of zero calculates but is wrong. Docs do not allow.")
def test_fixed_rate_bond_zero_frequency_raises(self) -> None:
with pytest.raises(ValueError, match="`frequency` must be provided"):
IndexFixedRateBond(
dt(1999, 5, 7),
dt(2002, 12, 7),
"Z",
convention="ActActICMA",
fixed_rate=1.0,
)
def test_fixed_rate_bond_no_amortization(self) -> None:
with pytest.raises(TypeError, match="got an unexpected keyword argument 'amortization"):
IndexFixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-100,
amortization=100,
index_base=100.0,
)
def test_fixed_rate_bond_rate_raises(self) -> None:
gilt = IndexFixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-100,
index_base=100.0,
)
curve = Curve({dt(1998, 12, 7): 1.0, dt(2015, 12, 7): 0.50})
with pytest.raises(ValueError, match="`metric` must be in"):
gilt.rate(
curves=[
Curve({dt(1992, 1, 1): 1.0, dt(2070, 1, 1): 0.13}, index_base=100.0),
curve,
],
metric="bad_metric",
)
def test_initialisation_rate_metric(self) -> None:
gilt = IndexFixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
settle=0,
index_base=100.0,
index_lag=3,
metric="ytm",
)
disc_curve = Curve(
{dt(1998, 12, 9): 1.0, dt(2015, 12, 7): 0.50}, index_base=100.0, index_lag=3
)
curve = Curve({dt(1998, 12, 1): 1.0, dt(2015, 12, 7): 0.50}, index_base=100.0, index_lag=3)
clean_price = gilt.rate(curves=[curve, disc_curve], metric="clean_price")
expected = gilt.ytm(price=clean_price, settlement=dt(1998, 12, 9))
result = gilt.rate(curves=[curve, disc_curve]) # default metric is "ytm"
assert abs(result - expected) < 1e-8
@pytest.mark.parametrize(
("i_fixings", "expected"),
[
(NoInput(0), 1.161227269),
("index_series", (90 + 14 / 30 * 200) / 95),
],
)
def test_index_ratio(self, i_fixings, expected) -> None:
if isinstance(i_fixings, str):
fixings.add("index_series", Series([90.0, 290], index=[dt(2022, 1, 1), dt(2022, 2, 1)]))
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_lag=3,
index_base=110.0,
interpolation="linear_index",
)
bond = IndexFixedRateBond(
dt(2022, 1, 1),
"9m",
"Q",
convention="ActActICMA",
fixed_rate=4,
ex_div=0,
calendar="ldn",
index_base=95.0,
index_fixings=i_fixings,
index_method="daily",
index_lag=3,
)
result = bond.index_ratio(settlement=dt(2022, 4, 15), index_curve=i_curve)
if isinstance(i_fixings, str):
fixings.pop("index_series")
assert abs(result - expected) < 1e-5
@pytest.mark.skip(
reason="This will calculate from the curve but will not be aligned with the specific list "
"fixings, but since list fixings are not recommended in the documentation and the"
"advice is to use a `fixings` object then this is OK."
)
def test_index_ratio_raises_float_index_fixings(self) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_lag=3,
index_base=110.0,
interpolation="linear_index",
)
bond = IndexFixedRateBond(
dt(2022, 1, 1),
"9m",
"Q",
convention="ActActICMA",
fixed_rate=4,
ex_div=0,
calendar="ldn",
index_base=95.0,
index_fixings=[100.0, 200.0],
index_method="daily",
)
# with pytest.raises(TypeError, match="`index_fixings` must be of type: Str, Series, Dual"):
bond.index_ratio(settlement=dt(2022, 4, 15), curve=i_curve)
def test_fixed_rate_bond_npv_private(self) -> None:
# this test shadows 'fixed_rate_bond_npv' but extends it for projection
curve = Curve({dt(2004, 11, 25): 1.0, dt(2010, 11, 25): 1.0, dt(2015, 12, 7): 0.75})
index_curve = Curve(
{dt(2004, 11, 25): 1.0, dt(2034, 1, 1): 1.0},
index_base=100.0,
interpolation="linear_index",
)
gilt = IndexFixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
notional=-100,
settle=0,
index_base=50.0,
index_lag=3,
index_method="daily",
)
with pytest.warns(UserWarning):
result = gilt.npv(
curves=[index_curve, curve], settlement=dt(2010, 11, 27), forward=dt(2010, 11, 25)
)
expected = 109.229489312983 * 2.0 # npv should match associated test
assert abs(result - expected) < 1e-6
def test_index_base_forecast(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_lag=3,
index_base=95.0,
interpolation="linear_index",
)
bond = IndexFixedRateBond(
dt(2022, 1, 1),
"9m",
"Q",
convention="ActActICMA",
fixed_rate=4,
ex_div=0,
calendar=NoInput(0),
index_method="daily",
settle=0,
)
cashflows = bond.cashflows(curves=[i_curve, curve])
for i in range(4):
assert cashflows.iloc[i]["Index Base"] == 95.0
result = bond.npv(curves=[i_curve, curve])
expected = -1006875.3812
assert abs(result - expected) < 1e-4
result = bond.rate(curves=[i_curve, curve], metric="index_dirty_price")
assert abs(result * -1e4 - expected) < 1e-4
def test_fixed_rate_bond_fwd_rate(self) -> None:
gilt = IndexFixedRateBond(
effective=dt(1998, 12, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
fixed_rate=8.0,
settle=0,
index_base=50.0,
index_lag=3,
)
curve = Curve({dt(1998, 12, 9): 1.0, dt(2015, 12, 7): 0.50})
i_curve = Curve(
{dt(1998, 12, 1): 1.0, dt(2015, 12, 7): 1.0},
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
clean_price = gilt.rate(curves=[i_curve, curve], metric="clean_price")
index_clean_price = gilt.rate(curves=[i_curve, curve], metric="index_clean_price")
assert abs(index_clean_price * 0.5 - clean_price) < 1e-3
result = gilt.rate(
curves=[i_curve, curve],
metric="clean_price",
settlement=dt(1998, 12, 9),
# forward
)
assert abs(result - clean_price) < 1e-8
result = gilt.rate(
curves=[i_curve, curve],
metric="index_clean_price",
settlement=dt(1998, 12, 9),
# forward
)
assert abs(result * 0.5 - clean_price) < 1e-8
result = gilt.rate(curves=[i_curve, curve], metric="dirty_price")
expected = clean_price + gilt.accrued(dt(1998, 12, 9))
assert result == expected
result = gilt.rate(
curves=[i_curve, curve],
metric="dirty_price",
settlement=dt(1998, 12, 9),
)
assert abs(result - clean_price - gilt.accrued(dt(1998, 12, 9))) < 1e-8
result = gilt.rate(
curves=[i_curve, curve],
metric="index_dirty_price",
settlement=dt(1998, 12, 9),
)
assert abs(result * 0.5 - clean_price - gilt.accrued(dt(1998, 12, 9))) < 1e-8
result = gilt.rate(curves=[i_curve, curve], metric="ytm")
expected = gilt.ytm(clean_price, dt(1998, 12, 9), False)
assert abs(result - expected) < 1e-8
def test_base_setting_and_index_ratio(self):
# GB00BMY62Z61
name = str(hash(os.urandom(8)))
fixings.add(
name,
Series(
index=[
dt(2025, 3, 1),
dt(2025, 4, 1),
dt(2025, 5, 1),
dt(2025, 6, 1),
dt(2025, 7, 1),
dt(2025, 8, 1),
dt(2025, 9, 1),
dt(2025, 10, 1),
],
data=[395.3, 402.2, 402.9, 404.5, 406.2, 407.7, 406.1, 407.4],
),
)
gilt = IndexFixedRateBond(
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
modifier="None",
ex_div=7,
fixed_rate=1.75,
settle=0,
index_fixings=name,
index_lag=3,
index_method="daily",
)
# these index base and index ratio are calculated externally and verified here
assert gilt.leg1.periods[0].index_params.index_base.value == 397.60
index_ratio = gilt.index_ratio(settlement=dt(2025, 9, 12), index_curve=NoInput(0))
fixings.pop(name)
assert abs(index_ratio - 1.018920) < 1e-5
def test_accrued_and_indexed_accrued(self):
# GB00BMY62Z61
name = str(hash(os.urandom(8)))
fixings.add(
name,
Series(
index=[
dt(2025, 3, 1),
dt(2025, 4, 1),
dt(2025, 5, 1),
dt(2025, 6, 1),
dt(2025, 7, 1),
dt(2025, 8, 1),
dt(2025, 9, 1),
dt(2025, 10, 1),
],
data=[395.3, 402.2, 402.9, 404.5, 406.2, 407.7, 406.1, 407.4],
),
)
gilt = IndexFixedRateBond(
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
modifier="None",
ex_div=7,
fixed_rate=1.75,
settle=0,
index_fixings=name,
index_lag=3,
index_method="daily",
)
accrued = gilt.accrued(settlement=dt(2025, 9, 12))
index_ratio = gilt.index_ratio(settlement=dt(2025, 9, 12), index_curve=NoInput(0))
indexed_accrued = accrued * index_ratio
# this indexed accrued is calculated externally and verified here
assert abs(indexed_accrued + 0.048454076) < 1e-7
assert abs(gilt.accrued(settlement=dt(2025, 9, 12), indexed=True) - indexed_accrued) < 1e-7
fixings.pop(name)
@pytest.mark.parametrize(
("price", "indexed", "dirty", "expected"),
[
(99.423682, True, True, 100.2169930),
(99.1924173, True, False, 99.9007374),
(98.1322608, False, True, 98.0424122),
(97.904000, False, False, 97.733020),
],
)
def test_fwd_from_repo(self, price, indexed, dirty, expected):
# GB00BMY62Z61
name = str(hash(os.urandom(8)))
fixings.add(
name,
Series(
index=[
dt(2025, 3, 1),
dt(2025, 4, 1),
dt(2025, 5, 1),
dt(2025, 6, 1),
dt(2025, 7, 1),
dt(2025, 8, 1),
dt(2025, 9, 1),
dt(2025, 10, 1),
],
data=[395.3, 402.2, 402.9, 404.5, 406.2, 407.7, 406.1, 407.4],
),
)
gilt = IndexFixedRateBond(
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_fixings=name,
)
fwd = gilt.fwd_from_repo(
price=price,
settlement=dt(2025, 7, 29),
forward_settlement=dt(2025, 11, 25),
repo_rate=4.00,
convention="act365F",
dirty=dirty,
indexed=indexed,
)
fixings.pop(name)
assert abs(fwd - expected) < 5e-4
@pytest.mark.parametrize(
("price", "indexed", "dirty", "fwd_price"),
[
(99.423682, True, True, 100.2169930),
(99.1924173, True, False, 99.9007374),
(98.1322608, False, True, 98.0424122),
(97.904000, False, False, 97.733020),
],
)
def test_repo_from_fwd(self, price, indexed, dirty, fwd_price):
# GB00BMY62Z61
name = str(hash(os.urandom(8)))
fixings.add(
name,
Series(
index=[
dt(2025, 3, 1),
dt(2025, 4, 1),
dt(2025, 5, 1),
dt(2025, 6, 1),
dt(2025, 7, 1),
dt(2025, 8, 1),
dt(2025, 9, 1),
dt(2025, 10, 1),
],
data=[395.3, 402.2, 402.9, 404.5, 406.2, 407.7, 406.1, 407.4],
),
)
gilt = IndexFixedRateBond(
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_fixings=name,
)
repo = gilt.repo_from_fwd(
price=price,
settlement=dt(2025, 7, 29),
forward_settlement=dt(2025, 11, 25),
forward_price=fwd_price,
convention="act365F",
dirty=dirty,
indexed=indexed,
)
fixings.pop(name)
assert abs(repo - 4.00) < 2e-3
@pytest.mark.parametrize(
("indexed_price", "indexed_ytm"),
[(False, False), (False, True), (True, False), (True, True)],
)
def test_duration_index_linked_finite_diff(self, indexed_price, indexed_ytm):
# GB00BMY62Z61
name = str(hash(os.urandom(8)))
fixings.add(
name,
Series(
index=[
dt(2025, 3, 1),
dt(2025, 4, 1),
dt(2025, 5, 1),
dt(2025, 6, 1),
dt(2025, 7, 1),
dt(2025, 8, 1),
dt(2025, 9, 1),
dt(2025, 10, 1),
],
data=[395.3, 402.2, 402.9, 404.5, 406.2, 407.7, 406.1, 407.4],
),
)
index_curve = Curve({dt(2025, 10, 1): 1.0, dt(2045, 10, 1): 1.0}, index_base=407.4).shift(
100
)
gilt = IndexFixedRateBond(
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_fixings=name,
)
value = gilt.duration(
ytm=2.00,
settlement=dt(2025, 7, 29),
metric="risk",
indexed_price=indexed_price,
indexed_ytm=indexed_ytm,
index_curve=index_curve,
)
# finite diff test:
original_price = gilt.price(
ytm=2.00,
settlement=dt(2025, 7, 29),
indexed_price=indexed_price,
indexed_ytm=indexed_ytm,
index_curve=index_curve,
dirty=True,
)
bumped_price = gilt.price(
ytm=1.999,
settlement=dt(2025, 7, 29),
indexed_ytm=indexed_ytm,
indexed_price=indexed_price,
index_curve=index_curve,
dirty=True,
)
expected = (bumped_price - original_price) * 1000.0
assert abs(value - expected) < 1e-3
## Test modified
modified = gilt.duration(
ytm=2.00,
settlement=dt(2025, 7, 29),
metric="modified",
indexed_price=indexed_price,
indexed_ytm=indexed_ytm,
index_curve=index_curve,
)
assert abs(value / original_price * 100.0 - modified) < 1e-6
# Test macauley
macauley = gilt.duration(
ytm=2.00,
settlement=dt(2025, 7, 29),
metric="duration",
indexed_price=indexed_price,
indexed_ytm=indexed_ytm,
index_curve=index_curve,
)
assert abs(modified * (1 + 0.02 / 2) - macauley) < 1e-6
fixings.pop(name)
# TODO: implement these tests
#
# def test_convexity(self):
# assert False
def test_latest_fixing(self) -> None:
# this is German government inflation bond with fixings given for a specific settlement
# calculation
ibnd = IndexFixedRateBond(
effective=dt(2021, 2, 11),
front_stub=dt(2022, 4, 15),
termination=dt(2033, 4, 15),
convention="ActActICMA",
calendar="tgt",
frequency="A",
index_lag=3,
index_base=124.17000 / 1.18851, # implying from 1st Jan 2024 on webpage
index_method="daily",
payment_lag=0,
currency="eur",
fixed_rate=0.1,
ex_div=1,
settle=1,
index_fixings=Series(data=[124.17, 123.46], index=[dt(2024, 1, 1), dt(2024, 2, 1)]),
)
result = ibnd.ytm(price=100.32, settlement=dt(2024, 1, 5))
expected = 0.065
assert (result - expected) < 1e-2
def test_rate_with_fx_is_same(self) -> None:
usd = Curve(nodes={dt(2000, 1, 1): 1.0, dt(2005, 1, 1): 0.9, dt(2010, 1, 5): 0.8})
gbp = Curve(nodes={dt(2000, 1, 1): 1.0, dt(2005, 1, 1): 0.9, dt(2010, 1, 5): 0.8})
gbpi = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2010, 1, 1): 0.95},
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
fxf = FXForwards(
fx_rates=FXRates({"gbpusd": 1.25}, settlement=dt(2000, 1, 1)),
fx_curves={"gbpgbp": gbp, "usdusd": usd, "gbpusd": gbp},
)
result = IndexFixedRateBond(
dt(2000, 1, 1),
"5y",
index_base=100.5,
spec="uk_gbi",
fixed_rate=1.0,
).rate(curves=[gbpi, gbp], metric="clean_price")
result2 = IndexFixedRateBond(
dt(2000, 1, 1),
"5y",
index_base=100.5,
spec="uk_gbi",
fixed_rate=1.0,
).rate(curves=[gbpi, gbp], metric="clean_price", fx=fxf)
assert result == result2
def test_spec_kwargs(self) -> None:
# GH346
fixings = Series(data=[314.175, 314.54], index=[dt(2024, 9, 1), dt(2024, 10, 1)])
tii_0728 = IndexFixedRateBond(
effective=dt(2018, 7, 31),
termination=dt(2028, 7, 15),
spec="us_gb_tsy",
fixed_rate=0.75,
notional=-100e6,
curves=["sofr", "sofr"],
index_lag=3,
index_method="monthly",
index_base=251.01658,
index_fixings=fixings,
)
result = tii_0728.ytm(100, dt(2024, 8, 26))
assert (result - 0.749935) < 1e-5
def test_custom_calc_mode(self):
cm = BondCalcMode(
settle_accrual="linear_days",
ytm_accrual="linear_days",
v1="compounding",
v2="regular",
v3="compounding",
c1="cashflow",
ci="cashflow",
cn="cashflow",
)
bond = IndexFixedRateBond(
effective=dt(2001, 1, 1),
termination="10y",
frequency="s",
calendar="ldn",
convention="ActActICMA",
modifier="none",
settle=1,
calc_mode=cm,
fixed_rate=1.0,
index_base=100.0,
)
bond2 = IndexFixedRateBond(
dt(2001, 1, 1), "10y", spec="uk_gb", fixed_rate=1.0, index_base=100.0
)
assert bond.price(3.0, dt(2002, 3, 4)) == bond2.price(3.0, dt(2002, 3, 4))
assert bond.accrued(dt(2002, 3, 4)) == bond2.accrued(dt(2002, 3, 4))
def test_fixed_rate_getter_and_setter(self):
tii_0728 = IndexFixedRateBond(
effective=dt(2018, 7, 31),
termination=dt(2028, 7, 15),
spec="us_gbi",
fixed_rate=0.75,
)
assert tii_0728.fixed_rate == 0.75
tii_0728.fixed_rate = 1.90
assert tii_0728.fixed_rate == 1.90
def test_no_fixed_rate_raises(self):
with pytest.raises(ValueError, match="`fixed_rate` must be provided for IndexFixedRateBo"):
IndexFixedRateBond(
effective=dt(2018, 7, 31),
termination=dt(2028, 7, 15),
spec="us_gbi",
)
def test_parse_curves(self, curve):
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_lag=3,
index_base=95.0,
interpolation="linear_index",
)
bond = IndexFixedRateBond(
dt(2022, 1, 1),
"9m",
"Q",
convention="ActActICMA",
fixed_rate=4,
ex_div=0,
calendar=NoInput(0),
index_method="daily",
settle=0,
)
result1 = bond.npv(curves=[i_curve, curve])
result2 = bond.npv(curves={"index_curve": i_curve, "disc_curve": curve})
expected = -1006875.3812
assert abs(result1 - expected) < 1e-5
assert abs(result1 - result2) < 1e-5
def test_rate_docs(self):
disc_curve = Curve(
nodes={dt(2025, 7, 28): 1.0, dt(2045, 7, 25): 1.0}, convention="act365f"
).shift(250) # curve begins at 0% and gets shifted by 250 Act365F O/N basis points
index_curve = Curve(
nodes={dt(2025, 5, 1): 1.0, dt(2045, 5, 1): 1.0},
convention="act365f",
index_lag=0,
index_base=402.9,
).shift(100) # curves begins at 0% and gets shifted by 100 Ac6t365f O/N basis points
fixings.add(
"UK_RPI_987",
Series(
index=[dt(2025, 3, 1), dt(2025, 4, 1), dt(2025, 5, 1)], data=[395.3, 402.2, 402.9]
),
)
ukti = IndexFixedRateBond( # ISIN: GB00BMY62Z61
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_fixings="UK_RPI_987",
)
a1 = ukti.rate(
curves=[index_curve, disc_curve], metric="clean_price"
) # settles T+1 i.e. 29th July
a2 = ukti.rate(curves=[index_curve, disc_curve], metric="dirty_price")
a3 = ukti.rate(curves=[index_curve, disc_curve], metric="index_clean_price")
a4 = ukti.rate(curves=[index_curve, disc_curve], metric="index_dirty_price")
a5 = ukti.rate(curves=[index_curve, disc_curve], metric="ytm")
a6 = ukti.accrued(settlement=dt(2025, 7, 29))
a7 = ukti.accrued(settlement=dt(2025, 7, 29), indexed=True)
a8 = ukti.rate(curves=[index_curve, disc_curve], metric="index_ytm")
assert abs(a1 - 102.90237315163287) < 1e-5
assert abs(a2 - 103.13063402119809) < 1e-5
assert abs(a3 - 104.25652750721756) < 1e-5
assert abs(a4 - 104.487792199156) < 1e-5
assert abs(a5 - 1.5058915118424034) < 1e-5
assert abs(a6 - 0.228260) < 1e-5
assert abs(a7 - 0.231264) < 1e-5
assert abs(a8 - 2.5174145913908443) < 1e-5
fixings.pop("UK_RPI_987")
def test_index_ytm(self):
fixings.add(
"UK_RPI_9843",
Series(
index=[
dt(2025, 1, 1),
dt(2026, 1, 1),
dt(2027, 1, 1),
dt(2028, 1, 1),
dt(2029, 1, 1),
dt(2030, 1, 1),
],
data=[100.0, 102, 103, 104, 105, 106],
),
)
bond = IndexFixedRateBond(
effective=dt(2025, 1, 6),
termination=dt(2030, 1, 6),
roll=6,
calendar="bus",
convention="actacticma",
frequency="A",
# index_base=100.0,
index_lag=0,
index_method="monthly",
ex_div=1,
fixed_rate=2.0,
index_fixings="UK_RPI_9843",
)
assert bond.leg1.periods[0].index_params.index_base.value == 100.0
result = bond.ytm(
price=101.9456166,
settlement=dt(2026, 1, 6),
indexed_price=True,
indexed_ytm=True,
dirty=True,
)
expected = 3.00
# 101.9456166 = 2 * 1.03/1.03 + 2 * 1.04/1.03**2 + 2 * 1.05/1.03**3 + 102 * 1.06/1.03**4
fixings.pop("UK_RPI_9843")
assert abs(result - expected) < 1e-6
result = bond.ytm(
price=101.9456166 / 1.02,
settlement=dt(2026, 1, 6),
indexed_price=False,
indexed_ytm=False,
dirty=True,
)
expected = 2.0140070859464996
assert abs(result - expected) < 1e-6
# clean yield is approximately 1% lower than indexed yield since inflation is approx 1%
def test_index_ytm2(self):
index_curve = Curve(
nodes={dt(2025, 5, 1): 1.0, dt(2045, 5, 1): 1.0},
convention="act365f",
index_lag=0,
index_base=402.9,
).shift(100) # curves begins at 0% and gets shifted by 100 Ac6t365f O/N basis points
ukti = IndexFixedRateBond( # ISIN: GB00BMY62Z61
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_base=397.60,
)
prices = [104.62775438373183, 103.24009646398126, 104.3626899720302, 102.97854755093778]
for i, (dirty, indexed_price) in enumerate(product([True, False], [True, False])):
indexed_ytm = ukti.ytm(
price=prices[i],
settlement=dt(2025, 8, 5),
indexed_price=indexed_price,
indexed_ytm=True,
dirty=dirty,
index_curve=index_curve,
)
assert abs(indexed_ytm - 2.5100000) < 1e-8
for i, (dirty, indexed_price) in enumerate(product([True, False], [True, False])):
unindexed_ytm = ukti.ytm(
price=prices[i],
settlement=dt(2025, 8, 5),
indexed_price=indexed_price,
indexed_ytm=False,
dirty=dirty,
index_curve=index_curve,
)
assert abs(unindexed_ytm - 1.499260363) < 1e-8
def test_index_price(self):
index_curve = Curve(
nodes={dt(2025, 5, 1): 1.0, dt(2045, 5, 1): 1.0},
convention="act365f",
index_lag=0,
index_base=402.9,
).shift(100) # curves begins at 0% and gets shifted by 100 Ac6t365f O/N basis points
ukti = IndexFixedRateBond( # ISIN: GB00BMY62Z61
effective=dt(2025, 6, 11),
termination=dt(2038, 9, 22),
fixed_rate=1.75,
spec="uk_gbi",
index_base=397.60,
)
prices_from_indexed_ytm = []
for dirty, indexed_price in product([True, False], [True, False]):
prices_from_indexed_ytm.append(
ukti.price(
ytm=2.5100000,
settlement=dt(2025, 8, 5),
indexed_price=indexed_price,
indexed_ytm=True,
dirty=dirty,
index_curve=index_curve,
)
)
prices_from_unindexed_ytm = []
for dirty, indexed_price in product([True, False], [True, False]):
prices_from_unindexed_ytm.append(
ukti.price(
ytm=1.499260363,
settlement=dt(2025, 8, 5),
indexed_price=indexed_price,
indexed_ytm=False,
dirty=dirty,
index_curve=index_curve,
)
)
for p1, p2 in zip(prices_from_indexed_ytm, prices_from_unindexed_ytm):
assert abs(p1 - p2) < 1e-8
class TestBill:
def test_bill_discount_rate(self) -> None:
# test pricing functions against Treasury Bill Example from US Treasury
bill = Bill(
effective=dt(2004, 1, 22),
termination=dt(2004, 2, 19),
calendar="nyc",
currency="usd",
convention="Act360",
calc_mode="ustb",
)
assert bill.discount_rate(99.93777, dt(2004, 1, 22)) == 0.8000999999999543
assert bill.price(0.800, dt(2004, 1, 22)) == 99.93777777777778
def test_bill_ytm(self) -> None:
bill = Bill(
effective=dt(2004, 1, 22),
termination=dt(2004, 2, 19),
calendar="nyc",
currency="usd",
convention="Act360",
calc_mode="ustb",
)
# this YTM is equivalent to the FixedRateBond ytm with coupon of 0.0
result = bill.ytm(99.937778, dt(2004, 1, 22))
# TODO this does not match US treasury example because the method is different
assert abs(result - 0.814) < 1e-2
def test_bill_ytm2(self) -> None:
# this is a longer than 6m period
bill = Bill(
effective=dt(1990, 6, 7),
termination=dt(1991, 6, 6),
convention="act360",
calc_mode="ustb",
)
price = bill.price(7.65, settlement=dt(1990, 6, 7))
result = bill.ytm(price, settlement=dt(1990, 6, 7))
assert abs(result - 8.237) < 1e-3
def test_bill_simple_rate(self) -> None:
bill = Bill(
effective=dt(2004, 1, 22),
termination=dt(2004, 2, 19),
calendar="nyc",
currency="usd",
convention="Act360",
calc_mode="ustb",
)
d = dcf(dt(2004, 1, 22), dt(2004, 2, 19), "Act360")
expected = 100 * (1 / (1 - 0.0080009999999 * d) - 1) / d # floating point truncation
expected = 100 * (100 / 99.93777777777778 - 1) / d
result = bill.simple_rate(99.93777777777778, dt(2004, 1, 22))
assert abs(result - expected) < 1e-6
def test_bill_initialised_rate_metric(self) -> None:
curve = Curve({dt(2004, 1, 22): 1.00, dt(2005, 1, 22): 0.992})
bill = Bill(
effective=dt(2004, 1, 22),
termination=dt(2004, 2, 19),
calendar="nyc",
currency="usd",
convention="Act360",
settle=0,
calc_mode="ustb",
metric="simple_rate",
)
price = bill.rate(curves=curve, metric="price")
expected = bill.simple_rate(price, dt(2004, 1, 22))
result = bill.rate(curves=curve)
assert abs(result - expected) < 1e-6
def test_bill_rate(self) -> None:
curve = Curve({dt(2004, 1, 22): 1.00, dt(2005, 1, 22): 0.992})
bill = Bill(
effective=dt(2004, 1, 22),
termination=dt(2004, 2, 19),
calendar="nyc",
currency="usd",
convention="Act360",
settle=0,
calc_mode="ustb",
)
result = bill.rate(curves=curve, metric="price")
expected = 99.9385705675
assert abs(result - expected) < 1e-6
result = bill.rate(curves=curve, metric="discount_rate")
expected = bill.discount_rate(99.9385705675, dt(2004, 1, 22))
assert abs(result - expected) < 1e-6
result = bill.rate(curves=curve, metric="simple_rate")
expected = bill.simple_rate(99.9385705675, dt(2004, 1, 22))
assert abs(result - expected) < 1e-6
result = bill.rate(curves=curve, metric="ytm")
expected = bill.ytm(99.9385705675, dt(2004, 1, 22))
assert abs(result - expected) < 1e-6
bill.kwargs.meta["settle"] = 2 # set the bill to T+2 settlement and re-run the calculations
result = bill.rate(curves=curve, metric="price")
expected = 99.94734388985547
assert abs(result - expected) < 1e-6
result = bill.rate(curves=curve, metric="discount_rate")
expected = bill.discount_rate(99.94734388985547, dt(2004, 1, 26))
assert abs(result - expected) < 1e-6
result = bill.rate(curves=curve, metric="simple_rate")
expected = bill.simple_rate(99.94734388985547, dt(2004, 1, 26))
assert abs(result - expected) < 1e-6
result = bill.rate(curves=curve, metric="ytm")
expected = bill.ytm(99.94734388985547, dt(2004, 1, 26))
assert abs(result - expected) < 1e-6
def test_bill_default_calc_mode(self) -> None:
bill = Bill(
effective=dt(2004, 1, 22),
termination=dt(2004, 2, 19),
calendar="nyc",
currency="usd",
convention="Act360",
settle=0,
)
assert bill.kwargs.meta["calc_mode"] == US_GBB
def test_bill_rate_raises(self) -> None:
curve = Curve({dt(2004, 1, 22): 1.00, dt(2005, 1, 22): 0.992})
bill = Bill(
effective=dt(2004, 1, 22),
termination=dt(2004, 2, 19),
calendar="nyc",
currency="usd",
convention="Act360",
)
with pytest.raises(ValueError, match="`metric` must be in"):
bill.rate(curves=curve, metric="bad vibes")
def test_sgbb(self) -> None:
bill = Bill(
effective=dt(2023, 3, 15),
termination=dt(2024, 3, 20),
spec="se_gbb",
)
result = bill.price(3.498, settlement=dt(2023, 3, 15))
expected = 96.520547
assert abs(result - expected) < 1e-6
ytm = bill.ytm(price=96.520547, settlement=dt(2023, 3, 15))
assert abs(ytm - 3.5546338) < 1e-5
# norwegian
@pytest.mark.parametrize(
("e", "t", "price", "y"),
[
(dt(2025, 3, 19), dt(2026, 3, 18), 99.38775, 4.01095),
(dt(2025, 6, 18), dt(2026, 6, 17), 98.4218, 4.0012),
(dt(2025, 9, 17), dt(2026, 9, 16), 97.4707, 3.99),
(dt(2025, 12, 17), dt(2026, 12, 16), 96.5409, 3.9705),
],
)
def test_nogbb(self, e, t, price, y) -> None:
# prices obtained from Norges Bank on Friday 16th Jan 2026, settle 20th Jan
bill = Bill(effective=e, termination=t, spec="no_gbb")
ytm = bill.ytm(price=price, settlement=dt(2026, 1, 20))
assert abs(ytm - y) < 5e-5
def test_text_example(self) -> None:
bill = Bill(effective=dt(2023, 5, 17), termination=dt(2023, 9, 26), spec="us_gbb")
result = bill.ytm(99.75, settlement=dt(2023, 9, 7))
bond = FixedRateBond(
effective=dt(2023, 3, 26),
termination=dt(2023, 9, 26),
fixed_rate=0.0,
spec="us_gb",
)
expected = bond.ytm(99.75, settlement=dt(2023, 9, 7))
assert abs(result - expected) < 1e-14
assert abs(result - 4.854240865091567) < 1e-7
@pytest.mark.parametrize(
("price", "tol"), [(96.0, 1e-6), (95.0, 1e-6), (93.0, 1e-5), (80.0, 1e-2)]
)
def test_oaspread(self, price, tol) -> None:
bill = Bill(
effective=dt(1998, 12, 7),
termination=dt(1999, 10, 7),
spec="us_gbb",
)
curve = Curve({dt(1998, 12, 7): 1.0, dt(2015, 12, 7): 0.75})
# result = bill.rate(curve, metric="price") # = 98.605
result = bill.oaspread(curves=curve, price=price)
curve_z = curve.shift(result)
result = bill.rate(curves=curve_z, metric="clean_price")
assert abs(result - price) < tol
def test_with_fx_supplied(self) -> None:
usd = Curve(nodes={dt(2000, 1, 1): 1.0, dt(2005, 1, 1): 0.9, dt(2010, 1, 5): 0.8})
gbp = Curve(nodes={dt(2000, 1, 1): 1.0, dt(2005, 1, 1): 0.9, dt(2010, 1, 5): 0.8})
fxf = FXForwards(
fx_rates=FXRates({"gbpusd": 1.25}, settlement=dt(2000, 1, 1)),
fx_curves={"gbpgbp": gbp, "usdusd": usd, "gbpusd": gbp},
)
result = Bill(dt(2000, 1, 1), "3m", spec="us_gbb").rate(curves=gbp, metric="discount_rate")
result2 = Bill(dt(2000, 1, 1), "3m", spec="us_gbb").rate(
curves=gbp,
metric="discount_rate",
fx=fxf,
)
assert result == result2
def test_duration(self) -> None:
b = Bill(dt(2000, 1, 1), "6m", frequency="A", spec="us_gbb")
result = b.duration(ytm=5.0, settlement=dt(2000, 1, 10), metric="duration")
assert result == 0.5170058346378255
b = Bill(dt(2000, 1, 1), "6m", spec="us_gbb")
result = b.duration(ytm=5.0, settlement=dt(2000, 1, 10), metric="duration")
assert result == 0.5046961719083534
b = Bill(dt(2000, 1, 1), "6m", frequency="Q", spec="us_gbb")
result = b.duration(ytm=5.0, settlement=dt(2000, 1, 10), metric="duration")
assert result == 0.4985413405436174
def test_custom_calc_mode(self):
from rateslib.instruments.bonds import BillCalcMode
cm = BillCalcMode(price_type="simple", ytm_clone_kwargs="uk_gb")
bill = Bill(
effective=dt(2001, 1, 1),
termination="3m",
calendar="ldn",
convention="Act365f",
modifier="none",
settle=1,
calc_mode=cm,
)
bill2 = Bill(dt(2001, 1, 1), "3m", spec="uk_gbb")
assert bill.simple_rate(99.0, dt(2001, 2, 4)) == bill2.simple_rate(99.0, dt(2001, 2, 4))
def test_us_gbb_eom(self):
b = Bill(dt(2023, 2, 28), "3m", spec="us_gbb")
assert b.leg1._regular_periods[0].period_params.end == dt(2023, 5, 31)
def test_se_gbb_eom(self):
b = Bill(dt(2023, 2, 28), "3m", spec="se_gbb")
assert b.leg1._regular_periods[0].period_params.end == dt(2023, 5, 28)
def test_act_act_icma(self):
# gh 144
with pytest.warns(
UserWarning,
match="`frequency` cannot be 'Zero' variant in combination with 'ActActICMA",
):
bill_actacticma = Bill(
effective=dt(2024, 2, 29),
termination=dt(2024, 5, 29), # 90 calendar days
modifier="NONE",
calendar="bus",
payment_lag=0,
notional=-1000000,
currency="usd",
convention="ACTACTICMA",
settle=0,
calc_mode="us_gbb",
)
assert bill_actacticma.leg1._regular_periods[0].period_params.dcf == 0.2465753424657534
bill_act360 = Bill(
effective=dt(2024, 2, 29),
termination=dt(2024, 5, 29), # 90 calendar days
modifier="NONE",
calendar="bus",
payment_lag=0,
notional=-1000000,
currency="usd",
convention="ACT360",
settle=0,
calc_mode="us_gbb",
)
assert bill_act360.leg1._regular_periods[0].period_params.dcf == 0.25
def test_ex_div(self):
b1 = Bill(dt(2000, 1, 3), "3m", spec="us_gbb")
assert b1.ex_div(dt(200, 4, 3)) is False
b2 = Bill(dt(2000, 1, 3), "3m", ex_div=2, spec="us_gbb")
assert b2.ex_div(dt(2000, 4, 3)) is True
assert b2.ex_div(dt(2000, 3, 31)) is True
assert b2.ex_div(dt(2000, 3, 30)) is False
def test_bill_roll(self):
b1 = Bill(dt(2026, 1, 30), "6m", spec="us_gbb", roll=30)
b2 = Bill(dt(2026, 1, 30), "6m", spec="us_gbb", roll=31)
assert b1.leg1.schedule.termination == dt(2026, 7, 30)
assert b2.leg1.schedule.termination == dt(2026, 7, 31)
def test_bill_eom(self):
b1 = Bill(dt(2026, 1, 30), "6m", spec="us_gbb", eom=False)
b2 = Bill(dt(2026, 1, 30), "6m", spec="us_gbb", eom=True)
assert b1.leg1.schedule.termination == dt(2026, 7, 30)
assert b2.leg1.schedule.termination == dt(2026, 7, 31)
class TestFloatRateNote:
@pytest.mark.parametrize(
("curve_spd", "method", "float_spd", "expected"),
[
(10, NoInput(0), 0, 10.055032859883),
(500, NoInput(0), 0, 508.93107035125325),
(-200, NoInput(0), 0, -200.053341848676),
(10, "isda_compounding", 0, 10.00000120),
(500, "isda_compounding", 0, 499.9999999997),
(-200, "isda_compounding", 0, -199.99999999),
(10, NoInput(0), 25, 10.055032859883),
(500, NoInput(0), 250, 508.93107035125325),
(10, "isda_compounding", 25, 10.00000120),
(500, "isda_compounding", 250, 499.99999999975523),
(10, NoInput(0), -25, 10.055032859883),
(500, NoInput(0), -250, 508.93107035125325),
(10, "isda_compounding", -25, 10.00000120),
(500, "isda_compounding", -250, 499.9999999997),
],
)
def test_float_rate_bond_rate_spread(self, curve_spd, method, float_spd, expected) -> None:
"""
When a DF curve is shifted it bumps daily rates.
But under the "none_simple" compounding method this does not compound daily
therefore the `float_spread` should be slightly higher than the bumped curve.
When the method is "isda_compounding" this closely matches the bumping method
of the curve.
"""
bond = FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="S",
convention="Act365f",
ex_div=0,
settle=0,
float_spread=float_spd,
spread_compound_method=method,
)
curve = Curve({dt(2007, 1, 1): 1.0, dt(2017, 1, 1): 0.9}, convention="Act365f")
disc_curve = curve.shift(curve_spd)
result = bond.rate(curves=[curve, disc_curve], metric="spread")
assert abs(result - expected) < 1e-4
bond.float_spread = result
validate = bond.npv(curves=[curve, disc_curve])
assert abs(validate + bond.leg1.settlement_params.notional) < 0.30 * abs(curve_spd)
@pytest.mark.parametrize(
("curve_spd", "method", "float_spd", "expected"),
[
(10, "isda_compounding", 0, 10.00000120),
],
)
def test_float_rate_bond_rate_spread_fx(self, curve_spd, method, float_spd, expected) -> None:
bond = FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="S",
convention="Act365f",
ex_div=0,
settle=0,
float_spread=float_spd,
spread_compound_method=method,
)
curve = Curve({dt(2007, 1, 1): 1.0, dt(2017, 1, 1): 0.9}, convention="Act365f")
disc_curve = curve.shift(curve_spd)
fxr = FXRates({"usdnok": 10.0}, settlement=dt(2007, 1, 1))
result = bond.rate(
curves=[curve, disc_curve],
metric="spread",
fx=fxr,
)
assert abs(result - expected) < 1e-4
bond.float_spread = result
validate = bond.npv(curves=[curve, disc_curve], fx=fxr)
assert abs(validate + bond.leg1.settlement_params.notional) < 0.30 * abs(curve_spd)
def test_float_rate_bond_accrued(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_1B", Series(2.0, index=date_range(dt(2009, 12, 1), dt(2010, 3, 1))))
bond = FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="S",
convention="Act365f",
ex_div=3,
float_spread=100,
fixing_method=FloatFixingMethod.RFRObservationShift(5),
rate_fixings=name,
spread_compound_method="none_simple",
)
result = bond.accrued(dt(2010, 3, 3))
expected = 0.5019199020076 # 3% * 2 / 12
fixings.pop(name + "_1B")
assert abs(result - expected) < 1e-8
@pytest.mark.parametrize(
("metric", "spd", "exp"),
[
("clean_price", 0.0, 100.0),
("dirty_price", 0.0, 100.0),
("clean_price", 10.0, 99.99982764447981), # compounding diff between shift
("dirty_price", 10.0, 100.0165399732469),
],
)
def test_float_rate_bond_rate_metric(self, metric, spd, exp) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_1B", Series(0.0, index=date_range(dt(2009, 12, 1), dt(2010, 3, 1))))
bond = FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="S",
convention="Act365f",
ex_div=3,
float_spread=spd,
fixing_method="rfr_observation_shift(5)",
rate_fixings=name,
spread_compound_method="none_simple",
settle=2,
)
curve = Curve({dt(2010, 3, 1): 1.0, dt(2017, 1, 1): 1.0}, convention="act365f")
disc_curve = curve.shift(spd)
result = bond.rate(curves=[curve, disc_curve], metric=metric)
fixings.pop(name + "_1B")
assert abs(result - exp) < 1e-8
@pytest.mark.parametrize(
("metric", "spd", "exp"),
[
("clean_price", 10.0, 99.99982764447981), # compounding diff between shift
("dirty_price", 10.0, 100.0165399732469),
],
)
def test_initialised_rate_metric(self, metric, spd, exp) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_1B", Series(0.0, index=date_range(dt(2009, 12, 1), dt(2010, 3, 1))))
bond = FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="S",
convention="Act365f",
ex_div=3,
float_spread=spd,
fixing_method="rfr_observation_shift(5)",
rate_fixings=name,
spread_compound_method="none_simple",
settle=2,
metric=metric,
)
curve = Curve({dt(2010, 3, 1): 1.0, dt(2017, 1, 1): 1.0}, convention="act365f")
disc_curve = curve.shift(spd)
result = bond.rate(curves=[curve, disc_curve])
fixings.pop(name + "_1B")
assert abs(result - exp) < 1e-8
@pytest.mark.parametrize(
("settlement", "expected"),
[
(dt(2010, 3, 3), 0.501369863013698),
(dt(2010, 6, 30), -0.008219178082191761), # ex div with fixed IBOR
],
)
def test_float_rate_bond_accrued_ibor(self, settlement, expected) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_6M", Series(2.0, index=date_range(dt(2009, 12, 1), dt(2010, 3, 1))))
bond = FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="S",
convention="Act365f",
ex_div=3,
float_spread=100,
fixing_method=FloatFixingMethod.IBOR(2),
rate_fixings=name,
spread_compound_method="none_simple",
)
result = bond.accrued(settlement)
fixings.pop(name + "_6M")
assert abs(result - expected) < 1e-8
def test_float_rate_bond_raise_frequency(self) -> None:
with pytest.raises(ValueError, match="A `FloatRateNote` cannot have a 'zero' freq"):
FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="Z",
convention="Act365f",
ex_div=3,
float_spread=100,
fixing_method="rfr_observation_shift(5)",
rate_fixings=NoInput(0),
spread_compound_method="none_simple",
)
def test_negative_accrued_needs_forecasting(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_1B", Series(2.0, index=date_range(dt(2009, 12, 1), dt(2010, 3, 8))))
bond = FloatRateNote(
effective=dt(2009, 9, 16),
termination=dt(2017, 3, 16),
frequency="Q",
convention="Act365f",
ex_div=6,
float_spread=0,
fixing_method="rfr_observation_shift(5)",
rate_fixings=name,
spread_compound_method="none_simple",
calendar=NoInput(0),
)
from rateslib.data.fixings import FixingMissingForecasterError
with pytest.raises( # noqa: PT012
FixingMissingForecasterError,
match="A `rate_curve` is required to forecast missing RFR rates",
):
bond.accrued(dt(2010, 3, 11))
fixings.pop(name + "_1B")
# # approximate calculation 5 days of negative accrued at 2% = -0.027397
# assert abs(result + 2 * 5 / 365) < 1e-3
@pytest.mark.parametrize(
"rate_fixings",
[
NoInput(0),
],
)
def test_negative_accrued_raises(self, rate_fixings) -> None:
bond = FloatRateNote(
effective=dt(2009, 9, 16),
termination=dt(2017, 3, 16),
frequency="Q",
convention="Act365f",
ex_div=5,
float_spread=0,
fixing_method="rfr_observation_shift(5)",
rate_fixings=rate_fixings,
spread_compound_method="none_simple",
calendar=NoInput(0),
)
from rateslib.data.fixings import FixingMissingForecasterError
with pytest.raises(
FixingMissingForecasterError,
match="A `rate_curve` is required to forecast missing RFR rate",
):
bond.accrued(dt(2010, 3, 11))
@pytest.mark.skip(reason="v2.5 removed these validations")
def test_bad_accrued_parameter_combo_raises(self, rate_fixings) -> None:
with pytest.raises(ValueError, match="For RFR FRNs `ex_div` must be less than"):
FloatRateNote(
effective=dt(2009, 9, 16),
termination=dt(2017, 3, 16),
frequency="Q",
ex_div=5,
fixing_method="rfr_observation_shift(3)",
)
def test_accrued_no_fixings_in_period(self) -> None:
bond = FloatRateNote(
effective=dt(2010, 3, 16),
termination=dt(2017, 3, 16),
frequency="Q",
convention="Act365f",
ex_div=0,
float_spread=0,
fixing_method="rfr_observation_shift(0)",
rate_fixings=NoInput(0),
spread_compound_method="none_simple",
calendar=NoInput(0),
)
result = bond.accrued(dt(2010, 3, 16))
assert result == 0.0
def test_float_rate_bond_analytic_delta(self) -> None:
frn = FloatRateNote(
effective=dt(2010, 6, 7),
termination=dt(2015, 12, 7),
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
float_spread=100,
notional=-1000000,
settle=0,
fixing_method="ibor(2)",
rate_fixings=2.0,
)
curve = Curve({dt(2010, 11, 25): 1.0, dt(2015, 12, 7): 1.0})
result = frn.analytic_delta(curves=curve)
expected = -550.0
assert abs(result - expected) < 1e-6
frn.kwargs.meta["settle"] = 2
result = frn.analytic_delta(curves=curve) # bond is ex div on settle 27th Nov 2010
expected = -500.0 # bond has dropped a 6m coupon payment
assert abs(result - expected) < 1e-6
@pytest.mark.parametrize(
("metric", "spd", "exp"),
[
("clean_price", 0.0, 100),
("dirty_price", 0.0, 100),
("clean_price", 50.0, 99.99601798513253),
("dirty_price", 50.0, 100.03848373855718),
],
)
def test_float_rate_bond_forward_prices(self, metric, spd, exp) -> None:
name = str(hash(os.urandom(8)))
fixings.add(
name + "_1B",
Series(
data=2.0,
index=get_calendar("bus").bus_date_range(start=dt(2007, 1, 1), end=dt(2010, 2, 26)),
),
)
bond = FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="S",
convention="Act365f",
ex_div=3,
float_spread=spd,
fixing_method="rfr_observation_shift(5)",
calendar="bus",
rate_fixings=name,
spread_compound_method="none_simple",
settle=2,
)
curve = Curve(
{dt(2010, 3, 1): 1.0, dt(2017, 1, 1): 1.0},
convention="act365f",
calendar="bus",
)
disc_curve = curve.shift(spd)
result = bond.rate(
curves=[curve, disc_curve],
metric=metric,
settlement=dt(2010, 8, 1),
)
fixings.pop(name + "_1B")
assert abs(result - exp) < 1e-8
def test_float_rate_bond_forward_accrued(self) -> None:
bond = FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="S",
convention="Act365f",
ex_div=3,
float_spread=0,
fixing_method="rfr_observation_shift(5)",
spread_compound_method="none_simple",
settle=2,
)
curve = Curve({dt(2010, 3, 1): 1.0, dt(2017, 1, 1): 0.9}, convention="act365f")
# disc_curve = curve.shift(0)
result = bond.accrued(dt(2010, 8, 1), rate_curve=curve)
expected = 0.13083715795372267
assert abs(result - expected) < 1e-8
def test_rate_raises(self, curve) -> None:
bond = FloatRateNote(
effective=dt(2007, 1, 1),
termination=dt(2017, 1, 1),
frequency="S",
convention="Act365f",
ex_div=3,
float_spread=0.0,
fixing_method="rfr_observation_shift(5)",
spread_compound_method="none_simple",
settle=2,
curves=curve,
)
with pytest.raises(ValueError, match="`metric` must be in"):
bond.rate(metric="BAD")
def test_forecast_ibor(self, curve) -> None:
f_curve = LineCurve({dt(2022, 1, 1): 3.0, dt(2022, 2, 1): 4.0})
frn = FloatRateNote(
effective=dt(2022, 2, 1),
termination="3m",
frequency="Q",
fixing_method="ibor(0)",
)
result = frn.accrued(dt(2022, 2, 5), rate_curve=f_curve)
expected = 0.044444444
assert abs(result - expected) < 1e-4
@pytest.mark.parametrize(
("price", "tol"), [(98.0, 1e-7), (95.0, 1e-5), (90.0, 1e-3), (80.0, 1e-2)]
)
def test_oaspread(self, price, tol) -> None:
bond = FloatRateNote(
effective=dt(1998, 12, 7),
termination=dt(2008, 12, 7),
frequency="q",
fixing_method="rfr_payment_delay",
rate_fixings=[4.0],
)
curve = Curve({dt(1998, 12, 7): 1.0, dt(2015, 12, 7): 0.75})
# result = bond.rate(curve, metric="clean_price") = 99.999999999999953
result = bond.oaspread(curves=curve, price=price)
curve_z = curve.shift(result)
result = bond.rate(curves=[curve, curve_z], metric="clean_price")
assert abs(result - price) < tol
def test_settle_method_param_combinations(self) -> None:
# for RFR when method_param is less than settle curve based pricing methods will
# require forecasting from RFR curve to correctly calculate the accrued.
name = str(hash(os.urandom(8)))
fixings.add(
name + "_1B",
Series(
[2.0, 3.0, 4.0, 5.0, 6.0],
index=[
dt(2022, 1, 2),
dt(2022, 1, 3),
dt(2022, 1, 4),
dt(2022, 1, 5),
dt(2022, 1, 6),
],
),
)
frn = FloatRateNote(
effective=dt(2022, 1, 5),
termination="1Y",
frequency="Q",
settle=3,
fixing_method="rfr_observation_shift(2)",
rate_fixings=name,
convention="Act365F",
ex_div=1,
)
curve = Curve(
nodes={dt(2022, 1, 7): 1.0, dt(2023, 1, 7): 0.95},
convention="act365f",
)
# Case1: All fixings are known and are published
# in this case a Curve is not required and is not given
result = frn.accrued(settlement=dt(2022, 1, 9))
assert abs(result - 0.04932400) < 1e-6
# Case2: Some fixings are unknown and must be forecast by a curve.
# If a curve is not supplied this will error
from rateslib.data.fixings import FixingMissingForecasterError
with pytest.raises(
FixingMissingForecasterError, match="A `rate_curve` is required to forecast missing RFR"
):
frn.accrued(settlement=dt(2022, 1, 10))
# Case3: Some fixings are unknown and must be forecast by a curve.
# A curve is given so this is used to forecast the values.
result = frn.accrued(settlement=dt(2022, 1, 10), rate_curve=curve)
assert abs(result - 0.06338487826265116) < 1e-6
# Case4: The bond settles on Issue date and there is no accrued if curve supplied or not
result1 = frn.accrued(settlement=dt(2022, 1, 5))
result2 = frn.accrued(settlement=dt(2022, 1, 5), rate_curve=curve)
assert abs(result1) < 1e-6
assert abs(result2) < 1e-6
# Case5: The bond settles on a coupon date and there is no accrued if curve supplied or not
result1 = frn.accrued(settlement=dt(2022, 4, 5))
result2 = frn.accrued(settlement=dt(2022, 4, 5), rate_curve=curve)
assert abs(result1) < 1e-6
assert abs(result2) < 1e-6
# Case6: Bond settles on issue date and there is no accrued. No fixings are input
frn_no_fixings = FloatRateNote(
effective=dt(2022, 1, 5),
termination="1Y",
frequency="Q",
settle=3,
fixing_method="rfr_observation_shift(2)",
convention="Act365F",
ex_div=1,
)
result1 = frn_no_fixings.accrued(settlement=dt(2022, 1, 5))
result2 = frn_no_fixings.accrued(settlement=dt(2022, 1, 5), rate_curve=curve)
assert abs(result1) < 1e-6
assert abs(result2) < 1e-6
# Case7: Bond settles a few days forward(settle) no previous fixings are given, all
# can be forecast from curve
frn_no_fixings = FloatRateNote(
effective=dt(2022, 1, 7),
termination="1Y",
frequency="Q",
settle=3,
fixing_method="rfr_observation_shift(0)",
convention="Act365F",
ex_div=1,
)
result = frn_no_fixings.accrued(settlement=dt(2022, 1, 10), rate_curve=curve)
assert abs(result - 0.04216776020085078) < 1e-6
# Case8: bond settles a few days forward, no fixings are given and no curve. Must error.
with pytest.raises(
FixingMissingForecasterError,
match="A `rate_curve` is required to forecast missing RFR rates",
):
frn_no_fixings.accrued(settlement=dt(2022, 1, 10))
fixings.pop(name + "_1B")
def test_ibor_fixings_table_historical_before_curve(self, curve):
# see test FloatPeriod.test_ibor_fixings_table_historical_before_curve
bond = FloatRateNote(
effective=dt(2001, 11, 7),
termination=dt(2002, 8, 7),
frequency="q",
fixing_method="ibor(2)",
rate_fixings=[4.0],
curves=[curve],
)
result = bond.local_analytic_rate_fixings()
assert isinstance(result, DataFrame)
def test_ibor_fixings_table_with_fixing(self, curve):
# see test FloatPeriod.test_ibor_fixings_table_historical_before_curve
bond = FloatRateNote(
effective=dt(2021, 11, 7),
termination=dt(2022, 8, 7),
frequency="q",
fixing_method="ibor(2)",
rate_fixings=[4.0],
curves=[curve],
)
result = bond.local_analytic_rate_fixings()
assert isinstance(result, DataFrame)
assert result.iloc[0, 0] == 0.0
assert abs(result.iloc[1, 0] + 24.376897) < 1e-6
assert abs(result.iloc[2, 0] + 24.941351) < 1e-6
def test_ibor_ytm_rate(self, curve):
# test a FixedRateBond and FloatRateNote with same conventions and cashflows have same ytm
ibor_curve = LineCurve({dt(2021, 12, 1): 4.0, dt(2027, 12, 2): 4.0})
disc_curve = Curve({dt(2021, 12, 1): 1.0, dt(2027, 12, 2): 0.92})
frn = FloatRateNote(
effective=dt(2021, 11, 7),
termination=dt(2022, 8, 7),
frequency="q",
fixing_method="ibor(2)",
convention="actacticma",
calendar="nyc",
modifier="none",
rate_fixings=[4.0],
curves=[ibor_curve, disc_curve],
calc_mode="us_gb",
fixing_series="eur_ibor",
settle=1,
)
frb = FixedRateBond(
effective=dt(2021, 11, 7),
termination=dt(2022, 8, 7),
spec="us_gb",
frequency="q",
fixed_rate=4.0,
curves=[disc_curve],
)
dp1 = frn.rate(metric="dirty_price")
dp2 = frb.rate(metric="dirty_price")
assert abs(dp1 - dp2) < 1e-12 # FRN and equivalent FRB have the same dirty price.
y2 = frb.rate(metric="ytm")
y1 = frn.rate(metric="ytm")
assert abs(y1 - y2) < 1e-12 # FRN and equivalent FRB have the same yield-to-maturity.
def test_ytm_rate_fixings_provided(self, curve):
# test a FixedRateBond and FloatRateNote with same conventions and cashflows have same ytm
disc_curve = Curve({dt(2021, 12, 1): 1.0, dt(2027, 12, 2): 0.92})
frn = FloatRateNote(
effective=dt(2021, 11, 7),
termination=dt(2022, 8, 7),
frequency="q",
fixing_method="ibor(2)",
convention="actacticma",
calendar="nyc",
modifier="none",
rate_fixings=[4.0, 4.0, 4.0],
calc_mode="us_gb",
settle=1,
)
frb = FixedRateBond(
effective=dt(2021, 11, 7),
termination=dt(2022, 8, 7),
spec="us_gb",
frequency="q",
fixed_rate=4.0,
curves=[disc_curve],
)
dp1 = frn.rate(metric="dirty_price", curves=[None, disc_curve])
dp2 = frb.rate(metric="dirty_price")
assert abs(dp1 - dp2) < 1e-12 # FRN and equivalent FRB have the same dirty price.
y2 = frb.ytm(price=dp2, dirty=True, settlement=dt(2022, 12, 1))
y1 = frn.ytm(price=dp1, dirty=True, settlement=dt(2022, 12, 1))
assert abs(y1 - y2) < 1e-12 # FRN and equivalent FRB have the same yield-to-maturity.
def test_cashflows_known_fixings(self):
name = str(hash(os.urandom(8)))
fixings.add(name + "_1B", Series(2.0, index=date_range(dt(1999, 12, 1), dt(2004, 6, 2))))
frn = FloatRateNote(
effective=dt(2000, 12, 7),
termination=dt(2001, 12, 7),
frequency="S",
currency="gbp",
convention="Act365F",
ex_div=3,
rate_fixings=name,
fixing_method="rfr_observation_shift_avg(5)",
)
result = frn.cashflows()
fixings.pop(name + "_1B")
assert isinstance(result, DataFrame)
assert abs(result["Cashflow"].iloc[0] + 10000) < 50.0
assert abs(result["Cashflow"].iloc[1] + 10000) < 50.0
class TestBondFuture:
def test_repr(self):
kwargs = dict(
effective=dt(2020, 1, 1),
stub="ShortFront",
frequency="A",
calendar="tgt",
currency="eur",
convention="ActActICMA",
)
bond1 = FixedRateBond(termination=dt(2022, 3, 1), fixed_rate=1.5, **kwargs)
fut = BondFuture(delivery=dt(2021, 3, 1), coupon=6.0, basket=[bond1])
expected = f""
assert expected == fut.__repr__()
@pytest.mark.parametrize(
("delivery", "mat", "coupon", "exp"),
[
(dt(2023, 6, 12), dt(2032, 2, 15), 0.0, 0.603058),
(dt(2023, 6, 12), dt(2032, 8, 15), 1.7, 0.703125),
(dt(2023, 6, 12), dt(2033, 2, 15), 2.3, 0.733943),
(dt(2023, 9, 11), dt(2032, 8, 15), 1.7, 0.709321),
(dt(2023, 9, 11), dt(2033, 2, 15), 2.3, 0.739087),
(dt(2023, 12, 11), dt(2032, 8, 15), 1.7, 0.715464),
(dt(2023, 12, 11), dt(2033, 2, 15), 2.3, 0.744390),
],
)
def test_conversion_factors_eurex_bund_ytm(self, delivery, mat, coupon, exp) -> None:
# The expected results are downloaded from the EUREX website
# regarding precalculated conversion factors.
# this test allows for an error in the cf < 1e-4, due to YTM method
kwargs = dict(
effective=dt(2020, 1, 1),
stub="ShortFront",
frequency="A",
calendar="tgt",
currency="eur",
convention="ActActICMA",
)
bond1 = FixedRateBond(termination=mat, fixed_rate=coupon, **kwargs)
fut = BondFuture(delivery=delivery, coupon=6.0, basket=[bond1])
result = fut.cfs
assert abs(result[0] - exp) < 1e-4
@pytest.mark.parametrize(
("delivery", "issue", "mat", "coupon", "exp"),
[
(dt(2023, 6, 12), dt(2022, 7, 1), dt(2032, 2, 15), 0.0, 0.603058),
(dt(2023, 6, 12), dt(2022, 7, 8), dt(2032, 8, 15), 1.7, 0.703125),
(dt(2023, 6, 12), dt(2023, 1, 13), dt(2033, 2, 15), 2.3, 0.733943),
(dt(2023, 9, 11), dt(2022, 7, 8), dt(2032, 8, 15), 1.7, 0.709321),
(dt(2023, 9, 11), dt(2023, 1, 13), dt(2033, 2, 15), 2.3, 0.739087),
(dt(2023, 12, 11), dt(2022, 7, 8), dt(2032, 8, 15), 1.7, 0.715464),
(dt(2023, 12, 11), dt(2023, 1, 13), dt(2033, 2, 15), 2.3, 0.744390),
],
)
def test_conversion_factors_eurex_bund_method(self, delivery, issue, mat, coupon, exp) -> None:
# The expected results are downloaded from the EUREX website
# regarding precalculated conversion factors.
# these should be exact due to specifically coded methods
kwargs = dict(
effective=issue,
stub="LongFront",
frequency="A",
calendar="tgt",
currency="eur",
convention="ActActICMA",
modifier="none",
)
bond1 = FixedRateBond(termination=mat, fixed_rate=coupon, **kwargs)
fut = BondFuture(delivery=delivery, coupon=6.0, basket=[bond1], calc_mode="eurex_eur")
result = fut.cfs
assert result[0] == exp
@pytest.mark.parametrize(
("effective", "maturity", "delivery", "coupon", "exp"),
[
# (dt(2019, 6, 26), dt(2034, 6, 26), dt(2025, 6, 10), 0.0, 0.591898),
# (dt(2006, 3, 8), dt(2036, 3, 8), dt(2025, 6, 10), 2.5, 0.729825),
# (dt(2021, 6, 23), dt(2035, 6, 23), dt(2025, 6, 10), 0.25, 0.576795),
(dt(2012, 6, 27), dt(2037, 6, 27), dt(2025, 6, 10), 1.25, 0.601767),
],
)
def test_conversion_factors_eurex_chf_method_jun25(
self, effective, maturity, delivery, coupon, exp
) -> None:
# The expected results are downloaded from the EUREX website
# regarding precalculated conversion factors.
# these should be exact due to specifically coded methods
bond1 = FixedRateBond(effective, maturity, fixed_rate=coupon, spec="ch_gb")
fut = BondFuture(basket=[bond1], delivery=delivery, spec="ch_gb_10y")
result = fut.cfs
assert result[0] == exp
@pytest.mark.parametrize(
("effective", "maturity", "delivery", "coupon", "exp"),
[
(dt(2019, 1, 1), dt(2033, 4, 8), dt(2025, 3, 10), 3.5, 0.844755),
(dt(2019, 1, 1), dt(2034, 6, 26), dt(2025, 3, 10), 0.0, 0.583339),
(dt(2006, 1, 1), dt(2036, 3, 8), dt(2025, 3, 10), 2.5, 0.725400),
(dt(2021, 1, 1), dt(2035, 6, 23), dt(2025, 3, 10), 0.25, 0.569042),
(dt(2012, 1, 1), dt(2037, 6, 27), dt(2025, 3, 10), 1.25, 0.596009),
],
)
def test_conversion_factors_eurex_chf_method_mar25(
self, effective, maturity, delivery, coupon, exp
) -> None:
# The expected results are downloaded from the EUREX website
# regarding precalculated conversion factors.
# these should be exact due to specifically coded methods
bond1 = FixedRateBond(effective, maturity, fixed_rate=coupon, spec="ch_gb")
fut = BondFuture(basket=[bond1], delivery=delivery, spec="ch_gb_10y")
result = fut.cfs
assert result[0] == exp
@pytest.mark.parametrize(
("mat", "coupon", "exp"),
[
(dt(2032, 6, 7), 4.25, 1.0187757),
(dt(2033, 7, 31), 0.875, 0.7410593),
(dt(2034, 9, 7), 4.5, 1.0449380),
(dt(2035, 7, 31), 0.625, 0.6773884),
(dt(2036, 3, 7), 4.25, 1.0247516),
],
)
def test_conversion_factors_ice_gilt(self, mat, coupon, exp) -> None:
# The expected results are downloaded from the ICE LIFFE website
# regarding precalculated conversion factors.
# this test allows for an error in the cf < 1e-6.
kwargs = dict(
effective=dt(2020, 1, 1),
stub="ShortFront",
frequency="S",
calendar="ldn",
currency="gbp",
convention="ActActICMA",
ex_div=7,
)
bond1 = FixedRateBond(termination=mat, fixed_rate=coupon, **kwargs)
fut = BondFuture(delivery=(dt(2023, 6, 1), dt(2023, 6, 30)), coupon=4.0, basket=[bond1])
result = fut.cfs
assert abs(result[0] - exp) < 1e-6
def test_conversion_factors_ice_gilt_default_spec(self) -> None:
# this uses the v2.5 implementation that rounds exactly to the exchange quantity
# this tests data directly from ice rounded to 7 dp.
# note this requires 'linear_days_long_front_split' on GB00BTXS1K06 which is the last bond.
bf = BondFuture(
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2035, 7, 31), fixed_rate=0.625, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2038, 1, 29), fixed_rate=3.75, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2034, 9, 7), fixed_rate=4.5, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2035, 3, 7), fixed_rate=4.5, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2036, 3, 7), fixed_rate=4.25, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2037, 9, 7), fixed_rate=1.75, spec="uk_gb"),
FixedRateBond(dt(2025, 9, 3), dt(2035, 10, 22), fixed_rate=4.75, spec="uk_gb"),
],
delivery=(dt(2025, 12, 1), dt(2025, 12, 31)),
spec="uk_gb_10y",
)
expected = (
0.7316293,
0.9760712,
1.0366069,
1.0383390,
1.0208264,
0.7904642,
1.0606298,
)
assert bf.cfs == expected
def test_conversion_factors_ice_gilt_default_spec2(self) -> None:
# this test has the first calendar day of the month as a holiday
# this uses the v2.5 implementation that rounds exactly to the exchange quantity
# this tests data directly from ice rounded to 7 dp.
bf = BondFuture(
basket=[
FixedRateBond(dt(1999, 1, 1), dt(2034, 9, 7), fixed_rate=4.5, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2038, 1, 29), fixed_rate=3.75, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2034, 7, 31), fixed_rate=4.25, spec="uk_gb"),
FixedRateBond(dt(2025, 2, 12), dt(2035, 3, 7), fixed_rate=4.5, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2037, 9, 7), fixed_rate=1.75, spec="uk_gb"),
FixedRateBond(dt(1999, 1, 1), dt(2036, 3, 7), fixed_rate=4.25, spec="uk_gb"),
FixedRateBond(dt(2020, 9, 11), dt(2035, 7, 31), fixed_rate=0.625, spec="uk_gb"),
],
delivery=(dt(2025, 6, 2), dt(2025, 6, 30)),
spec="uk_gb_10y",
)
expected = (1.0383429, 0.9753142, 1.0189797, 1.0400109, 0.7835277, 1.0216443, 0.7203475)
assert bf.cfs == expected
@pytest.mark.parametrize(
("mat", "coupon", "calc_mode", "exp"),
[
(dt(2010, 10, 31), 1.5, "ust_short", 0.9229),
(dt(2013, 10, 31), 2.75, "ust_short", 0.8653),
(dt(2018, 11, 15), 3.75, "ust_long", 0.8357),
(dt(2038, 5, 15), 4.5, "ust_long", 0.7943),
],
)
def test_conversion_factors_cme_treasury(self, mat, coupon, calc_mode, exp) -> None:
# The expected results are downloaded from the CME website
# regarding precalculated conversion factors.
# this test allows for an error in the cf < 1e-6.
kwargs = dict(
effective=dt(2005, 1, 1),
spec="us_gb",
)
bond1 = FixedRateBond(termination=mat, fixed_rate=coupon, **kwargs)
fut = BondFuture(
delivery=(dt(2008, 12, 1), dt(2008, 12, 29)),
coupon=6.0,
basket=[bond1],
calc_mode=calc_mode,
)
result = fut.cfs
assert abs(result[0] - exp) < 1e-6
def test_dlv_screen_print(self) -> None:
kws = dict(ex_div=7, frequency="S", convention="ActActICMA", calendar=NoInput(0))
bonds = [
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, **kws),
]
future = BondFuture(delivery=(dt(2000, 6, 1), dt(2000, 6, 30)), coupon=7.0, basket=bonds)
result = future.dlv(
future_price=112.98,
prices=[102.732, 131.461, 107.877, 134.455],
repo_rate=6.24,
settlement=dt(2000, 3, 16),
convention="Act365f",
)
expected = DataFrame(
{
"Bond": [
"5.750% 07-12-2009",
"9.000% 12-07-2011",
"6.250% 25-11-2010",
"9.000% 06-08-2012",
],
"Price": [102.732, 131.461, 107.877, 134.455],
"YTM": [5.384243, 5.273217, 5.275481, 5.193851],
"C.Factor": [0.914225, 1.152571, 0.944931, 1.161956],
"Gross Basis": [-0.557192, 1.243582, 1.118677, 3.177230],
"Implied Repo": [7.381345, 3.564685, 2.199755, -1.414670],
"Actual Repo": [6.24, 6.24, 6.24, 6.24],
"Net Basis": [-0.343654, 1.033668, 1.275866, 3.010371],
},
)
assert_frame_equal(result, expected)
result2 = future.dlv(
future_price=112.98,
prices=[102.732, 131.461, 107.877, 134.455],
repo_rate=[6.24, 6.24, 6.24, 6.24], # test individual repo input
settlement=dt(2000, 3, 16),
convention="Act365f",
)
assert_frame_equal(result2, expected)
def test_notional(self) -> None:
future = BondFuture(
coupon=0,
delivery=dt(2000, 6, 1),
basket=[],
nominal=100000,
contracts=10,
)
assert future.notional == -1e6
def test_dirty_in_methods(self) -> None:
kws = dict(ex_div=7, frequency="S", convention="ActActICMA", calendar=NoInput(0))
bonds = [
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, **kws),
]
future = BondFuture(delivery=(dt(2000, 6, 1), dt(2000, 6, 30)), coupon=7.0, basket=bonds)
prices = [102.732, 131.461, 107.877, 134.455]
basket = future.kwargs.meta["basket"]
dirty_prices = [
price + basket[i].accrued(dt(2000, 3, 16)) for i, price in enumerate(prices)
]
result = future.gross_basis(112.98, dirty_prices, dt(2000, 3, 16), True)
expected = future.gross_basis(112.98, prices, dt(2000, 3, 16), False)
assert result == expected
def test_delivery_in_methods(self) -> None:
kws = dict(ex_div=7, frequency="S", convention="ActActICMA", calendar=NoInput(0))
bonds = [
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, **kws),
]
future = BondFuture(delivery=(dt(2000, 6, 1), dt(2000, 6, 30)), coupon=7.0, basket=bonds)
prices = [102.732, 131.461, 107.877, 134.455]
expected = future.net_basis(112.98, prices, 6.24, dt(2000, 3, 16))
result = future.net_basis(112.98, prices, 6.24, dt(2000, 3, 16), delivery=dt(2000, 6, 30))
assert result == expected
expected = future.implied_repo(112.98, prices, dt(2000, 3, 16))
result = future.implied_repo(112.98, prices, dt(2000, 3, 16), delivery=dt(2000, 6, 30))
assert result == expected
expected = future.ytm(112.98)
result = future.ytm(112.98, delivery=dt(2000, 6, 30))
assert result == expected
expected = future.duration(112.98)
result = future.duration(112.98, delivery=dt(2000, 6, 30))
assert result == expected
def test_ctd_index(self) -> None:
kws = dict(ex_div=7, frequency="S", convention="ActActICMA", calendar=NoInput(0))
bonds = [
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, **kws),
]
future = BondFuture(delivery=(dt(2000, 6, 1), dt(2000, 6, 30)), coupon=7.0, basket=bonds)
prices = [102.732, 131.461, 107.877, 134.455]
assert future.ctd_index(112.98, prices, dt(2000, 3, 16)) == 0
@pytest.mark.parametrize(("metric", "expected"), [("future_price", 112.98), ("ytm", 5.301975)])
@pytest.mark.parametrize("delivery", [NoInput(0), dt(2000, 6, 30)])
def test_futures_rates(self, metric, expected, delivery) -> None:
curve = Curve(
nodes={
dt(2000, 3, 15): 1.0,
dt(2000, 6, 30): 1.0,
dt(2009, 12, 7): 1.0,
dt(2010, 11, 25): 1.0,
dt(2011, 7, 12): 1.0,
dt(2012, 8, 6): 1.0,
},
id="gilt_curve",
convention="act365f",
)
kws = dict(
ex_div=7,
frequency="S",
convention="ActActICMA",
calendar=NoInput(0),
settle=1,
curves="gilt_curve",
)
bonds = [
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, **kws),
]
solver = Solver(
curves=[curve],
instruments=[
IRS(
dt(2000, 3, 15),
dt(2000, 6, 30),
"A",
convention="act365f",
curves="gilt_curve",
),
]
+ bonds,
s=[7.381345, 102.732, 131.461, 107.877, 134.455],
) # note the repo rate as defined by 'gilt_curve' is set to analogue implied
future = BondFuture(
coupon=7.0,
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
basket=bonds,
)
result = future.rate(
solver=solver,
metric=metric,
settlement=delivery,
)
assert abs(result - expected) < 1e-3
def test_future_rate_raises(self) -> None:
kws = dict(
ex_div=7,
frequency="S",
convention="ActActICMA",
calendar=NoInput(0),
settle=1,
curves="gilt_curve",
)
bonds = [
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, **kws),
]
future = BondFuture(
coupon=7.0,
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
basket=bonds,
)
with pytest.raises(ValueError, match="`metric`"):
future.rate(metric="badstr")
def test_futures_npv(self) -> None:
curve = Curve(
nodes={
dt(2000, 3, 15): 1.0,
dt(2000, 6, 30): 1.0,
dt(2009, 12, 7): 1.0,
dt(2010, 11, 25): 1.0,
dt(2011, 7, 12): 1.0,
dt(2012, 8, 6): 1.0,
},
id="gilt_curve",
convention="act365f",
)
kws = dict(
ex_div=7,
frequency="S",
convention="ActActICMA",
calendar=NoInput(0),
settle=1,
curves="gilt_curve",
currency="gbp",
)
bonds = [
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, **kws),
]
solver = Solver(
curves=[curve],
instruments=[
IRS(
dt(2000, 3, 15),
dt(2000, 6, 30),
"A",
convention="act365f",
curves="gilt_curve",
),
]
+ bonds,
s=[7.381345, 102.732, 131.461, 107.877, 134.455],
algorithm="gauss_newton",
) # note the repo rate as defined by 'gilt_curve' is set to analogue implied
future = BondFuture(
coupon=7.0,
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
basket=bonds,
nominal=100000,
contracts=10,
currency="gbp",
)
result = future.npv(solver=solver, local=False)
expected = 1129798.770872
assert abs(result - expected) < 1e-5
result2 = future.npv(solver=solver, local=True)
assert abs(result2["gbp"] - expected) < 1e-5
@pytest.mark.parametrize("delivery", [NoInput(0), dt(2000, 6, 30)])
def test_futures_duration_and_convexity(self, delivery) -> None:
kws = dict(
ex_div=7,
frequency="S",
convention="ActActICMA",
calendar=NoInput(0),
settle=1,
curves="gilt_curve",
)
bonds = [
FixedRateBond(dt(1999, 1, 1), dt(2009, 12, 7), fixed_rate=5.75, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2011, 7, 12), fixed_rate=9.00, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2010, 11, 25), fixed_rate=6.25, **kws),
FixedRateBond(dt(1999, 1, 1), dt(2012, 8, 6), fixed_rate=9.00, **kws),
]
future = BondFuture(
coupon=7.0,
delivery=(dt(2000, 6, 1), dt(2000, 6, 30)),
basket=bonds,
)
result = future.duration(112.98, delivery=delivery)[0]
expected = 8.20178546111
assert abs(result - expected) < 1e-3
expected = (
future.duration(112.98, delivery=delivery)[0]
- future.duration(112.98 - result / 100, delivery=delivery)[0]
)
result2 = future.convexity(112.98, delivery=delivery)[0]
assert abs(result2 - expected * 100) < 1e-3
# Bond future duration which is not risk is not adjusted by CFs
result = future.duration(112.98, delivery=delivery, metric="modified")[0]
expected = 7.23419455163
assert abs(result - expected) < 1e-3
def test_cms(self):
data = DataFrame(
data=[
[
FixedRateBond(
dt(2022, 1, 1),
dt(2039, 8, 15),
fixed_rate=4.5,
spec="us_gb",
curves="bcurve",
),
98.6641,
],
[
FixedRateBond(
dt(2022, 1, 1),
dt(2040, 2, 15),
fixed_rate=4.625,
spec="us_gb",
curves="bcurve",
),
99.8203,
],
[
FixedRateBond(
dt(2022, 1, 1),
dt(2041, 2, 15),
fixed_rate=4.75,
spec="us_gb",
curves="bcurve",
),
100.7734,
],
[
FixedRateBond(
dt(2022, 1, 1),
dt(2040, 5, 15),
fixed_rate=4.375,
spec="us_gb",
curves="bcurve",
),
96.6953,
],
[
FixedRateBond(
dt(2022, 1, 1),
dt(2042, 11, 15),
fixed_rate=4.00,
spec="us_gb",
curves="bcurve",
),
90.4766,
],
],
columns=["bonds", "prices"],
)
usz3 = BondFuture( # Construct the BondFuture Instrument
coupon=6.0,
delivery=(dt(2023, 12, 1), dt(2023, 12, 29)),
basket=data["bonds"],
nominal=100e3,
calendar="nyc",
currency="usd",
calc_mode="ust_long",
)
result = usz3.cms(prices=data["prices"], settlement=dt(2023, 11, 22), shifts=[-50, 0, 50])
expected = DataFrame(
data={
"Bond": [
"4.500% 15-08-2039",
"4.625% 15-02-2040",
"4.750% 15-02-2041",
"4.375% 15-05-2040",
"4.000% 15-11-2042",
],
-50: [
0.0,
0.10938764224876252,
0.32693578691382186,
0.24721845093496597,
1.1960030963801813,
],
0: [
0.0,
0.01148721023514554,
0.016282194434154462,
0.032902987886402,
0.33598669301149187,
],
50: [
0.43066112621522734,
0.3653207547713322,
0.19632745772335625,
0.27120849999053576,
0.0,
],
}
)
assert_frame_equal(result, expected)
def test_curves_on_individual_bonds(self):
# no curves are supplied to BondFuture meta or method an local instrument meta are used
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2042, 1, 1): 1.0})
c2 = Curve({dt(2022, 1, 2): 1.0, dt(2042, 1, 1): 0.5})
usz3 = BondFuture( # Construct the BondFuture Instrument
coupon=6.0,
delivery=(dt(2023, 12, 1), dt(2023, 12, 29)),
basket=[
FixedRateBond(dt(2022, 1, 1), "10y", fixed_rate=4.5, spec="us_gb", curves=c1),
FixedRateBond(dt(2022, 1, 1), "10Y", fixed_rate=4.5, spec="us_gb", curves=c2),
],
nominal=100e3,
calendar="nyc",
currency="usd",
calc_mode="ust_long",
)
cfs = usz3.cfs
assert cfs[0] == cfs[1]
result = usz3.rate()
assert abs(result - 118.06972328) < 1e-7
def test_curves_supplied_to_rate_method(self):
# the curves supplied to the method overrides the local instruments meta
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2042, 1, 1): 1.0})
c2 = Curve({dt(2022, 1, 2): 1.0, dt(2042, 1, 1): 0.5})
usz3 = BondFuture( # Construct the BondFuture Instrument
coupon=6.0,
delivery=(dt(2023, 12, 1), dt(2023, 12, 29)),
basket=[
FixedRateBond(dt(2022, 1, 1), "10y", fixed_rate=4.5, spec="us_gb", curves=c1),
FixedRateBond(dt(2022, 1, 1), "10Y", fixed_rate=4.5, spec="us_gb", curves=c2),
],
nominal=100e3,
calendar="nyc",
currency="usd",
calc_mode="ust_long",
)
cfs = usz3.cfs
assert cfs[0] == cfs[1]
# in this c1 is used as an override so both bonds are priced expensively - price is higher
result = usz3.rate(curves=c1)
assert abs(result - 150.184019411) < 1e-7
def test_curves_supplied_as_future_meta(self):
# the curves supplied to the BondFuture.curves meta override local instruments.
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2042, 1, 1): 1.0})
c2 = Curve({dt(2022, 1, 2): 1.0, dt(2042, 1, 1): 0.5})
usz3 = BondFuture( # Construct the BondFuture Instrument
coupon=6.0,
delivery=(dt(2023, 12, 1), dt(2023, 12, 29)),
basket=[
FixedRateBond(dt(2022, 1, 1), "10y", fixed_rate=4.5, spec="us_gb", curves=c1),
FixedRateBond(dt(2022, 1, 1), "10Y", fixed_rate=4.5, spec="us_gb", curves=c2),
],
nominal=100e3,
calendar="nyc",
currency="usd",
calc_mode="ust_long",
curves=c1,
)
cfs = usz3.cfs
assert cfs[0] == cfs[1]
# in this c1 is used as an override via the bond future metric so both bonds are priced
# expensively - price is higher
result = usz3.rate()
assert abs(result - 150.184019411) < 1e-7
================================================
FILE: python/tests/instruments/test_instruments_legacy.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import os
from datetime import datetime as dt
import numpy as np
import pytest
from pandas import DataFrame, Index, MultiIndex, Series, isna
from pandas.testing import assert_frame_equal
from rateslib import default_context, defaults, fixings
from rateslib.curves import CompositeCurve, Curve, LineCurve, MultiCsaCurve
from rateslib.curves._parsers import _map_curve_from_solver
from rateslib.data.fixings import FloatRateSeries, FXIndex, IBORStubFixing
from rateslib.default import NoInput
from rateslib.dual import Dual, Dual2, Variable, dual_exp, dual_log, gradient
from rateslib.enums.parameters import FloatFixingMethod, LegMtm
from rateslib.fx import FXForwards, FXRates
from rateslib.instruments import (
CDS,
FRA,
IIRS,
IRS,
NDF,
NDXCS,
SBS,
XCS,
ZCIS,
ZCS,
Bill,
Fee,
FixedRateBond,
FloatRateNote,
Fly,
FXBrokerFly,
FXCall,
FXForward,
FXPut,
FXRiskReversal,
FXStraddle,
FXStrangle,
FXSwap,
FXVolValue,
IndexFixedRateBond,
IRSCall,
IRSPut,
IRSStraddle,
IRVolValue,
Loan,
Portfolio,
Spread,
STIRFuture,
Value,
YoYIS,
)
from rateslib.instruments.bonds.conventions import US_GB
from rateslib.instruments.protocols.kwargs import (
_KWArgs,
)
from rateslib.instruments.protocols.pricing import (
_Curves,
_Vol,
)
from rateslib.legs import Amortization
from rateslib.periods import Cashflow, ZeroFloatPeriod
from rateslib.scheduling import Adjuster, NamedCal, Schedule, add_tenor, get_imm
from rateslib.solver import Solver
from rateslib.volatility import (
FXDeltaVolSmile,
FXDeltaVolSurface,
FXSabrSmile,
FXSabrSurface,
IRSabrCube,
IRSabrSmile,
IRSplineSmile,
)
@pytest.fixture
def curve():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.99,
dt(2022, 7, 1): 0.98,
dt(2022, 10, 1): 0.97,
}
# convention = "Act360"
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def curve2():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.98,
dt(2022, 7, 1): 0.97,
dt(2022, 10, 1): 0.95,
}
return Curve(nodes=nodes, interpolation="log_linear", index_base=100.0)
@pytest.fixture
def usdusd():
nodes = {dt(2022, 1, 1): 1.00, dt(2022, 4, 1): 0.99}
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def eureur():
nodes = {dt(2022, 1, 1): 1.00, dt(2022, 4, 1): 0.997}
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def usdeur():
nodes = {dt(2022, 1, 1): 1.00, dt(2022, 4, 1): 0.996}
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def simple_solver():
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0, dt(2024, 1, 1): 1.0}, id="curve")
solver = Solver(
curves=[curve],
instruments=[
IRS(dt(2022, 1, 1), "1Y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "2Y", "A", curves="curve"),
],
s=[2.5, 3.0],
id="solver",
instrument_labels=["1Y", "2Y"],
)
return solver
@pytest.mark.parametrize(
"inst",
[
IRS(dt(2022, 7, 1), "3M", "A", curves="eureur", notional=1e6),
STIRFuture(
dt(2022, 3, 16),
dt(2022, 6, 15),
"Q",
curves="eureur",
spec="eur_stir",
contracts=-1,
),
FRA(dt(2022, 7, 1), "3M", "A", curves="eureur", notional=1e6),
SBS(
dt(2022, 7, 1),
"3M",
"A",
curves=["eureur", "eureur", "eurusd", "eureur"],
notional=-1e6,
),
ZCS(dt(2022, 7, 1), "3M", "A", curves="eureur", notional=1e6),
ZCIS(dt(2022, 1, 1), "1Y", "A", curves=["usdusd", "usdusd", "eu_cpi", "usdusd"]),
IIRS(
dt(2022, 7, 1),
"3M",
"A",
curves=["eu_cpi", "eureur", "eureur", "eureur"],
notional=1e6,
),
XCS( # XCS - FloatFloat
dt(2022, 7, 1),
"3M",
"A",
currency="usd",
pair="eurusd",
curves=["usdusd", "usdusd", "eureur", "eurusd"],
notional=1e6,
),
FXSwap(
dt(2022, 7, 1),
"3M",
pair="usdeur",
curves=["usdusd", "usdusd", "eureur", "eureur"],
notional=-1e6,
),
FXForward(
settlement=dt(2022, 10, 1),
pair="eurusd",
curves=[None, "eureur", None, "usdusd"],
notional=-1e6 * 25 / 74.27,
),
],
)
def test_instrument_repr(inst):
result = inst.__repr__()
expected = f""
assert result == expected
class TestCurvesandSolver:
def test_get_curve_from_solver(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="tagged")
inst = [(Value(dt(2023, 1, 1)), {"curves": "tagged"})]
solver = Solver([curve], [], inst, [0.975])
result = _map_curve_from_solver("tagged", solver)
assert result == curve
result = _map_curve_from_solver(curve, solver)
assert result == curve
no_curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="not in solver")
with default_context("curve_not_in_solver", "ignore"):
result = _map_curve_from_solver(no_curve, solver)
assert result == no_curve
with pytest.warns(), default_context("curve_not_in_solver", "warn"):
result = _map_curve_from_solver(no_curve, solver)
assert result == no_curve
with (
pytest.raises(ValueError, match="`curve` must be in `solver`"),
default_context("curve_not_in_solver", "raise"),
):
_map_curve_from_solver(no_curve, solver)
with pytest.raises(AttributeError, match="`curve` has no attribute `id`, likely it not"):
_map_curve_from_solver(100.0, solver)
# @pytest.mark.parametrize("solver", [True, False])
# @pytest.mark.parametrize("fxf", [True, False])
# @pytest.mark.parametrize("fx", [NoInput(0), 2.0])
# @pytest.mark.parametrize("crv", [True, False])
# def test_get_curves_and_fx_from_solver(
# self,
# usdusd,
# usdeur,
# eureur,
# solver,
# fxf,
# fx,
# crv,
# ) -> None:
# curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="tagged")
# inst = [Value(dt(2023, 1, 1), curves="tagged")]
# fxfs = FXForwards(
# FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3)),
# {"usdusd": usdusd, "usdeur": usdeur, "eureur": eureur},
# )
# solver = (
# Solver([curve], [], inst, [0.975], fx=fxfs if fxf else NoInput(0))
# if solver
# else NoInput(0)
# )
# curve = curve if crv else NoInput(0)
#
# if solver is not NoInput(0) and fxf and fx is not NoInput(0):
# with pytest.warns(UserWarning):
# # Solver contains an `fx` attribute but an `fx` argument has been supplied
# crv_result, fx_result, _ = _get_curves_fx_and_base_maybe_from_solver(
# NoInput(0),
# solver,
# curve,
# fx,
# NoInput(0),
# "usd",
# )
# else:
# crv_result, fx_result, _ = _get_curves_fx_and_base_maybe_from_solver(
# NoInput(0),
# solver,
# curve,
# fx,
# NoInput(0),
# "usd",
# )
#
# # check the fx results. If fx is specified directly it is returned
# # otherwsie it is returned from a solver object if it is available.
# if fx is not NoInput(0):
# assert fx_result == 2.0
# elif solver is NoInput(0):
# assert fx_result is NoInput(0)
# else:
# if fxf:
# assert fx_result == fxfs
# else:
# assert fx_result is NoInput(0)
#
# assert crv_result == (curve, curve, curve, curve)
# @pytest.mark.parametrize(
# "obj",
# [
# (Curve({dt(2000, 1, 1): 1.0})),
# (LineCurve({dt(2000, 1, 1): 1.0})),
# (Curve({dt(2000, 1, 1): 1.0}, index_base=100.0)),
# (CompositeCurve([Curve({dt(2000, 1, 1): 1.0})])),
# (MultiCsaCurve([Curve({dt(2000, 1, 1): 1.0})])),
# (
# FXDeltaVolSmile(
# {0.1: 1.0, 0.2: 2.0, 0.5: 3.0, 0.7: 4.0, 0.9: 5.0},
# dt(2023, 3, 16),
# dt(2023, 6, 16),
# "forward",
# )
# ),
# ],
# )
# def test_get_curves_fx_and_base_maybe_from_solver_object_types(self, obj) -> None:
# crv_result, _, _ = _get_curves_fx_and_base_maybe_from_solver(
# obj,
# NoInput(0),
# NoInput(0),
# NoInput(0),
# NoInput(0),
# NoInput(0),
# )
# assert crv_result == (obj,) * 4
# def test_get_curves_and_fx_from_solver_raises(self) -> None:
# from rateslib.solver import Solver
#
# curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="tagged")
# inst = [Value(dt(2023, 1, 1), curves="tagged")]
# solver = Solver([curve], [], inst, [0.975])
#
# with pytest.raises(ValueError, match="`curves` must contain Curve, not str, if"):
# _get_curves_fx_and_base_maybe_from_solver(
# NoInput(0),
# NoInput(0),
# "tagged",
# NoInput(0),
# NoInput(0),
# "",
# )
#
# with pytest.raises(ValueError, match="`curves` must contain str curve `id` s"):
# _get_curves_fx_and_base_maybe_from_solver(
# NoInput(0),
# solver,
# "bad_id",
# NoInput(0),
# NoInput(0),
# "",
# )
#
# with pytest.raises(ValueError, match="Can only supply a maximum of 4 `curves`"):
# _get_curves_fx_and_base_maybe_from_solver(
# NoInput(0),
# solver,
# ["tagged"] * 5,
# NoInput(0),
# NoInput(0),
# "",
# )
# @pytest.mark.parametrize("num", [1, 2, 3, 4])
# def test_get_curves_from_solver_multiply(self, num) -> None:
# from rateslib.solver import Solver
#
# curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="tagged")
# inst = [Value(dt(2023, 1, 1), curves="tagged")]
# solver = Solver([curve], [], inst, [0.975])
# result, _, _ = _get_curves_fx_and_base_maybe_from_solver(
# NoInput(0),
# solver,
# ["tagged"] * num,
# NoInput(0),
# NoInput(0),
# "",
# )
# assert result == (curve, curve, curve, curve)
def test_get_proxy_curve_from_solver(self, usdusd, usdeur, eureur) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="tagged")
inst = [Value(dt(2023, 1, 1), curves="tagged")]
fxf = FXForwards(
FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "usdeur": usdeur, "eureur": eureur},
)
solver = Solver([curve], [], inst, [0.975], fx=fxf)
curve = fxf.curve("eur", "usd")
irs = IRS(dt(2022, 1, 1), "3m", "Q")
# test the curve will return even though it is not included within the solver
# because it is a proxy curve.
irs.npv(curves=curve, solver=solver)
def test_ambiguous_curve_in_out_id_solver_raises(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0}, id="cloned-id")
curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="cloned-id")
solver = Solver(
curves=[curve2],
instruments=[IRS(dt(2022, 1, 1), "1y", "A", curves="cloned-id")],
s=[5.0],
)
irs = IRS(dt(2022, 1, 1), "1y", "A", fixed_rate=2.0)
with pytest.raises(ValueError, match="A curve has been supplied, as part of ``curves``,"):
irs.npv(curves=curve, solver=solver)
def test_get_multicsa_curve_from_solver(self, usdusd, usdeur, eureur) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="tagged")
inst = [Value(dt(2023, 1, 1), curves="tagged")]
fxf = FXForwards(
FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "usdeur": usdeur, "eureur": eureur},
)
solver = Solver([curve], [], inst, [0.975], fx=fxf)
curve = fxf.curve("eur", ("usd", "eur"))
irs = IRS(dt(2022, 1, 1), "3m", "Q")
# test the curve will return even though it is not included within the solver
# because it is a proxy curve.
irs.npv(curves=curve, solver=solver)
class TestSolverFXandBase:
"""
Test the npv method with combinations of solver fx and base args.
"""
@classmethod
def setup_class(cls):
"""setup any state specific to the execution of the given class (which
usually contains tests).
"""
cls.curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.96}, id="curve")
cls.fxr = FXRates({"eurusd": 1.1, "gbpusd": 1.25}, base="gbp")
cls.irs = IRS(dt(2022, 2, 1), "6M", "A", curves=cls.curve, fixed_rate=4.0)
cls.solver = Solver(
curves=[cls.curve],
instruments=[IRS(dt(2022, 1, 1), "1y", "A", curves=cls.curve)],
s=[4.109589041095898],
id="Solver",
)
cls.nxcs = XCS(
dt(2022, 2, 1),
"6M",
"A",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
curves=[cls.curve] * 4,
currency="eur",
pair="eurusd",
float_spread=2.0,
)
@classmethod
def teardown_class(cls):
"""teardown any state that was previously setup with a call to
setup_class.
"""
pass
# ``base`` is explcit
def test_base_and_fx(self) -> None:
# calculable since base aligns with local currency
result = self.irs.npv(fx=self.fxr, base="eur")
expected = 330.4051154763001 / 1.1
assert abs(result - expected) < 1e-4
with pytest.warns(
DeprecationWarning,
match=r"Supplying `fx` as numeric is ambiguous, particularly with multi-curr",
):
# warn about numeric
self.irs.npv(fx=1 / 1.1, base="eur")
# raises because no FX data to calculate a conversion
with pytest.raises(KeyError, match="'usd'"):
self.irs.npv(fx=FXRates({"eurgbp": 1.1}), base="eur")
def test_base_and_solverfx(self) -> None:
# should take fx from solver and calculated
self.solver.fx = FXRates({"eurusd": 1.1})
self.solver._set_new_state()
result = self.irs.npv(solver=self.solver, base="eur")
expected = 330.4051154763001 / 1.1
assert abs(result - expected) < 1e-4
self.solver.fx = NoInput(0)
def test_base_and_fx_and_solverfx(self) -> None:
# should take fx and ignore solver.fx
fxr = FXRates({"eurusd": 1.2})
self.solver.fx = fxr
self.solver._set_new_state()
# no warning becuase objects are the same
result = self.irs.npv(solver=self.solver, base="eur", fx=fxr)
expected = 330.4051154763001 / 1.2
assert abs(result - expected) < 1e-4
# should give warning because obj id are different
with pytest.warns(UserWarning):
result = self.irs.npv(solver=self.solver, base="eur", fx=self.fxr)
expected = 330.4051154763001 / 1.1
assert abs(result - expected) < 1e-4
self.solver.fx = NoInput(0)
def test_base_only(self) -> None:
# calculable since base aligns with local currency
result = self.irs.npv(base="usd")
expected = 330.4051154763001
assert abs(result - expected) < 1e-4
# raises becuase no FX data to calculate a conversion
with pytest.raises(ValueError, match="`base` "):
result = self.irs.npv(base="eur")
def test_base_solvernofx(self) -> None:
# calculable since base aligns with local currency
result = self.irs.npv(base="usd", solver=self.solver)
expected = 330.4051154763001
assert abs(result - expected) < 1e-4
# raises becuase no FX data to calculate a conversion
with pytest.raises(ValueError, match="`base` "):
result = self.irs.npv(base="eur", solver=self.solver)
# ``base`` is inferred
def test_no_args(self) -> None:
# should result in a local NPV calculation
result = self.irs.npv()
expected = 330.4051154763001
assert abs(result - expected) < 1e-4
def test_fx(self) -> None:
# this was amended by v2.5. `base` must now be explicit and is not inherited.
result = self.irs.npv(fx=self.fxr)
expected = 330.4051154763001 # / 1.25
assert abs(result - expected) < 1e-4
def test_fx_solverfx(self) -> None:
# this was amended by v2.5. `base` must now be explicit and is not inherited.
fxr = FXRates({"eurusd": 1.2}, base="eur")
self.solver.fx = fxr
self.solver._set_new_state()
# no warning because objects are the same
result = self.irs.npv(solver=self.solver, fx=fxr)
expected = 330.4051154763001 # / 1.2
assert abs(result - expected) < 1e-4
# should give warning because obj id are different
with pytest.warns(UserWarning, match="Solver contains an `fx` attribute but an `fx` ar"):
result = self.irs.npv(solver=self.solver, fx=self.fxr)
expected = 330.4051154763001 # / 1.25
assert abs(result - expected) < 1e-4
self.solver.fx = NoInput(0)
def test_solverfx(self) -> None:
fxr = FXRates({"eurusd": 1.2}, base="eur")
self.solver.fx = fxr
self.solver._set_new_state()
# no warning becuase objects are the same
result = self.irs.npv(solver=self.solver)
expected = 330.4051154763001 # base in this should be local currency not eur.
assert abs(result - expected) < 1e-4
self.solver.fx = NoInput(0)
class TestNullPricing:
# test instruments can be priced without defining a pricing parameter.
@pytest.mark.parametrize(
"inst",
[
CDS(
dt(2022, 7, 1), "3M", "Q", curves=["eureur", "usdusd"], notional=1e6 * 25 / 14.91357
),
IRS(dt(2022, 7, 1), "3M", "A", curves="eureur", notional=1e6),
STIRFuture(
dt(2022, 3, 16),
dt(2022, 6, 15),
"Q",
curves="eureur",
spec="usd_stir",
contracts=-1,
),
FRA(dt(2022, 7, 1), "3M", "A", curves="eureur", notional=1e6),
SBS(
dt(2022, 7, 1),
"3M",
"A",
curves=["eureur", "eureur", "eurusd", "eureur"],
notional=-1e6,
),
ZCS(dt(2022, 7, 1), "3M", "A", curves="eureur", notional=1e6),
IIRS(
dt(2022, 7, 1),
"3M",
"A",
curves=["eu_cpi", "eureur", "eureur", "eureur"],
notional=1e6,
),
IIRS(
dt(2022, 7, 1),
"3M",
"A",
curves=["eu_cpi", "eureur", "eureur", "eureur"],
notional=1e6,
notional_exchange=True,
),
# TODO add a null price test for ZCIS
XCS( # XCS-FixedFloatNonMtm
dt(2022, 7, 1),
"3M",
"A",
fixed=True,
leg2_fixed=False,
leg2_mtm=False,
currency="eur",
pair="eurusd",
curves=["eureur", "eureur", "usdusd", "usdusd"],
notional=1e6,
),
XCS( # XCS-FixedFixedNonMtm
dt(2022, 7, 1),
"3M",
"A",
fixed=True,
leg2_fixed=True,
leg2_mtm=False,
currency="eur",
pair="eurusd",
fixed_rate=1.2,
curves=["eureur", "eureur", "usdusd", "usdusd"],
notional=1e6,
),
XCS( # XCS - FixedFloat
dt(2022, 7, 1),
"3M",
"A",
fixed=True,
leg2_fixed=False,
leg2_mtm=True,
currency="eur",
pair="eurusd",
curves=["eureur", "eureur", "usdusd", "usdusd"],
notional=1e6,
),
XCS( # XCS-FixedFixed
dt(2022, 7, 1),
"3M",
"A",
fixed=True,
leg2_fixed=True,
leg2_mtm=True,
currency="eur",
pair="eurusd",
leg2_fixed_rate=1.3,
curves=["eureur", "eureur", "usdusd", "usdusd"],
notional=1e6,
),
XCS( # XCS - FloatFixed
dt(2022, 7, 1),
"3M",
"A",
fixed=False,
leg2_fixed=True,
leg2_mtm=True,
currency="usd",
pair="usdeur",
curves=["usdusd", "usdusd", "eureur", "eureur"],
notional=-1e6,
),
],
)
def test_null_priced_delta(self, inst) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="eureur")
c3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.982}, id="eurusd")
c4 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.995},
id="eu_cpi",
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
fxf = FXForwards(
FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
{"usdusd": c1, "eureur": c2, "eurusd": c3},
)
ins = [
IRS(dt(2022, 1, 1), "1y", "A", curves="eureur"),
IRS(dt(2022, 1, 1), "1y", "A", curves="usdusd"),
IRS(dt(2022, 1, 1), "1y", "A", curves="eurusd"),
ZCIS(dt(2022, 1, 1), "1y", "A", curves=["eureur", "eureur", "eu_cpi", "eureur"]),
]
solver = Solver(
curves=[c1, c2, c3, c4, fxf.curve("usd", "eur", "usdeur")],
instruments=ins,
s=[1.2, 1.3, 1.33, 0.5],
id="solver",
instrument_labels=["eur 1y", "usd 1y", "eur 1y xcs adj.", "1y cpi"],
fx=fxf,
)
result = inst.delta(solver=solver)
assert abs(result.iloc[0, 0] - 25.0) < 1.0
result2 = inst.npv(solver=solver)
assert abs(result2) < 1e-3
# test that instruments have not been set by the previous pricing action
solver.s = [1.3, 1.4, 1.36, 0.55]
solver.iterate()
result3 = inst.npv(solver=solver)
assert abs(result3) < 1e-3
@pytest.mark.parametrize(
"inst",
[
FXSwap(
dt(2022, 7, 1),
"3M",
pair="eurusd",
curves=["eureur", "usdeur"],
notional=-1e6,
fx_rate=0.999851,
split_notional=-1003052.812,
points=-0.756443,
),
FXForward(
settlement=dt(2022, 10, 1),
pair="eurusd",
curves=["eureur", "usdeur"],
notional=-1e6 * 25 / 74.27,
),
],
)
def test_instruments_that_cannot_be_set_to_mid_market_if_null_priced(self, inst) -> None:
# These instruments behave differently when they have no pricing parameters with regards
# to risk becuase they cannot have FXFixings set, otherwise it breaks the fixings
# calculation (or each call manually requires a reset fixing process)
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="eureur")
c3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.982}, id="eurusd")
c4 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.995},
id="eu_cpi",
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
fxf = FXForwards(
FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
{"usdusd": c1, "eureur": c2, "eurusd": c3},
)
ins = [
IRS(dt(2022, 1, 1), "1y", "A", curves="eureur"),
IRS(dt(2022, 1, 1), "1y", "A", curves="usdusd"),
IRS(dt(2022, 1, 1), "1y", "A", curves="eurusd"),
ZCIS(dt(2022, 1, 1), "1y", "A", curves=["eureur", "eureur", "eu_cpi", "eureur"]),
]
solver = Solver(
curves=[c1, c2, c3, c4, fxf.curve("usd", "eur", "usdeur")],
instruments=ins,
s=[1.2, 1.3, 1.33, 0.5],
id="solver",
instrument_labels=["eur 1y", "usd 1y", "eur 1y xcs adj.", "1y cpi"],
fx=fxf,
)
result = inst.delta(solver=solver)
assert abs(result.iloc[0, 0] - 25.0) < 1.0
result2 = inst.npv(solver=solver)
assert abs(result2) < 1e-3
# # test that instruments have not been set by the previous pricing action
# solver.s = [1.3, 1.4, 1.36, 0.55]
# solver.iterate()
# result3 = inst.npv(solver=solver)
# assert abs(result3) < 1e-3
@pytest.mark.parametrize(
"inst",
[
XCS( # XCS - FloatFloat
dt(2022, 7, 1),
"3M",
"A",
currency="usd",
pair="usdeur",
curves=["usdusd", "usdusd", "eureur", "eurusd"],
notional=1e6,
float_spread=-12.876098007605556,
),
XCS( # XCS-FloatFloatNonMtm
dt(2022, 7, 1),
"3M",
"A",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
currency="usd",
pair="usdeur",
curves=["usdusd", "usdusd", "eureur", "eurusd"],
notional=1e6,
metric="leg2",
leg2_float_spread=12.877093409125974,
),
],
)
def test_null_priced_delta_xcs_float_spread(self, inst) -> None:
# all float spreads are defaulted to 0.0 so are artificially priced contracts
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="eureur")
c3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.982}, id="eurusd")
c4 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.995},
id="eu_cpi",
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
fxf = FXForwards(
FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
{"usdusd": c1, "eureur": c2, "eurusd": c3},
)
ins = [
IRS(dt(2022, 1, 1), "1y", "A", curves="eureur"),
IRS(dt(2022, 1, 1), "1y", "A", curves="usdusd"),
IRS(dt(2022, 1, 1), "1y", "A", curves="eurusd"),
ZCIS(dt(2022, 1, 1), "1y", "A", curves=["eureur", "eureur", "eu_cpi", "eureur"]),
]
solver = Solver(
curves=[c1, c2, c3, c4],
instruments=ins,
s=[1.2, 1.3, 1.33, 0.5],
id="solver",
instrument_labels=["eur 1y", "usd 1y", "eur 1y xcs adj.", "1y cpi"],
fx=fxf,
)
result = inst.delta(solver=solver)
# rate = inst.rate(solver=solver)
assert abs(result.iloc[0, 0] - 25.0) < 1.0
result2 = inst.npv(solver=solver)
assert abs(result2) < 1e-3
# test that instruments have not been set by the previous pricing action
solver.s = [1.3, 1.4, 1.36, 0.55]
solver.iterate()
result3 = inst.npv(solver=solver)
assert abs(result3) > 175 # becuase XCS is priced so its value has changed
@pytest.mark.parametrize(
"inst",
[
NDF(
pair="eurusd",
notional=1e6 * 0.333,
settlement=dt(2022, 10, 1),
curves="usdusd",
)
],
)
def test_null_priced_delta2(self, inst) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="eureur")
c3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.982}, id="eurusd")
c4 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.995},
id="eu_cpi",
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
fxf = FXForwards(
FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
{"usdusd": c1, "eureur": c2, "eurusd": c3},
)
ins = [
IRS(dt(2022, 1, 1), "1y", "A", curves="eureur"),
IRS(dt(2022, 1, 1), "1y", "A", curves="usdusd"),
IRS(dt(2022, 1, 1), "1y", "A", curves="eurusd"),
ZCIS(dt(2022, 1, 1), "1y", "A", curves=["eureur", "eureur", "eu_cpi", "eureur"]),
]
solver = Solver(
curves=[c1, c2, c3, c4],
instruments=ins,
s=[1.2, 1.3, 1.33, 0.5],
id="solver",
instrument_labels=["eur 1y", "usd 1y", "eur 1y xcs adj.", "1y cpi"],
fx=fxf,
)
result = inst.delta(solver=solver)
assert abs(result.iloc[1, 0] - 25.0) < 1.0
result2 = inst.npv(solver=solver)
assert abs(result2) < 1e-3
# test that instruments have not been set by the previous pricing action
solver.s = [1.3, 1.4, 1.36, 0.55]
solver.iterate()
result3 = inst.npv(solver=solver)
assert abs(result3) < 1e-3
@pytest.mark.parametrize(
"inst",
[
NDF(
pair="eurusd",
notional=1e6 * 0.333,
settlement=dt(2022, 10, 1),
curves="usdusd",
)
],
)
def test_null_priced_gamma2(self, inst) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="eureur")
c3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.982}, id="eurusd")
c4 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.995},
id="eu_cpi",
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
fxf = FXForwards(
FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
{"usdusd": c1, "eureur": c2, "eurusd": c3},
)
ins = [
IRS(dt(2022, 1, 1), "1y", "A", curves="eureur"),
IRS(dt(2022, 1, 1), "1y", "A", curves="usdusd"),
IRS(dt(2022, 1, 1), "1y", "A", curves="eurusd"),
ZCIS(dt(2022, 1, 1), "1y", "A", curves=["eureur", "eureur", "eu_cpi", "eureur"]),
]
solver = Solver(
curves=[c1, c2, c3, c4],
instruments=ins,
s=[1.2, 1.3, 1.33, 0.5],
id="solver",
instrument_labels=["eur 1y", "usd 1y", "eur 1y xcs adj.", "1y cpi"],
fx=fxf,
)
result = inst.gamma(solver=solver)
assert isinstance(result, DataFrame)
@pytest.mark.parametrize(
("inst", "param"),
[
(IRS(dt(2022, 7, 1), "3M", "A", curves="usdusd"), "fixed_rate"),
(FRA(dt(2022, 7, 1), "3M", "Q", curves="usdusd"), "fixed_rate"),
(
SBS(dt(2022, 7, 1), "3M", "Q", curves=["usdusd", "usdusd", "eureur", "usdusd"]),
"float_spread",
),
(ZCS(dt(2022, 1, 1), "1Y", "Q", curves=["usdusd"]), "fixed_rate"),
(
ZCIS(dt(2022, 1, 1), "1Y", "A", curves=["usdusd", "usdusd", "eu_cpi", "usdusd"]),
"fixed_rate",
),
(
IIRS(dt(2022, 1, 1), "1Y", "Q", curves=["eu_cpi", "usdusd", "usdusd", "usdusd"]),
"fixed_rate",
),
(
FXForward(
dt(2022, 3, 1),
pair="usdeur",
curves=[NoInput(0), "usdusd", NoInput(0), "eurusd"],
),
"fx_rate",
),
],
)
def test_null_priced_delta_round_trip_one_pricing_param(self, inst, param) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="eureur")
c3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.982}, id="eurusd")
c4 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.995},
id="eu_cpi",
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
fxf = FXForwards(
FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
{"usdusd": c1, "eureur": c2, "eurusd": c3},
)
ins = [
IRS(dt(2022, 1, 1), "1y", "A", curves="eureur"),
IRS(dt(2022, 1, 1), "1y", "A", curves="usdusd"),
IRS(dt(2022, 1, 1), "1y", "A", curves="eurusd"),
ZCIS(dt(2022, 1, 1), "1y", "A", curves=["eureur", "eureur", "eu_cpi", "eureur"]),
]
solver = Solver(
curves=[c1, c2, c3, c4],
instruments=ins,
s=[1.2, 1.3, 1.33, 0.5],
id="solver",
instrument_labels=["eur 1y", "usd 1y", "eur 1y xcs adj.", "1y cpi"],
fx=fxf,
)
unpriced_delta = inst.delta(solver=solver)
mid_market_price = inst.rate(solver=solver)
setattr(inst, param, float(mid_market_price))
priced_delta = inst.delta(solver=solver)
assert_frame_equal(unpriced_delta, priced_delta)
@pytest.mark.parametrize(
("inst", "param"),
[
(
FXSwap(
dt(2022, 2, 1),
"3M",
pair="eurusd",
curves=[NoInput(0), "eurusd", NoInput(0), "usdusd"],
),
"points",
),
],
)
def test_null_priced_delta_round_trip_one_pricing_param_fx_fix(self, inst, param) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="eureur")
c3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.982}, id="eurusd")
c4 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.995},
id="eu_cpi",
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
fxf = FXForwards(
FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
{"usdusd": c1, "eureur": c2, "eurusd": c3},
)
ins = [
IRS(dt(2022, 1, 1), "1y", "A", curves="eureur"),
IRS(dt(2022, 1, 1), "1y", "A", curves="usdusd"),
IRS(dt(2022, 1, 1), "1y", "A", curves="eurusd"),
ZCIS(dt(2022, 1, 1), "1y", "A", curves=["eureur", "eureur", "eu_cpi", "eureur"]),
]
solver = Solver(
curves=[c1, c2, c3, c4],
instruments=ins,
s=[1.2, 1.3, 1.33, 0.5],
id="solver",
instrument_labels=["eur 1y", "usd 1y", "eur 1y xcs adj.", "1y cpi"],
fx=fxf,
)
unpriced_delta = inst.delta(solver=solver, fx=fxf)
mid_market_price = inst.rate(solver=solver, fx=fxf)
setattr(inst, param, float(mid_market_price))
priced_delta = inst.delta(solver=solver, fx=fxf)
assert_frame_equal(unpriced_delta, priced_delta)
@pytest.mark.parametrize(
"inst",
[
CDS(dt(2022, 7, 1), "3M", "Q", notional=1e6 * 25 / 14.91357),
IRS(dt(2022, 7, 1), "3M", "A", notional=1e6),
FRA(dt(2022, 7, 1), "3M", "A", notional=1e6),
SBS(
dt(2022, 7, 1),
"3M",
"A",
notional=-1e6,
),
ZCS(dt(2022, 7, 1), "3M", "A", notional=1e6),
IIRS(
dt(2022, 7, 1),
"3M",
"A",
notional=1e6,
),
IIRS(
dt(2022, 7, 1),
"3M",
"A",
notional=1e6,
notional_exchange=True,
),
# # TODO add a null price test for ZCIS
XCS( # XCS - FloatFloat
dt(2022, 7, 1),
"3M",
"A",
currency="usd",
pair="usdeur",
notional=1e6,
),
XCS( # XCS-FloatFloatNonMtm
dt(2022, 7, 1),
"3M",
"A",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
currency="usd",
pair="usdeur",
notional=1e6,
),
XCS( # XCS-FixedFloatNonMtm
dt(2022, 7, 1),
"3M",
"A",
fixed=True,
leg2_fixed=False,
leg2_mtm=False,
currency="eur",
pair="eurusd",
notional=1e6,
),
XCS( # XCS-FixedFixedNonMtm
dt(2022, 7, 1),
"3M",
"A",
fixed=True,
leg2_fixed=True,
leg2_mtm=False,
currency="eur",
pair="eurusd",
leg2_fixed_rate=1.2,
notional=1e6,
),
XCS( # XCS - FixedFloat
dt(2022, 7, 1),
"3M",
"A",
fixed=True,
leg2_fixed=False,
leg2_mtm=True,
currency="eur",
pair="eurusd",
notional=1e6,
),
XCS( # XCS-FixedFixed
dt(2022, 7, 1),
"3M",
"A",
fixed=True,
leg2_fixed=True,
leg2_mtm=True,
currency="eur",
pair="eurusd",
leg2_fixed_rate=1.3,
notional=1e6,
),
FXSwap(
dt(2022, 7, 1),
"3M",
pair="usdeur",
notional=-1e6,
# fx_fixing=0.999851,
# split_notional=1003052.812,
# points=2.523505,
),
FXForward(
settlement=dt(2022, 10, 1),
pair="usdeur",
notional=-1e6 * 25 / 74.27,
),
# NDF(
# pair="eurusd", # settlement currency defaults to right hand side: usd
# settlement=dt(2022, 10, 1),
# ),
],
)
def test_set_pricing_does_not_overwrite_unpriced_status(self, inst):
# unpriced instruments run a `set_pricing_mid` method
# this test ensures that after that run the price is not permanently set and
# will reset when priced from an alternative set of curves.
# test is slightly different to null_priced_delta: uses fx and includes rate call
curve1 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.99}, index_base=66)
curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.98}, index_base=66)
curve3 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.97})
curve4 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.96}, index_base=80)
curve5 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.95}, index_base=80)
curve6 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.94})
fxr1 = FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1))
fxr2 = FXRates({"eurusd": 1.5}, settlement=dt(2022, 1, 1))
fxf1 = FXForwards(fxr1, {"usdusd": curve1, "eureur": curve2, "eurusd": curve3})
fxf2 = FXForwards(fxr2, {"usdusd": curve4, "eureur": curve5, "eurusd": curve6})
rate1 = inst.rate(
curves=dict(
rate_curve=curve1,
disc_curve=curve1,
index_curve=curve1,
leg2_rate_curve=curve2,
leg2_disc_curve=curve3,
leg2_index_curve=curve2,
),
fx=fxf1,
)
npv1 = inst.npv(
curves=dict(
rate_curve=curve1,
disc_curve=curve1,
index_curve=curve1,
leg2_rate_curve=curve2,
leg2_disc_curve=curve3,
leg2_index_curve=curve2,
),
fx=fxf1,
)
assert abs(npv1) < 1e-8
rate2 = inst.rate(
curves=dict(
rate_curve=curve4,
disc_curve=curve4,
index_curve=curve4,
leg2_rate_curve=curve5,
leg2_disc_curve=curve6,
leg2_index_curve=curve5,
),
fx=fxf2,
)
npv2 = inst.npv(
curves=dict(
rate_curve=curve4,
disc_curve=curve4,
index_curve=curve4,
leg2_rate_curve=curve5,
leg2_disc_curve=curve6,
leg2_index_curve=curve5,
),
fx=fxf2,
)
assert rate1 != rate2
assert abs(npv2) < 1e-8
@pytest.mark.parametrize(
"inst",
[
STIRFuture(
dt(2022, 3, 16),
dt(2022, 6, 15),
"Q",
spec="usd_stir",
contracts=-1,
),
# # TODO add a null price test for ZCIS
],
)
def test_set_pricing_does_not_overwrite_unpriced_status_single_currency_inst(self, inst):
# unpriced instruments run a `set_pricing_mid` method
# this test ensures that after that run the price is not permanently set and
# will reset when priced from an alternative set of curves.
# test is slightly different to null_priced_delta: uses fx and includes rate call
curve1 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.99}, index_base=66)
curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.98}, index_base=66)
curve3 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.97})
curve4 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.96}, index_base=80)
curve5 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.95}, index_base=80)
curve6 = Curve({dt(2022, 1, 1): 1.0, dt(2024, 1, 1): 0.94})
fxr1 = FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1))
fxr2 = FXRates({"eurusd": 1.5}, settlement=dt(2022, 1, 1))
fxf1 = FXForwards(fxr1, {"usdusd": curve1, "eureur": curve2, "eurusd": curve3})
fxf2 = FXForwards(fxr2, {"usdusd": curve4, "eureur": curve5, "eurusd": curve6})
rate1 = inst.rate(
curves=dict(
rate_curve=curve1,
disc_curve=curve1,
index_curve=curve1,
leg2_rate_curve=curve2,
leg2_disc_curve=curve1,
leg2_index_curve=curve2,
),
fx=fxf1,
)
npv1 = inst.npv(
curves=dict(
rate_curve=curve1,
disc_curve=curve1,
index_curve=curve1,
leg2_rate_curve=curve2,
leg2_disc_curve=curve1,
leg2_index_curve=curve2,
),
fx=fxf1,
)
assert abs(npv1) < 1e-8
rate2 = inst.rate(
curves=dict(
rate_curve=curve4,
disc_curve=curve4,
index_curve=curve4,
leg2_rate_curve=curve5,
leg2_disc_curve=curve4,
leg2_index_curve=curve5,
),
fx=fxf2,
)
npv2 = inst.npv(
curves=dict(
rate_curve=curve4,
disc_curve=curve4,
index_curve=curve4,
leg2_rate_curve=curve5,
leg2_disc_curve=curve4,
leg2_index_curve=curve5,
),
fx=fxf2,
)
assert rate1 != rate2
assert abs(npv2) < 1e-8
class TestIRS:
@pytest.mark.parametrize(
("float_spread", "fixed_rate", "expected"),
[
(0, 4.03, 4.03637780),
(3, 4.03, 4.06637780),
(0, 5.10, 4.03637780),
],
)
def test_irs_rate(self, curve, float_spread, fixed_rate, expected) -> None:
# test the mid-market rate ignores the given fixed_rate and reacts to float_spread
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
fixed_rate=fixed_rate,
stub="ShortFront",
leg2_float_spread=float_spread,
)
result = irs.rate(curves=curve)
assert abs(result - expected) < 1e-7
@pytest.mark.parametrize(
("float_spread", "fixed_rate", "expected"),
[
(0, 4.03, -0.63777963),
(200, 4.03, -0.63777963),
(500, 4.03, -0.63777963),
(0, 4.01, -2.63777963),
],
)
def test_irs_spread_none_simple(self, curve, float_spread, fixed_rate, expected) -> None:
# test the mid-market float spread ignores the given float_spread and react to fixed
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
fixed_rate=fixed_rate,
leg2_float_spread=float_spread,
leg2_fixing_method="rfr_payment_delay",
leg2_spread_compound_method="none_simple",
stub="ShortFront",
)
result = irs.spread(curves=curve)
assert abs(result - expected) < 1e-7
irs.leg2_float_spread = result
validate = irs.npv(curves=curve)
assert abs(validate) < 1e-8
@pytest.mark.parametrize(
("float_spread", "fixed_rate", "expected"),
[
(0, 4.03, -0.6322524949759807),
(200, 4.03, -0.6322524951743129),
(500, 4.03, -0.6322524951743129),
(0, 4.01, -2.61497625534),
],
)
def test_irs_spread_isda_compound(self, curve, float_spread, fixed_rate, expected) -> None:
# test the mid-market float spread ignores the given float_spread and react to fixed
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
fixed_rate=fixed_rate,
leg2_float_spread=float_spread,
leg2_fixing_method="rfr_payment_delay",
leg2_spread_compound_method="isda_compounding",
stub="ShortFront",
)
result = irs.spread(curves=curve)
assert abs(result - expected) < 1e-7
irs.leg2_float_spread = result
validate = irs.npv(curves=curve)
assert abs(validate) < 5e2
@pytest.mark.parametrize(
("float_spread", "fixed_rate", "expected"),
[
(0, 4.03, -0.63500600),
(200, 4.03, -0.6348797243),
(500, 4.03, -0.6346903026),
(0, 4.01, -2.626308241),
],
)
def test_irs_spread_isda_flat_compound(self, curve, float_spread, fixed_rate, expected) -> None:
# test the mid-market float spread ignores the given float_spread and react to fixed
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
fixed_rate=fixed_rate,
leg2_float_spread=float_spread,
leg2_fixing_method="rfr_payment_delay",
leg2_spread_compound_method="isda_flat_compounding",
stub="ShortFront",
)
result = irs.spread(curves=curve)
assert abs(result - expected) < 1e-2
irs.leg2_float_spread = result
validate = irs.npv(curves=curve)
assert abs(validate) < 20
def test_irs_npv(self, curve) -> None:
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
fixed_rate=4.035,
stub="ShortFront",
leg2_float_spread=0,
)
result = irs.npv(curves=curve)
expected = irs.analytic_delta(curves=curve) * (4.035 - irs.rate(curves=curve)) * -100
assert abs(result - expected) < 1e-7
assert abs(result - 5704.13604352) < 1e-7
def test_irs_cashflows(self, curve) -> None:
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
fixed_rate=4.035,
leg2_float_spread=NoInput(0),
stub="ShortFront",
)
result = irs.cashflows(curves=curve)
assert isinstance(result, DataFrame)
assert result.index.nlevels == 2
def test_irs_npv_mid_mkt_zero(self, curve) -> None:
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
stub="ShortFront",
)
result = irs.npv(curves=curve)
assert abs(result) < 1e-8
irs.fixed_rate = 1.0 # pay fixed low rate implies positive NPV
assert irs.npv(curves=curve) > 1
irs.fixed_rate = NoInput(0) # fixed rate set back to initial
assert abs(irs.npv(curves=curve)) < 1e-8
irs.fixed_rate = float(irs.rate(curves=curve))
irs.leg2_float_spread = 100
assert irs.npv(curves=curve) > 1
# irs.leg2_float_spread = NoInput(0)
# assert abs(irs.npv(curves=curve)) < 1e-8
@pytest.mark.skip(reason="unexpected attribute no longer raise exceptions")
def test_sbs_float_spread_raises(self, curve) -> None:
irs = IRS(dt(2022, 1, 1), "9M", "Q")
with pytest.raises(AttributeError, match="property 'float_spread' of 'IRS' object has no "):
irs.float_spread = 1.0
@pytest.mark.skip(reason="attribute mutation is not exhaustively blocked")
def test_index_base_raises(self) -> None:
irs = IRS(dt(2022, 1, 1), "9M", "Q")
with pytest.raises(AttributeError, match="property 'float_spread' of 'IRS' object has no"):
irs.index_base = 1.0
with pytest.raises(AttributeError, match="property 'float_spread' of 'IRS' object has no"):
irs.leg2_index_base = 1.0
def test_irs_interpolated_stubs(self, curve) -> None:
curve6 = LineCurve({dt(2022, 1, 1): 4.0, dt(2023, 2, 1): 4.0})
curve3 = LineCurve({dt(2022, 1, 1): 3.0, dt(2023, 2, 1): 3.0})
curve1 = LineCurve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
irs = IRS(
effective=dt(2022, 1, 3),
termination=dt(2023, 1, 3),
front_stub=dt(2022, 2, 10),
back_stub=dt(2022, 8, 10),
frequency="Q",
convention="act360",
curves=[{"3m": curve3, "1m": curve1, "6M": curve6}, curve],
leg2_fixing_method="ibor(2)",
)
cashflows = irs.cashflows()
assert (cashflows.loc[("leg2", 0), "Rate"] - 1.23729) < 1e-4
assert (cashflows.loc[("leg2", 3), "Rate"] - 3.58696) < 1e-4
def test_irs_interpolated_stubs_solver(self) -> None:
curve6 = Curve({dt(2022, 1, 1): 4.0, dt(2023, 2, 1): 4.0}, id="6m")
curve3 = Curve({dt(2022, 1, 1): 3.0, dt(2023, 2, 1): 3.0}, id="3m")
solver = Solver(
curves=[curve6, curve3],
instruments=[
IRS(dt(2022, 1, 1), "1Y", "A", curves=curve6),
IRS(dt(2022, 1, 1), "1Y", "A", curves=curve3),
],
s=[6.0, 3.0],
)
irs = IRS(
effective=dt(2022, 1, 3),
termination=dt(2022, 11, 3),
front_stub=dt(2022, 5, 3),
stub="Front",
frequency="Q",
convention="act360",
curves=[{"3m": "3m", "6m": "6m"}, "3m"],
leg2_fixing_method="ibor(2)",
)
cashflows = irs.cashflows(solver=solver)
assert (cashflows.loc[("leg2", 0), "Rate"] - 3.93693) < 1e-4
def test_no_rfr_fixings_raises(self) -> None:
# GH 170
T_irs = IRS(
effective=dt(2020, 12, 15),
termination=dt(2037, 12, 15),
notional=-600e6,
frequency="A",
leg2_frequency="A",
fixed_rate=4.5,
curves="curve",
)
par_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="curve",
)
with pytest.raises(ValueError, match="`effective` date for rate period is before the init"):
T_irs.npv(curves=par_curve)
def test_no_rfr_fixings_raises2(self) -> None:
# GH 357
sofr = Curve(
id="sofr",
convention="Act360",
calendar="nyc",
modifier="MF",
interpolation="log_linear",
nodes={
dt(2023, 8, 21): 1.0,
dt(2026, 8, 25): 0.97,
},
)
irs = IRS(
effective=dt(2023, 8, 18),
termination=dt(2025, 8, 18),
notional=1e6,
curves=sofr,
fixed_rate=4.86,
spec="usd_irs",
)
with pytest.raises(ValueError, match="`effective` date for rate period is before the init"):
irs.npv()
def test_1b_tenor_swaps(self):
irs = IRS(dt(2024, 12, 30), "1b", spec="sek_irs") # 31st is a holiday.
assert irs.leg1.schedule.uschedule == [dt(2024, 12, 30), dt(2025, 1, 2)]
def test_1d_tenor_swaps(self):
irs = IRS(dt(2024, 12, 30), "1d", spec="sek_irs") # 31st is a holiday.
assert irs.leg1.schedule.uschedule == [dt(2024, 12, 30), dt(2025, 1, 2)]
def test_fixings_table(self, curve):
irs = IRS(dt(2022, 1, 15), "6m", spec="usd_irs", curves=curve)
result = irs.local_analytic_rate_fixings()
assert isinstance(result, DataFrame)
def test_1d_instruments(self):
# GH484
with pytest.raises(ValueError, match="A Schedule could not be generated from the pa"):
IRS(dt(2025, 1, 1), "1d", spec="sek_irs")
def test_custom_amortization_raises(self):
with pytest.raises(ValueError, match="Custom amortisation schedules must have `n-1` amort"):
IRS(dt(2000, 1, 1), dt(2000, 4, 1), "M", notional=1000, amortization=[100, 400, 50])
def test_custom_amortization(self):
irs = IRS(dt(2000, 1, 1), dt(2000, 5, 1), "M", notional=1000, amortization=[100, 400, 50])
assert irs.leg1.amortization.outstanding == (1000.0, 900.0, 500.0, 450.0)
assert irs.leg1.amortization.amortization == (100.0, 400.0, 50.0)
assert irs.leg2.amortization.outstanding == (-1000.0, -900.0, -500.0, -450.0)
assert irs.leg2.amortization.amortization == (-100.0, -400.0, -50.0)
def test_custom_amortization_as_object(self):
# test an Amortization object can be passed and is negated correctly
amort = Amortization(4, 1000.0, [100.0, 400.0, 50.0])
irs = IRS(dt(2000, 1, 1), dt(2000, 5, 1), "M", notional=1000, amortization=amort)
assert irs.leg1.amortization.outstanding == (1000.0, 900.0, 500.0, 450.0)
assert irs.leg1.amortization.amortization == (100.0, 400.0, 50.0)
assert irs.leg2.amortization.outstanding == (-1000.0, -900.0, -500.0, -450.0)
assert irs.leg2.amortization.amortization == (-100.0, -400.0, -50.0)
@pytest.mark.skip(reason="unexpected attribute no longer raise exceptions")
def test_irs_attributes(self):
irs = IRS(dt(2000, 1, 1), dt(2000, 5, 1), "M", fixed_rate=2.0)
assert irs.fixed_rate == 2.0
with pytest.raises(AttributeError, match="Attribute not available on IRS"):
irs.float_spread
with pytest.raises(AttributeError, match="Attribute not available on IRS"):
irs.leg2_fixed_rate
assert irs.leg2_float_spread == 0.0
def test_irs_parse_curves(self, curve):
irs = IRS(dt(2000, 1, 1), dt(2000, 5, 1), "M", fixed_rate=2.0)
r1 = irs.npv(curves=[curve])
r2 = irs.npv(curves={"rate_curve": curve, "disc_curve": curve})
assert r1 == r2
def test_modifier_as_adjuster(self):
irs = IRS(dt(2000, 1, 1), "1y", "S", modifier=Adjuster.CalDaysLagSettle(10), calendar="ldn")
assert irs.leg1.schedule.uschedule[0] == dt(2000, 1, 1)
assert irs.leg1.schedule.aschedule[0] == dt(2000, 1, 11)
def test_cny_zero_periods(self):
irs = IRS(
effective=dt(2026, 2, 4),
termination=dt(2031, 2, 4),
frequency="Q",
calendar="bjs",
modifier="F",
payment_lag=0,
leg2_fixing_method="ibor(1)",
convention="act365F",
leg2_fixing_frequency="7D",
leg2_fixing_series=FloatRateSeries(
lag=1,
convention="act365f",
calendar="bjs",
modifier="f",
tenors=["7D"],
zero_period_stub="shortback",
eom=False,
),
leg2_zero_periods=True,
)
assert isinstance(irs.leg2.periods[0], ZeroFloatPeriod)
fixing_dates = [
dt(2026, 2, 3),
dt(2026, 2, 10),
dt(2026, 2, 14),
dt(2026, 2, 24),
dt(2026, 3, 3),
dt(2026, 3, 10),
dt(2026, 3, 17),
dt(2026, 3, 24),
dt(2026, 3, 31),
dt(2026, 4, 7),
dt(2026, 4, 14),
dt(2026, 4, 21),
dt(2026, 4, 28),
]
for i, float_period in enumerate(irs.leg2.periods[0].float_periods):
assert float_period.rate_params.rate_fixing.date == fixing_dates[i]
# test even stub sub-periods are fixed veruss "7D"
assert isinstance(
irs.leg2.periods[1].float_periods[-1].rate_params.rate_fixing, IBORStubFixing
)
assert isinstance(
irs.leg2.periods[1].float_periods[-1].rate_params.rate_fixing.fixing2, NoInput
)
def test_cny_golden_week_npv(self):
fixings.add(
"CNR7_1W",
Series(
index=[dt(2025, 9, 23), dt(2025, 9, 30), dt(2025, 10, 14)],
data=[1.53, 1.65, 1.48],
),
)
irs = IRS(
effective=dt(2025, 9, 24),
termination=dt(2025, 10, 22),
frequency="Q",
payment_lag=0,
leg2_fixing_method="ibor(1)",
leg2_fixing_frequency="7D",
leg2_zero_periods=True,
leg2_rate_fixings="CNR7",
leg2_fixing_series=FloatRateSeries(
lag=1,
calendar="bjs",
convention="act365f",
tenors=["7D"],
zero_period_stub="shortback",
modifier="F",
eom=False,
),
fixed_rate=3.0,
calendar="bjs",
convention="act365F",
notional=-100e6,
)
curve = Curve(
{dt(2025, 10, 21): 1.0, dt(2026, 10, 21): 0.99}, calendar="bjs", convention="act365f"
)
_npv = irs.npv(curves=curve)
cf = irs.cashflows(curves=curve)
assert abs(cf.loc[("leg1", 0), "Cashflow"] - 230136.99) < 1e-2
assert abs(cf.loc[("leg2", 0), "Cashflow"] + 118426.165) < 1e-2
expected_rate = (
((1 + 1.53 * 15 / 36500) * (1 + 1.65 * 6 / 36500) * (1 + 1.48 * 7 / 36500) - 1)
* 36500
/ 28
)
assert abs(cf.loc[("leg2", 0), "Rate"] - expected_rate) < 1e-4
fixings.pop("CNR7_1W")
class TestNDIRS:
def test_irs_analytic_dv01(self, eureur, usdeur, usdusd) -> None:
# test the mid-market rate ignores the given fixed_rate and reacts to float_spread
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2022, 1, 3)),
fx_curves={"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
currency="usd",
pair="eurusd",
frequency="Q",
fixed_rate=1.05,
stub="ShortFront",
leg2_float_spread=1.0,
)
anal_delta_l1_local = irs.analytic_delta(curves=[eureur, usdusd], fx=fxf)
anal_delta_l1_eur = irs.analytic_delta(curves=[eureur, usdusd], fx=fxf, base="eur")
anal_delta_l2_local = irs.analytic_delta(curves=[eureur, usdusd], fx=fxf, leg=2)
anal_delta_l2_eur = irs.analytic_delta(curves=[eureur, usdusd], fx=fxf, base="eur", leg=2)
assert abs(anal_delta_l1_local + anal_delta_l2_local) < 1e-8
assert abs(anal_delta_l1_eur + anal_delta_l2_eur) < 1e-8
assert abs(anal_delta_l1_local - anal_delta_l1_eur) > 4000
assert abs(anal_delta_l1_eur - 5 / 12 * 1e5) < 213
@pytest.mark.parametrize(
("float_spread", "fixed_rate", "expected"),
[
(0, 1.05, 1.2033904812590062),
(3, 1.05, 1.2333904812590062),
(0, 1.25, 1.2033904812590062),
],
)
def test_irs_rate(self, float_spread, fixed_rate, expected, eureur, usdeur, usdusd) -> None:
# test the mid-market rate ignores the given fixed_rate and reacts to float_spread
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2022, 1, 3)),
fx_curves={"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
currency="usd",
pair="eurusd",
frequency="Q",
fixed_rate=fixed_rate,
stub="ShortFront",
leg2_float_spread=float_spread,
)
result = irs.rate(curves=[eureur, usdusd], fx=fxf)
assert abs(result - expected) < 1e-7
@pytest.mark.parametrize(
("float_spread", "fixed_rate", "expected"),
[
(0, 1.05, -15.33904812590061),
(200, 1.05, -15.33904812590061),
(500, 1.05, -15.33904812590061),
(0, 1.02, -18.33904812590061),
],
)
def test_irs_spread_none_simple(
self, curve, float_spread, fixed_rate, expected, eureur, usdeur, usdusd
) -> None:
# test the mid-market float spread ignores the given float_spread and react to fixed
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2022, 1, 3)),
fx_curves={"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
currency="usd",
pair="eurusd",
fixed_rate=fixed_rate,
leg2_float_spread=float_spread,
leg2_fixing_method="rfr_payment_delay",
leg2_spread_compound_method="none_simple",
stub="ShortFront",
)
result = irs.spread(curves=[eureur, usdusd], fx=fxf)
assert abs(result - expected) < 1e-7
irs.leg2_float_spread = result
validate = irs.npv(curves=[eureur, usdusd], fx=fxf)
assert abs(validate) < 1e-8
@pytest.mark.parametrize(
("float_spread", "fixed_rate", "expected"),
[
(0, 1.05, -15.301676945861795),
(200, 1.05, -15.301676945861795),
(500, 1.05, -15.301676945861795),
(0, 1.02, -18.294961421921645),
],
)
def test_irs_spread_isda_compound(
self, float_spread, fixed_rate, expected, eureur, usdeur, usdusd
) -> None:
# test the mid-market float spread ignores the given float_spread and react to fixed
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2022, 1, 3)),
fx_curves={"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
currency="usd",
pair="eurusd",
fixed_rate=fixed_rate,
leg2_float_spread=float_spread,
leg2_fixing_method="rfr_payment_delay",
leg2_spread_compound_method="isda_compounding",
stub="ShortFront",
)
result = irs.spread(curves=[eureur, usdusd], fx=fxf)
assert abs(result - expected) < 1e-7
irs.leg2_float_spread = result
validate = irs.npv(curves=[eureur, usdusd], fx=fxf)
assert abs(validate) < 5e-2
@pytest.mark.parametrize(
("float_spread", "fixed_rate", "expected"),
[
(0, 1.05, -15.319076706336164),
(200, 1.05, -15.319076706336164),
(500, 1.05, -15.319076706336164),
(0, 1.02, -18.315170710463878),
],
)
def test_irs_spread_isda_flat_compound(
self, float_spread, fixed_rate, expected, eureur, usdeur, usdusd
) -> None:
# test the mid-market float spread ignores the given float_spread and react to fixed
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2022, 1, 3)),
fx_curves={"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
currency="usd",
pair="eurusd",
fixed_rate=fixed_rate,
leg2_float_spread=float_spread,
leg2_fixing_method="rfr_payment_delay",
leg2_spread_compound_method="isda_flat_compounding",
stub="ShortFront",
)
result = irs.spread(curves=[eureur, usdusd], fx=fxf)
assert abs(result - expected) < 1e-7
irs.leg2_float_spread = result
validate = irs.npv(curves=[eureur, usdusd], fx=fxf)
assert abs(validate) < 5e-2
def test_irs_npv(self, eureur, usdeur, usdusd) -> None:
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2022, 1, 3)),
fx_curves={"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
currency="usd",
pair="eurusd",
fixed_rate=1.24,
leg2_float_spread=3.0,
leg2_fixing_method="rfr_payment_delay",
stub="ShortFront",
curves=[eureur, usdusd],
)
result = irs.npv(fx=fxf)
expected = irs.analytic_delta(fx=fxf) * (1.24 - irs.rate(fx=fxf)) * -100
assert abs(result - expected) < 1e-7
assert abs(result + 30138.5056568) < 1e-7
def test_irs_cashflows(self, eureur, usdeur, usdusd) -> None:
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2022, 1, 3)),
fx_curves={"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
currency="usd",
pair="eurusd",
fixed_rate=1.24,
leg2_float_spread=3.0,
leg2_fixing_method="rfr_payment_delay",
stub="ShortFront",
curves=[eureur, usdusd],
)
result = irs.cashflows(fx=fxf)
assert isinstance(result, DataFrame)
assert all(result[defaults.headers["reference_currency"]] == ["EUR", "EUR", "EUR", "EUR"])
assert irs.kwargs.leg1["mtm"]
assert irs.kwargs.leg2["mtm"]
def test_irs_npv_mid_mkt_zero(self, eureur, usdeur, usdusd) -> None:
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2022, 1, 3)),
fx_curves={"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
currency="usd",
pair="eurusd",
leg2_float_spread=3.0,
leg2_fixing_method="rfr_payment_delay",
stub="ShortFront",
curves=[eureur, usdusd],
)
result = irs.npv(fx=fxf)
assert abs(result) < 1e-8
irs.fixed_rate = 1.0 # pay fixed low rate implies positive NPV
assert irs.npv(fx=fxf) > 1
irs.fixed_rate = NoInput(0) # fixed rate set back to initial
assert abs(irs.npv(fx=fxf)) < 1e-8
irs.fixed_rate = float(irs.rate(fx=fxf))
irs.leg2_float_spread = 100
assert irs.npv(fx=fxf) > 1
def test_fixings_table(self, eureur, usdeur, usdusd):
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2022, 1, 3)),
fx_curves={"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="Q",
currency="usd",
pair="eurusd",
leg2_float_spread=3.0,
leg2_fixing_method="rfr_payment_delay",
stub="ShortFront",
curves=[eureur, usdusd],
)
result = irs.local_analytic_rate_fixings(fx=fxf)
assert isinstance(result, DataFrame)
assert isinstance(result.iloc[0, 0], Dual)
assert abs(result.iloc[0, 0] - 304.26949) < 1e-5
assert abs(gradient(result.iloc[0, 0], vars=["fx_eurusd"])[0] - 276.6) < 1e-1
def test_irs_parse_curves(self, curve):
irs = IRS(dt(2000, 1, 1), dt(2000, 5, 1), "M", fixed_rate=2.0)
r1 = irs.npv(curves=[curve])
r2 = irs.npv(curves={"rate_curve": curve, "disc_curve": curve})
assert r1 == r2
def test_spec_ndset(self):
irs = IRS(effective=dt(2022, 1, 1), termination="1y", spec="inr_ndirs")
assert irs.kwargs.leg1["pair"] == "usdinr"
assert irs.kwargs.leg1["mtm"]
assert irs.kwargs.leg2["mtm"]
def test_real_mkt_example(self):
# An INRUSD NDIRS with market pricing
fxr = FXRates({"usdinr": 90.38}, settlement=dt(2025, 12, 19))
usdusd = Curve({dt(2025, 12, 17): 1.0, dt(2030, 12, 20): 0.9})
inrinr = Curve({dt(2025, 12, 17): 1.0, dt(2030, 12, 20): 0.9}, convention="act365F")
inrusd = Curve({dt(2025, 12, 17): 1.0, dt(2030, 12, 20): 0.9}, convention="act365F")
fxf = FXForwards(
fx_rates=fxr, fx_curves={"usdusd": usdusd, "inrinr": inrinr, "inrusd": inrusd}
)
Solver(
curves=[usdusd, inrinr, inrusd],
instruments=[
IRS(dt(2025, 12, 19), "5y", spec="usd_irs", curves=[usdusd]),
FXSwap(dt(2025, 12, 19), "5y", "usdinr", curves=[usdusd, inrusd]),
IRS(dt(2025, 12, 18), "5Y", spec="inr_ndirs", curves=[inrinr, usdusd]),
],
s=[3.447, 148300.0, 5.9075],
fx=fxf,
)
ndirs = IRS(dt(2025, 12, 18), "5y", spec="inr_ndirs", fixed_rate=5.8775, notional=250e6)
npv = ndirs.npv(fx=fxf, curves=[inrinr, usdusd])
assert abs(npv - 3489.2) < 1e-1
a_delta = ndirs.analytic_delta(fx=fxf, curves=[inrinr, usdusd])
assert abs(a_delta - 1163.1) < 1e-1
df = ndirs.cashflows(fx=fxf, curves=[inrinr, usdusd])
assert isinstance(df, DataFrame)
class TestIIRS:
@pytest.mark.skip(reason="v2.5 new IndexFixing handles setting and updating")
def test_index_base_none_populated(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.5, dt(2034, 1, 1): 0.4},
index_lag=3,
index_base=100.0,
interpolation_method="linear_index",
)
iirs = IIRS(
effective=dt(2022, 2, 1),
termination="1y",
frequency="Q",
index_lag=3,
notional_exchange=False,
)
for period in iirs.leg1.periods:
assert period.index_params.index_base.value is NoInput(0)
iirs.rate(curves=[i_curve, curve])
for period in iirs.leg1.periods:
assert period.index_params.index_base.value is NoInput(0)
def test_iirs_npv_mid_mkt_zero(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.5, dt(2034, 1, 1): 0.4},
index_lag=3,
index_base=100.0,
interpolation="linear_index",
)
name = str(hash(os.urandom(8)))
fixings.add(name=name, series=Series(index=[dt(2000, 1, 1)], data=[1.00]))
iirs = IIRS(
effective=dt(2022, 2, 1),
termination=dt(2022, 7, 1),
payment_lag=0,
notional=1e9,
convention="Act360",
frequency="Q",
stub="ShortFront",
index_lag=3,
index_fixings=name,
)
initial_mid = iirs.rate(curves=[i_curve, curve, curve])
result = iirs.npv(curves=[i_curve, curve, curve, curve])
assert abs(result) < 1e-8
iirs.fixed_rate = iirs.rate(curves=[i_curve, curve, curve])
fixings.pop(name)
fixings.add(name=name, series=Series(index=[dt(2021, 11, 1)], data=[500.0]))
result2 = iirs.npv(curves=[i_curve, curve, curve])
assert result2 > 1
assert iirs.leg1._regular_periods[0].index_params.index_base.value == 500.0
new_mid = iirs.rate(curves=[i_curve, curve, curve])
assert abs(new_mid - initial_mid) > 5.00
def test_cashflows(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 0.99},
index_lag=3,
index_base=100.0,
interpolation="linear_index",
)
iirs = IIRS(
effective=dt(2022, 2, 1),
termination="9M",
frequency="Q",
index_base=Series([100.0], index=[dt(2021, 11, 1)]),
index_fixings=Series([110.0, 115], index=[dt(2022, 2, 1), dt(2022, 5, 1)]),
index_lag=3,
index_method="monthly",
fixed_rate=1.0,
)
result = iirs.cashflows(curves=[i_curve, curve, curve, curve])
expected = DataFrame(
{
"Index Val": [110.0, 115.0, 100.7754, np.nan, np.nan, np.nan],
"Index Ratio": [1.10, 1.15, 1.00775, np.nan, np.nan, np.nan],
"NPV": [-2682.655, -2869.534, -2488.937, 9849.93, 10070.85, 9963.277],
"Type": ["FixedPeriod"] * 3 + ["FloatPeriod"] * 3,
},
index=MultiIndex.from_tuples(
[("leg1", 0), ("leg1", 1), ("leg1", 2), ("leg2", 0), ("leg2", 1), ("leg2", 2)],
),
)
assert_frame_equal(
expected,
result[["Index Val", "Index Ratio", "NPV", "Type"]],
rtol=1e-3,
)
def test_npv_no_index_base(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.5, dt(2034, 1, 1): 0.4},
index_lag=3,
index_base=100.0,
interpolation="linear_index",
)
iirs = IIRS(
effective=dt(2022, 2, 1),
termination="1y",
frequency="Q",
fixed_rate=2.0,
index_lag=3,
notional_exchange=False,
)
result = iirs.npv(curves=[i_curve, curve, curve, curve])
expected = 19792.08369745
assert abs(result - expected) < 1e-6
def test_cashflows_no_index_base(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.5, dt(2034, 1, 1): 0.4},
index_lag=3,
index_base=100.0,
interpolation="linear_index",
)
iirs = IIRS(
effective=dt(2022, 2, 1),
termination="1y",
frequency="Q",
fixed_rate=2.0,
index_lag=3,
notional_exchange=False,
)
result = iirs.cashflows(curves=[i_curve, curve, curve, curve])
for i in range(4):
assert result.iloc[i]["Index Base"] == 200.0
def test_fixings_table(self, curve):
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.5, dt(2034, 1, 1): 0.4},
index_lag=3,
index_base=100.0,
interpolation="linear_index",
)
iirs = IIRS(dt(2022, 1, 15), "6m", "Q", curves=[i_curve, curve, curve])
result = iirs.local_analytic_rate_fixings()
assert isinstance(result, DataFrame)
def test_fixing_in_the_past(self):
# this test will also initialise `index_base` from the provided `index_fixings`
discount = Curve({dt(2025, 5, 15): 1.0, dt(2027, 5, 15): 0.96})
inflation = Curve(
{dt(2025, 4, 1): 1.0, dt(2027, 5, 1): 0.98}, index_base=100.0, index_lag=0
)
fixings = Series(
[97, 98, 99, 100.0],
index=[dt(2025, 1, 1), dt(2025, 2, 1), dt(2025, 3, 1), dt(2025, 4, 1)],
)
iirs = IIRS(
dt(2025, 5, 15),
"1y",
"Q",
index_fixings=fixings,
curves=[inflation, discount, discount],
)
result = iirs.rate()
assert abs(result - 1.9775254614497422) < 1e-8
class TestYoYIS:
def test_index_fixings(self, curve) -> None:
name = str(hash(os.urandom(2)))
fixings.add(
name,
Series(
index=[
dt(2025, 11, 1),
dt(2026, 11, 1),
dt(2027, 11, 1),
dt(2028, 11, 1),
dt(2029, 11, 1),
dt(2030, 11, 1),
],
data=[324.09771, 332.32169, 340.43872, 348.73351, 357.21860, 366.05583],
),
)
yoyis = YoYIS(
effective=dt(2026, 2, 11),
termination="5y",
frequency="A",
fixed_rate=2.473874,
convention="ActActIsda",
leg2_index_lag=3,
leg2_index_method="monthly",
leg2_index_fixings=name,
notional=10e6,
calendar="nyc",
)
expected_cashflows = [253750.018, 244177.225, 244392.329, 242644.969, 247389.973]
cashflows = yoyis.cashflows(curves=[NoInput(0), curve])
for i in range(5):
value = cashflows.loc["leg2", "Cashflow"].iloc[i]
assert abs(value - expected_cashflows[i]) < 1e-2
expected_cashflows = [
-247387.40,
-247311.47,
-248141.10,
-246709.63,
-247387.40,
]
cashflows = yoyis.cashflows(curves=[NoInput(0), curve])
for i in range(5):
value = cashflows.loc["leg1", "Cashflow"].iloc[i]
assert abs(value - expected_cashflows[i]) < 1e-2
npv = yoyis.npv(curves=[NoInput(0), curve])
assert abs(npv + 3002.4397) < 1e-3
rate = yoyis.rate(curves=[NoInput(0), curve])
analytic_delta = yoyis.analytic_delta(curves=[NoInput(0), curve])
assert abs((2.473874 - rate) * analytic_delta * 100.0 - 3002.4397) < 1e-3
def test_cashflows_no_index_base(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.5, dt(2034, 1, 1): 0.4},
index_lag=3,
index_base=100.0,
interpolation="linear_index",
)
yoyis = YoYIS(
effective=dt(2022, 2, 1),
termination="3y",
frequency="A",
fixed_rate=2.0,
convention="One",
leg2_index_lag=3,
)
result = yoyis.cashflows(curves=[i_curve, curve])
expected = [200.0, 204.193474, 208.386949]
for i in range(3):
assert abs(result.loc["leg2", "Index Base"].iloc[i] - expected[i]) < 1e-6
expected_cashflows = [204.193474 / 200.0, 208.386949 / 204.193474]
for i in range(2):
expected = 1e6 * (expected_cashflows[i] - 1)
assert abs(result.loc["leg2", "Cashflow"].iloc[i] - expected) < 1e-2
def test_yoyis_npv_mid_mkt_zero(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.5, dt(2034, 1, 1): 0.4},
index_lag=3,
index_base=100.0,
interpolation="linear_index",
)
name = str(hash(os.urandom(8)))
fixings.add(name=name, series=Series(index=[dt(2000, 1, 1)], data=[1.00]))
yoyis = YoYIS(
effective=dt(2022, 2, 1),
termination="3y",
frequency="A",
convention="One",
leg2_index_lag=3,
leg2_index_fixings=name,
leg2_index_method="monthly",
)
initial_mid = yoyis.rate(curves=[i_curve, curve])
result = yoyis.npv(curves=[i_curve, curve])
assert abs(result) < 1e-8
yoyis.fixed_rate = initial_mid
fixings.pop(name)
fixings.add(name=name, series=Series(index=[dt(2021, 11, 1)], data=[500.0]))
result2 = yoyis.npv(curves=[i_curve, curve])
assert result2 < 500000
assert yoyis.leg2._regular_periods[0].index_params.index_base.value == 500.0
new_mid = yoyis.rate(curves=[i_curve, curve])
assert new_mid - initial_mid < -20.0
def test_fixings_table(self, curve):
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.5, dt(2034, 1, 1): 0.4},
index_lag=3,
index_base=100.0,
interpolation="linear_index",
)
yoyis = YoYIS(dt(2022, 1, 15), "6m", "Q", curves=[i_curve, curve])
result = yoyis.local_analytic_rate_fixings()
assert result.empty
class TestSBS:
def test_sbs_npv(self, curve) -> None:
sbs = SBS(dt(2022, 1, 1), "9M", "Q", float_spread=3.0)
a_delta = sbs.analytic_delta(curves=[curve, curve, curve], leg=1)
npv = sbs.npv(curves=[curve, curve, curve])
assert abs(npv + 3.0 * a_delta) < 1e-9
sbs.leg2_float_spread = 4.5
npv = sbs.npv(curves=[curve, curve, curve])
assert abs(npv - 1.5 * a_delta) < 1e-9
def test_sbs_rate(self, curve) -> None:
sbs = SBS(dt(2022, 1, 1), "9M", "Q", float_spread=3.0)
result = sbs.rate(curves=[curve] * 3)
alias = sbs.spread(curves=[curve] * 3)
assert abs(result - 0) < 1e-8
assert abs(alias - 0) < 1e-8
result = sbs.rate(curves=[curve] * 3, metric="leg2_float_spread")
alias = sbs.rate(curves=[curve] * 3, metric="leg2_float_spread")
assert abs(result - 3.0) < 1e-8
assert abs(alias - 3.0) < 1e-8
def test_sbs_cashflows(self, curve) -> None:
sbs = SBS(dt(2022, 1, 1), "9M", "Q", float_spread=3.0)
result = sbs.cashflows(curves=[curve] * 3)
expected = DataFrame(
{
"Type": ["FloatPeriod", "FloatPeriod"],
"Period": ["Regular", "Regular"],
"Spread": [3.0, 0.0],
},
index=MultiIndex.from_tuples([("leg1", 0), ("leg2", 2)]),
)
assert_frame_equal(
result.loc[[("leg1", 0), ("leg2", 2)], ["Type", "Period", "Spread"]],
expected,
)
@pytest.mark.skip(reason="exceptions are no longer raised for unexpected attributes.")
def test_sbs_fixed_rate_raises(self, curve) -> None:
sbs = SBS(dt(2022, 1, 1), "9M", "Q", float_spread=3.0)
with pytest.raises(AttributeError, match="property 'fixed_rate' of 'SBS' object has no se"):
sbs.fixed_rate = 1.0
with pytest.raises(AttributeError, match="property 'leg2_fixed_rate' of 'SBS' object has"):
sbs.leg2_fixed_rate = 1.0
def test_fixings_table(self, curve):
inst = SBS(dt(2022, 1, 15), "6m", spec="usd_irs", curves=[curve] * 3)
result = inst.local_analytic_rate_fixings()
assert isinstance(result, DataFrame)
def test_fixings_table_3s1s(self, curve, curve2):
inst = SBS(
dt(2022, 1, 15),
"6m",
fixing_method="ibor(0)",
leg2_fixing_method="ibor(1)",
frequency="Q",
leg2_frequency="m",
curves=[curve, curve, curve2, curve],
)
result = inst.local_analytic_rate_fixings()
assert isinstance(result, DataFrame)
assert len(result.columns) == 2
assert len(result.index) == 8
class TestFRA:
def test_fra_rate(self, curve) -> None:
# test the mid-market rate ignores the given fixed_rate and reacts to float_spread
fra = FRA(
effective=dt(2022, 1, 1),
termination=dt(2022, 7, 1),
notional=1e9,
convention="Act360",
frequency="S",
fixed_rate=4.00,
)
result = fra.rate(curves=curve)
expected = 4.0590821964144
assert abs(result - expected) < 1e-7
def test_fra_rate_with_spec(self):
curve = Curve(
{dt(2026, 1, 14): 1.0, dt(2027, 1, 14): 0.98},
calendar="stk",
convention="act360",
)
fra = FRA(get_imm(code="H26"), get_imm(code="M26"), spec="sek_fra3", curves="sek_3m")
result = fra.rate(curves=curve)
expected = 1.9976777500828364
assert fra.leg1.settlement_params.notional == 1e6
assert fra.leg2.settlement_params.notional == -1e6
assert abs(result - expected) < 1e-5
def test_negated_notional(self):
fra = FRA(
get_imm(code="H26"), get_imm(code="M26"), spec="sek_fra3", curves="sek_3m", notional=7.0
)
assert fra.leg1.settlement_params.notional == 7.0
assert fra.leg2.settlement_params.notional == -7.0
def test_fra_npv(self, curve) -> None:
fra = FRA(
effective=dt(2022, 1, 1),
termination="6m",
payment_lag=2,
notional=1e9,
convention="Act360",
modifier="mf",
frequency="S",
fixed_rate=4.035,
)
result = fra.npv(curves=curve)
expected = fra.analytic_delta(curves=curve) * (4.035 - fra.rate(curves=curve)) * -100
assert abs(result - expected) < 1e-8
assert abs(result - 118631.8350458332) < 1e-7
def test_fra_cashflows(self, curve) -> None:
fra = FRA(
effective=dt(2022, 1, 1),
termination=dt(2022, 7, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="s",
fixed_rate=4.035,
)
result = fra.cashflows(curves=curve)
assert isinstance(result, DataFrame)
assert result.index.nlevels == 2
def test_fra_cashflows_with_rate_fixing(self) -> None:
fra = FRA(
effective=dt(2022, 1, 1),
termination=dt(2022, 7, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="s",
fixed_rate=4.035,
leg2_rate_fixings=2.99,
)
result = fra.cashflows()
assert isinstance(result, DataFrame)
subsample = result.loc[:, "Cashflow"]
d = 181.0 / 360.0
x = 0.04035 * d / (1 + d * 0.0299)
y = 0.0299 * d / (1 + d * 0.0299)
for a, b in zip(subsample, [-1e9 * x, 1e9 * y]):
assert abs(a - b) < 1e-4
def test_irs_npv_mid_mkt_zero(self, curve) -> None:
fra = FRA(
effective=dt(2022, 1, 1),
termination=dt(2022, 7, 1),
payment_lag=2,
notional=1e9,
convention="Act360",
frequency="S",
)
result = fra.npv(curves=curve)
assert abs(result) < 1e-9
fra.fixed_rate = 1.0 # pay fixed low rate implies positive NPV
assert fra.npv(curves=curve) > 1
fra.fixed_rate = NoInput(0) # fixed rate set back to initial
assert abs(fra.npv(curves=curve)) < 1e-9
@pytest.mark.parametrize(("eom", "exp"), [(True, dt(2021, 5, 31)), (False, dt(2021, 5, 26))])
def test_fra_roll_inferral(self, eom, exp) -> None:
fra = FRA(
effective=dt(2021, 2, 26),
termination="3m",
frequency="Q",
eom=eom,
calendar="bus",
)
assert fra.leg1.schedule.termination == exp
def test_imm_dated(self):
FRA(effective=dt(2024, 12, 18), termination=dt(2025, 3, 19), spec="sek_fra3", roll="imm")
def test_fra_fixings_table(self, curve) -> None:
fra = FRA(
effective=dt(2022, 1, 1),
termination="6m",
payment_lag=2,
notional=1e9,
convention="Act360",
modifier="mf",
frequency="S",
fixed_rate=4.035,
curves=curve,
)
result = fra.local_analytic_rate_fixings(curves=curve)
assert isinstance(result, DataFrame)
def test_imm_dated_fixings_table(self, curve):
# This is an IMM FRA: the DCF is different to standard tenor.
fra = FRA(
effective=dt(2024, 12, 18),
termination=dt(2025, 3, 19),
spec="sek_fra3",
roll="imm",
curves=curve,
notional=1e9,
)
result = fra.local_analytic_rate_fixings()
analytic_delta = fra.analytic_delta()
assert isinstance(result, DataFrame)
assert abs(result.iloc[0, 0] - analytic_delta) < 1
def test_fra_ex_div_and_payment(self):
fra = FRA(
effective=dt(2024, 12, 18),
termination=dt(2025, 3, 19),
spec="sek_fra3",
roll="imm",
curves=curve,
notional=1e9,
payment_lag=2,
ex_div=-1,
)
assert fra.leg1.periods[0].period_params.start == dt(2024, 12, 18)
assert fra.leg1.periods[0].settlement_params.payment == dt(2024, 12, 20)
assert fra.leg1.periods[0].settlement_params.ex_dividend == dt(2024, 12, 19)
def test_fra_cashflows_no_curve(self):
fra = FRA(
effective=dt(2000, 1, 1),
termination="6m",
spec="eur_fra6",
fixed_rate=2.0,
)
assert isinstance(fra.cashflows(), DataFrame)
class TestZCS:
@pytest.mark.parametrize(("freq", "exp"), [("Q", 3.53163356950), ("S", 3.54722411409218)])
def test_zcs_rate(self, freq, exp) -> None:
usd = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2027, 1, 1): 0.85, dt(2032, 1, 1): 0.70},
id="usd",
calendar="bus",
)
zcs = ZCS(
effective=dt(2022, 1, 1),
termination="10Y",
frequency=freq,
leg2_frequency="Q",
calendar="bus",
modifier="MF",
currency="usd",
fixed_rate=4.0,
convention="Act360",
notional=100e6,
curves=["usd"],
)
result = zcs.rate(curves=usd)
assert abs(result - exp) < 1e-7
def test_zcs_analytic_delta(self) -> None:
usd = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2027, 1, 1): 0.85, dt(2032, 1, 1): 0.70},
id="usd",
)
zcs = ZCS(
effective=dt(2022, 1, 1),
termination="10Y",
frequency="Q",
leg2_frequency="Q",
calendar="nyc",
currency="usd",
fixed_rate=4.0,
convention="Act360",
notional=100e6,
curves=["usd"],
)
result = zcs.analytic_delta(curves=usd)
expected = 105186.21760654295
assert abs(result - expected) < 1e-7
def test_zcs_raise_frequency(self) -> None:
with pytest.raises(ValueError, match="`frequency` for a ZeroFixedLeg should not be 'Z'."):
ZCS(
effective=dt(2022, 1, 5),
termination="10Y",
modifier="mf",
frequency="Z",
fixed_rate=4.22566695954813,
)
def test_fixings_table(self, curve):
zcs = ZCS(
effective=dt(2022, 1, 15),
termination="2y",
frequency="Q",
leg2_fixing_method="ibor(2)",
calendar="all",
convention="30e360",
leg2_convention="30e360",
leg2_fixing_series="eur_ibor",
curves=curve,
)
result = zcs.local_analytic_rate_fixings()
assert isinstance(result, DataFrame)
for i in range(8):
abs(result.iloc[i, 0] - 24.678) < 1e-3
class TestZCIS:
def test_leg2_index_base(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_base=200.0,
interpolation="linear_index",
index_lag=3,
)
zcis = ZCIS(
effective=dt(2022, 1, 1),
termination="9m",
frequency="Q",
)
prior = zcis.rate(curves=[curve, curve, i_curve, curve])
zcis = ZCIS(
effective=dt(2022, 1, 1),
termination="9m",
frequency="Q",
leg2_index_base=100.0,
)
result = zcis.rate(curves=[curve, curve, i_curve, curve])
assert result > (prior + 100)
def test_solver_failure_unspecified_index_base(self, curve) -> None:
# GH 349
curve = Curve({dt(2022, 1, 15): 1.0, dt(2023, 1, 1): 0.98})
i_curve = Curve(
{dt(2022, 1, 15): 1.0, dt(2023, 1, 1): 0.99},
index_base=200.0,
interpolation="linear_index",
)
zcis = ZCIS(
effective=dt(2022, 1, 15),
termination="9m",
frequency="A",
convention="1+",
calendar="nyc",
leg2_index_method="monthly",
currency="usd",
curves=[curve, curve, i_curve, curve],
leg2_index_lag=3,
)
with pytest.raises(ZeroDivisionError): # noqa: SIM117
with pytest.warns(
UserWarning, match="The date queried on the Curve for an `index_value` is prior"
):
zcis.rate()
def test_fixing_in_the_past(self):
# this test will also initialise `index_base` from the provided `index_fixings`
discount = Curve({dt(2025, 5, 15): 1.0, dt(2027, 5, 15): 0.96})
inflation = Curve(
{dt(2025, 4, 1): 1.0, dt(2027, 5, 1): 0.98}, index_base=100.0, index_lag=0
)
name = str(hash(os.urandom(8)))
fixings.add(
name,
Series(
[97, 98, 99, 100.0],
index=[dt(2025, 1, 1), dt(2025, 2, 1), dt(2025, 3, 1), dt(2025, 4, 1)],
),
)
zcis = ZCIS(dt(2025, 5, 15), "1y", spec="eur_zcis", leg2_index_fixings=name)
result = zcis.rate(curves=[inflation, discount])
assert abs(result - 2.8742266148532813) < 1e-8
class TestValue:
def test_npv_adelta_cashflows_raises(self) -> None:
value = Value(dt(2022, 1, 1))
with pytest.raises(NotImplementedError):
value.npv()
with pytest.raises(NotImplementedError):
value.cashflows()
with pytest.raises(NotImplementedError):
value.analytic_delta()
def test_cc_zero_rate(self, curve) -> None:
v = Value(effective=dt(2022, 7, 1), metric="cc_zero_rate")
result = v.rate(curves=curve)
t = (dt(2022, 7, 1) - dt(2022, 1, 1)).days / 360
expected = 100 * dual_log(curve[dt(2022, 7, 1)]) / -t
assert abs(result - expected) < 1e-12
def test_on_rate(self, curve) -> None:
c = Curve({dt(2000, 1, 1): 1.0, dt(2000, 7, 1): 1.0})
v = Value(effective=dt(2000, 2, 1), metric="o/n_rate")
result = v.rate(curves=c)
expected = 0.0
assert abs(result - expected) < 1e-8
def test_index_value(self) -> None:
curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.995},
id="eu_cpi",
index_base=100.0,
interpolation="linear_index",
)
v = Value(effective=dt(2022, 7, 1), metric="index_value")
result = v.rate(curves=curve)
expected = 100.24919116128588
assert result == expected
def test_value_raise(self, curve) -> None:
with pytest.raises(ValueError):
Value(effective=dt(2022, 7, 1), metric="bad").rate(curves=curve)
class TestFXForward:
def test_cashflows(self) -> None:
fxe = FXForward(
settlement=dt(2022, 10, 1),
pair="eurusd",
notional=-1e6,
fx_rate=2.05,
)
result = fxe.cashflows()
expected = DataFrame(
{
"Type": ["Cashflow", "Cashflow"],
"Ccy": ["EUR", "USD"],
"Payment": [dt(2022, 10, 1), dt(2022, 10, 1)],
"Notional": [1e6, -1e6],
"FX Fixing": [None, 2.05],
"Cashflow": [-1e6, 2050000.0],
},
index=MultiIndex.from_tuples([("leg1", 0), ("leg2", 0)]),
)
result = result[["Type", "Ccy", "Payment", "Notional", "FX Fixing", "Cashflow"]]
assert_frame_equal(result, expected, rtol=1e-6)
@pytest.mark.parametrize(
("base", "fx"),
[
("usd", FXRates({"eurusd": 1.20})),
("eur", FXRates({"eurusd": 1.20})),
],
)
def test_npv_at_mid_market(self, curve, curve2, base, fx) -> None:
fxe = FXForward(
settlement=dt(2022, 3, 1),
pair="eurusd",
fx_rate=1.2080131682341035,
)
result = fxe.npv(
curves=[NoInput(0), curve, NoInput(0), curve2],
fx=fx,
base=base,
local=False,
)
assert abs(result - 0.0) < 1e-8
def test_rate(self, curve, curve2) -> None:
fxe = FXForward(
settlement=dt(2022, 3, 1),
pair="eurusd",
fx_rate=1.2080131682341035,
)
result = fxe.rate(
curves=[NoInput(0), curve, NoInput(0), curve2], fx=FXRates({"eurusd": 1.20})
)
expected = 1.2080131682341035
assert abs(result - expected) < 1e-7
def test_npv_fx_numeric(self, curve) -> None:
# This demonstrates the ambiguity and poor practice of
# using numeric fx as pricing input, although it will return.
fxe = FXForward(
settlement=dt(2022, 3, 1),
pair="eurusd",
fx_rate=1.2080131682341035,
notional=-1e6,
)
# # real_result_ = fxe.npv(curves=[curve] * 4, fx=FXRates({"eurusd": 2.0}), local=True)
with pytest.warns(
DeprecationWarning,
match="Supplying `fx` as numeric is ambiguous, particularly with multi-curre",
):
fxe.npv(curves=[curve] * 4, fx=2.0, base="bad")
def test_npv_no_fx_raises(self, curve) -> None:
fxe = FXForward(
settlement=dt(2022, 3, 1),
pair="eurusd",
fx_rate=1.2080131682341035,
)
with pytest.raises(
ValueError,
match=r"`base` \(eur\) cannot be requested without supplying `fx` as a valid FXRates",
):
fxe.npv(curves=[curve, curve])
def test_notional_direction(self, curve, curve2) -> None:
fx1 = FXForward(notional=1e6, pair="eurusd", settlement=dt(2022, 1, 1), fx_rate=1.20)
fx2 = FXForward(notional=-1e6, pair="eurusd", settlement=dt(2022, 1, 1), fx_rate=1.30)
pf = Portfolio([fx1, fx2])
fx = FXRates({"eurusd": 1.30}, base="usd")
result = pf.npv(curves=[None, curve, None, curve2], fx=fx)
expected = 100000.0 / 1.30
assert abs(result - expected) < 1e-8
result = pf.npv(curves=[None, curve, None, curve2], fx=fx, base="usd")
expected = 100000.0
assert abs(result - expected) < 1e-8
def test_analytic_delta_is_zero(self, curve, curve2) -> None:
result = FXForward(
settlement=dt(2022, 3, 1),
pair="eurusd",
fx_rate=1.2080131682341035,
).analytic_delta(curves=[curve, curve2])
assert abs(result - 0.0) < 1e-8
def test_error_msg_for_no_fx(self) -> None:
eur = Curve({dt(2024, 6, 20): 1.0, dt(2024, 9, 30): 1.0}, calendar="tgt")
usd = Curve({dt(2024, 6, 20): 1.0, dt(2024, 9, 30): 1.0}, calendar="nyc")
eurusd = Curve({dt(2024, 6, 20): 1.0, dt(2024, 9, 30): 1.0})
with pytest.raises(ValueError, match="`fx` must be supplied to price FXExchange"):
Solver(
curves=[eur, usd, eurusd],
instruments=[
IRS(dt(2024, 6, 24), "3m", spec="eur_irs", curves=eur),
IRS(dt(2024, 6, 24), "3m", spec="usd_irs", curves=usd),
FXForward(
pair="eurusd",
settlement=dt(2024, 9, 24),
curves=[None, eurusd, None, usd],
),
],
s=[3.77, 5.51, 1.0775],
)
def test_leg2_notional(self, curve, curve2) -> None:
fx1 = FXForward(
leg2_notional=-1.2e6, pair="eurusd", settlement=dt(2022, 1, 1), fx_rate=1.20
)
fx2 = FXForward(leg2_notional=1.3e6, pair="eurusd", settlement=dt(2022, 1, 1), fx_rate=1.30)
pf = Portfolio([fx1, fx2])
fx = FXRates({"eurusd": 1.30}, base="usd")
result = pf.npv(curves=[None, curve, None, curve2], fx=fx)
expected = 100000.0 / 1.30
assert abs(result - expected) < 1e-8
result = pf.npv(curves=[None, curve, None, curve2], fx=fx, base="usd")
expected = 100000.0
assert abs(result - expected) < 1e-8
class TestNDF:
def test_2ccy_constructions(self):
# no notionals or fx_rate, notional=1mm by default, pricing set at price time.
a1 = NDF(pair="eurusd", currency="eur", settlement=dt(2000, 1, 1))
a2 = NDF(pair="eurusd", currency="eur", notional=1e6, settlement=dt(2000, 1, 1))
assert a1.kwargs.leg1["notional"] == 1e6
assert a2.kwargs.leg1["notional"] == 1e6
assert a1.kwargs.leg2["notional"] == NoInput(0)
assert a2.kwargs.leg2["notional"] == NoInput(0)
assert a1.kwargs.meta["fx_rate"] == NoInput(0)
assert a2.kwargs.meta["fx_rate"] == NoInput(0)
# no notional with fx_rate, notional=1mm by default and ==> leg2_notional
b1 = NDF(pair="eurusd", currency="eur", fx_rate=2.0, settlement=dt(2000, 1, 1))
b2 = NDF(
pair="eurusd", currency="eur", fx_rate=2.0, notional=1e6, settlement=dt(2000, 1, 1)
)
b3 = NDF(
pair="eurusd",
currency="eur",
fx_rate=2.0,
leg2_notional=-2e6,
settlement=dt(2000, 1, 1),
)
assert b1.kwargs.leg1["notional"] == 1e6
assert b2.kwargs.leg1["notional"] == 1e6
assert b3.kwargs.leg1["notional"] == 1e6
assert b1.kwargs.leg2["notional"] == -2e6
assert b2.kwargs.leg2["notional"] == -2e6
assert b3.kwargs.leg2["notional"] == -2e6
assert b1.kwargs.meta["fx_rate"] == 2.0
assert b2.kwargs.meta["fx_rate"] == 2.0
assert b3.kwargs.meta["fx_rate"] == 2.0
# reversed pair
c1 = NDF(
pair="usdeur", currency="eur", fx_rate=0.5, notional=-2e6, settlement=dt(2000, 1, 1)
)
c2 = NDF(
pair="usdeur", currency="eur", fx_rate=0.5, leg2_notional=1e6, settlement=dt(2000, 1, 1)
)
assert c1.kwargs.leg1["notional"] == -2e6
assert c2.kwargs.leg1["notional"] == -2e6
assert c1.kwargs.leg2["notional"] == 1e6
assert c2.kwargs.leg2["notional"] == 1e6
assert c1.kwargs.meta["fx_rate"] == 0.5
assert c2.kwargs.meta["fx_rate"] == 0.5
# 2 notionals imply fx rate
d1 = NDF(
pair="eurusd",
currency="eur",
notional=-1e6,
leg2_notional=2e6,
settlement=dt(2000, 1, 1),
)
d2 = NDF(
pair="usdeur",
currency="eur",
notional=2e6,
leg2_notional=-1e6,
settlement=dt(2000, 1, 1),
)
assert d1.kwargs.meta["fx_rate"] == 2.0
assert d2.kwargs.meta["fx_rate"] == 0.5
def test_construction(self) -> None:
ndf = NDF(
pair=FXIndex("brlusd", "all", 0),
settlement=dt(2022, 1, 1),
fx_rate=1.20,
fx_fixings=2.25,
notional=1e6, # <- should be expressed in BRL
currency="usd",
)
assert ndf.leg1.periods[0].settlement_params.currency == "usd"
assert ndf.leg1.periods[0].non_deliverable_params.reference_currency == "brl"
assert ndf.leg1.periods[0].non_deliverable_params.fx_reversed is False
# value is 1.05mm usd
c = Curve({dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 1.0})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 1.0})
fxf = FXForwards(
fx_rates=FXRates({"brlusd": 2.20}, settlement=dt(2022, 1, 1)),
fx_curves={"brlbrl": c2, "brlusd": c2, "usdusd": c},
)
result = ndf.npv(curves=[c], fx=fxf) # value should be expressed in USD
assert abs(result - 1000000.0 * (2.25 - 1.2)) < 1e-8
def test_construction_opposite(self) -> None:
ndf = NDF(
pair=FXIndex("brlusd", "all", 0),
settlement=dt(2022, 1, 1),
fx_rate=1.20,
leg2_fx_fixings=2.25,
notional=1e6, # <- should be expressed in BRL
currency="brl",
)
assert ndf.leg2.periods[0].settlement_params.currency == "brl"
assert ndf.leg2.periods[0].non_deliverable_params.reference_currency == "usd"
assert ndf.leg2.periods[0].non_deliverable_params.fx_reversed is True
# value is 1.05mm usd
c = Curve({dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 1.0})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 1.0})
fxf = FXForwards(
fx_rates=FXRates({"brlusd": 2.20}, settlement=dt(2022, 1, 1)),
fx_curves={"brlbrl": c2, "brlusd": c2, "usdusd": c},
)
result = ndf.npv(curves=[c], fx=fxf) # value should be expressed in BRL
assert abs(result - 1000000.0 * (2.25 - 1.2) / 2.25) < 1e-8
def test_construction_reversed(self) -> None:
ndf = NDF(
pair=FXIndex("usdbrl", "all", 0),
settlement=dt(2022, 1, 1),
currency="usd",
fx_rate=1.20,
leg2_fx_fixings=2.25,
notional=1e6, # <- should be expressed in USD
)
assert ndf.leg1.periods[0].settlement_params.currency == "usd"
assert ndf.leg2.periods[0].non_deliverable_params.reference_currency == "brl"
assert ndf.leg2.periods[0].non_deliverable_params.fx_reversed is True
# value is 1.05mm BRL
c = Curve({dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 1.0})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 1.0})
fxf = FXForwards(
fx_rates=FXRates({"brlusd": 2.20}, settlement=dt(2022, 1, 1)),
fx_curves={"brlbrl": c2, "brlusd": c2, "usdusd": c},
)
result = ndf.npv(curves=[c], fx=fxf) # value should be expressed in USD
assert abs(result - 1000000.0 * (2.25 - 1.2) / 2.25) < 1e-8
def test_construction_reversed_opposite(self) -> None:
ndf = NDF(
pair=FXIndex("usdbrl", "all", 0),
settlement=dt(2022, 1, 1),
currency="brl",
fx_rate=1.20,
fx_fixings=2.25,
notional=1e6, # <- should be expressed in USD
)
assert ndf.leg1.periods[0].settlement_params.currency == "brl"
assert ndf.leg1.periods[0].non_deliverable_params.reference_currency == "usd"
assert ndf.leg1.periods[0].non_deliverable_params.fx_reversed is False
# value is 1.05mm BRL
c = Curve({dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 1.0})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 1.0})
fxf = FXForwards(
fx_rates=FXRates({"brlusd": 2.20}, settlement=dt(2022, 1, 1)),
fx_curves={"brlbrl": c2, "brlusd": c2, "usdusd": c},
)
result = ndf.npv(curves=[c], fx=fxf) # value should be expressed in USD
assert abs(result - 1000000.0 * (2.25 - 1.2)) < 1e-8
def test_3ccy_constructions(self):
# no notionals or fx_rate, notional=1mm by default, pricing set at price time.
a1 = NDF(
pair="seknok", currency="usd", notional=1e6, fx_rate=2.0, settlement=dt(2000, 1, 1)
)
a2 = NDF(
pair="seknok",
currency="usd",
notional=1e6,
fx_rate=2.0,
reversed=True,
settlement=dt(2000, 1, 1),
)
a3 = NDF(
pair="seknok",
currency="usd",
notional=1e6,
fx_rate=2.0,
leg2_reversed=True,
settlement=dt(2000, 1, 1),
)
a4 = NDF(
pair="seknok",
currency="usd",
notional=1e6,
fx_rate=2.0,
reversed=True,
leg2_reversed=True,
settlement=dt(2000, 1, 1),
)
assert a1.leg1.periods[0].non_deliverable_params.fx_index.pair == "usdsek"
assert a1.leg2.periods[0].non_deliverable_params.fx_index.pair == "usdnok"
assert a2.leg1.periods[0].non_deliverable_params.fx_index.pair == "sekusd"
assert a2.leg2.periods[0].non_deliverable_params.fx_index.pair == "usdnok"
assert a3.leg1.periods[0].non_deliverable_params.fx_index.pair == "usdsek"
assert a3.leg2.periods[0].non_deliverable_params.fx_index.pair == "nokusd"
assert a4.leg1.periods[0].non_deliverable_params.fx_index.pair == "sekusd"
assert a4.leg2.periods[0].non_deliverable_params.fx_index.pair == "nokusd"
@pytest.mark.parametrize(
("lag", "eval1", "exp2"),
[
(2, dt(2009, 8, 11), dt(2009, 11, 13)),
(3, dt(2009, 8, 10), dt(2009, 11, 13)),
],
)
def test_dates(self, lag, eval1, exp2):
ndf = NDF(
pair=FXIndex("eurusd", "tgt|fed", lag),
settlement="3m",
eval_date=eval1,
currency="usd",
)
assert ndf.leg1.periods[0].settlement_params.payment == exp2
@pytest.mark.parametrize(
("eom", "exp"),
[
(True, dt(2025, 5, 30)),
(False, dt(2025, 5, 28)),
],
)
def test_roll(self, eom, exp):
ndf = NDF(
pair="eurusd",
settlement="3m",
eval_date=dt(2025, 2, 26),
currency="usd",
eom=eom,
)
assert ndf.leg1.periods[0].settlement_params.payment == exp
def test_zero_analytic_delta(self):
curve = Curve({dt(2009, 1, 1): 1.0, dt(2020, 1, 1): 1.0})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2009, 1, 1)),
fx_curves={"eureur": curve, "eurusd": curve, "usdusd": curve},
)
ndf = NDF(
pair="eurusd",
settlement="3m",
eval_date=dt(2009, 8, 13),
currency="usd",
)
assert ndf.analytic_delta(curves=curve, fx=fxf) == 0.0
@pytest.mark.skip(reason="v2.5 allows third currency settlement currency.")
def test_bad_currency_raises(self):
with pytest.raises(ValueError, match="`currency` must be one of the currencies in `pair`."):
NDF(
pair="eurusd",
currency="jpy",
settlement="3m",
eval_date=dt(2009, 8, 13),
)
def test_cashflows(self, usdusd, usdeur, eureur):
fxf = FXForwards(
FXRates({"eurusd": 1.02}, settlement=dt(2022, 1, 3)),
{"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
ndf = NDF(
pair="eurusd",
settlement="3m",
eval_date=dt(2022, 1, 1),
currency="usd",
fx_rate=1.05,
)
result = ndf.cashflows(curves=usdusd, fx=fxf)
assert result.loc[("leg1", 0), "Type"] == "Cashflow"
assert result.loc[("leg1", 0), "Notional"] == -1e6
assert result.loc[("leg1", 0), "Ccy"] == "USD"
assert result.loc[("leg1", 0), "Reference Ccy"] == "EUR"
assert result.loc[("leg1", 0), "Payment"] == dt(2022, 4, 4)
assert result.loc[("leg1", 0), "FX Fixing"] == 1.0210354810081033
assert result.loc[("leg2", 0), "Notional"] == 1050000.0
assert result.loc[("leg2", 0), "Ccy"] == "USD"
@pytest.mark.parametrize(("base", "expected"), [("eur", -28103.831), ("usd", -28665.269)])
def test_npv(self, usdusd, usdeur, eureur, base, expected):
fxf = FXForwards(
FXRates({"eurusd": 1.02}, settlement=dt(2022, 1, 3)),
{"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
ndf = NDF(
pair="eurusd",
settlement="3m",
eval_date=dt(2022, 1, 1),
currency="usd",
fx_rate=1.05,
notional=1e6,
)
result = ndf.npv(curves=usdusd, fx=fxf, base=base)
assert abs(result - expected) < 1e-3
expected = {"usd": -28665.269}
local_result = ndf.npv(curves=usdusd, fx=fxf, base=base, local=True)
assert len(local_result.keys()) == 1
assert abs(local_result["usd"] - expected["usd"]) < 1e-3
@pytest.mark.parametrize(("base", "expected"), [("eur", -28103.831), ("usd", -28665.269)])
def test_npv_leg2_notional(self, usdusd, usdeur, eureur, base, expected):
# same test as above expressed with leg2 notional
fxf = FXForwards(
FXRates({"eurusd": 1.02}, settlement=dt(2022, 1, 3)),
{"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
ndf = NDF(
pair="eurusd",
settlement="3m",
eval_date=dt(2022, 1, 1),
currency="usd",
fx_rate=1.05,
# notional=1e6,
leg2_notional=-1.05e6,
)
result = ndf.npv(curves=usdusd, fx=fxf, base=base)
assert abs(result - expected) < 1e-3
expected = {"usd": -28665.269}
local_result = ndf.npv(curves=usdusd, fx=fxf, base=base, local=True)
assert len(local_result.keys()) == 1
assert abs(local_result["usd"] - expected["usd"]) < 1e-3
@pytest.mark.parametrize(("pair", "rate"), [("eurusd", 1.05), ("usdeur", 0.952380952)])
def test_npv_direction(self, usdusd, usdeur, eureur, pair, rate):
fxf = FXForwards(
FXRates({"eurusd": 1.02}, settlement=dt(2022, 1, 3)),
{"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
ndf = NDF(
pair=pair,
settlement="3m",
eval_date=dt(2022, 1, 1),
currency="usd",
fx_rate=rate,
notional=1e6 if pair[:3] == "eur" else -1e6 / rate,
)
result = ndf.npv(curves=usdusd, fx=fxf)
expected = -28665.26900
assert abs(result - expected) < 1e-3
@pytest.mark.parametrize(("base", "expected"), [("eur", 0.0), ("usd", 0.0)])
def test_npv_unpriced(self, usdusd, usdeur, eureur, base, expected):
fxf = FXForwards(
FXRates({"eurusd": 1.02}, settlement=dt(2022, 1, 3)),
{"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
ndf = NDF(
pair="eurusd",
settlement="3m",
eval_date=dt(2022, 1, 1),
currency="usd",
)
result = ndf.npv(curves=usdusd, fx=fxf, base=base)
assert abs(result - expected) < 1e-3
local_result = ndf.npv(curves=usdusd, fx=fxf, base=base, local=True)
expected = {"usd": 0.0}
assert len(local_result.keys()) == 1
assert abs(local_result["usd"] - expected["usd"]) < 1e-3
@pytest.mark.parametrize(("base", "expected"), [("eur", 0.0), ("usd", 0.0)])
def test_npv_unpriced_leg2_notional(self, usdusd, usdeur, eureur, base, expected):
fxf = FXForwards(
FXRates({"eurusd": 1.02}, settlement=dt(2022, 1, 3)),
{"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
ndf = NDF(
pair="eurusd",
settlement="3m",
eval_date=dt(2022, 1, 1),
currency="usd",
leg2_notional=-1e6,
)
result = ndf.npv(curves=usdusd, fx=fxf, base=base)
assert abs(result - expected) < 1e-3
local_result = ndf.npv(curves=usdusd, fx=fxf, base=base, local=True)
expected = {"usd": 0.0}
assert len(local_result.keys()) == 1
assert abs(local_result["usd"] - expected["usd"]) < 1e-3
def test_rate(self, usdusd, usdeur, eureur):
fxf = FXForwards(
FXRates({"eurusd": 1.02}, settlement=dt(2022, 1, 3)),
{"eureur": eureur, "usdeur": usdeur, "usdusd": usdusd},
)
ndf = NDF(
pair="eurusd",
settlement="3m",
eval_date=dt(2022, 1, 1),
currency="usd",
)
result = ndf.rate(curves=usdusd, fx=fxf)
expected = 1.021035
assert abs(result - expected) < 1e-6
def test_raising(self):
with pytest.raises(ValueError, match="`notional`, `leg2_notional` and `fx_rate` cannot"):
NDF(
pair="eurusd",
settlement=dt(2000, 1, 1),
notional=1e6,
leg2_notional=-1e6,
fx_rate=1.05,
)
with pytest.raises(ValueError, match="Leg1 of NDF is directly deliverable"):
NDF(pair="eurusd", settlement=dt(2000, 1, 1), notional=1e6, fx_fixings=1.0)
with pytest.raises(ValueError, match="Leg2 of NDF is directly deliverable"):
NDF(
pair="eurusd",
currency="usd",
settlement=dt(2000, 1, 1),
notional=1e6,
leg2_fx_fixings=1.0,
)
with pytest.raises(ValueError, match="When providing `notional` and `leg2_notional` on an"):
NDF(pair="eurusd", settlement=dt(2000, 1, 1), notional=1e6, leg2_notional=1.2e6)
# test the commented out FXSwap variant
# def test_fx_swap(curve, curve2):
# fxs = FXSwap(dt(2022, 1, 15), "3M", notional=1000, fx_fixing_points=(10.1, 105),
# currency="eur", leg2_currency="sek")
# assert len(fxs.leg1.periods) == 2
# assert len(fxs.leg2.periods) == 2
#
# assert fxs.leg1.periods[0].notional == 1000
# assert fxs.leg1.periods[0].payment == dt(2022, 1, 15)
# assert fxs.leg1.periods[1].notional == -1000
# assert fxs.leg1.periods[1].payment == dt(2022, 4, 15)
#
# assert fxs.leg2.periods[0].notional == -10100
# assert fxs.leg2.periods[0].payment == dt(2022, 1, 15)
# assert fxs.leg2.periods[1].notional == 10110.5
# assert fxs.leg2.periods[1].payment == dt(2022, 4, 15)
#
# fxs.fx_fixing_points = NoInput(0)
# points = fxs._rate_alt(curve, curve2, 10.0)
# npv = fxs._npv_alt(curve, curve2, 10.0)
# assert abs(npv) < 1e-9
#
# fxf = FXForwards(
# FXRates({"eursek": 10.0}, dt(2022, 1, 1)),
# {"eureur": curve, "seksek": curve2, "sekeur": curve2}
# )
# points2 = fxs.rate(fxf)
# npv2 = fxs.npv(fxf, NoInput(0), "eur")
# assert abs(npv2) < 1e-9
class TestNonMtmXCS:
def test_nonmtmxcs_npv(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"eurusd": 1.1}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "eurusd": curve2, "eureur": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="eur",
pair="eurusd",
payment_lag_exchange=0,
)
# npv2 = xcs._npv2(curve2, curve2, curve, curve, 1.10)
npv = xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf)
assert abs(npv) < 1e-9
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
amortization=100e3,
currency="eur",
pair="eurusd",
payment_lag_exchange=0,
)
# npv2 = xcs._npv2(curve2, curve2, curve, curve, 1.10)
npv = xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf)
assert abs(npv) < 1e-9
def test_nonmtmxcs_fx_notional(self) -> None:
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="eur",
pair="eurusd",
payment_lag_exchange=0,
leg2_fx_fixings=2.0,
notional=1e6,
)
for period in xcs.leg2.periods:
assert period.non_deliverable_params.fx_fixing.value == 2.0
@pytest.mark.parametrize(
("float_spd", "compound", "expected"),
[
(10, "none_simple", 10.160794),
(100, "none_simple", 101.60794),
(100, "isda_compounding", 101.023590),
(100, "isda_flat_compounding", 101.336040),
],
)
def test_nonmtmxcs_spread(self, curve, curve2, float_spd, compound, expected) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
float_spread=float_spd,
leg2_spread_compound_method=compound,
)
result = xcs.rate(curves=[curve, curve, curve2, curve2], fx=fxf, metric="leg2")
assert abs(result - expected) < 1e-4
alias = xcs.spread(curves=[curve, curve, curve2, curve2], fx=fxf, metric="leg2")
assert alias == result
xcs.leg2_float_spread = result
validate = xcs.npv(curves=[curve, curve, curve2, curve2], fx=fxf)
assert abs(validate) < 1e-2
result2 = xcs.rate(curves=[curve, curve, curve2, curve2], fx=fxf, metric="leg2")
assert abs(result - result2) < 1e-3
# reverse legs
xcs_reverse = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="usd",
pair="usdnok",
payment_lag_exchange=0,
notional=1e6,
leg2_float_spread=float_spd,
spread_compound_method=compound,
)
result = xcs_reverse.rate(curves=[curve2, curve2, curve, curve], fx=fxf)
assert abs(result - expected) < 1e-4
def test_no_fx_raises(self, curve, curve2) -> None:
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
float_spread=0.0,
)
with (
pytest.raises(ValueError, match="Must provide `fx` argument to forecast FXFixing"),
default_context("no_fx_fixings_for_xcs", "raise"),
):
xcs.npv(curves=[curve, curve, curve2, curve2])
# no error
xcs.cashflows(curves=[curve, curve, curve2, curve2])
# with pytest.warns():
# with default_context("no_fx_fixings_for_xcs", "warn"):
# xcs.npv([curve, curve, curve2, curve2])
def test_nonmtmxcs_cashflows(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
)
result = xcs.cashflows(
curves=[curve, curve, curve2, curve2],
fx=fxf,
base="usd",
)
expected = DataFrame(
{
"Type": ["Cashflow", "FloatPeriod"],
"Period": [np.nan, "Regular"],
"Ccy": ["NOK", "USD"],
"Notional": [-10000000, -10000000.0],
"FX Rate": [0.10002256337062124, 1.0],
"FX Fixing": [np.nan, 0.09967340252423884],
},
index=MultiIndex.from_tuples([("leg1", 0), ("leg2", 8)]),
)
assert_frame_equal(
result.loc[
[("leg1", 0), ("leg2", 8)],
["Type", "Period", "Ccy", "Notional", "FX Rate", "FX Fixing"],
],
expected,
)
@pytest.mark.parametrize("fix", ["float", "dual", "variable"])
def test_nonmtm_fx_fixing(self, curve, curve2, fix) -> None:
fxr = FXRates({"usdnok": 10}, settlement=dt(2022, 1, 1))
fxf = FXForwards(fxr, {"usdusd": curve, "nokusd": curve2, "noknok": curve2})
mapping = {
"float": 1 / 10.0,
"dual": Dual(1 / 10.0, ["x"], []),
"variable": Variable(1 / 10.0, ["x"], []),
}
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
leg2_fx_fixings=mapping[fix],
)
assert abs(xcs.npv(curves=[curve, curve, curve2, curve2], fx=fxf)) < 1e-7
def test_nonmtm_fx_fixing_raises_type_crossing(self, curve, curve2):
fxr = FXRates({"usdnok": 10}, settlement=dt(2022, 1, 1))
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
leg2_fx_fixings=Dual2(10.0, ["x"], [], []),
)
# the given fixing is not downcast to Float because it is a specific user provided value.
# Users should technically use a Variable.
with pytest.raises(TypeError, match=r"Dual2 operation with incompatible type \(Dual\)"):
xcs.npv(curves=[curve, curve, curve2, curve2], fx=fxr) < 1e-7
def test_is_priced(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
leg2_float_spread=1.0,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
metric="leg2",
)
result = xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf, base="usd")
assert abs(result - 65.766356) < 1e-5
@pytest.mark.skip(reason="no fx fixings no longer allows warnings")
def test_no_fx_warns(self, curve, curve2) -> None:
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
leg2_float_spread=1.0,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
)
with default_context("no_fx_fixings_for_xcs", "warn"), pytest.warns(UserWarning):
xcs.npv(curves=[curve2, curve2, curve, curve], local=True)
def test_npv_fx_as_float_raises(self) -> None:
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
)
curve = Curve({dt(2022, 2, 1): 1.0, dt(2024, 2, 1): 0.9})
with pytest.raises(AttributeError, match="'float' object has no attribute 'rate'"):
xcs.npv(curves=[curve] * 4, fx=10.0)
@pytest.mark.skip(reason="v2.5 uses FXForwards as a more explicit input type.")
def test_npv_fx_as_rates_valid(self) -> None:
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
)
curve = Curve({dt(2022, 2, 1): 1.0, dt(2024, 2, 1): 0.9})
result = xcs.npv(curves=[curve] * 2, fx=FXRates({"usdnok": 10.0}))
assert abs(result) < 1e-6
def test_setting_fx_fixing_no_input(self):
# Define the interest rate curves for EUR, USD and X-Ccy basis
usdusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 11, 7): 0.98}, calendar="nyc", id="usdusd")
eureur = Curve({dt(2024, 5, 7): 1.0, dt(2024, 11, 7): 0.99}, calendar="tgt", id="eureur")
eurusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 11, 7): 0.992}, id="eurusd")
# Create an FX Forward market with spot FX rate data
fxr = FXRates({"eurusd": 1.0760}, settlement=dt(2024, 5, 9))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={"eureur": eureur, "usdusd": usdusd, "eurusd": eurusd},
)
xcs = XCS(
dt(2024, 5, 9),
"6M",
"Q",
fixed=False,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="eur",
pair="eurusd",
payment_lag_exchange=0,
notional=10e6,
)
xcs.npv(curves=[eureur, eurusd, usdusd, usdusd], fx=fxf)
xcs.leg2.periods[0].non_deliverable_params.fx_fixing.value_or_forecast(fx=fxf) == Dual(
1.0760, ["fx_eurusd"], []
)
class TestNonMtmFixedFloatXCS:
@pytest.mark.parametrize(
("float_spd", "compound", "expected"),
[
(10, "none_simple", 6.70955968),
(100, "isda_compounding", 7.62137047),
],
)
def test_nonmtmfixxcs_rate_npv(self, curve, curve2, float_spd, compound, expected) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
fixed=True,
leg2_fixed=False,
leg2_mtm=False,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
leg2_spread_compound_method=compound,
leg2_float_spread=float_spd,
)
result = xcs.rate(curves=[curve2, curve2, curve, curve], fx=fxf, metric="leg1")
assert abs(result - expected) < 1e-4
npv = xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf)
assert abs(npv) < 1e-6
xcs.fixed_rate = result # set the fixed rate and check revalues to zero
assert abs(xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf)) < 1e-6
irs = IRS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="nok",
leg2_spread_compound_method=compound,
leg2_float_spread=float_spd,
)
validate = irs.rate(curves=curve2)
assert abs(result - validate) < 1e-2
def test_nonmtmfixxcs_fx_notional(self) -> None:
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="eur",
pair="eurusd",
payment_lag_exchange=0,
leg2_fx_fixings=2.0,
notional=1e6,
)
for period in xcs.leg2.periods:
assert period.non_deliverable_params.fx_fixing.value == 2.0
def test_nonmtmfixxcs_no_fx_raises(self, curve, curve2) -> None:
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
)
with (
pytest.raises(ValueError, match="Must provide `fx` argument to forecast FXFixing"),
default_context("no_fx_fixings_for_xcs", "raise"),
):
xcs.npv(curves=[curve, curve, curve2, curve2])
xcs.cashflows(curves=[curve, curve, curve2, curve2])
def test_nonmtmfixxcs_cashflows(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
)
result = xcs.cashflows(curves=[curve, curve, curve2, curve2], fx=fxf, base="usd")
expected = DataFrame(
{
"Type": ["Cashflow", "FloatPeriod"],
"Period": [np.nan, "Regular"],
"Ccy": ["NOK", "USD"],
"Notional": [-10000000, -10000000.0],
"FX Rate": [0.10002256337062124, 1.0],
"FX Fixing": [np.nan, 0.09967340252423884],
},
index=MultiIndex.from_tuples([("leg1", 0), ("leg2", 8)]),
)
assert_frame_equal(
result.loc[
[("leg1", 0), ("leg2", 8)],
["Type", "Period", "Ccy", "Notional", "FX Rate", "FX Fixing"],
],
expected,
)
@pytest.mark.parametrize("fix", ["float", "dual", "variable"])
def test_nonmtmfixxcs_fx_fixing(self, curve, curve2, fix) -> None:
fxr = FXRates({"usdnok": 10}, settlement=dt(2022, 1, 1))
fxf = FXForwards(fxr, {"usdusd": curve, "nokusd": curve2, "noknok": curve2})
mapping = {
"float": 10.0,
"dual": Dual(10.0, ["x"], []),
"variable": Variable(10.0, ["x"], []),
}
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
leg2_fx_fixings=mapping[fix],
leg2_float_spread=10.0,
)
assert abs(xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf)) < 1e-7
def test_nonmtmfixxcs_fx_fixing_raises_type_crossing(self, curve, curve2) -> None:
fxr = FXRates({"usdnok": 10}, settlement=dt(2022, 1, 1))
fxf = FXForwards(fxr, {"usdusd": curve, "nokusd": curve2, "noknok": curve2})
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
leg2_fx_fixings=Dual2(2.0, ["c"], [], []),
leg2_float_spread=10.0,
)
with pytest.raises(TypeError, match=r"Dual2 operation with incompatible type \(Dual\)."):
xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf)
def test_nonmtmfixxcs_raises(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=False,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
)
with pytest.raises(ValueError, match="A `fixed_rate` must be set for a cashflow to be"):
xcs.rate(curves=[curve, curve, curve2, curve2], fx=fxf, metric="leg2")
class TestNonMtmFixedFixedXCS:
# @pytest.mark.parametrize("float_spd, compound, expected",[
# (10, "none_simple", 6.70955968),
# (100, "isda_compounding", 7.62137047),
# ])
# def test_nonmtmfixxcs_rate_npv(self, curve, curve2, float_spd, compound, expected):
# fxf = FXForwards(
# FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
# {"usdusd": curve, "nokusd": curve2, "noknok": curve2}
# )
# xcs = NonMtmFixedFloatXCS(dt(2022, 2, 1), "8M", "M",
# payment_lag=0, currency="nok", leg2_currency="usd",
# payment_lag_exchange=0, notional=10e6,
# leg2_spread_compound_method=compound, leg2_float_spread=float_spd
# )
#
# result = xcs.rate([curve2, curve2, curve, curve], NoInput(0), fxf, 1)
# assert abs(result - expected) < 1e-4
# assert abs(xcs.npv([curve2, curve2, curve, curve], NoInput(0), fxf)) < 1e-6
#
# xcs.fixed_rate = result # set the fixed rate and check revalues to zero
# assert abs(xcs.npv([curve2, curve2, curve, curve], NoInput(0), fxf)) < 1e-6
#
# irs = IRS(dt(2022, 2, 1), "8M", "M",
# payment_lag=0, currency="nok",
# leg2_spread_compound_method=compound, leg2_float_spread=float_spd)
# validate = irs.rate(curve2)
# assert abs(result - validate) < 1e-2
#
# def test_nonmtmfixxcs_fx_notional(self):
# xcs = NonMtmFixedFloatXCS(dt(2022, 2, 1), "8M", "M",
# payment_lag=0, currency="eur", leg2_currency="usd",
# payment_lag_exchange=0, fx_fixing=2.0, notional=1e6)
# assert xcs.leg2_notional == -2e6
#
# def test_nonmtmfixxcs_no_fx_raises(self, curve, curve2):
# xcs = NonMtmFixedFloatXCS(dt(2022, 2, 1), "8M", "M",
# payment_lag=0, currency="nok", leg2_currency="usd",
# payment_lag_exchange=0, notional=10e6)
#
# with pytest.raises(ValueError, match="`fx` is required when `fx_fixing` is"):
# xcs.npv([curve, curve, curve2, curve2])
#
# with pytest.raises(ValueError, match="`fx` is required when `fx_fixing` is"):
# xcs.cashflows([curve, curve, curve2, curve2])
#
# def test_nonmtmfixxcs_cashflows(self, curve, curve2):
# fxf = FXForwards(
# FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
# {"usdusd": curve, "nokusd": curve2, "noknok": curve2}
# )
#
# xcs = NonMtmFixedFloatXCS(dt(2022, 2, 1), "8M", "M",
# payment_lag=0, currency="nok", leg2_currency="usd",
# payment_lag_exchange=0, notional=10e6)
#
# result = xcs.cashflows([curve, curve, curve2, curve2], NoInput(0), fxf)
# expected = DataFrame({
# "Type": ["Cashflow", "FloatPeriod"],
# "Period": ["Exchange", "Regular"],
# "Ccy": ["NOK", "USD"],
# "Notional": [-10000000, -996734.0252423884],
# "FX Rate": [0.10002256337062124, 1.0],
# }, index=MultiIndex.from_tuples([("leg1", 0), ("leg2", 8)]))
# assert_frame_equal(
# result.loc[[("leg1", 0), ("leg2", 8)],
# ["Type", "Period", "Ccy", "Notional", "FX Rate"]],
# expected,
# )
@pytest.mark.parametrize("fix", ["float", "dual", "variable"])
def test_nonmtmfixxcs_fx_fixing(self, curve, curve2, fix) -> None:
fxr = FXRates({"usdnok": 10}, settlement=dt(2022, 1, 1))
fxf = FXForwards(fxr, {"usdusd": curve, "nokusd": curve2, "noknok": curve2})
mapping = {
"float": 10.0,
"dual": Dual(10.0, ["x"], []),
"variable": Variable(10.0, ["x"], []),
}
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=True,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
leg2_fx_fixings=mapping[fix],
leg2_fixed_rate=2.0,
)
assert abs(xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf)) < 1e-7
def test_nonmtmfixxcs_fx_fixing_type_crossing_raises(self, curve, curve2) -> None:
fxr = FXRates({"usdnok": 10}, settlement=dt(2022, 1, 1))
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=True,
leg2_mtm=False,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
leg2_fx_fixings=Dual2(10.0, ["s"], [], []),
leg2_fixed_rate=2.0,
)
with pytest.raises(TypeError, match=r"Dual2 operation with incompatible type \(Dual\)."):
xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxr)
def test_nonmtmfixfixxcs_raises(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
fixed=True,
leg2_fixed=True,
leg2_mtm=False,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
)
with pytest.raises(ValueError, match="A `fixed_rate` must be set for a cashflow to be det"):
xcs.rate(curves=[curve, curve, curve2, curve2], fx=fxf, metric="leg2")
with pytest.raises(AttributeError, match="Leg2 is of type"):
xcs.leg2_float_spread = 2.0
@pytest.fixture
def isda_credit_curves_40rr_20quote():
# https://www.cdsmodel.com/rfr-test-grids.html?
# USD 22 June 2022
# from rateslib.scheduling import get_calendar
# trade = dt(2022, 6, 22)
# spot = get_calendar("nyc").add_bus_days(trade, 2, False)
# tenors = ["1m", "2m", "3m", "6m", "1y", "2y", "3y", "4y", "5y", "6y", "7y", "8y", "9y"]
# tenors += ["10y", "12y", "15y", "20y", "25y", "30y"]
# curve = Curve(
# nodes={
# trade: 1.0,
# **{add_tenor(spot, _, "f", "nyc"): 1.0 for _ in tenors},
# },
# interpolation="log_linear",
# )
# solver = Solver(
# curves=[curve],
# instruments=[IRS(spot, _, spec="usd_irs", curves=curve) for _ in tenors],
# s=[1.5088, 1.8228, 1.9729, 2.5640, 3.1620, 3.3169, 3.2441, 3.1771, 3.1371, 3.1131, 3.0951,
# 3.0841, 3.0811, 3.0871, 3.1061, 3.1201, 3.0601, 2.9381, 2.8221]
# )
#
# credit_curve = Curve(
# nodes={trade: 1.0, dt(2055, 1, 1): 1.0}, credit_recovery_rate=0.4
# )
# solver2 = Solver(
# curves=[credit_curve],
# pre_solvers=[solver],
# instruments=[
# CDS(dt(2022, 6, 20), dt(2023, 6, 20), spec="us_ig_cds", curves=[credit_curve, curve])], #noqa: E501
# s=[0.20]
# )
curve = Curve(
{
dt(2022, 6, 22, 0, 0): 1.0,
dt(2022, 7, 25, 0, 0): 0.9986187857823194,
dt(2022, 8, 24, 0, 0): 0.9968373705612348,
dt(2022, 9, 26, 0, 0): 0.994791605422867,
dt(2022, 12, 27, 0, 0): 0.9868431949407511,
dt(2023, 6, 26, 0, 0): 0.9686906539113461,
dt(2024, 6, 24, 0, 0): 0.9357773336285784,
dt(2025, 6, 24, 0, 0): 0.9073411683282268,
dt(2026, 6, 24, 0, 0): 0.8808780124060293,
dt(2027, 6, 24, 0, 0): 0.8551765951547667,
dt(2028, 6, 26, 0, 0): 0.8298749243478529,
dt(2029, 6, 25, 0, 0): 0.8056454824131845,
dt(2030, 6, 24, 0, 0): 0.7819517736960135,
dt(2031, 6, 24, 0, 0): 0.7584699996495646,
dt(2032, 6, 24, 0, 0): 0.7349334728363958,
dt(2034, 6, 26, 0, 0): 0.6890701260967745,
dt(2037, 6, 24, 0, 0): 0.62634116393611,
dt(2042, 6, 24, 0, 0): 0.5441094046550682,
dt(2047, 6, 24, 0, 0): 0.4864281755586489,
dt(2052, 6, 24, 0, 0): 0.4409891618081753,
}
)
return (None, curve)
class TestCDS:
def okane_curve(self):
today = dt(2019, 8, 12)
spot = dt(2019, 8, 14)
tenors = [
"1b",
"1m",
"2m",
"3m",
"6m",
"12M",
"2y",
"3y",
"4y",
"5y",
"6y",
"7y",
"8y",
"9y",
"10y",
]
ibor = Curve(
nodes={today: 1.0, **{add_tenor(spot, _, "mf", "nyc"): 1.0 for _ in tenors}},
convention="act360",
calendar="nyc",
id="ibor",
)
rates = [
2.2,
2.2009,
2.2138,
2.1810,
2.0503,
1.9930,
1.591,
1.499,
1.4725,
1.4664,
1.48,
1.4995,
1.5118,
1.5610,
1.6430,
]
ib_sv = Solver(
curves=[ibor],
instruments=[
IRS(
spot,
_,
leg2_fixing_method="ibor(2)",
calendar="nyc",
payment_lag=0,
convention="30e360",
leg2_convention="act360",
frequency="s",
curves=ibor,
)
for _ in tenors
],
s=rates,
)
cds_tenor = ["6m", "12m", "2y", "3y", "4y", "5y", "7y", "10y"]
credit_curve = Curve(
nodes={today: 1.0, **{add_tenor(today, _, "mf", "nyc"): 1.0 for _ in cds_tenor}},
convention="act365f",
calendar="all",
id="credit",
credit_discretization=5,
)
cc_sv = Solver(
curves=[credit_curve],
pre_solvers=[ib_sv],
instruments=[
CDS(
today,
add_tenor(dt(2019, 9, 20), _, "mf", "nyc"),
front_stub=dt(2019, 9, 20),
frequency="q",
convention="act360",
payment_lag=0,
curves=["credit", "ibor"],
fixed_rate=4.00,
premium_accrued=True,
calendar="nyc",
)
for _ in cds_tenor
],
s=[4.00, 4.00, 4.00, 4.00, 4.00, 4.00, 4.00, 4.00],
)
return credit_curve, ibor, cc_sv
def test_okane_values(self):
# These values are validated against finance Py. Not identical but within tolerance.
cds = CDS(
dt(2019, 8, 12),
dt(2029, 6, 20),
front_stub=dt(2019, 9, 20),
frequency="q",
fixed_rate=1.50,
curves=["credit", "ibor"],
calendar="nyc",
)
c1, c2, solver = self.okane_curve()
result1 = cds.rate(solver=solver)
assert abs(result1 - 3.9999960) < 5e-5
result2 = cds.npv(solver=solver)
assert abs(result2 - 170739.5956) < 180
result3 = cds.leg1.npv(rate_curve=c1, disc_curve=c2)
assert abs(result3 + 104508.9265 - 2125) < 50
result4 = cds.leg2.npv(rate_curve=c1, disc_curve=c2)
assert abs(result4 - 273023.5221) < 110
def test_unpriced_npv(self, curve, curve2) -> None:
cds = CDS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="eur",
)
npv = cds.npv(curves=[curve2, curve], solver=NoInput(0))
assert abs(npv) < 1e-9
def test_rate(self, curve, curve2) -> None:
hazard_curve = curve
disc_curve = curve2
cds = CDS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="eur",
)
rate = cds.rate(curves=[hazard_curve, disc_curve])
expected = 2.4164004881061285
assert abs(rate - expected) < 1e-9
def test_npv(self, curve, curve2) -> None:
hazard_curve = curve
disc_curve = curve2
cds = CDS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="eur",
fixed_rate=1.00,
)
npv = cds.npv(curves=[hazard_curve, disc_curve])
expected = 9075.835204292109 # uses cds_discretization = 23 as default
assert abs(npv - expected) < 1e-5
def test_analytic_delta(self, curve, curve2) -> None:
hazard_curve = curve
disc_curve = curve2
cds = CDS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="eur",
)
result = cds.analytic_delta(curves=[hazard_curve, disc_curve], leg=1)
expected = 64.07675851924779
assert abs(result - expected) < 1e-7
result = cds.analytic_delta(curves=[hazard_curve, disc_curve], leg=2)
expected = 0.0
assert abs(result - expected) < 1e-7
def test_cds_cashflows(self, curve, curve2) -> None:
hazard_curve = curve
disc_curve = curve2
cds = CDS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="eur",
)
result = cds.cashflows(curves=[hazard_curve, disc_curve])
assert isinstance(result, DataFrame)
assert result.index.nlevels == 2
def test_solver(self, curve2):
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="disc")
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 0.99, dt(2023, 1, 1): 0.98}, id="haz")
solver = Solver(
curves=[c2],
instruments=[
CDS(dt(2022, 1, 1), "6m", frequency="Q", curves=["haz", c1]),
CDS(dt(2022, 1, 1), "12m", frequency="Q", curves=["haz", c1]),
],
s=[0.30, 0.40],
instrument_labels=["6m", "12m"],
)
inst = CDS(dt(2022, 7, 1), "3M", "Q", curves=["haz", c1], notional=1e6)
result = inst.delta(solver=solver)
assert abs(result.sum().iloc[0] - 25.294894375736) < 1e-6
def test_okane_paper(self):
# Figure 12 of Turnbull and O'Kane 2003 Valuation of CDS
usd_libor = Curve(
nodes={
dt(2003, 6, 19): 1.0,
dt(2003, 12, 23): 1.0,
dt(2004, 6, 23): 1.0,
dt(2005, 6, 23): 1.0,
dt(2006, 6, 23): 1.0,
dt(2007, 6, 23): 1.0,
dt(2008, 6, 23): 1.0,
},
convention="act360",
calendar="nyc",
id="libor",
)
args = dict(spec="eur_irs6", frequency="s", calendar="nyc", curves="libor", currency="usd")
solver = Solver(
curves=[usd_libor],
instruments=[
IRS(dt(2003, 6, 23), "6m", **args),
IRS(dt(2003, 6, 23), "1y", **args),
IRS(dt(2003, 6, 23), "2y", **args),
IRS(dt(2003, 6, 23), "3y", **args),
IRS(dt(2003, 6, 23), "4y", **args),
IRS(dt(2003, 6, 23), "5y", **args),
],
s=[1.35, 1.43, 1.90, 2.47, 2.936, 3.311],
)
haz_curve = Curve(
nodes={
dt(2003, 6, 19): 1.0,
dt(2004, 6, 20): 1.0,
dt(2005, 6, 20): 1.0,
dt(2006, 6, 20): 1.0,
dt(2007, 6, 20): 1.0,
dt(2008, 6, 20): 1.0,
},
convention="act365f",
calendar="all",
id="hazard",
)
args = dict(
calendar="nyc", frequency="q", roll=20, curves=["hazard", "libor"], convention="act360"
)
solver = Solver(
curves=[haz_curve],
pre_solvers=[solver],
instruments=[
CDS(dt(2003, 6, 20), "1y", **args),
CDS(dt(2003, 6, 20), "2y", **args),
CDS(dt(2003, 6, 20), "3y", **args),
CDS(dt(2003, 6, 20), "4y", **args),
CDS(dt(2003, 6, 20), "5y", **args),
],
s=[1.10, 1.20, 1.30, 1.40, 1.50],
)
cds = CDS(dt(2003, 6, 20), dt(2007, 9, 20), fixed_rate=2.00, notional=10e6, **args)
result = cds.rate(solver=solver)
assert abs(result - 1.427) < 0.0030
_table = cds.cashflows(solver=solver)
leg1_npv = cds.leg1.npv(rate_curve=haz_curve, disc_curve=usd_libor)
leg2_npv = cds.leg2.npv(rate_curve=haz_curve, disc_curve=usd_libor)
assert abs(leg1_npv + 781388) < 250
assert abs(leg2_npv - 557872) < 900
a_delta = cds.analytic_delta(curves=[haz_curve, usd_libor])
assert abs(a_delta - 3899) < 10
npv = cds.npv(solver=solver)
assert abs(npv + 223516) < 670
def test_accrued(self):
cds = CDS(
dt(2022, 1, 1), "6M", "Q", payment_lag=0, currency="eur", notional=1e9, fixed_rate=2.0
)
result = cds.accrued(dt(2022, 2, 1))
assert abs(result + 0.25 * 1e9 * 0.02 * 31 / 90) < 1e-6
def test_accrued_raises(self):
cds = CDS(dt(2022, 1, 1), "6M", "Q", payment_lag=0, currency="eur", notional=1e9)
with pytest.raises(ValueError, match="A `fixed_rate` must be set for a cashflo"):
cds.accrued(dt(2022, 2, 1))
@pytest.mark.parametrize(
("cash", "tenor", "quote"),
[
(-79690.03, "1y", 0.20),
(-156453.96, "2y", 0.20),
(-230320.76, "3y", 0.20),
(-370875.32, "5y", 0.20),
(-502612.64, "7y", 0.20),
(-684299.75, "10y", 0.20),
(116199.85, "1y", 2.20),
(225715.34, "2y", 2.20),
(327602.22, "3y", 2.20),
(512001.20, "5y", 2.20),
(673570.58, "7y", 2.20),
(878545.53, "10y", 2.20),
],
)
def test_standard_model_test_grid(self, cash, tenor, quote, isda_credit_curves_40rr_20quote):
# https://www.cdsmodel.com/rfr-test-grids.html?
# USD 22 June 2022
credit_curve, curve = isda_credit_curves_40rr_20quote
credit_curve = Curve({dt(2022, 6, 22): 1.0, dt(2052, 6, 30): 1.0}, credit_recovery_rate=0.4)
Solver(
curves=[credit_curve],
instruments=[
CDS(dt(2022, 6, 20), tenor, spec="us_ig_cds", curves=[credit_curve, curve])
],
s=[quote],
)
cds = CDS(
dt(2022, 6, 20), tenor, spec="us_ig_cds", curves=[credit_curve, curve], notional=10e6
)
result = cds.npv()
assert abs(result - cash) < 875
def test_cds_attributes(self):
cds = CDS(
dt(2022, 1, 1), "6M", "Q", payment_lag=0, currency="eur", notional=1e9, fixed_rate=2.0
)
assert cds.fixed_rate == 2.0
cds.fixed_rate = 1.0
assert cds.fixed_rate == 1.0
def test_cds_parse_curves(self, curve, curve2):
cds = CDS(
dt(2022, 1, 1), "6M", "Q", payment_lag=0, currency="eur", notional=1e9, fixed_rate=2.0
)
r1 = cds.npv(curves={"rate_curve": curve, "disc_curve": curve2})
r2 = cds.npv(curves=[curve, curve2])
assert r1 == r2
with pytest.raises(ValueError, match="CDS requires 2"):
cds.npv(curves=curve)
def test_analytic_rec_risk(self):
irs_tenor = [
"1m",
"2m",
"3m",
"6m",
"12m",
"2y",
"3y",
"4y",
"5y",
"6y",
"7y",
"8y",
"9y",
"10y",
"12y",
]
irs_rates = [
4.8457,
4.7002,
4.5924,
4.3019,
3.8992,
3.5032,
3.3763,
3.3295,
3.3165,
3.3195,
3.3305,
3.3450,
3.3635,
3.3830,
3.4245,
]
cds_tenor = ["6m", "12m", "2y", "3y", "4y", "5y", "7y", "10y"]
cds_rates = [0.11011, 0.14189, 0.20750, 0.26859, 0.32862, 0.37861, 0.51068, 0.66891]
today = dt(2024, 10, 4) # Friday 4th October 2024
spot = dt(2024, 10, 8) # Tuesday 8th October 2024
disc_curve = Curve(
nodes={today: 1.0, **{add_tenor(spot, _, "mf", "nyc"): 1.0 for _ in irs_tenor}},
calendar="nyc",
convention="act360",
interpolation="log_linear",
id="sofr",
)
us_rates_sv = Solver(
curves=[disc_curve],
instruments=[IRS(spot, _, spec="usd_irs", curves="sofr") for _ in irs_tenor],
s=irs_rates,
instrument_labels=irs_tenor,
id="us_rates",
)
cds_eff = dt(2024, 9, 20)
cds_mats = [add_tenor(dt(2024, 12, 20), _, "mf", "all") for _ in cds_tenor]
hazard_curve = Curve(
nodes={today: 1.0, **{add_tenor(spot, _, "mf", "nyc"): 1.0 for _ in cds_tenor}},
calendar="all",
convention="act365f",
interpolation="log_linear",
id="pfizer",
)
pfizer_sv = Solver(
curves=[hazard_curve],
pre_solvers=[us_rates_sv],
instruments=[
CDS(cds_eff, _, spec="us_ig_cds", curves=["pfizer", "sofr"]) for _ in cds_mats
],
s=cds_rates,
instrument_labels=cds_tenor,
id="pfizer_cds",
)
cds = CDS(
effective=dt(2024, 9, 20),
termination=dt(2029, 12, 20),
spec="us_ig_cds",
curves=["pfizer", "sofr"],
notional=10e6,
)
result = cds.analytic_rec_risk(solver=pfizer_sv)
assert abs(result + 3031.0076128941) < 1e-8
class TestXCS:
def test_mtmxcs_npv(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"eurusd": 1.1}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "eurusd": curve2, "eureur": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="eur",
pair="eurusd",
payment_lag_exchange=0,
leg2_mtm=True,
)
npv = xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf)
assert abs(npv) < 1e-9
def test_mtmxcs_cashflows(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
leg2_mtm=True,
)
result = xcs.cashflows(curves=[curve, curve, curve2, curve2], fx=fxf, base="usd")
expected = DataFrame(
{
"Type": ["Cashflow", "FloatPeriod", "MtmCashflow"],
"Period": [np.nan, "Regular", np.nan],
"Ccy": ["NOK", "USD", "USD"],
"Notional": [-10000000, -10000000.0, 10000000.0],
"Rate": [np.nan, 8.181151773810475, np.nan],
"FX Rate": [0.10002256337062124, 1.0, 1.0],
"FX Fixing": [np.nan, 0.0990019249688802, 0.09829871161519926],
},
index=MultiIndex.from_tuples([("leg1", 0), ("leg2", 11), ("leg2", 14)]),
)
assert_frame_equal(
result.loc[
[("leg1", 0), ("leg2", 11), ("leg2", 14)],
["Type", "Period", "Ccy", "Notional", "Rate", "FX Rate", "FX Fixing"],
],
expected,
)
@pytest.mark.skip(
reason="After merging all XCS to one class inputting `fx_fixings` as list was changed.",
)
def test_mtmxcs_fx_fixings_raises(self) -> None:
with pytest.raises(ValueError, match="`fx_fixings` for MTM XCS should"):
_ = XCS(
dt(2022, 2, 1),
"8M",
"M",
fx_fixings=NoInput(0),
currency="usd",
pair="eurusd",
)
with pytest.raises(ValueError, match="`fx_fixings` for MTM XCS should"):
_ = XCS(
dt(2022, 2, 1),
"8M",
"M",
fx_fixings=NoInput(0),
fixed=True,
leg2_fixed=False,
leg2_mtm=True,
currency="usd",
pair="eurusd",
)
with pytest.raises(ValueError, match="`fx_fixings` for MTM XCS should"):
_ = XCS(
dt(2022, 2, 1),
"8M",
"M",
fx_fixings=NoInput(0),
fixed=True,
leg2_fixed=True,
leg2_mtm=True,
currency="usd",
pair="eurusd",
)
with pytest.raises(ValueError, match="`fx_fixings` for MTM XCS should"):
_ = XCS(
dt(2022, 2, 1),
"8M",
"M",
fx_fixings=NoInput(0),
fixed=False,
leg2_fixed=True,
leg2_mtm=True,
currency="usd",
pair="eurusd",
)
@pytest.mark.parametrize(
("float_spd", "compound", "expected"),
[
(10, "none_simple", 9.97839804),
(100, "none_simple", 99.78398037),
(100, "isda_compounding", 99.418428),
(100, "isda_flat_compounding", 99.621117),
],
)
def test_mtmxcs_rate(self, float_spd, compound, expected, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
float_spread=float_spd,
leg2_spread_compound_method=compound,
leg2_mtm=True,
)
result = xcs.rate(curves=[curve2, curve2, curve, curve], fx=fxf, metric="leg2")
assert abs(result - expected) < 1e-4
alias = xcs.spread(curves=[curve2, curve2, curve, curve], fx=fxf, metric="leg2")
assert alias == result
xcs.leg2_float_spread = result
validate = xcs.npv(curves=[curve2, curve2, curve, curve], fx=fxf)
assert abs(validate) < 1e-2
result2 = xcs.rate(curves=[curve2, curve2, curve, curve], fx=fxf, metric="leg2")
assert abs(result - result2) < 1e-3
def test_fx_fixings_2_tuple(self) -> None:
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
leg2_fx_fixings=(1.25, Series([1.5, 1.75], index=[dt(2022, 2, 25), dt(2022, 3, 30)])),
leg2_mtm=True,
)
assert xcs.leg2._regular_periods[0].non_deliverable_params.fx_fixing.value == 1.25
assert xcs.leg2._regular_periods[1].non_deliverable_params.fx_fixing.value == 1.50
assert xcs.leg2._regular_periods[2].non_deliverable_params.fx_fixing.value == 1.75
assert xcs.leg2._regular_periods[3].non_deliverable_params.fx_fixing.value == NoInput(0)
def test_initialisation_nonmtm_xcs_notional_raises(self) -> None:
with pytest.raises(ValueError, match="The `notional` can only be provided on one leg"):
XCS(
effective=dt(2000, 1, 1),
termination="1y",
frequency="q",
notional=135e6,
fx_fixings=0.7407407407407407,
leg2_notional=20e6,
currency="cad",
pair="cadusd",
leg2_mtm=False,
)
@pytest.mark.parametrize("fixed1", [True, False])
@pytest.mark.parametrize("fixed2", [True, False])
@pytest.mark.parametrize("mtm", [True, False])
def test_fixings_table(self, curve, curve2, fixed1, fixed2, mtm):
curve._id = "c1"
curve2._id = "c2"
fxf = FXForwards(
FXRates({"eurusd": 1.1}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "eurusd": curve2, "eureur": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
frequency="M",
payment_lag=0,
currency="eur",
pair="eurusd",
payment_lag_exchange=0,
fixed=fixed1,
leg2_fixed=fixed2,
leg2_mtm=mtm,
fixing_method="ibor(2)",
leg2_fixing_method=FloatFixingMethod.IBOR(2),
)
result = xcs.local_analytic_rate_fixings(curves=[curve, curve, curve2, curve2], fx=fxf)
assert isinstance(result, DataFrame)
def test_initialisation_bug(self):
XCS(
dt(2000, 1, 7),
"9m",
spec="eurusd_xcs",
leg2_fixed=True,
leg2_mtm=False,
fixing_method="ibor(2)",
leg2_fixed_rate=2.4,
)
XCS(dt(2000, 1, 7), "9m", spec="eurusd_xcs", fixed=True, fixed_rate=3.0)
def test_fixing_doc(self):
# tests a series as string can be provided to XCS in tuple
name = str(hash(os.urandom(8)))
fixings.add(
name + "_GBPUSD",
Series(
index=[dt(2023, 1, 13), dt(2023, 4, 13), dt(2023, 7, 13)],
data=[1.19, 1.21, 1.24],
),
)
xcs = XCS(
effective=dt(2023, 1, 15),
termination="9M",
spec="gbpusd_xcs",
leg2_fx_fixings=(1.20, name),
)
result = xcs.cashflows()
assert isinstance(result, DataFrame)
expected = [1.20, 1.21, 1.24]
for i, period in enumerate(xcs.leg2._regular_periods):
period.non_deliverable_params.fx_fixing.value == expected[i]
def test_notional_on_mtm_leg_raises(self):
with pytest.raises(ValueError, match="Setting `mtm` on a Leg requires a non-deli"):
XCS(
effective=dt(2025, 1, 8),
termination="6m",
frequency="Q",
currency="usd",
mtm=True,
leg2_fx_fixings=155.0,
pair="usdjpy",
notional=1e9,
)
def test_attributes_get(self):
xcs = XCS(dt(2000, 1, 1), "6m", "Q", fixed=True, leg2_fixed=True, pair="eurusd")
assert xcs.fixed_rate == NoInput(0)
assert xcs.leg2_fixed_rate == NoInput(0)
with pytest.raises(AttributeError, match="Leg1 is of type"):
xcs.float_spread
with pytest.raises(AttributeError, match="Leg2 is of type"):
xcs.leg2_float_spread
def test_attributes_get_float(self):
xcs = XCS(dt(2000, 1, 1), "6m", "Q", pair="eurusd")
assert xcs.float_spread == 0.0
assert xcs.leg2_float_spread == 0.0
with pytest.raises(AttributeError, match="Leg1 is of type"):
xcs.fixed_rate
with pytest.raises(AttributeError, match="Leg2 is of type"):
xcs.leg2_fixed_rate
def test_attributes_set(self):
xcs = XCS(dt(2000, 1, 1), "6m", "Q", fixed=True, leg2_fixed=True, pair="eurusd")
xcs.fixed_rate = 2.0
xcs.leg2_fixed_rate = 1.5
with pytest.raises(AttributeError, match="Leg1 is of type"):
xcs.float_spread = 2.0
with pytest.raises(AttributeError, match="Leg2 is of type"):
xcs.leg2_float_spread = 1.5
def test_attributes_set_float(self):
xcs = XCS(dt(2000, 1, 1), "6m", "Q", pair="eurusd")
xcs.float_spread = 2.0
xcs.leg2_float_spread = 1.5
with pytest.raises(AttributeError, match="Leg1 is of type"):
xcs.fixed_rate = 2.0
with pytest.raises(AttributeError, match="Leg2 is of type"):
xcs.leg2_fixed_rate = 1.5
def test_mtm_dual_validation_raises(self):
with pytest.raises(ValueError, match="`mtm` and `leg2_mtm` must def"):
XCS(
dt(2000, 1, 1),
"6m",
"Q",
pair="eurusd",
mtm=True,
leg2_mtm=True,
)
def test_notional_fixings_mismatch_raises(self):
with pytest.raises(ValueError, match="When `notional` is given, that leg is assumed to be"):
XCS(dt(2000, 1, 1), "6m", "Q", pair="eurusd", mtm=True, fx_fixings=1.10)
with pytest.raises(
ValueError, match="When `leg2_notional` is given, that leg is assumed to be"
):
XCS(
dt(2000, 1, 1),
"6m",
"Q",
pair="usdeur",
mtm=True,
leg2_fx_fixings=1.10,
leg2_notional=10.0,
)
@pytest.mark.parametrize("curves", ["bad-value", ["1", "2", "3"]])
def test_parse_curves_failures(self, curves):
with pytest.raises(ValueError, match="XCS requires 4 curve type"):
XCS(
dt(2000, 1, 1),
"6m",
"Q",
pair="usdeur",
mtm=True,
fx_fixings=1.10,
leg2_notional=10.0,
curves=curves,
)
def test_must_set_one_fixed_rate(self):
with pytest.raises(ValueError, match="At least one leg must have a de"):
XCS(
dt(2000, 1, 1),
"6m",
"Q",
pair="usdeur",
mtm=True,
fx_fixings=1.10,
leg2_notional=10.0,
fixed=True,
leg2_fixed=True,
).npv()
def test_bad_metric_raises(self):
with pytest.raises(ValueError, match="`metric` must be in {'leg1', 'leg2'}"):
XCS(
dt(2000, 1, 1),
"6m",
"Q",
pair="eurusd",
mtm=True,
fx_fixings=1.10,
leg2_notional=10.0,
fixed=True,
leg2_fixed=True,
).rate(metric="bad")
def test_leg1_mtm(self):
# if notional given on leg1 this will error with nd `pair` not given.
XCS(
effective=dt(2000, 1, 1),
termination="6m",
frequency="Q",
currency="eur",
pair="eurusd",
mtm=True,
leg2_notional=10.0,
)
class TestNDXCS:
@pytest.mark.parametrize(
("leg1", "curves"),
[
(True, ["c2", "c", "c", "c"]),
(False, ["c", "c", "c2", "c"]),
],
)
def test_2c_ndxcs_npv(self, curve, curve2, leg1, curves) -> None:
# the EUR reference leg is leg1 if leg1 is True
map_ = {
True: LegMtm.Payment,
False: LegMtm.Initial,
}
fxf = FXForwards(
FXRates({"eurusd": 1.1}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "eurusd": curve2, "eureur": curve2},
)
ndxcs = NDXCS(
dt(2022, 2, 1),
"3M",
"M",
currency="usd",
pair="eurusd",
notional=1e6 if leg1 else NoInput(0),
leg2_notional=1e6 if not leg1 else NoInput(0),
curves=[curve if _ == "c" else curve2 for _ in curves],
)
assert ndxcs.kwargs.leg1["mtm"] is map_[leg1]
assert ndxcs.kwargs.leg2["mtm"] is map_[not leg1]
npv = ndxcs.npv(fx=fxf)
assert abs(npv) < 1e-9
def test_3c_ndxcs_npv(self, curve, curve2) -> None:
curve3 = Curve(
nodes={
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.981,
dt(2022, 7, 1): 0.973,
dt(2022, 10, 1): 0.955,
},
interpolation="log_linear",
index_base=100.0,
)
fxf = FXForwards(
FXRates({"eurusd": 1.1, "gbpusd": 1.25}, settlement=dt(2022, 1, 3)),
{
"usdusd": curve,
"eurusd": curve2,
"eureur": curve2,
"gbpgbp": curve3,
"gbpusd": curve3,
},
)
ndxcs = NDXCS(
dt(2022, 2, 1),
"3M",
"M",
currency="usd",
pair="eurusd",
notional=1e6,
leg2_notional=-1e6 * float(fxf.rate("eurgbp")),
leg2_pair="gbpusd",
curves=[curve2, curve, curve3, curve],
)
assert ndxcs.kwargs.leg1["mtm"] == LegMtm.Payment
assert ndxcs.kwargs.leg2["mtm"] == LegMtm.Payment
npv = ndxcs.npv(fx=fxf)
rate = ndxcs.rate(fx=fxf)
df = ndxcs.cashflows(fx=fxf)
assert abs(npv) < 1e-9
ndxcs.float_spread = float(rate) + 0.1
assert abs(ndxcs.npv(fx=fxf) + ndxcs.analytic_delta(fx=fxf, leg=1) * 0.1) < 1e-8
for a, b in zip(
df["FX Fix Date"],
[dt(2022, 1, 28), dt(2022, 3, 1), dt(2022, 3, 31), dt(2022, 4, 28), dt(2022, 4, 28)]
* 2,
):
assert a == b
def test_init_default_ccy(self):
defaults.base_currency = "gbp"
ndxcs = NDXCS(dt(2000, 1, 1), "1y", spec="inrusd_ndxcs")
assert ndxcs.leg1.settlement_params.currency == "usd"
assert ndxcs.leg2.settlement_params.currency == "usd"
defaults.reset_defaults()
assert defaults.base_currency == "usd"
class TestFixedFloatXCS:
def test_mtmfixxcs_rate(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=False,
leg2_mtm=True,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
)
result = xcs.rate(curves=[curve2, curve2, curve, curve], fx=fxf, metric="leg1")
irs = IRS(dt(2022, 2, 1), "8M", "M", currency="nok", payment_lag=0)
validate = irs.rate(curves=curve2)
assert abs(result - validate) < 1e-4
# alias = xcs.spread([curve2, curve2, curve, curve], NoInput(0), fxf, 2)
def test_mtmfixxcs_rate_reversed(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=False,
leg2_fixed=True,
leg2_mtm=True,
payment_lag=0,
currency="usd",
pair="usdnok",
payment_lag_exchange=0,
notional=10e6,
)
result = xcs.rate(curves=[curve, curve, curve2, curve2], fx=fxf, metric="leg2")
irs = IRS(dt(2022, 2, 1), "8M", "M", currency="nok", payment_lag=0)
validate = irs.rate(curves=curve2)
assert abs(result - validate) < 1e-2
alias = xcs.spread(curves=[curve, curve, curve2, curve2], fx=fxf, metric="leg2")
assert abs(result - alias) < 1e-4
class TestFixedFixedXCS:
def test_mtmfixfixxcs_rate(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
irs = IRS(dt(2022, 2, 1), "8M", "M", payment_lag=0)
nok_rate = float(irs.rate(curves=curve2))
xcs = XCS(
dt(2022, 2, 1),
"8M",
"M",
fixed=True,
leg2_fixed=True,
leg2_mtm=True,
payment_lag=0,
currency="nok",
pair="nokusd",
payment_lag_exchange=0,
notional=10e6,
fixed_rate=nok_rate,
)
result = xcs.rate(curves=[curve2, curve2, curve, curve], fx=fxf, metric="leg2")
validate = irs.rate(curves=curve)
assert abs(result - validate) < 1e-4
alias = xcs.spread(curves=[curve2, curve2, curve, curve], fx=fxf, metric="leg2")
assert abs(result - alias) < 1e-8
# test reverse
usd_rate = float(irs.rate(curves=curve))
xcs.fixed_rate = NoInput(0)
xcs.leg2_fixed_rate = usd_rate
result = xcs.rate(curves=[curve2, curve2, curve, curve], fx=fxf, metric="leg1")
validate = irs.rate(curves=curve2)
assert abs(result - validate) < 1e-4
alias = xcs.spread(curves=[curve2, curve2, curve, curve], fx=fxf, metric="leg1")
assert abs(result - alias) < 1e-8
class TestFXSwap:
def test_fxswap_rate(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
fxs = FXSwap(
dt(2022, 2, 1),
"8M",
pair="usdnok",
notional=1e6,
)
expected = fxf.swap("usdnok", [dt(2022, 2, 1), dt(2022, 10, 1)])
result = fxs.rate(curves=[NoInput(0), curve, NoInput(0), curve2], fx=fxf)
assert abs(result - expected) < 1e-10
assert np.isclose(result.dual, expected.dual)
def test_fxswap_pair_arg(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
fxs = FXSwap(
dt(2022, 2, 1),
"8M",
pair="usdnok",
notional=1e6,
)
expected = fxf.swap("usdnok", [dt(2022, 2, 1), dt(2022, 10, 1)])
result = fxs.rate(curves=[NoInput(0), curve, NoInput(0), curve2], fx=fxf)
assert abs(result - expected) < 1e-10
assert np.isclose(result.dual, expected.dual)
def test_currency_arg_pair_overlap(self) -> None:
with pytest.raises(TypeError, match="unexpected keyword argument 'currency'"):
FXSwap(
dt(2022, 2, 1),
"8M",
pair="usdnok",
currency="jpy",
)
def test_fxswap_npv(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
fxs = FXSwap(
dt(2022, 2, 1),
"8M",
pair="usdnok",
notional=1e6,
)
assert abs(fxs.npv(curves=[NoInput(0), curve, NoInput(0), curve2], fx=fxf)) < 1e-7
result = fxs.rate(curves=[NoInput(0), curve, NoInput(0), curve2], fx=fxf)
fxs.leg2_fixed_rate = result
assert abs(fxs.npv(curves=[NoInput(0), curve, NoInput(0), curve2], fx=fxf)) < 1e-7
def test_fxswap_points_raises(self) -> None:
msg = "For an FXSwap transaction both `fx_rate` and `points` must be given"
with pytest.raises(ValueError, match=msg):
FXSwap(
dt(2022, 2, 1),
"8M",
pair="usdnok",
notional=1e6,
points=100.0,
)
def test_fxswap_points_warns(self) -> None:
with pytest.raises(
ValueError, match="For an FXSwap transaction both `fx_rate` and `points` must be given"
):
FXSwap(
dt(2022, 2, 1),
"8M",
fx_rate=11.0,
pair="usdnok",
notional=1e6,
)
FXSwap(
dt(2022, 2, 1),
"8M",
fx_rate=11.0,
points=1000.0,
pair="usdnok",
notional=1e6,
split_notional=1e6,
)
@pytest.mark.parametrize(
("fx_rate", "points", "split_notional", "expected"),
[
(NoInput(0), NoInput(0), NoInput(0), Dual(0, ["fx_usdnok"], [0.0])),
(11.0, 1800.0, NoInput(0), Dual(3734.617680, ["fx_usdnok"], [-3027.88203904])),
(
11.0,
1754.5623360395632,
NoInput(0),
Dual(4166.37288388, ["fx_usdnok"], [-3071.05755945]),
),
(
10.032766762996951,
1754.5623360395632,
NoInput(0),
Dual(0, ["fx_usdnok"], [-2654.42027107]),
),
(
10.032766762996951,
1754.5623360395632,
1027365.1574336714,
Dual(0, ["fx_usdnok"], [0.0]),
),
],
)
def test_fxswap_parameter_combinations_off_mids_given(
self,
curve,
curve2,
fx_rate,
points,
split_notional,
expected,
) -> None:
# curve._set_ad_order(1)
# curve2._set_ad_order(1)
# risk sensitivity to curve is checked in:
# test_null_priced_delta_round_trip_one_pricing_param_fx_fix
# the exact values of relevance here are:
# usdnok: 10.032766762996951,
# points: 1754.5623360395632
# split_notional: 1027365.1574336714
# the first test which results in a zero gradient is explained on the documentation pages
# from an FXSwap in the notes section.
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
fxs = FXSwap(
dt(2022, 2, 1),
"8M",
fx_rate=fx_rate,
points=points,
split_notional=split_notional,
pair="usdnok",
notional=1e6,
)
assert fxs.kwargs.meta["points"] == points
result = fxs.npv(curves=[NoInput(0), curve, NoInput(0), curve2], fx=fxf, base="usd")
# rate = fxs.rate(curves=[curve, curve2], fx=fxf)
assert abs(result - expected) < 1e-6
assert np.isclose(result.dual, expected.dual)
def test_direction_fx_swap_notional(self, curve, curve2):
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
fxs = FXSwap(
dt(2022, 2, 1),
"8M",
fx_rate=10.0,
points=1000.0,
pair="usdnok",
notional=1e6,
)
fxs2 = FXSwap(
dt(2022, 2, 1),
"8M",
fx_rate=10.0,
points=1500.0,
pair="usdnok",
notional=-1e6,
)
npv1 = fxs.npv(curves=[curve, curve2], fx=fxf)
npv2 = fxs2.npv(curves=[curve, curve2], fx=fxf)
assert (npv1 - npv2) > 0
@pytest.mark.parametrize("leg", [1, 2])
def test_notional_directions_with_split_notional(self, leg):
fxs = FXSwap(
**{
"effective": dt(2022, 2, 1),
"termination": dt(2022, 4, 1),
f"{'leg2_' if leg == 2 else ''}notional": 1e6,
"split_notional": 1e6,
"pair": "usdnok",
}
)
l1c1_sign = fxs.leg1.periods[0].settlement_params.notional < 0
l1c2_sign = fxs.leg1.periods[1].settlement_params.notional < 0
l2c1_sign = fxs.leg2.periods[0].settlement_params.notional < 0
l2c2_sign = fxs.leg2.periods[1].settlement_params.notional < 0
assert l1c1_sign != l1c2_sign
assert l2c1_sign != l2c2_sign
assert l1c1_sign != l2c1_sign
def test_rate_with_fixed_parameters(self, curve, curve2) -> None:
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
fxs = FXSwap(
dt(2022, 2, 1),
"8M",
fx_rate=10.01,
points=1765,
split_notional=1.01e6,
pair="usdnok",
notional=1e6,
)
result = fxs.rate(curves=[NoInput(0), curve, NoInput(0), curve2], fx=fxf)
expected = 1746.59802
assert abs(result - expected) < 1e-4
# def test_proxy_curve_from_fxf(self, curve, curve2):
# # TODO this needs a solver from which to test the proxy curve (line 92)
# fxf = FXForwards(
# FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
# {"usdusd": curve, "nokusd": curve2, "noknok": curve2}
# )
# fxs = FXSwap(dt(2022, 2, 1), "8M", "M",
# currency="usd", leg2_currency="nok",
# payment_lag_exchange=0, notional=1e6,
# leg2_fixed_rate=-1.0)
# npv_nok =
# fxs.npv([NoInput(0), fxf.curve("usd", "nok"), NoInput(0), curve2], NoInput(0), fxf)
# npv_usd =
# fxs.npv([NoInput(0), curve, NoInput(0), fxf.curve("nok", "usd")], NoInput(0), fxf)
# assert abs(npv_nok-npv_usd) < 1e-7 # npvs are equivalent becasue xcs basis =0
def test_transition_from_dual_to_dual2(self, curve, curve2) -> None:
# Test added for BUG, see PR: XXX
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
fxf._set_ad_order(1)
fxs = FXSwap(
dt(2022, 2, 1),
"8M",
pair="usdnok",
notional=1e6,
)
result = fxs.npv(
curves=[None, fxf.curve("usd", "usd"), None, fxf.curve("nok", "usd")], fx=fxf
)
assert isinstance(result, Dual)
fxf._set_ad_order(2)
result2 = fxs.npv(
curves=[None, fxf.curve("usd", "usd"), None, fxf.curve("nok", "usd")], fx=fxf
)
assert isinstance(result2, Dual2)
def test_transition_from_dual_to_dual2_rate(self, curve, curve2) -> None:
# Test added for BUG, see PR: XXX
fxf = FXForwards(
FXRates({"usdnok": 10}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "nokusd": curve2, "noknok": curve2},
)
fxf._set_ad_order(1)
fxs = FXSwap(
dt(2022, 2, 1),
"8M",
pair="usdnok",
notional=1e6,
)
result = fxs.rate(
curves=[None, fxf.curve("usd", "usd"), None, fxf.curve("nok", "usd")], fx=fxf
)
assert isinstance(result, Dual)
fxf._set_ad_order(2)
result = fxs.rate(
curves=[None, fxf.curve("usd", "usd"), None, fxf.curve("nok", "usd")], fx=fxf
)
assert isinstance(result, Dual2)
@pytest.mark.skip(reason="in v2.5 split notional is not the default and must be set directly")
def test_split_notional_raises(self):
# this is an unpriced FXswap with split notional
fxs = FXSwap(effective=dt(2022, 2, 1), termination="3m", pair="eurusd")
with pytest.raises(
TypeError, match="`curves` have not been supplied correctly. A `disc_curve` is required"
):
fxs.rate()
@pytest.mark.parametrize(
("eom", "expected"),
[
(False, dt(2022, 5, 28)),
(True, dt(2022, 5, 31)),
],
)
def test_eom_dates(self, eom, expected):
fxs = FXSwap(
effective=dt(2022, 2, 28),
termination="3m",
pair="eurusd",
calendar="all",
modifier="mf",
eom=eom,
)
assert fxs.kwargs.leg1["termination"] == expected
@pytest.mark.parametrize(
("roll", "expected"),
[
("imm", dt(2022, 4, 20)),
(19, dt(2022, 4, 19)),
],
)
def test_roll_dates(self, roll, expected):
fxs = FXSwap(
effective=dt(2022, 1, 19),
termination="3m",
pair="eurusd",
calendar="all",
modifier="mf",
roll=roll,
)
assert fxs.kwargs.leg1["termination"] == expected
class TestSTIRFuture:
def test_stir_rate(self, curve, curve2) -> None:
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
)
expected = 95.96254344884888
result = stir.rate(curves=curve, metric="price")
assert abs(100 - result - stir.rate(curves=curve)) < 1e-8
assert abs(result - expected) < 1e-8
def test_stir_no_gamma(self, curve) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
ins = [
IRS(dt(2022, 3, 16), dt(2022, 6, 15), "Q", curves="usdusd"),
]
solver = Solver(
curves=[c1],
instruments=ins,
s=[1.2],
id="solver",
instrument_labels=["usd fut"],
)
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
curves="usdusd",
)
result = stir.delta(solver=solver).sum().sum()
assert abs(result + 25.0) < 1e-7
result = stir.gamma(solver=solver).sum().sum()
assert abs(result) < 1e-7
def test_stir_npv(self) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
# irs = IRS(dt(2022, 3, 16), dt(2022, 6, 15), "Q", curves="usdusd")
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
curves="usdusd",
price=99.50,
)
result = stir.npv(curves=c1)
expected = (99.5 - (100 - 0.99250894761)) * 2500 * -1.0
assert abs(result - expected) < 1e-7
def test_stir_npv_currency_bug(self) -> None:
# GH653: instantiation without a currency failed to NPV when an fx object provided.
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99})
c2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98})
c3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.97})
fxf = FXForwards(
FXRates({"eurusd": 1.1}, dt(2022, 1, 1)), {"eureur": c1, "eurusd": c2, "usdusd": c3}
)
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
frequency="Q",
spec="usd_stir",
contracts=-1,
)
result = stir.npv(curves=[None, c1, c2, c1], fx=fxf)
assert abs(result) < 1e-7
def test_stir_npv_fx(self) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
# irs = IRS(dt(2022, 3, 16), dt(2022, 6, 15), "Q", curves="usdusd")
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
curves="usdusd",
price=99.50,
)
fxr = FXRates({"usdeur": 0.85})
result = stir.npv(curves=c1, fx=fxr, base="eur")
expected = ((99.5 - (100 - 0.99250894761)) * 2500 * -1.0) * 0.85
assert abs(result - expected) < 1e-7
def test_stir_raises(self) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
# irs = IRS(dt(2022, 3, 16), dt(2022, 6, 15), "Q", curves="usdusd")
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
curves="usdusd",
price=99.50,
)
with pytest.raises(ValueError, match="`metric` must be in"):
stir.rate(curves=c1, metric="bad")
def test_analytic_delta(self) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
curves=c1,
price=99.50,
contracts=100,
)
expected = -2500.0
result = stir.analytic_delta()
assert abs(result - expected) < 1e-10
def test_analytic_delta_fx(self) -> None:
c1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="usdusd")
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
curves=c1,
price=99.50,
contracts=100,
)
expected = -2500.0 * 0.85
fxr = FXRates({"usdeur": 0.85})
result = stir.analytic_delta(fx=fxr, base="eur")
assert abs(result - expected) < 1e-10
def test_fixings_table(self, curve):
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination="3m",
spec="eur_stir3",
contracts=100,
curves=curve,
)
result = stir.local_analytic_rate_fixings()
assert isinstance(result, DataFrame)
value = result.loc[dt(2022, 3, 14), slice(None)].iloc[0]
assert abs(value + 2500.0) < 1e-9
@pytest.mark.parametrize(
("spec", "expected"),
[
("usd_stir", -25.0),
("eur_stir", -25.0),
("eur_stir3", -25.0),
("gbp_stir", -25.0),
],
)
def test_3m_spec_contracts(self, spec, expected, curve):
stir = STIRFuture(get_imm(3, 2022), get_imm(6, 2022), spec=spec)
for leg in [1, 2]:
result = stir.analytic_delta(curves=curve, leg=leg)
assert abs(result - expected * (1.5 - leg) * 2) < 1e-10
@pytest.mark.parametrize(
("spec", "expected"),
[
("usd_stir1", -41.670),
("eur_stir1", -25.0),
],
)
def test_1m_spec_contracts(self, spec, expected, curve):
stir = STIRFuture(dt(2022, 4, 1), dt(2022, 5, 1), spec=spec)
for leg in [1, 2]:
result = stir.analytic_delta(curves=curve, leg=leg)
assert abs(result - expected * (1.5 - leg) * 2) < 1e-10
def test_cashflows(self, curve):
stir = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
price=99.50,
contracts=10,
)
result = stir.cashflows()
assert result["Payment"].iloc[0] is None
result2 = stir.cashflows(curves=curve)
assert result2["Payment"].iloc[0] == dt(2022, 1, 1)
assert result2["DF"].iloc[0] == 1.0
def test_edsp_check(self):
# https://www.fmxfutures.com/wp-content/uploads/2025/09/SOFR-Final-Settlement-Report-9_17_2025.pdf
name = str(hash(os.urandom(3)))
fixings.add(
name + "_1B",
Series(
index=[
dt.strptime(_, "%m/%d/%Y")
for _ in [
"06/30/2025",
"06/27/2025",
"06/26/2025",
"06/25/2025",
"06/24/2025",
"06/23/2025",
"06/20/2025",
"06/18/2025",
"06/17/2025",
"06/16/2025",
"06/13/2025",
"06/12/2025",
"06/11/2025",
"06/10/2025",
"06/09/2025",
"06/06/2025",
"06/05/2025",
"06/04/2025",
"06/03/2025",
"06/02/2025",
"05/30/2025",
"05/29/2025",
"05/28/2025",
"05/27/2025",
"05/23/2025",
"05/22/2025",
"05/21/2025",
"05/20/2025",
"05/19/2025",
"05/16/2025",
"05/15/2025",
"05/14/2025",
"05/13/2025",
"05/12/2025",
"05/09/2025",
"05/08/2025",
"05/07/2025",
"05/06/2025",
"05/05/2025",
"05/02/2025",
"05/01/2025",
"04/30/2025",
"04/29/2025",
"04/28/2025",
"04/25/2025",
"04/24/2025",
"04/23/2025",
"04/22/2025",
"04/21/2025",
"04/17/2025",
"04/16/2025",
"04/15/2025",
"04/14/2025",
"04/11/2025",
"04/10/2025",
"04/09/2025",
"04/08/2025",
"04/07/2025",
"04/04/2025",
"04/03/2025",
"04/02/2025",
"04/01/2025",
"03/31/2025",
"03/28/2025",
"03/27/2025",
"03/26/2025",
"03/25/2025",
"03/24/2025",
"03/21/2025",
"03/20/2025",
"03/19/2025",
"03/18/2025",
"03/17/2025",
"03/14/2025",
"03/13/2025",
"03/12/2025",
"03/11/2025",
"03/10/2025",
"03/07/2025",
"03/06/2025",
"03/05/2025",
"03/04/2025",
"03/03/2025",
]
],
data=[
4.45,
4.39,
4.4,
4.36,
4.3,
4.29,
4.29,
4.28,
4.31,
4.32,
4.28,
4.28,
4.28,
4.28,
4.29,
4.29,
4.29,
4.28,
4.32,
4.35,
4.35,
4.33,
4.33,
4.31,
4.26,
4.26,
4.26,
4.27,
4.29,
4.3,
4.31,
4.29,
4.3,
4.28,
4.28,
4.29,
4.3,
4.32,
4.33,
4.36,
4.39,
4.41,
4.36,
4.36,
4.33,
4.29,
4.28,
4.3,
4.32,
4.32,
4.31,
4.36,
4.33,
4.33,
4.37,
4.42,
4.4,
4.33,
4.35,
4.39,
4.37,
4.39,
4.41,
4.34,
4.36,
4.35,
4.33,
4.31,
4.3,
4.29,
4.29,
4.31,
4.32,
4.3,
4.3,
4.31,
4.32,
4.33,
4.34,
4.35,
4.34,
4.33,
4.33,
],
),
)
inst = STIRFuture(
get_imm(code="H25"),
get_imm(code="M25"),
spec="usd_stir",
leg2_rate_fixings=name,
)
result = inst.rate()
edsp = 100 - round(result, 4)
fixings.pop(name + "_1B")
assert edsp == 95.6577
def test_edsp_check2(self):
# https://www.cmegroup.com/content/dam/cmegroup/education/files/sofr-futures-settlement-calculation-methodologies.pdf
name = str(hash(os.urandom(3)))
fixings.add(
name + "_1B",
Series(
index=[
dt.strptime(_, "%Y-%m-%d")
for _ in [
"2017-06-21",
"2017-06-22",
"2017-06-23",
"2017-06-26",
"2017-06-27",
"2017-06-28",
"2017-06-29",
"2017-06-30",
"2017-07-03",
"2017-07-05",
"2017-07-06",
"2017-07-07",
"2017-07-10",
"2017-07-11",
"2017-07-12",
"2017-07-13",
"2017-07-14",
"2017-07-17",
"2017-07-18",
"2017-07-19",
"2017-07-20",
"2017-07-21",
"2017-07-24",
"2017-07-25",
"2017-07-26",
"2017-07-27",
"2017-07-28",
"2017-07-31",
"2017-08-01",
"2017-08-02",
"2017-08-03",
"2017-08-04",
"2017-08-07",
"2017-08-08",
"2017-08-09",
"2017-08-10",
"2017-08-11",
"2017-08-14",
"2017-08-15",
"2017-08-16",
"2017-08-17",
"2017-08-18",
"2017-08-21",
"2017-08-22",
"2017-08-23",
"2017-08-24",
"2017-08-25",
"2017-08-28",
"2017-08-29",
"2017-08-30",
"2017-08-31",
"2017-09-01",
"2017-09-05",
"2017-09-06",
"2017-09-07",
"2017-09-08",
"2017-09-11",
"2017-09-12",
"2017-09-13",
"2017-09-14",
"2017-09-15",
"2017-09-18",
"2017-09-19",
]
],
data=[
1.02,
1.02,
1.06,
1.05,
1.03,
1.04,
1.07,
1.2,
1.1,
1.05,
1.03,
1.01,
1.01,
1.01,
1.01,
1.02,
1.02,
1.04,
1.02,
1.01,
1.02,
1.01,
1.05,
1.04,
1.04,
1.04,
1.03,
1.08,
1.03,
1.01,
1.01,
1.01,
1.01,
1.01,
1.01,
1.03,
1.05,
1.08,
1.11,
1.09,
1.09,
1.06,
1.04,
1.02,
1.02,
1.08,
1.05,
1.05,
1.03,
1.03,
1.14,
1.09,
1.05,
1.03,
1.04,
1.04,
1.04,
1.05,
1.05,
1.09,
1.1,
1.04,
1.01,
],
),
)
inst = STIRFuture(
get_imm(code="M17"),
get_imm(code="U17"),
spec="usd_stir",
leg2_rate_fixings=name,
)
result = inst.rate()
edsp = 100 - round(result, 4)
fixings.pop(name + "_1B")
assert edsp == 98.9495
class TestPricingMechanism:
def test_value(self, curve) -> None:
ob = Value(dt(2022, 1, 28), curves=curve)
ob.rate()
def test_irs(self, curve) -> None:
ob = IRS(dt(2022, 1, 28), "6m", "Q", curves=curve)
ob.rate()
ob.npv()
ob.cashflows()
ob.spread()
def test_iirs(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
ob = IIRS(dt(2022, 1, 28), "6m", "Q", curves=[i_curve, curve, curve, curve])
ob.rate()
ob.npv()
ob.cashflows()
ob.spread()
def test_sbs(self, curve) -> None:
ob = SBS(dt(2022, 1, 28), "6m", "Q", curves=[curve] * 3)
ob.rate()
ob.npv()
ob.cashflows()
ob.spread()
def test_fra(self, curve) -> None:
ob = FRA(dt(2022, 1, 28), "6m", "S", curves=curve)
ob.rate()
ob.npv()
ob.cashflows()
@pytest.mark.parametrize(
("klass", "kwargs"),
[
(XCS, {"fixed": False, "leg2_fixed": False, "leg2_mtm": False}),
(XCS, {"fixed": True, "leg2_fixed": False, "leg2_mtm": False, "fixed_rate": 2.0}),
(XCS, {"fixed": True, "leg2_fixed": True, "leg2_mtm": False, "fixed_rate": 2.0}),
(XCS, {}), # defaults to fixed:False, leg2_fixed: False, leg2_mtm: True
(XCS, {"fixed": True, "leg2_fixed": False, "leg2_mtm": True, "fixed_rate": 2.0}),
(XCS, {"fixed": False, "leg2_fixed": True, "leg2_mtm": True}),
(XCS, {"fixed": True, "leg2_fixed": True, "leg2_mtm": True, "fixed_rate": 2.0}),
],
)
def test_allxcs(self, klass, kwargs, curve, curve2) -> None:
ob = klass(
dt(2022, 1, 28),
"6m",
"S",
currency="usd",
pair="eurusd",
curves=[curve, NoInput(0), curve2, NoInput(0)],
**kwargs,
)
fxf = FXForwards(
FXRates({"eurusd": 1.1}, settlement=dt(2022, 1, 3)),
{"usdusd": curve, "eurusd": curve2, "eureur": curve2},
)
ob.rate(metric="leg2", fx=fxf)
ob.npv(fx=fxf)
ob.cashflows(fx=fxf)
def test_zcs(self, curve) -> None:
ob = ZCS(dt(2022, 1, 28), "6m", "S", curves=curve)
ob.rate()
ob.npv()
ob.cashflows()
def test_zcis(self, curve) -> None:
i_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
ob = ZCIS(dt(2022, 1, 28), "6m", "S", curves=[curve, curve, i_curve, curve])
ob.rate()
ob.npv()
ob.cashflows()
# TODO FXEXchange and FXSwap
class TestPortfolio:
def test_portfolio_npv(self, curve) -> None:
irs1 = IRS(dt(2022, 1, 1), "6m", "Q", fixed_rate=1.0, curves=curve)
irs2 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=2.0, curves=curve)
pf = Portfolio([irs1, irs2])
assert pf.npv(base="usd") == irs1.npv() + irs2.npv()
pf = Portfolio([irs1] * 5)
assert pf.npv(base="usd") == irs1.npv() * 5
def test_portoflio_npv_pool(self, curve) -> None:
irs1 = IRS(dt(2022, 1, 1), "6m", "Q", fixed_rate=1.0, curves=curve)
pf = Portfolio([irs1] * 5)
with default_context("pool", 2): # also test parallel processing
result = pf.npv(base="usd")
assert result == irs1.npv() * 5
def test_portfolio_npv_local(self, curve) -> None:
irs1 = IRS(dt(2022, 1, 1), "6m", "Q", fixed_rate=1.0, curves=curve, currency="usd")
irs2 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=2.0, curves=curve, currency="eur")
irs3 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=2.0, curves=curve, currency="usd")
pf = Portfolio([irs1, irs2, irs3])
result = pf.npv(local=True)
expected = {
"usd": 20093.295095887483,
"eur": 5048.87332403382,
}
assert result == expected
def test_portfolio_local_parallel(self, curve) -> None:
irs1 = IRS(dt(2022, 1, 1), "6m", "Q", fixed_rate=1.0, curves=curve, currency="usd")
irs2 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=2.0, curves=curve, currency="eur")
irs3 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=2.0, curves=curve, currency="usd")
pf = Portfolio([irs1, irs2, irs3])
expected = {
"usd": 20093.295095887483,
"eur": 5048.87332403382,
}
with default_context("pool", 2): # also test parallel processing
result = pf.npv(local=True)
assert result == expected
def test_portfolio_mixed_currencies(self) -> None:
ll_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 5, 1): 1.0, dt(2022, 9, 3): 1.0},
interpolation="log_linear",
id="sofr",
)
ll_solver = Solver(
curves=[ll_curve],
instruments=[
IRS(dt(2022, 1, 1), "4m", "Q", curves="sofr"),
IRS(dt(2022, 1, 1), "8m", "Q", curves="sofr"),
],
s=[1.85, 2.10],
instrument_labels=["4m", "8m"],
id="sofr",
)
ll_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 1): 1.0, dt(2022, 10, 1): 1.0},
interpolation="log_linear",
id="estr",
)
combined_solver = Solver(
curves=[ll_curve],
instruments=[
IRS(dt(2022, 1, 1), "3m", "Q", curves="estr"),
IRS(dt(2022, 1, 1), "9m", "Q", curves="estr"),
],
s=[0.75, 1.65],
instrument_labels=["3m", "9m"],
pre_solvers=[ll_solver],
id="estr",
)
irs = IRS(
effective=dt(2022, 1, 1),
termination="6m",
frequency="Q",
currency="usd",
notional=500e6,
fixed_rate=2.0,
curves="sofr", # or ["sofr", "sofr"] for forecasting and discounting
)
irs2 = IRS(
effective=dt(2022, 1, 1),
termination="6m",
frequency="Q",
currency="eur",
notional=-300e6,
fixed_rate=1.0,
curves="estr",
)
pf = Portfolio([irs, irs2])
result = pf.npv(solver=combined_solver, local=True)
assert "eur" in result
assert "usd" in result
# the following should execute without warnings
pf.delta(solver=combined_solver)
pf.gamma(solver=combined_solver)
def test_repr(self, curve) -> None:
irs1 = IRS(dt(2022, 1, 1), "6m", "Q", fixed_rate=1.0, curves=curve)
irs2 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=2.0, curves=curve)
pf = Portfolio([irs1, irs2])
expected = f""
assert pf.__repr__() == expected
def test_fixings_table(self, curve, curve2):
curve._id = "c1"
curve2._id = "c2"
irs1 = IRS(dt(2022, 1, 17), "6m", spec="eur_irs3", curves=curve, notional=3e6)
irs2 = IRS(dt(2022, 1, 23), "6m", spec="eur_irs6", curves=curve2, notional=1e6)
irs3 = IRS(dt(2022, 1, 17), "6m", spec="eur_irs3", curves=curve, notional=-2e6)
pf = Portfolio([irs1, irs2, irs3])
result = pf.local_analytic_rate_fixings()
# # irs1 and irs3 are summed over curve c1 notional
# assert abs(result["c1", "notional"][dt(2022, 1, 15)] - 1021994.16) < 1e-2
# irs1 and irs3 are summed over curve c1 risk
assert abs(result["c1", "eur", "eur", "3M"][dt(2022, 1, 13)] - 25.249) < 1e-2
# c1 has no exposure to 22nd Jan
assert isna(result["c1", "eur", "eur", "3M"][dt(2022, 1, 20)])
# # c1 dcf is not summed
# assert abs(result["c1", "dcf"][dt(2022, 1, 15)] - 0.25) < 1e-3
# # irs2 is included
# assert abs(result["c2", "notional"][dt(2022, 1, 22)] - 1005297.17) < 1e-2
# irs1 and irs3 are summed over curve c1 risk
assert abs(result["c2", "eur", "eur", "6M"][dt(2022, 1, 20)] - 48.773) < 1e-3
# c2 has no exposure to 15 Jan
assert isna(result["c2", "eur", "eur", "6M"][dt(2022, 1, 13)])
# # c2 has DCF
# assert abs(result["c2", "dcf"][dt(2022, 1, 22)] - 0.50277) < 1e-3
def test_fixings_table_null_inst(self, curve):
irs = IRS(dt(2022, 1, 15), "6m", spec="eur_irs3", curves=curve)
frb = FixedRateBond(dt(2022, 1, 1), "5y", "A", fixed_rate=2.0, curves=curve)
pf = Portfolio([irs, frb])
table = pf.local_analytic_rate_fixings()
assert isinstance(table, DataFrame)
class TestFly:
@pytest.mark.parametrize("mechanism", [False, True])
def test_fly_npv(self, curve, mechanism) -> None:
mechanism = curve if mechanism else NoInput(0)
inverse = curve if mechanism is NoInput(0) else NoInput(0)
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0, curves=mechanism)
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0, curves=mechanism)
irs3 = IRS(dt(2022, 1, 1), "5m", "Q", fixed_rate=1.0, curves=mechanism)
fly = Fly(irs1, irs2, irs3)
assert fly.npv(curves=inverse) == irs1.npv(curves=inverse) + irs2.npv(
curves=inverse
) + irs3.npv(curves=inverse)
@pytest.mark.parametrize("mechanism", [False, True])
def test_fly_rate(self, curve, mechanism) -> None:
mechanism = curve if mechanism else NoInput(0)
inv = curve if mechanism is NoInput(0) else NoInput(0)
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0, curves=mechanism)
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0, curves=mechanism)
irs3 = IRS(dt(2022, 1, 1), "5m", "Q", fixed_rate=1.0, curves=mechanism)
fly = Fly(irs1, irs2, irs3)
assert (
fly.rate(curves=inv)
== (-irs1.rate(curves=inv) + 2 * irs2.rate(curves=inv) - irs3.rate(curves=inv)) * 100.0
)
def test_fly_cashflows_executes(self, curve) -> None:
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0, curves=curve)
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0, curves=curve)
irs3 = IRS(dt(2022, 1, 1), "5m", "Q", fixed_rate=1.0, curves=curve)
fly = Fly(irs1, irs2, irs3)
fly.cashflows()
def test_local_npv(self, curve) -> None:
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0, curves=curve, currency="eur")
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0, curves=curve, currency="usd")
irs3 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0, curves=curve, currency="gbp")
fly = Fly(irs1, irs2, irs3)
result = fly.npv(local=True)
expected = {
"eur": 7523.321141258284,
"usd": 6711.514715925333,
"gbp": 7523.321141258284,
}
assert result == expected
def test_delta(self, simple_solver) -> None:
irs1 = IRS(dt(2022, 1, 1), "6m", "A", fixed_rate=1.0, notional=-3e6, curves="curve")
irs2 = IRS(dt(2022, 1, 1), "1Y", "A", fixed_rate=2.0, notional=3e6, curves="curve")
irs3 = IRS(dt(2022, 1, 1), "18m", "A", fixed_rate=1.0, notional=-1e6, curves="curve")
fly = Fly(irs1, irs2, irs3)
result = fly.delta(solver=simple_solver).to_numpy()
expected = np.array([[102.08919479], [-96.14488074]])
assert np.all(np.isclose(result, expected))
def test_gamma(self, simple_solver) -> None:
irs1 = IRS(dt(2022, 1, 1), "6m", "A", fixed_rate=1.0, notional=-3e6, curves="curve")
irs2 = IRS(dt(2022, 1, 1), "1Y", "A", fixed_rate=2.0, notional=3e6, curves="curve")
irs3 = IRS(dt(2022, 1, 1), "18m", "A", fixed_rate=1.0, notional=-1e6, curves="curve")
fly = Fly(irs1, irs2, irs3)
result = fly.gamma(solver=simple_solver).to_numpy()
expected = np.array([[-0.02944899, 0.009254014565], [0.009254014565, 0.0094239781314]])
assert np.all(np.isclose(result, expected))
def test_repr(self):
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0)
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0)
spd = Spread(irs1, irs2)
expected = f""
assert expected == spd.__repr__()
def test_fixings_table(self, curve, curve2):
curve._id = "c1"
curve2._id = "c2"
irs1 = IRS(dt(2022, 1, 17), "6m", spec="eur_irs3", curves=curve, notional=3e6)
irs2 = IRS(dt(2022, 1, 23), "6m", spec="eur_irs6", curves=curve2, notional=1e6)
irs3 = IRS(dt(2022, 1, 17), "6m", spec="eur_irs3", curves=curve, notional=-2e6)
fly = Fly(irs1, irs2, irs3)
result = fly.local_analytic_rate_fixings()
# irs1 and irs3 are summed over curve c1 risk
assert abs(result[("c1", "eur", "eur", "3M")][dt(2022, 1, 13)] - 25.249) < 1e-2
# c1 has no exposure to 22nd Jan
assert isna(result[("c1", "eur", "eur", "3M")][dt(2022, 1, 20)])
# irs1 and irs3 are summed over curve c1 risk
assert abs(result[("c2", "eur", "eur", "6M")][dt(2022, 1, 20)] - 48.773) < 1e-3
# c2 has no exposure to 15 Jan
assert isna(result[("c2", "eur", "eur", "6M")][dt(2022, 1, 13)])
def test_fixings_table_null_inst(self, curve):
irs = IRS(dt(2022, 1, 15), "6m", spec="eur_irs3", curves=curve)
frb = FixedRateBond(dt(2022, 1, 1), "5y", "A", fixed_rate=2.0, curves=curve)
fly = Fly(irs, frb, irs)
assert isinstance(fly.local_analytic_rate_fixings(), DataFrame)
class TestSpread:
@pytest.mark.parametrize("mechanism", [False, True])
def test_spread_npv(self, curve, mechanism) -> None:
mechanism = curve if mechanism else NoInput(0)
inverse = curve if mechanism is NoInput(0) else NoInput(0)
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0, curves=mechanism)
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0, curves=mechanism)
spd = Spread(irs1, irs2)
assert spd.npv(curves=inverse) == irs1.npv(curves=inverse) + irs2.npv(curves=inverse)
@pytest.mark.parametrize("mechanism", [False, True])
def test_spread_rate(self, curve, mechanism) -> None:
mechanism = curve if mechanism else NoInput(0)
inverse = curve if mechanism is NoInput(0) else NoInput(0)
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0, curves=mechanism)
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0, curves=mechanism)
spd = Spread(irs1, irs2)
assert (
spd.rate(curves=inverse)
== (-irs1.rate(curves=inverse) + irs2.rate(curves=inverse)) * 100.0
)
def test_spread_cashflows_executes(self, curve) -> None:
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0, curves=curve)
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0, curves=curve)
spd = Spread(irs1, irs2)
spd.cashflows()
def test_local_npv(self, curve) -> None:
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0, curves=curve, currency="eur")
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0, curves=curve, currency="usd")
spd = Spread(irs1, irs2)
result = spd.npv(local=True)
expected = {
"eur": 7523.321141258284,
"usd": 6711.514715925333,
}
assert result == expected
def test_repr(self):
irs1 = IRS(dt(2022, 1, 1), "3m", "Q", fixed_rate=1.0)
irs2 = IRS(dt(2022, 1, 1), "4m", "Q", fixed_rate=2.0)
irs3 = IRS(dt(2022, 1, 1), "5m", "Q", fixed_rate=1.0)
fly = Fly(irs1, irs2, irs3)
expected = f""
assert expected == fly.__repr__()
def test_fixings_table(self, curve, curve2):
curve._id = "c1"
curve2._id = "c2"
irs1 = IRS(dt(2022, 1, 17), "6m", spec="eur_irs3", curves=curve, notional=3e6)
irs2 = IRS(dt(2022, 1, 23), "6m", spec="eur_irs6", curves=curve2, notional=1e6)
irs3 = IRS(dt(2022, 1, 17), "6m", spec="eur_irs3", curves=curve, notional=-2e6)
spd = Spread(irs1, Spread(irs2, irs3))
result = spd.local_analytic_rate_fixings()
# irs1 and irs3 are summed over curve c1 risk
assert abs(result[("c1", "eur", "eur", "3M")][dt(2022, 1, 13)] - 25.249) < 1e-2
# c1 has no exposure to 22nd Jan
assert isna(result[("c1", "eur", "eur", "3M")][dt(2022, 1, 20)])
# irs1 and irs3 are summed over curve c1 risk
assert abs(result[("c2", "eur", "eur", "6M")][dt(2022, 1, 20)] - 48.773) < 1e-3
# c2 has no exposure to 15 Jan
assert isna(result["c2", "eur", "eur", "6M"][dt(2022, 1, 13)])
def test_fixings_table_null_inst(self, curve):
irs = IRS(dt(2022, 1, 15), "6m", spec="eur_irs3", curves=curve)
frb = FixedRateBond(dt(2022, 1, 1), "5y", "A", fixed_rate=2.0, curves=curve)
spd = Spread(irs, frb)
table = spd.local_analytic_rate_fixings()
assert isinstance(table, DataFrame)
def test_cashflows_curve_strings(self):
irs = IRS(dt(2025, 12, 1), dt(2030, 12, 7), spec="gbp_irs", curves=["uk_sonia"])
ukt = FixedRateBond(
dt(2024, 12, 7),
dt(2030, 12, 7),
fixed_rate=4.75,
spec="uk_gb",
curves=["uk_gb"],
metric="ytm",
)
asw = Spread(ukt, irs)
result = asw.cashflows()
assert isinstance(result, DataFrame)
class TestSensitivities:
def test_sensitivity_raises(self) -> None:
irs = IRS(dt(2022, 1, 1), "6m", "Q")
with pytest.raises(ValueError, match="`solver` is required"):
irs.delta()
with pytest.raises(ValueError, match="`solver` is required"):
irs.gamma()
class TestSpec:
def test_spec_overwrites(self) -> None:
irs = IRS(
effective=dt(2022, 1, 1),
termination=dt(2024, 2, 26),
calendar="tgt",
frequency="Q",
notional=250.0,
spec="test",
curves="test",
)
expected = dict(
initial_exchange=False,
final_exchange=False,
leg2_initial_exchange=False,
leg2_final_exchange=False,
pair=NoInput(0),
leg2_pair=NoInput(1),
fx_fixings=NoInput(0),
leg2_fx_fixings=NoInput(1),
leg2_zero_periods=NoInput(0),
mtm=LegMtm.Payment,
leg2_mtm=LegMtm.Payment,
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2024, 2, 26),
frequency="Q",
stub="longfront",
front_stub=NoInput(0),
back_stub=NoInput(0),
roll=NoInput(0),
eom=False,
modifier="p",
calendar="tgt",
payment_lag=4,
payment_lag_exchange=0,
),
leg2_schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2024, 2, 26),
frequency="m",
stub="longback",
front_stub=NoInput(0),
back_stub=NoInput(0),
roll=1,
eom=False,
modifier="mp",
calendar="nyc,tgt,ldn",
payment_lag=3,
payment_lag_exchange=0,
),
notional=250.0,
currency="tes",
amortization=NoInput(0),
convention="yearsmonths",
leg2_notional=-250.0,
leg2_currency="tes",
leg2_convention="one",
leg2_amortization=NoInput(0),
fixed_rate=NoInput(0),
leg2_fixing_method=NoInput(0),
leg2_spread_compound_method=NoInput(0),
leg2_rate_fixings=NoInput(0),
leg2_float_spread=NoInput(0),
leg2_fixing_series=NoInput(0),
leg2_fixing_frequency=NoInput(0),
curves=_Curves(disc_curve="test", leg2_rate_curve="test", leg2_disc_curve="test"),
vol=_Vol(),
)
kwargs = _KWArgs(
user_args=expected,
meta_args=["curves", "vol"],
)
assert irs.kwargs.meta == kwargs.meta
assert irs.kwargs.leg1 == kwargs.leg1
assert irs.kwargs.leg2 == kwargs.leg2
assert irs.kwargs == kwargs
def test_irs(self) -> None:
irs = IRS(
effective=dt(2022, 1, 1),
termination="1Y",
spec="usd_irs",
convention="30e360",
fixed_rate=2.0,
)
assert irs.kwargs.leg1["convention"] == "30e360"
assert irs.kwargs.leg2["convention"] == "30e360"
assert irs.kwargs.leg1["currency"] == "usd"
assert irs.kwargs.leg1["fixed_rate"] == 2.0
def test_stir(self) -> None:
irs = STIRFuture(
effective=dt(2022, 3, 16),
termination=dt(2022, 6, 15),
spec="usd_stir",
convention="30e360",
)
assert irs.kwargs.leg1["convention"] == "30e360"
assert irs.kwargs.leg2["convention"] == "30e360"
assert irs.kwargs.leg1["currency"] == "usd"
assert irs.kwargs.leg1["schedule"].roll == "IMM"
def test_sbs(self) -> None:
inst = SBS(
effective=dt(2022, 1, 1),
termination="1Y",
spec="eur_sbs36",
convention="30e360",
frequency="A",
)
assert inst.kwargs.leg1["convention"] == "30e360"
assert inst.kwargs.leg2["convention"] == "30e360"
assert inst.kwargs.leg1["currency"] == "eur"
assert inst.kwargs.leg2["fixing_method"] == "ibor(2)"
assert inst.kwargs.leg1["schedule"].frequency == "A"
assert inst.kwargs.leg2["schedule"].frequency == "S"
def test_zcis(self) -> None:
inst = ZCIS(
effective=dt(2022, 1, 1),
termination="1Y",
spec="eur_zcis",
leg2_calendar="nyc,tgt",
calendar="nyc,tgt",
)
assert inst.kwargs.leg1["convention"] == "1+"
assert inst.kwargs.leg2["convention"] == "1+"
assert inst.kwargs.leg1["currency"] == "eur"
assert inst.kwargs.leg2["index_method"] == "monthly"
assert inst.kwargs.leg2["schedule"].calendar == NamedCal("nyc,tgt")
def test_zcs(self) -> None:
inst = ZCS(
effective=dt(2022, 1, 1),
termination="5Y",
spec="gbp_zcs",
leg2_calendar="nyc,tgt",
calendar="nyc,tgt",
fixed_rate=3.0,
curves="ish",
)
assert inst.kwargs.leg1["convention"] == "act365f"
assert inst.kwargs.leg1["currency"] == "gbp"
assert inst.kwargs.leg2["schedule"].calendar == NamedCal("nyc,tgt")
assert inst.kwargs.leg2["schedule"].frequency == "A"
assert inst.kwargs.leg1["fixed_rate"] == 3.0
assert inst.kwargs.leg2["spread_compound_method"] == "none_simple"
assert isinstance(inst.kwargs.meta["curves"], _Curves)
def test_iirs(self) -> None:
inst = IIRS(
effective=dt(2022, 1, 1),
termination="1Y",
spec="sek_iirs",
leg2_calendar="nyc,tgt",
calendar="nyc,tgt",
fixed_rate=3.0,
)
assert inst.kwargs.leg1["convention"] == "actacticma"
assert inst.kwargs.leg2["schedule"].frequency == "Q"
assert inst.kwargs.leg1["currency"] == "sek"
assert inst.kwargs.leg2["schedule"].calendar == NamedCal("nyc,tgt")
assert inst.kwargs.leg1["fixed_rate"] == 3.0
assert inst.kwargs.leg2["spread_compound_method"] == "none_simple"
def test_fixedratebond(self) -> None:
bond = FixedRateBond(
effective=dt(2022, 1, 1),
termination="1Y",
spec="us_gb",
calc_mode="ust_31bii",
fixed_rate=2.0,
)
from rateslib.instruments.bonds.conventions import US_GB_TSY
assert bond.kwargs.meta["calc_mode"].kwargs == US_GB_TSY.kwargs
assert bond.kwargs.leg1["convention"] == "actacticma"
assert bond.kwargs.leg1["currency"] == "usd"
assert bond.kwargs.leg1["fixed_rate"] == 2.0
assert bond.kwargs.leg1["schedule"].payment_adjuster3 == Adjuster.BusDaysLagSettle(-1)
def test_indexfixedratebond(self) -> None:
bond = IndexFixedRateBond(
effective=dt(2022, 1, 1),
termination="1Y",
spec="uk_gbi",
calc_mode="ust",
fixed_rate=2.0,
)
assert bond.kwargs.leg1["convention"] == "actacticma"
assert bond.kwargs.leg1["currency"] == "gbp"
assert bond.kwargs.leg1["fixed_rate"] == 2.0
assert bond.kwargs.leg1["schedule"].payment_adjuster3 == Adjuster.BusDaysLagSettle(-7)
assert bond.kwargs.meta["calc_mode"] == US_GB
def test_bill(self) -> None:
bill = Bill(
effective=dt(2022, 1, 1),
termination="3m",
spec="us_gbb",
convention="act365f",
)
from rateslib.instruments.bonds.conventions import US_GBB
assert bill.kwargs.meta["calc_mode"] == US_GBB
assert bill.kwargs.leg1["convention"] == "act365f"
assert bill.kwargs.leg1["currency"] == "usd"
assert bill.kwargs.leg1["fixed_rate"] == 0.0
def test_fra(self) -> None:
from rateslib.enums.parameters import FloatFixingMethod
fra = FRA(
effective=dt(2022, 1, 1),
termination="3m",
spec="eur_fra3",
payment_lag=5,
modifier="F",
fixed_rate=2.0,
)
assert fra.kwargs.leg2["fixing_method"] == "ibor(2)"
assert fra.kwargs.leg1["convention"] == "act360"
assert fra.kwargs.leg1["currency"] == "eur"
assert fra.kwargs.leg2["currency"] == "eur"
assert fra.kwargs.leg1["fixed_rate"] == 2.0
assert fra.kwargs.leg2["schedule"].payment_adjuster == Adjuster.BusDaysLagSettleInAdvance(5)
assert fra.kwargs.leg2["schedule"].modifier == Adjuster.Following()
def test_frn(self) -> None:
frn = FloatRateNote(
effective=dt(2022, 1, 1),
termination="3y",
spec="usd_frn5",
payment_lag=5,
)
assert frn.kwargs.leg1["fixing_method"] == "rfr_observation_shift(5)"
assert frn.kwargs.leg1["convention"] == "act360"
assert frn.kwargs.leg1["currency"] == "usd"
assert frn.kwargs.leg1["schedule"].payment_adjuster == Adjuster.BusDaysLagSettle(5)
assert frn.kwargs.leg1["schedule"].modifier == Adjuster.ModifiedFollowing()
def test_xcs(self) -> None:
xcs = XCS(
effective=dt(2022, 1, 1),
termination="3y",
spec="eurusd_xcs",
payment_lag=5,
calendar="ldn,tgt,nyc",
)
assert xcs.kwargs.leg1["fixing_method"] == "rfr_payment_delay"
assert xcs.kwargs.leg1["convention"] == "act360"
assert xcs.kwargs.leg1["currency"] == "eur"
assert xcs.kwargs.leg1["schedule"].calendar == NamedCal("ldn,tgt,nyc")
assert xcs.kwargs.leg2["schedule"].calendar == NamedCal("ldn,tgt,nyc")
assert xcs.kwargs.leg1["schedule"].payment_adjuster == Adjuster.BusDaysLagSettle(5)
assert xcs.kwargs.leg2["schedule"].payment_adjuster == Adjuster.BusDaysLagSettle(5)
@pytest.mark.parametrize(
("inst", "expected"),
[
(
IRS(
dt(2022, 1, 1),
"9M",
"Q",
currency="eur",
curves=["eureur", "eur_eurusd"],
fixed_rate=4.0,
),
DataFrame(
data=[-3808.80973, -3850.91496, -3893.01546],
index=Index([dt(2022, 4, 3), dt(2022, 7, 3), dt(2022, 10, 3)], name="payment"),
columns=MultiIndex.from_tuples(
tuples=[("EUR", "usd,eur")],
names=["local_ccy", "collateral_ccy"],
),
),
),
(
SBS(
dt(2022, 1, 1),
"9M",
"Q",
leg2_frequency="S",
currency="eur",
curves=["eureur", "eurusd", "eureur"],
),
DataFrame(
[-0.00, -6260.19615, 6299.81823],
index=Index([dt(2022, 4, 3), dt(2022, 7, 3), dt(2022, 10, 3)], name="payment"),
columns=MultiIndex.from_tuples(
[("EUR", "usd")],
names=["local_ccy", "collateral_ccy"],
),
),
),
(
FRA(
dt(2022, 1, 15),
"3M",
"Q",
currency="eur",
curves=["eureur", "eureur"],
fixed_rate=4.0,
),
DataFrame(
[-3785.37376],
index=Index([dt(2022, 1, 15)], name="payment"),
columns=MultiIndex.from_tuples(
[("EUR", "eur")],
names=["local_ccy", "collateral_ccy"],
),
),
),
(
FXForward(
dt(2022, 1, 15),
pair="eurusd",
curves=["eureur", "eureur", "usdusd", "usdeur"],
),
DataFrame(
[[1000000.0, -1101072.93429]],
index=Index([dt(2022, 1, 15)], name="payment"),
columns=MultiIndex.from_tuples(
[("EUR", "eur"), ("USD", "eur")],
names=["local_ccy", "collateral_ccy"],
),
),
),
(
XCS(
dt(2022, 1, 5),
"3M",
"M",
currency="eur",
pair="eurusd",
leg2_mtm=True,
curves=["eureur", "eurusd", "usdusd", "usdusd"],
),
DataFrame(
[
[1000000.0, -1100306.44743],
[0.0, -2377.86409],
[-2128.20822, 4630.97804],
[0.0, -2152.16480],
[-1922.05479, 4191.00596],
[-1000000, 1104836.47633],
[-2128.20822, 4650.04405],
],
index=Index(
[
dt(2022, 1, 5),
dt(2022, 2, 5),
dt(2022, 2, 7),
dt(2022, 3, 5),
dt(2022, 3, 7),
dt(2022, 4, 5),
dt(2022, 4, 7),
],
name="payment",
),
columns=MultiIndex.from_tuples(
[("EUR", "usd"), ("USD", "usd")],
names=["local_ccy", "collateral_ccy"],
),
),
),
(
FXSwap(
dt(2022, 1, 5),
"3M",
pair="eurusd",
curves=["eureur", "eurusd", "usdusd", "usdusd"],
),
DataFrame(
[[-1000000.0, 1100306.44743], [1000000.0, -1107224.13024]],
index=Index([dt(2022, 1, 5), dt(2022, 4, 5)], name="payment"),
columns=MultiIndex.from_tuples(
[("EUR", "usd"), ("USD", "usd")],
names=["local_ccy", "collateral_ccy"],
),
),
),
],
)
def test_fx_settlements_table(inst, expected) -> None:
usdusd = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.95}, id="usdusd")
eureur = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.975}, id="eureur")
eurusd = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.976}, id="eurusd")
fxr = FXRates({"eurusd": 1.1}, settlement=dt(2022, 1, 1))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={
"usdusd": usdusd,
"eureur": eureur,
"eurusd": eurusd,
},
)
usdeur = fxf.curve("usd", "eur", id="usdeur")
eur_eurusd = fxf.curve("eur", ["usd", "eur"], id="eur_eurusd")
solver = Solver(
curves=[usdusd, eureur, eurusd, usdeur, eur_eurusd],
instruments=[
IRS(dt(2022, 1, 1), "1y", "A", curves=usdusd),
IRS(dt(2022, 1, 1), "1y", "A", curves=eureur),
XCS(
dt(2022, 1, 1),
"1y",
"Q",
currency="eur",
pair="eurusd",
curves=[eureur, eurusd, usdusd, usdusd],
),
],
s=[5.0, 2.5, -10],
fx=fxf,
)
assert eureur.meta.collateral == "eur" # collateral tags populated by FXForwards
pf = Portfolio([inst])
result = pf.cashflows_table(solver=solver)
assert_frame_equal(expected, result, atol=1e-4)
result = inst.cashflows_table(solver=solver)
assert_frame_equal(expected, result, atol=1e-4)
def test_fx_settlements_table_no_fxf() -> None:
solver = Solver(
curves=[Curve({dt(2023, 8, 1): 1.0, dt(2024, 8, 1): 1.0}, id="usd")],
instruments=[IRS(dt(2023, 8, 1), "1Y", "Q", curves="usd")],
s=[2.0],
instrument_labels=["1Y"],
id="us_rates",
algorithm="gauss_newton",
)
irs_mkt = IRS(
dt(2023, 8, 1),
"1Y",
"Q",
curves="usd",
fixed_rate=2.0,
notional=999556779.81,
)
result = irs_mkt.cashflows_table(solver=solver)
assert abs(result.iloc[0, 0] - 69.49810) < 1e-5
assert abs(result.iloc[3, 0] - 69.49810) < 1e-5
@pytest.fixture
def fxfo():
# FXForwards for FX Options tests
eureur = Curve(
{dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.9851909811629752},
calendar="tgt",
id="eureur",
)
usdusd = Curve(
{dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.976009366603271},
calendar="nyc",
id="usdusd",
)
eurusd = Curve({dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.987092591908283}, id="eurusd")
fxr = FXRates({"eurusd": 1.0615}, settlement=dt(2023, 3, 20))
fxf = FXForwards(fx_curves={"eureur": eureur, "eurusd": eurusd, "usdusd": usdusd}, fx_rates=fxr)
# fxf.swap("eurusd", [dt(2023, 3, 20), dt(2023, 6, 20)]) = 60.10
return fxf
class TestFXOptions:
# replicate https://quant.stackexchange.com/a/77802/29443
@pytest.mark.parametrize(
("pay", "k", "exp_pts", "exp_prem", "dlty", "exp_dl"),
[
(dt(2023, 3, 20), 1.101, 69.378, 138756.54, "spot", 0.250124),
(dt(2023, 3, 20), 1.101, 69.378, 138756.54, "forward", 0.251754),
(dt(2023, 6, 20), 1.101, 70.226, 140451.53, "spot", 0.250124),
(dt(2023, 6, 20), 1.101, 70.226, 140451.53, "forward", 0.251754),
(dt(2023, 6, 20), 1.10101922, 70.180, 140360.17, "spot", 0.250000),
],
)
@pytest.mark.parametrize("smile", [True, False])
def test_big_usd_pips(self, fxfo, pay, k, exp_pts, exp_prem, dlty, exp_dl, smile) -> None:
vol = FXDeltaVolSmile(
{
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
id="vol",
ad=1,
)
vol = vol if smile else 8.90
fxc = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
strike=k,
payment_lag=pay,
delivery_lag=2,
calendar="tgt",
modifier="mf",
premium_ccy="usd",
delta_type=dlty,
)
result = fxc.rate(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
fx=fxfo,
vol=vol,
)
assert abs(result - exp_pts) < 1e-3
result = fxc.rate(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
fx=fxfo,
vol=vol,
metric="premium",
)
assert abs(result - exp_prem) < 1e-2
@pytest.mark.parametrize(
("pay", "k", "exp_pts", "exp_prem", "exp_dl"),
[
(dt(2023, 3, 20), 1.101, 0.6536, 130717.44, 0.245175),
(dt(2023, 6, 20), 1.101, 0.6578, 131569.29, 0.245178),
],
)
@pytest.mark.parametrize(
"vol",
[
8.9,
FXDeltaVolSmile(
nodes={0.5: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
),
FXSabrSmile(
nodes={"alpha": 0.089, "beta": 1.0, "rho": 0.0, "nu": 0.0},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
),
],
)
def test_premium_big_eur_pc(self, fxfo, pay, k, exp_pts, exp_prem, exp_dl, vol) -> None:
fxo = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=pay,
strike=k,
notional=20e6,
delta_type="forward",
premium_ccy="eur",
)
result = fxo.rate(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
fx=fxfo,
vol=vol,
)
expected = exp_pts
assert abs(result - expected) < 1e-3
result = 20e6 * result / 100
expected = exp_prem
assert abs(result - expected) < 1e-1
@pytest.mark.parametrize(
("pay", "k", "exp_pts", "exp_prem", "exp_dl"),
[
(dt(2023, 3, 20), 1.101, 0.6536, 130717.44, 0.243588),
(dt(2023, 6, 20), 1.101, 0.6578, 131569.29, 0.243548),
],
)
@pytest.mark.parametrize(
"vol",
[
8.9,
FXDeltaVolSmile(
nodes={0.5: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
),
FXSabrSmile(
nodes={"alpha": 0.089, "beta": 1.0, "rho": 0.0, "nu": 0.0},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
),
],
)
def test_premium_big_eur_pc_spot(self, fxfo, pay, k, exp_pts, exp_prem, exp_dl, vol) -> None:
fxo = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=pay,
strike=k,
notional=20e6,
delta_type="spot",
premium_ccy="eur",
)
result = fxo.rate(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
fx=fxfo,
vol=vol,
)
expected = exp_pts
assert abs(result - expected) < 1e-3
result = 20e6 * result / 100
expected = exp_prem
assert abs(result - expected) < 1e-1
def test_fx_call_npv_unpriced(self, fxfo) -> None:
fxo = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=1.101,
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.npv(curves=curves, fx=fxfo, vol=8.9)
expected = 0.0
assert abs(result - expected) < 1e-6
def test_fx_call_cashflows(self, fxfo) -> None:
fxo = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=1.101,
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.cashflows(curves=curves, fx=fxfo, vol=8.9)
assert isinstance(result, DataFrame)
assert result["Type"].iloc[0] == "FXCallPeriod"
assert result["Type"].iloc[1] == "Cashflow"
def test_fx_call_cashflows_table(self, fxfo) -> None:
fxo = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=1.101,
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.cashflows_table(curves=curves, fx=fxfo, vol=8.9)
expected = DataFrame(
data=[[0.0]],
index=Index([dt(2023, 6, 20)], name="payment"),
columns=MultiIndex.from_tuples([("USD", "usd")], names=["local_ccy", "collateral_ccy"]),
)
assert_frame_equal(result, expected)
@pytest.mark.parametrize(
("ccy", "exp_rate", "exp_strike"),
[
("usd", 70.180131, 1.10101920113408469),
("eur", 0.680949, 1.099976),
],
)
@pytest.mark.parametrize(
"vol",
[
8.90,
FXDeltaVolSmile(
{
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
id="vol",
ad=1,
),
FXSabrSmile(
nodes={"alpha": 0.089, "beta": 1.0, "rho": 0.0, "nu": 0.0},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
ad=1,
),
],
)
def test_fx_call_rate_delta_strike(self, fxfo, ccy, exp_rate, exp_strike, vol) -> None:
fxo = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike="25d",
delta_type="spot",
premium_ccy=ccy,
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=vol)
expected = exp_rate
assert abs(result - expected) < 1e-6
assert abs(fxo._option.fx_option_params.strike - exp_strike) < 1e-4
def test_fx_call_rate_expiry_tenor(self, fxfo) -> None:
fxo = FXCall(
pair="eurusd",
expiry="3m",
eval_date=dt(2023, 3, 16),
modifier="mf",
notional=20e6,
delivery_lag=2,
payment_lag=dt(2023, 6, 20),
calendar="tgt",
strike="25d",
delta_type="spot",
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=8.9)
expected = 70.180131
assert abs(result - expected) < 1e-6
def test_fx_call_plot_payoff(self, fxfo) -> None:
fxc = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
strike=1.101,
premium=0.0,
)
result = fxc.plot_payoff(
[1.03, 1.12],
fx=fxfo,
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
)
x, y = result[2][0]._x, result[2][0]._y
assert x[0] == 1.03
assert x[1000] == 1.12
assert y[0] == 0.0
assert y[1000] == (1.12 - 1.101) * 20e6
def test_fx_put_rate(self, fxfo) -> None:
fxo = FXPut(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike="-25d",
delta_type="spot",
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=10.15)
expected = 83.975596
assert abs(result - expected) < 1e-6
def test_str_tenor_raises(self) -> None:
with pytest.raises(ValueError, match="`expiry` as string tenor requires `eval_date`"):
FXCall(pair="eurusd", expiry="3m", strike=1.0)
def test_premium_ccy_raises(self) -> None:
with pytest.raises(
ValueError,
match="`premium_ccy`: 'chf' must be one of option currency pair",
):
FXCall(
pair="eurusd",
expiry="3m",
eval_date=dt(2023, 3, 16),
premium_ccy="chf",
strike=1.0,
)
@pytest.mark.parametrize("dlty", [("forward")])
def test_call_put_parity_50d(self, fxfo, dlty) -> None:
fxp = FXPut(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike="-50d",
premium_ccy="usd",
delta_type=dlty,
)
fxc = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike="50d",
premium_ccy="usd",
delta_type=dlty,
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
assert abs(fxc.analytic_greeks(curves, fx=fxfo, vol=10.0)["delta"] - 0.5) < 1e-14
assert abs(fxc._option.fx_option_params.strike - 1.068856) < 1e-6
assert abs(fxp.analytic_greeks(curves, fx=fxfo, vol=10.0)["delta"] + 0.5) < 1e-14
assert abs(fxp._option.fx_option_params.strike - 1.068856) < 1e-6
def test_analytic_vega(self, fxfo) -> None:
fxo = FXCall(
pair="eurusd",
expiry="3m",
eval_date=dt(2023, 3, 16),
modifier="mf",
notional=20e6,
delivery_lag=2,
payment_lag=dt(2023, 3, 16),
calendar="tgt",
strike=1.101,
delta_type="spot",
)
result = fxo.analytic_greeks(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
fx=fxfo,
vol=8.9,
)["vega"]
# see test_periods/test_analytic_vega
assert abs(result * 20e6 / 100 - 33757.945) < 1e-2
@pytest.mark.skip(
reason="An Option could expire in the past but settle forward, should still price"
)
def test_rate_vol_raises(self, fxfo) -> None:
args = {
"expiry": dt(2009, 6, 16),
"pair": "eurusd",
"curves": [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
"delta_type": "spot",
}
vol = FXDeltaVolSmile(
{0.75: 8.9},
eval_date=dt(2009, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
id="vol",
ad=1,
)
fxc = FXCall(strike=1.10, **args, notional=100e6, vol=vol)
# the expiry is before the eval date. This still needs to price
with pytest.raises(ValueError, match="The `eval_date` on the FXDeltaVolSmile and the"):
fxc.rate(fx=fxfo)
@pytest.mark.parametrize("phi", [-1.0, 1.0])
@pytest.mark.parametrize("prem_ccy", ["usd", "eur"])
@pytest.mark.parametrize("dt_0", ["spot", "forward"])
@pytest.mark.parametrize("dt_1", ["spot", "forward", "spot_pa", "forward_pa"])
@pytest.mark.parametrize("smile", [True, False])
def test_atm_rates(self, fxfo, phi, prem_ccy, smile, dt_0, dt_1) -> None:
FXOp = FXCall if phi > 0 else FXPut
fxvs = FXDeltaVolSmile(
{0.25: 10.15, 0.5: 7.8, 0.75: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type=dt_1,
id="vol",
)
vol = fxvs if smile else 9.50
fxo = FXOp(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type=dt_0,
vol=vol,
premium_ccy=prem_ccy,
strike="atm_delta",
)
result = fxo.analytic_greeks(fx=fxfo)
f_d = fxfo.rate("eurusd", dt(2023, 6, 20))
eta = 0.5 if prem_ccy == "usd" else -0.5
expected = f_d * dual_exp(result["__vol"] ** 2 * fxvs.meta.t_expiry * eta)
assert abs(result["__strike"] - expected) < 1e-8
@pytest.mark.parametrize("phi", [-1.0, 1.0])
@pytest.mark.parametrize("prem_ccy", ["usd", "eur"])
@pytest.mark.parametrize("dt_0", ["spot", "forward"])
def test_atm_rates_sabr(self, fxfo, phi, prem_ccy, dt_0) -> None:
FXOp = FXCall if phi > 0 else FXPut
vol = FXSabrSmile(
{"alpha": 0.072, "beta": 1.0, "rho": -0.1, "nu": 0.80},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
)
fxo = FXOp(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type=dt_0,
vol=vol,
premium_ccy=prem_ccy,
strike="atm_delta",
)
result = fxo.analytic_greeks(fx=fxfo)
f_d = fxfo.rate("eurusd", dt(2023, 6, 20))
eta = 0.5 if prem_ccy == "usd" else -0.5
expected = f_d * dual_exp(result["__vol"] ** 2 * vol.meta.t_expiry * eta)
assert abs(result["__strike"] - expected) < 1e-8
@pytest.mark.parametrize("phi", [1.0, -1.0])
@pytest.mark.parametrize(
("vol_", "expected"),
[
(
FXDeltaVolSmile(
{0.25: 10.15, 0.5: 7.8, 0.75: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
),
8.899854,
),
(
FXSabrSmile(
nodes={"alpha": 0.078, "beta": 1.0, "rho": 0.03, "nu": 0.04},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
),
7.799409,
),
(
FXSabrSurface(
expiries=[dt(2023, 5, 16), dt(2023, 7, 16)],
node_values=[
[0.078, 1.0, 0.03, 0.04],
[0.08, 1.0, 0.04, 0.05],
],
eval_date=dt(2023, 3, 16),
pair="eurusd",
calendar="tgt|fed",
),
7.934473,
),
],
)
def test_traded_option_rate_vol(self, fxfo, phi, vol_, expected) -> None:
FXOp = FXCall if phi > 0 else FXPut
fxo = FXOp(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type="spot",
premium_ccy="usd",
strike=1.05,
premium=100000.0,
)
result = fxo.rate(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
vol=vol_,
fx=fxfo,
metric="vol",
)
assert abs(result - expected) < 1e-6
def test_option_strike_premium_validation(self) -> None:
with pytest.raises(TypeError, match="missing 1 required positional argument: 'strike'"):
FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
)
with pytest.raises(ValueError, match="FXOption with string delta as `strike` cannot be"):
FXCall(pair="eurusd", expiry=dt(2023, 6, 16), strike="25d", premium=0.0)
@pytest.mark.parametrize(
("notn", "expected", "phi"),
[
(1e6, [0.5, 500000], 1.0),
(2e6, [0.5, 1000000], 1.0),
(-2e6, [0.5, 1000000], 1.0),
(1e6, [-0.5, -500000], -1.0),
(2e6, [-0.5, -1000000], -1.0),
(-2e6, [-0.5, -1000000], -1.0),
],
)
def test_greeks_delta_direction(self, fxfo, notn, expected, phi) -> None:
# test the delta and delta_eur are not impacted by a Buy or Sell. Delta is expressed
# relative to a Buy.
FXOp = FXCall if phi > 0 else FXPut
delta = f"{'-' if phi < 0 else ''}50d"
fxo = fxo = FXOp(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type="forward",
premium_ccy="usd",
strike=delta,
notional=notn,
)
fxvs = FXDeltaVolSmile(
{0.25: 10.15, 0.5: 7.8, 0.75: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
result = fxo.analytic_greeks(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
vol=fxvs,
fx=fxfo,
)
assert abs(result["delta"] - expected[0]) < 1e-6
assert abs(result["delta_eur"] - expected[1]) < 1e-6
def test_metric_and_period_metric_compatible(self) -> None:
# ensure that vol and pips_or_% can be interchanged
eur = Curve({dt(2024, 6, 20): 1.0, dt(2024, 9, 30): 1.0}, calendar="tgt")
usd = Curve({dt(2024, 6, 20): 1.0, dt(2024, 9, 30): 1.0}, calendar="nyc")
eurusd = Curve({dt(2024, 6, 20): 1.0, dt(2024, 9, 30): 1.0})
fxr = FXRates({"eurusd": 1.0727}, settlement=dt(2024, 6, 24))
fxf = FXForwards(fx_rates=fxr, fx_curves={"eureur": eur, "eurusd": eurusd, "usdusd": usd})
pre_solver = Solver(
curves=[eur, usd, eurusd],
instruments=[
IRS(dt(2024, 6, 24), "3m", spec="eur_irs", curves=eur),
IRS(dt(2024, 6, 24), "3m", spec="usd_irs", curves=usd),
FXForward(
pair="eurusd",
settlement=dt(2024, 9, 24),
curves=[eurusd, usd],
),
],
s=[3.77, 5.51, 1.0775],
fx=fxf,
)
smile = FXDeltaVolSmile(
nodes={0.25: 5.0, 0.50: 5.0, 0.75: 5.0},
eval_date=dt(2024, 6, 20),
expiry=dt(2024, 9, 20),
delta_type="spot",
)
fx_args = dict(
expiry=dt(2024, 9, 20),
pair="eurusd",
delta_type="spot",
metric="vol", # note how the option is pre-configured with a metric as "vol"
curves=[eurusd, usd],
vol=smile,
premium_ccy="eur",
delivery_lag=2,
payment_lag=2,
)
solver = Solver(
pre_solvers=[pre_solver],
curves=[smile],
instruments=[
FXPut(strike=1.0504, **fx_args),
FXCall(strike=1.0728, **fx_args),
FXCall(strike=1.0998, **fx_args),
],
s=[7.621, 6.60, 6.12],
fx=fxf,
)
result = FXCall(strike=1.0728, **fx_args).rate(metric="pips_or_%", solver=solver)
expected = 1.543289 # % of EUR notional
assert abs(result - expected) < 1e-6
result = FXCall(strike=1.0728, **fx_args).rate(solver=solver) # should default to "vol"
expected = 6.60 # vol points
assert abs(result - expected) < 1e-6
@pytest.mark.parametrize(
("evald", "eom", "expected"),
[
(
dt(2024, 4, 26),
True,
dt(2024, 5, 29),
), # 2bd before 31st May (rolled from End of April)
(
dt(2024, 4, 26),
False,
dt(2024, 5, 28),
), # 2bd before 30th May (rolled from 30th April)
],
)
def test_expiry_delivery_tenor_eom(self, evald, eom, expected) -> None:
fxo = FXCall(
pair="eurusd",
expiry="1m",
eval_date=evald,
eom=eom,
calendar="tgt|fed",
modifier="mf",
strike=1.0,
)
assert fxo.kwargs.leg1["expiry"] == expected
def test_single_vol_not_no_input(self, fxfo):
fxo = FXCall(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "eur"), fxfo.curve("usd", "eur")],
delta_type="forward",
premium_ccy="usd",
strike=1.1,
notional=1e6,
)
with pytest.raises(ValueError, match="`vol` must be supplied. Got"):
fxo.rate(metric="vol", fx=fxfo)
def test_hyper_parameter_setting_and_solver_interaction(self):
# Define the interest rate curves for EUR, USD and X-Ccy basis
usdusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="nyc", id="usdusd")
eureur = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="tgt", id="eureur")
eurusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, id="eurusd")
# Create an FX Forward market with spot FX rate data
fxr = FXRates({"eurusd": 1.0760}, settlement=dt(2024, 5, 9))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={"eureur": eureur, "usdusd": usdusd, "eurusd": eurusd},
)
pre_solver = Solver(
curves=[eureur, eurusd, usdusd],
instruments=[
IRS(dt(2024, 5, 9), "3W", spec="eur_irs", curves="eureur"),
IRS(dt(2024, 5, 9), "3W", spec="usd_irs", curves="usdusd"),
FXSwap(dt(2024, 5, 9), "3W", pair="eurusd", curves=["eurusd", "usdusd"]),
],
s=[3.90, 5.32, 8.85],
fx=fxf,
id="rates_sv",
)
dv_smile = FXDeltaVolSmile(
nodes={
0.10: 10.0,
0.25: 10.0,
0.50: 10.0,
0.75: 10.0,
0.90: 10.0,
},
eval_date=dt(2024, 5, 7),
expiry=dt(2024, 5, 28),
delta_type="spot",
id="eurusd_3w_smile",
)
option_args = dict(
pair="eurusd",
expiry=dt(2024, 5, 28),
calendar="tgt|fed",
delta_type="spot",
curves=["eurusd", "usdusd"],
vol="eurusd_3w_smile",
)
dv_solver = Solver(
pre_solvers=[pre_solver],
curves=[dv_smile],
instruments=[
FXStraddle(strike="atm_delta", **option_args),
FXRiskReversal(strike=("-25d", "25d"), **option_args),
FXRiskReversal(strike=("-10d", "10d"), **option_args),
FXBrokerFly(strike=(("-25d", "25d"), "atm_delta"), **option_args),
FXBrokerFly(strike=(("-10d", "10d"), "atm_delta"), **option_args),
],
s=[5.493, -0.157, -0.289, 0.071, 0.238],
fx=fxf,
id="dv_solver",
)
fc = FXCall(
expiry=dt(2024, 5, 28),
pair="eurusd",
strike=1.07,
notional=100e6,
curves=["eurusd", "usdusd"],
vol="eurusd_3w_smile",
premium=98.216647 * 1e8 / 1e4,
premium_ccy="usd",
delta_type="spot",
)
assert abs(fc.npv(solver=dv_solver, base="usd")) < 1e-2
delta = fc.delta(solver=dv_solver, base="usd").loc[("fx", "fx", "eurusd"), ("all", "usd")]
gamma = fc.gamma(solver=dv_solver, base="usd").loc[
("all", "usd", "fx", "fx", "eurusd"), ("fx", "fx", "eurusd")
]
fxr.update({"eurusd": 1.0761})
pre_solver.iterate()
dv_solver.iterate()
result = fc.npv(solver=dv_solver, base="usd")
expected = delta + 0.5 * gamma
assert abs(result - expected) < 5e-2
fxr.update({"eurusd": 1.0759})
pre_solver.iterate()
dv_solver.iterate()
result = fc.npv(solver=dv_solver, base="usd")
expected = -delta + 0.5 * gamma
assert abs(result - expected) < 5e-2
@pytest.mark.parametrize("k", [1.07, "25d", "atm_delta"])
def test_pricing_with_interpolated_sabr_surface(self, k, fxfo):
surf = FXSabrSurface(
eval_date=dt(2023, 3, 16),
expiries=[dt(2023, 6, 16), dt(2023, 10, 17)],
node_values=[[0.05, 1.0, 0.03, 0.04], [0.055, 1.0, 0.04, 0.05]],
pair="eurusd",
calendar="tgt|fed",
ad=1,
id="v",
)
fxc = FXCall(
expiry=dt(2023, 7, 21),
pair="eurusd",
calendar="tgt|fed",
delta_type="spot",
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
vol=surf,
strike=k,
)
fxc.rate(fx=fxfo)
result = fxc._pricing
assert abs(result.vol - 5.25) < 1e-2
assert np.all(gradient(result.vol, vars=["v_0_0", "v_1_0"]) > 49.2)
assert np.all(gradient(result.vol, vars=["v_0_0", "v_1_0"]) < 50.6)
@pytest.mark.skip(reason="non-deliverability for FXOption instruments not yet implemented")
@pytest.mark.parametrize("ndpair", [FXIndex("usdbrl", "all", 0), FXIndex("brlusd", "all", 0)])
def test_non_deliverable_fx_option_npv_vol_from_delta(self, ndpair):
# see the equivalent test for an FXOptionPeriod with static vol
fxf = FXForwards(
fx_rates=FXRates({"usdbrl": 5.0}, settlement=dt(2000, 1, 1)),
fx_curves={
"usdusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 6, 1): 0.98}),
"brlusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 6, 1): 0.983}),
"brlbrl": Curve({dt(2000, 1, 1): 1.0, dt(2000, 6, 1): 0.984}),
},
)
fxv = FXDeltaVolSmile(
nodes={0.4: 10.0, 0.6: 11.0},
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 2, 28),
delta_type="forward",
)
fxo = FXCall(
delivery_lag=dt(2000, 3, 1),
pair="USDBRL",
strike="50d",
delta_type="spot",
expiry=dt(2000, 2, 28),
)
fxond = FXCall(
delivery_lag=dt(2000, 3, 1),
pair="USDBRL",
nd_pair=ndpair,
delta_type="spot",
strike="50d",
expiry=dt(2000, 2, 28),
)
npv = fxo.local_npv(
fx=fxf,
vol=fxv,
curves=[fxf.curve("usd", "usd"), fxf.curve("brl", "usd")],
)
npv_nd = fxond.local_npv(
fx=fxf,
vol=fxv,
curves=[fxf.curve("usd", "usd"), fxf.curve("usd", "usd")],
)
# local NPV should be expressed in USD for ND type
result = npv / 5.0 - npv_nd
assert abs(result) < 1e-9
class TestRiskReversal:
@pytest.mark.parametrize(
("metric", "expected"),
[
("pips_or_%", -13.795465),
("vol", -1.25),
("premium", -27590.930533),
],
)
def test_risk_reversal_rate_metrics(self, fxfo, metric, expected) -> None:
fxo = FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=["-25d", "25d"],
delta_type="spot",
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=[10.15, 8.9], metric=metric)
assert abs(result - expected) < 1e-6
@pytest.mark.parametrize(
("prem", "prem_ccy", "local", "exp"),
[
((NoInput(0), NoInput(0)), NoInput(0), False, 0.0),
((NoInput(0), NoInput(0)), "eur", False, 0.0),
((-167500.0, 140500.0), "usd", False, -219.590678),
((-167500 / 1.06751, 140500 / 1.06751), "eur", False, -219.590678),
(
(-167500 / 1.06751, 140500 / 1.06751),
"eur",
True,
{"eur": 25121.646, "usd": -26879.673},
),
],
)
def test_risk_reversal_npv(self, fxfo, prem, prem_ccy, local, exp) -> None:
fxo = FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=[1.033, 1.101],
premium=prem,
premium_ccy=prem_ccy,
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.npv(curves=curves, fx=fxfo, vol=[10.15, 8.9], local=local)
expected = exp
if not local:
assert abs(result - expected) < 1e-6
else:
for k in expected:
assert abs(result[k] - expected[k]) < 1e-3
@pytest.mark.parametrize("prem_ccy", ["usd", "eur"])
def test_risk_reversal_component_npv(self, fxfo, prem_ccy) -> None:
fxo = FXPut(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=1.033,
premium=NoInput(0),
premium_ccy=prem_ccy,
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.npv(curves=curves, fx=fxfo, vol=10.15, local=False)
expected = 0.0
assert abs(result - expected) < 1e-6
def test_risk_reversal_plot(self, fxfo) -> None:
fxo = FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=[1.033, 1.101],
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.plot_payoff([1.03, 1.12], curves, fx=fxfo, vol=[10.15, 8.9])
x, y = result[2][0]._x, result[2][0]._y
assert x[0] == 1.03
assert x[1000] == 1.12
assert abs(y[0] + (1.033 - 1.03) * 20e6) < 1e-5
assert abs(y[1000] - (1.12 - 1.101) * 20e6) < 1e-5
def test_rr_strike_premium_validation(self) -> None:
with pytest.raises(TypeError, match="missing 1 required positional argument: 'strike'"):
FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
)
with pytest.raises(ValueError, match="FXOption with string delta as `strike` cannot be in"):
FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
strike=["25d", "35d"],
premium=[NoInput(0), 1.0],
)
@pytest.mark.parametrize(
("notn", "expected_grks", "expected_ccy"),
[
(1e6, [0.5, -1.329654, -0.035843], [500000, -14194.192533, -358.428628]),
(2e6, [0.5, -1.329654, -0.035843], [1000000, -28388.384, -716.8572]),
(-2e6, [0.5, -1.329654, -0.035843], [1000000, -28388.384, -716.8572]),
],
)
def test_greeks_delta_direction(self, fxfo, notn, expected_grks, expected_ccy) -> None:
# test the delta and delta_eur are not impacted by a Buy or Sell. Delta is expressed
# relative to a Buy.
fxo = FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type="forward",
premium_ccy="usd",
strike=["-30d", "20d"],
notional=notn,
)
fxvs = FXDeltaVolSmile(
{0.25: 10.15, 0.5: 7.8, 0.75: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
result = fxo.analytic_greeks(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
vol=fxvs,
fx=fxfo,
)
assert abs(result["delta"] - expected_grks[0]) < 1e-6
assert abs(result["gamma"] - expected_grks[1]) < 1e-6
assert abs(result["vega"] - expected_grks[2]) < 1e-6
assert abs(result["delta_eur"] - expected_ccy[0]) < 1e-2
assert abs(result["gamma_eur_1%"] - expected_ccy[1]) < 1e-2
assert abs(result["vega_usd"] - expected_ccy[2]) < 1e-2
def test_repr(self):
fxo = FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=[1.033, 1.101],
)
expected = f""
assert fxo.__repr__() == expected
def test_cashflows(self, fxfo) -> None:
# test the delta and delta_eur are not impacted by a Buy or Sell. Delta is expressed
# relative to a Buy.
fxo = FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type="forward",
premium_ccy="usd",
strike=["-30d", "20d"],
)
result = fxo.cashflows()
assert isinstance(result, DataFrame)
@pytest.mark.parametrize("ccy", ["usd", "eur"])
def test_populate_curves_on_init(self, ccy):
fxo = FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
curves=["A", "B"],
premium_ccy=ccy,
strike=["-30d", "20d"],
)
if ccy == "usd":
assert fxo.kwargs.meta["curves"].leg2_disc_curve == "B"
else:
assert fxo.kwargs.meta["curves"].leg2_disc_curve == "A"
def test_populate_all_vols_on_init(self):
# test also validates FXStraddle and FXStrangle
fxo = FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
vol=["a", "b"],
strike=[1.10, 1.12],
)
assert fxo.instruments[0].kwargs.meta["vol"].fx_vol == "a"
assert fxo.instruments[1].kwargs.meta["vol"].fx_vol == "b"
def test_populate_single_vols_on_init(self):
# test also validates FXStraddle and FXStrangle
fxo = FXRiskReversal(
pair="eurusd",
expiry=dt(2023, 6, 16),
vol="myvol",
strike=[1.10, 1.12],
)
assert fxo.instruments[0].kwargs.meta["vol"].fx_vol == "myvol"
assert fxo.instruments[1].kwargs.meta["vol"].fx_vol == "myvol"
class TestFXStraddle:
@pytest.mark.parametrize(
("dlty", "strike", "ccy", "exp"),
[
# ("forward", ["50d", "-50d"], "usd", [1.068856203, 1.068856203]),
# ("spot", ["50d", "-50d"], "usd", [1.06841799, 1.069294591]),
("spot", "atm_forward", "usd", [1.06750999, 1.06750999]),
("spot", "atm_spot", "usd", [1.061500, 1.061500]),
("forward", "atm_delta", "usd", [1.068856203, 1.068856203]),
("spot", "atm_delta", "usd", [1.068856203, 1.068856203]),
("spot", "atm_forward", "eur", [1.06750999, 1.06750999]),
("spot", "atm_spot", "eur", [1.061500, 1.061500]),
("forward", "atm_delta", "eur", [1.06616549, 1.06616549]),
("spot", "atm_delta", "eur", [1.06616549, 1.06616549]),
# ("forward", ["50d", "-50d"], "eur", [1.0660752074, 1.06624508149]), # pa strikes
# ("spot", ["50d", "-50d"], "eur", [1.0656079102, 1.066656812]), # pa strikes
],
)
@pytest.mark.parametrize("smile", [True, False])
def test_straddle_strikes(self, fxfo, dlty, strike, ccy, exp, smile) -> None:
fxvs = FXDeltaVolSmile(
nodes={0.5: 10.0},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
vol_ = fxvs if smile else 10.0
fxo = FXStraddle(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=strike,
premium_ccy=ccy,
delta_type=dlty,
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
fxo.npv(curves=curves, fx=fxfo, vol=vol_)
call_k = fxo.instruments[0]._option.fx_option_params.strike
put_k = fxo.instruments[1]._option.fx_option_params.strike
assert abs(call_k - exp[0]) < 1e-7
assert abs(put_k - exp[1]) < 1e-7
@pytest.mark.parametrize(
("metric", "expected"),
[
("pips_or_%", 337.998151),
("vol", 7.9),
("premium", 675996.301147),
],
)
def test_straddle_rate_metrics(self, fxfo, metric, expected) -> None:
fxo = FXStraddle(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike="atm_delta",
delta_type="spot",
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=7.9, metric=metric)
assert abs(result - expected) < 1e-6
def test_strad_strike_premium_validation(self) -> None:
with pytest.raises(TypeError, match="missing 1 required positional argument: 'strike'"):
FXStraddle(
pair="eurusd",
expiry=dt(2023, 6, 16),
)
with pytest.raises(ValueError, match="FXOption with string delta as `strike` cannot be "):
FXStraddle(
pair="eurusd",
expiry=dt(2023, 6, 16),
strike="25d",
premium=[NoInput(0), 1.0],
)
@pytest.mark.parametrize(
("notn", "expected_grks", "expected_ccy"),
[
(1e6, [0.0, 19.086488, 0.422238], [0, 203750.1688, 4222.379]),
(2e6, [0.0, 19.086488, 0.422238], [0, 407500.336, 8444.758]),
(-2e6, [0.0, 19.086488, 0.422238], [0, 407500.336, 8444.758]),
],
)
def test_greeks_delta_direction(self, fxfo, notn, expected_grks, expected_ccy) -> None:
# test the delta and delta_eur are not impacted by a Buy or Sell. Delta is expressed
# relative to a Buy.
fxo = FXStraddle(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type="forward",
premium_ccy="usd",
strike="atm_delta",
notional=notn,
)
fxvs = FXDeltaVolSmile(
{0.25: 10.15, 0.5: 7.8, 0.75: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
result = fxo.analytic_greeks(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
vol=fxvs,
fx=fxfo,
)
assert abs(result["delta"] - expected_grks[0]) < 1e-6
assert abs(result["gamma"] - expected_grks[1]) < 1e-6
assert abs(result["vega"] - expected_grks[2]) < 1e-6
assert abs(result["delta_eur"] - expected_ccy[0]) < 1e-2
assert abs(result["gamma_eur_1%"] - expected_ccy[1]) < 1e-2
assert abs(result["vega_usd"] - expected_ccy[2]) < 1e-2
def test_repr(self):
fxo = FXStraddle(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=1.0,
)
expected = f""
assert expected == fxo.__repr__()
@pytest.mark.parametrize("ccy", ["usd", "eur"])
def test_populate_curves_on_init(self, ccy):
fxo = FXStraddle(
pair="eurusd",
expiry=dt(2023, 6, 16),
curves=["A", "B"],
premium_ccy=ccy,
strike=1.10,
)
if ccy == "usd":
assert fxo.kwargs.meta["curves"].leg2_disc_curve == "B"
else:
assert fxo.kwargs.meta["curves"].leg2_disc_curve == "A"
class TestFXStrangle:
@pytest.mark.parametrize(
("strike", "ccy"),
[
([1.02, 1.10], "usd"),
(["-20d", "20d"], "usd"),
([1.02, 1.10], "eur"),
(["-20d", "20d"], "eur"),
],
)
@pytest.mark.parametrize(
"vol",
[
FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.9,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
),
FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.9,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot_pa",
),
10.0,
FXSabrSmile(
nodes={
"alpha": 0.10,
"beta": 1.0,
"rho": 0.00,
"nu": 0.50,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
),
],
)
def test_strangle_rate_forward(self, fxfo, strike, ccy, vol) -> None:
# test pricing a straddle with vol 10.0 returns 10.0
fxo = FXStrangle(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=strike,
premium_ccy=ccy,
delta_type="forward",
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=vol)
premium = fxo.rate(curves=curves, fx=fxfo, vol=result, metric="pips_or_%")
metric = "pips" if ccy == "usd" else "percent"
premium_vol = fxo.instruments[0]._option.rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol,
metric=metric,
)
premium_vol += fxo.instruments[1]._option.rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol,
metric=metric,
)
assert abs(premium - premium_vol) < 5e-2
@pytest.mark.parametrize(
"vol",
[
FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.9,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
ad=1,
),
FXSabrSmile(
nodes={
"alpha": 0.079,
"beta": 1.0,
"rho": 0.00,
"nu": 0.50,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
),
],
)
def test_strangle_rate_strike_str(self, fxfo, vol) -> None:
# test pricing a strangle with delta as string that is not a delta percent should fail?
fxo = FXStrangle(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=["atm_spot", "atm_forward"],
premium_ccy="eur",
delta_type="forward",
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=vol)
premium = fxo.rate(curves=curves, fx=fxfo, vol=result, metric="pips_or_%")
metric = "percent"
premium_vol = fxo.instruments[0]._option.rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol,
metric=metric,
)
premium_vol += fxo.instruments[1]._option.rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol,
metric=metric,
)
assert abs(premium - premium_vol) < 5e-2
@pytest.mark.parametrize(
"vol",
[
FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.9,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
ad=1,
id="vol",
),
FXSabrSmile(
nodes={
"alpha": 0.079,
"beta": 1.0,
"rho": 0.00,
"nu": 0.50,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
ad=1,
id="vol",
),
],
)
def test_strangle_rate_ad(self, fxfo, vol) -> None:
# test pricing a strangle with delta as string that is not a delta percent should fail?
fxo = FXStrangle(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=["atm_spot", "atm_forward"],
premium_ccy="eur",
delta_type="forward",
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=vol)
# test fwd diff
v = vol._get_node_vector()
m_ = {
0: [0.001, 0.0, 0.0],
1: [0.0, 0.001, 0.0],
2: [0.0, 0.0, 0.001],
}
for i in range(3):
vol._set_node_vector(v + np.array(m_[i]), ad=1)
result2 = fxo.rate(curves=curves, fx=fxfo, vol=vol)
fwd_diff = (result2 - result) * 1000.0
assert abs(fwd_diff - gradient(result, [f"vol{i}"])[0]) < 2e-4
@pytest.mark.parametrize(
"vol",
[
FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.9,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
ad=2,
id="vol",
),
FXSabrSmile(
nodes={
"alpha": 0.079,
"beta": 1.0,
"rho": 0.00,
"nu": 0.50,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
ad=2,
id="vol",
),
],
)
def test_strangle_rate_ad2(self, fxfo, vol) -> None:
# test pricing a strangle with delta as string that is not a delta percent should fail?
fxo = FXStrangle(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=["atm_spot", "atm_forward"],
premium_ccy="eur",
delta_type="forward",
)
fxfo._set_ad_order(2)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=vol)
# test fwd diff
m_ = {
0: [0.001, 0.0, 0.0],
1: [0.0, 0.001, 0.0],
2: [0.0, 0.0, 0.001],
}
n_ = {
0: [-0.001, 0.0, 0.0],
1: [0.0, -0.001, 0.0],
2: [0.0, 0.0, -0.001],
}
v = vol._get_node_vector()
for i in range(3):
vol._set_node_vector(v + np.array(m_[i]), ad=2)
result_plus = fxo.rate(curves=curves, fx=fxfo, vol=vol)
vol._set_node_vector(v + np.array(n_[i]), ad=2)
result_min = fxo.rate(curves=curves, fx=fxfo, vol=vol)
fwd_diff = (result_plus + result_min - 2 * result) * 1000000.0
assert abs(fwd_diff - gradient(result, [f"vol{i}"], order=2)[0][0]) < 1e-4
def test_strangle_rate_2vols(self, fxfo) -> None:
# test pricing a straddle with vol [8.0, 10.0] returns a valid value close to 9.0
fxo = FXStrangle(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=20e6,
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=["-25d", "25d"],
premium_ccy="usd",
delta_type="forward",
)
vol = [8.0, 10.0]
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=vol)
premium = fxo.rate(curves=curves, fx=fxfo, vol=result, metric="pips_or_%")
premium_vol = fxo.instruments[0]._option.rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol[0],
)
premium_vol += fxo.instruments[1]._option.rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol[1],
)
assert abs(premium - premium_vol) < 5e-2
@pytest.mark.parametrize(
("notn", "expected_grks", "expected_ccy"),
[
(1e6, [-0.026421, 10.217368, 0.294605], [-26421.408, 109071.429, 2946.046]),
(2e6, [-0.026421, 10.217368, 0.294605], [-52842.816, 218142.858, 5892.092]),
(-2e6, [-0.026421, 10.217368, 0.294605], [-52842.816, 218142.858, 5892.092]),
],
)
@pytest.mark.parametrize("strikes", [("-20d", "20d"), (1.0238746345527665, 1.1159199351325004)])
def test_greeks_delta_direction(self, fxfo, notn, expected_grks, expected_ccy, strikes) -> None:
# test the delta and delta_eur are not impacted by a Buy or Sell. Delta is expressed
# relative to a Buy.
fxo = FXStrangle(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type="forward",
premium_ccy="usd",
strike=strikes,
notional=notn,
)
fxvs = FXDeltaVolSmile(
{0.25: 10.15, 0.5: 7.8, 0.75: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
result = fxo.analytic_greeks(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
vol=fxvs,
fx=fxfo,
)
assert abs(result["delta"] - expected_grks[0]) < 1e-6
assert abs(result["gamma"] - expected_grks[1]) < 1e-6
assert abs(result["vega"] - expected_grks[2]) < 1e-6
assert abs(result["delta_eur"] - expected_ccy[0]) < 1e-1
assert abs(result["gamma_eur_1%"] - expected_ccy[1]) < 1e-1
assert abs(result["vega_usd"] - expected_ccy[2]) < 1e-1
def test_strang_strike_premium_validation(self) -> None:
# with pytest.raises(ValueError, match="`strike` for FXStrangle must be set"):
# FXStrangle(
# pair="eurusd",
# expiry=dt(2023, 6, 16),
# strike=["25d", NoInput(0)],
# )
with pytest.raises(
ValueError, match="FXOption with string delta as `strike` cannot be initialised"
):
FXStrangle(
pair="eurusd",
expiry=dt(2023, 6, 16),
strike=["25d", "35d"],
premium=[NoInput(0), 1.0],
)
def test_repr(self):
fxo = FXStrangle(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
delta_type="forward",
premium_ccy="usd",
strike=[1.0, 1.1],
)
expected = f""
assert expected == fxo.__repr__()
@pytest.mark.parametrize("ccy", ["usd", "eur"])
def test_populate_curves_on_init(self, ccy):
fxo = FXStrangle(
pair="eurusd",
expiry=dt(2023, 6, 16),
curves=["A", "B"],
premium_ccy=ccy,
strike=[1.10, 1.12],
)
if ccy == "usd":
assert fxo.kwargs.meta["curves"].leg2_disc_curve == "B"
else:
assert fxo.kwargs.meta["curves"].leg2_disc_curve == "A"
class TestFXBrokerFly:
@pytest.mark.parametrize(
("strike", "ccy"),
[
([[1.024, 1.116], 1.0683], "usd"),
([["-20d", "20d"], "atm_delta"], "usd"),
([[1.024, 1.116], 1.0683], "eur"),
([["-20d", "20d"], "atm_delta"], "eur"),
],
)
@pytest.mark.parametrize(
("vol", "expected"),
[
(
FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.9,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
),
2.225,
),
(
FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.9,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot_pa",
),
2.39,
),
(9.5, 0.0),
(
FXSabrSmile(
nodes={
"alpha": 0.071,
"beta": 1.0,
"rho": 0.00,
"nu": 2.5,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
),
2.065,
),
],
)
def test_fxbf_rate(self, fxfo, strike, ccy, vol, expected) -> None:
# test pricing a straddle with vol 10.0 returns 10.0
fxo = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=[20e6, NoInput(0)],
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=strike,
premium_ccy=ccy,
delta_type="forward",
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=vol)
assert abs(result - expected) < 3e-2
@pytest.mark.parametrize(
("strike", "ccy"),
[
([[1.024, 1.116], 1.0683], "usd"),
([["-20d", "20d"], "atm_delta"], "usd"),
([[1.0228, 1.1147], 1.0683], "eur"),
([["-20d", "20d"], "atm_delta"], "eur"),
],
)
@pytest.mark.parametrize("smile", [True])
def test_fxbf_rate_pips(self, fxfo, strike, ccy, smile) -> None:
fxo = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=[20e6, NoInput(0)],
delivery_lag=2,
payment_lag=2,
calendar="tgt",
strike=strike,
premium_ccy=ccy,
delta_type="forward",
metric="pips_or_%",
)
fxvs = FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.8,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
)
vol = fxvs if smile else 9.5
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=vol)
expected = (-111.2, 0.1) if ccy == "usd" else (-1.041, 0.02)
assert abs(result - expected[0]) < expected[1]
@pytest.mark.parametrize(
("strike", "ccy"),
[
([[1.024, 1.116], 1.0683], "usd"),
([["-20d", "20d"], "atm_delta"], "usd"),
([[1.024, 1.116], 1.06668], "eur"),
([["-20d", "20d"], "atm_delta"], "eur"),
],
)
@pytest.mark.parametrize(
("vol", "expected"),
[
(
FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.8,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
),
(-221743, -210350),
),
(
FXSabrSmile(
nodes={
"alpha": 0.071,
"beta": 1.0,
"rho": 0.00,
"nu": 2.5,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
),
(-240740, -225500),
),
],
)
def test_fxbf_rate_premium(self, fxfo, strike, ccy, vol, expected) -> None:
fxo = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=[20e6, NoInput(0)],
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
strike=strike,
premium_ccy=ccy,
delta_type="forward",
metric="premium",
)
curves = [fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")]
result = fxo.rate(curves=curves, fx=fxfo, vol=vol)
tolerance = 300 if ccy == "usd" else 800
expected = expected[0] if ccy == "usd" else expected[1]
assert abs(result - expected) < tolerance
def test_bf_rate_vols_list(self, fxfo) -> None:
fxbf = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
notional=[20e6, -13.5e6],
strike=(("-20d", "20d"), "atm_delta"),
payment_lag=2,
delivery_lag=2,
calendar="tgt",
premium_ccy="usd",
delta_type="spot",
)
result = fxbf.rate(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
fx=fxfo,
vol=[[10.15, 8.9], 1.0],
)
expected = 8.539499
assert abs(result - expected) < 1e-6
result = fxbf.rate(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
fx=fxfo,
vol=[[10.15, 8.9], 7.8],
metric="pips_or_%",
)
expected = -110.098920
assert abs(result - expected) < 1e-6
@pytest.mark.parametrize(
("notn", "expected_grks", "expected_ccy"),
[
([1e6, NoInput(0)], [-0.026421, -3.099693, 0.000000], [-26421.408, -33089.534, 0.000]),
([2e6, NoInput(0)], [-0.026421, -3.099693, 0.000000], [-52842.816, -66179.068, 0.000]),
([-2e6, NoInput(0)], [-0.026421, -3.099693, 0.000000], [-52842.816, -66179.068, 0.000]),
([1e6, -600e3], [-0.026421, -1.234524, 0.041262], [-26421.408, -13178.672, 412.619]),
],
)
@pytest.mark.parametrize(
"strikes",
[
(("-20d", "20d"), "atm_delta"),
((1.0238746345527665, 1.1159199351325004), 1.0683288279019205),
],
)
def test_greeks_delta_direction(self, fxfo, notn, expected_grks, expected_ccy, strikes) -> None:
# test the delta and delta_eur are not impacted by a Buy or Sell. Delta is expressed
# relative to a Buy.
fxo = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type="forward",
premium_ccy="usd",
strike=strikes,
notional=notn,
)
fxvs = FXDeltaVolSmile(
{0.25: 10.15, 0.5: 7.8, 0.75: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
result = fxo.analytic_greeks(
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
vol=fxvs,
fx=fxfo,
)
assert abs(result["delta"] - expected_grks[0]) < 1e-6
assert abs(result["gamma"] - expected_grks[1]) < 1e-4
assert abs(result["vega"] - expected_grks[2]) < 1e-5
assert abs(result["delta_eur"] - expected_ccy[0]) < 1e-1
assert abs(result["gamma_eur_1%"] - expected_ccy[1]) < 1.5
assert abs(result["vega_usd"] - expected_ccy[2]) < 1e-1
def test_single_vol_definition(self, fxfo) -> None:
# test the metric of the rate can be input as "single_vol" and a result returned.
fxvs = FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.50: 7.9,
0.75: 8.9,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
fxo = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
curves=[fxfo.curve("eur", "usd"), fxfo.curve("usd", "usd")],
delta_type="forward",
premium_ccy="usd",
strike=[["-20d", "20d"], "atm_delta"],
vol=fxvs,
)
result = fxo.rate(metric="single_vol", fx=fxfo)
expected = 10.147423 - 7.90
assert (result - expected) < 1e-6
def test_repr(self):
fxo = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery_lag=dt(2023, 6, 20),
payment_lag=dt(2023, 6, 20),
delta_type="forward",
premium_ccy="usd",
strike=[["-20d", "20d"], "atm_delta"],
)
expected = f""
assert expected == fxo.__repr__()
@pytest.mark.parametrize("ccy", ["usd", "eur"])
def test_populate_curves_on_init(self, ccy):
fxo = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
curves=["A", "B"],
premium_ccy=ccy,
strike=[[1.10, 1.12], 1.11],
)
if ccy == "usd":
assert fxo.kwargs.meta["curves"].leg2_disc_curve == "B"
assert fxo.instruments[0].kwargs.meta["curves"].leg2_disc_curve == "B"
assert fxo.instruments[1].kwargs.meta["curves"].leg2_disc_curve == "B"
assert fxo.instruments[0].instruments[0].kwargs.meta["curves"].leg2_disc_curve == "B"
assert fxo.instruments[0].instruments[1].kwargs.meta["curves"].leg2_disc_curve == "B"
assert fxo.instruments[1].instruments[0].kwargs.meta["curves"].leg2_disc_curve == "B"
assert fxo.instruments[1].instruments[1].kwargs.meta["curves"].leg2_disc_curve == "B"
else:
assert fxo.kwargs.meta["curves"].leg2_disc_curve == "A"
assert fxo.instruments[0].kwargs.meta["curves"].leg2_disc_curve == "A"
assert fxo.instruments[1].kwargs.meta["curves"].leg2_disc_curve == "A"
assert fxo.instruments[0].instruments[0].kwargs.meta["curves"].leg2_disc_curve == "A"
assert fxo.instruments[0].instruments[1].kwargs.meta["curves"].leg2_disc_curve == "A"
assert fxo.instruments[1].instruments[0].kwargs.meta["curves"].leg2_disc_curve == "A"
assert fxo.instruments[1].instruments[1].kwargs.meta["curves"].leg2_disc_curve == "A"
def test_populate_all_vols_on_init(self):
# test also validates FXStraddle and FXStrangle
fxo = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
vol=[["a", "b"], ["c", "d"]],
strike=[[1.10, 1.12], 1.11],
)
assert fxo.instruments[0].instruments[0].kwargs.meta["vol"] == _Vol(fx_vol="a")
assert fxo.instruments[0].instruments[1].kwargs.meta["vol"] == _Vol(fx_vol="b")
assert fxo.instruments[1].instruments[0].kwargs.meta["vol"] == _Vol(fx_vol="c")
assert fxo.instruments[1].instruments[1].kwargs.meta["vol"] == _Vol(fx_vol="d")
assert fxo.instruments[0].kwargs.meta["vol"] == (_Vol(fx_vol="a"), _Vol(fx_vol="b"))
assert fxo.instruments[1].kwargs.meta["vol"] == (_Vol(fx_vol="c"), _Vol(fx_vol="d"))
assert fxo.kwargs.meta["vol"] == (
(_Vol(fx_vol="a"), _Vol(fx_vol="b")),
(_Vol(fx_vol="c"), _Vol(fx_vol="d")),
)
def test_populate_single_vol_on_init(self):
# test also validates FXStraddle and FXStrangle
fxo = FXBrokerFly(
pair="eurusd",
expiry=dt(2023, 6, 16),
vol="myvol",
strike=[[1.10, 1.12], 1.11],
)
_ = _Vol(fx_vol="myvol")
assert fxo.kwargs.meta["vol"] == ((_, _), (_, _))
assert fxo.instruments[0].kwargs.meta["vol"] == (_, _)
assert fxo.instruments[1].kwargs.meta["vol"] == (_, _)
assert fxo.instruments[0].instruments[0].kwargs.meta["vol"] == _
assert fxo.instruments[0].instruments[1].kwargs.meta["vol"] == _
assert fxo.instruments[1].instruments[0].kwargs.meta["vol"] == _
assert fxo.instruments[1].instruments[1].kwargs.meta["vol"] == _
@pytest.mark.parametrize(
"inst",
[
FXCall(
spec="eurusd_call",
expiry=dt(2023, 6, 16),
strike=1.10,
vol="smile",
curves=["eurusd", "usdusd"],
),
FXStraddle(
spec="eurusd_call",
expiry=dt(2023, 6, 16),
strike=1.10,
vol="smile",
curves=["eurusd", "usdusd"],
),
FXStrangle(
spec="eurusd_call",
expiry=dt(2023, 6, 16),
strike=[1.10, 1.11],
vol="smile",
curves=["eurusd", "usdusd"],
),
FXRiskReversal(
spec="eurusd_call",
expiry=dt(2023, 6, 16),
strike=[1.10, 1.11],
vol="smile",
curves=["eurusd", "usdusd"],
),
FXBrokerFly(
spec="eurusd_call",
expiry=dt(2023, 6, 16),
strike=[[1.10, 1.13], 1.11],
vol="smile",
curves=["eurusd", "usdusd"],
),
],
)
def test_str_vol_price_from_solver(self, inst, fxfo):
smile = FXDeltaVolSmile(
nodes={0.5: 10.0},
expiry=dt(2023, 6, 16),
eval_date=dt(2023, 3, 16),
delta_type="forward",
id="smile",
)
solver = Solver(
curves=[
smile,
fxfo.curve("eur", "eur"),
fxfo.curve("eur", "usd"),
fxfo.curve("usd", "usd"),
],
instruments=[
FXVolValue(index_value=0.5, vol="smile"),
IRS(dt(2023, 3, 20), "1b", spec="eur_irs", curves="eureur"),
IRS(dt(2023, 3, 20), "1b", spec="eur_irs", curves="eurusd"),
IRS(dt(2023, 3, 20), "1b", spec="usd_irs", curves="usdusd"),
],
s=[9.5, 2.4, 2.5, 4.5],
fx=fxfo,
)
result = inst.rate(solver=solver)
assert isinstance(result, Dual)
@pytest.mark.parametrize(
("inst", "strike", "exp"),
[
(FXRiskReversal, [1.10, 1.12], 0.0),
(FXStraddle, "atm_delta", 11.0),
(FXStrangle, ["-20d", "20d"], 11.0),
(FXBrokerFly, [["-25d", "25d"], "atm_delta"], 0.0),
],
)
@pytest.mark.parametrize(
("vol_meta"), [NoInput(0), 11.0, "smile", [11.0, 11.0], ["smile", "smile"]]
)
@pytest.mark.parametrize(("vol"), [NoInput(0), 11.0, "smile", [11.0, 11.0], ["smile", "smile"]])
def test_vol_input_combinations(self, fxfo, inst, strike, exp, vol_meta, vol):
if isinstance(vol_meta, NoInput) and isinstance(vol, NoInput):
pytest.skip("Invalid parameter combinations")
obj = inst(
spec="eurusd_call",
expiry=dt(2023, 6, 16),
strike=strike,
vol=vol_meta,
curves=["eurusd", "usdusd"],
)
smile = FXDeltaVolSmile(
nodes={0.5: 10.0},
expiry=dt(2023, 6, 16),
eval_date=dt(2023, 3, 16),
delta_type="forward",
id="smile",
)
solver = Solver(
curves=[
smile,
fxfo.curve("eur", "eur"),
fxfo.curve("eur", "usd"),
fxfo.curve("usd", "usd"),
],
instruments=[
FXVolValue(index_value=0.5, vol="smile"),
IRS(dt(2023, 3, 20), "1b", spec="eur_irs", curves="eureur"),
IRS(dt(2023, 3, 20), "1b", spec="eur_irs", curves="eurusd"),
IRS(dt(2023, 3, 20), "1b", spec="usd_irs", curves="usdusd"),
],
s=[11.0, 2.4, 2.5, 4.5],
fx=fxfo,
)
result = obj.rate(solver=solver, vol=vol, metric="single_vol")
assert abs(result - exp) < 1e-6
class TestFXVolValue:
def test_solver_passthrough(self) -> None:
smile = FXDeltaVolSmile(
nodes={0.25: 10.0, 0.5: 10.0, 0.75: 10.0},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
id="VolSmile",
)
instruments = [
FXVolValue(0.25, vol=smile),
FXVolValue(0.5, vol="VolSmile"),
FXVolValue(0.75, vol="VolSmile"),
]
Solver(curves=[smile], instruments=instruments, s=[8.9, 8.2, 9.1])
assert abs(smile[0.25] - 8.9) < 5e-7
assert abs(smile[0.5] - 8.2) < 5e-7
assert abs(smile[0.75] - 9.1) < 5e-7
def test_solver_passthrough_sabr(self) -> None:
smile = FXSabrSmile(
nodes={"alpha": 0.01, "beta": 1.0, "rho": 0.01, "nu": 0.01},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delivery_lag=2,
calendar="tgt|fed",
pair="eurusd",
id="VolSmile",
)
fxf = FXForwards(
fx_curves={
"eureur": Curve({dt(2023, 3, 16): 1.0, dt(2025, 6, 9): 0.95}),
"eurusd": Curve({dt(2023, 3, 16): 1.0, dt(2025, 6, 9): 0.95}),
"usdusd": Curve({dt(2023, 3, 16): 1.0, dt(2025, 6, 9): 0.93}),
},
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2023, 3, 18)),
)
instruments = [
FXVolValue(1.0, vol=smile),
FXVolValue(1.10, vol="VolSmile"),
FXVolValue(1.20, vol="VolSmile"),
]
Solver(curves=[smile], instruments=instruments, s=[8.9, 8.2, 9.1], fx=fxf)
assert abs(smile.get_from_strike(1.0, fxf.rate("eurusd", dt(2023, 6, 20)))[1] - 8.9) < 5e-7
assert abs(smile.get_from_strike(1.10, fxf.rate("eurusd", dt(2023, 6, 20)))[1] - 8.2) < 5e-7
assert abs(smile.get_from_strike(1.20, fxf.rate("eurusd", dt(2023, 6, 20)))[1] - 9.1) < 5e-7
def test_solver_surface_passthrough(self) -> None:
surface = FXDeltaVolSurface(
delta_indexes=[0.5],
expiries=[dt(2000, 1, 1), dt(2001, 1, 1)],
node_values=[[1.0], [1.0]],
eval_date=dt(1999, 12, 1),
delta_type="forward",
id="VolSurf",
)
instruments = [
FXVolValue(0.25, dt(2000, 1, 1), vol=surface),
FXVolValue(0.5, dt(2001, 1, 1), vol="VolSurf"),
]
Solver(surfaces=[surface], instruments=instruments, s=[8.9, 8.2], func_tol=1e-14)
assert abs(surface._get_index(0.5, dt(2000, 1, 1)) - 8.9) < 5e-7
assert abs(surface._get_index(0.5, dt(2001, 1, 1)) - 8.2) < 5e-7
def test_no_solver_vol_value(self) -> None:
vv = FXVolValue(0.25, vol="string_id")
with pytest.raises(ValueError, match="`fx_vol` must contain FXVol object, not str, if"):
vv.rate()
def test_repr(self):
v = FXVolValue(0.25)
expected = f""
assert v.__repr__() == expected
def test_sabr_surface(self):
fxss = FXSabrSurface(
expiries=[dt(2000, 6, 1), dt(2000, 9, 1)],
node_values=[[0.1, 1.0, 0.01, 0.01], [0.11, 1.0, 0.01, 0.01]],
eval_date=dt(2000, 1, 1),
pair="eurusd",
)
fxvv = FXVolValue(index_value=1.1, expiry=dt(2000, 8, 1))
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.15}, settlement=dt(2000, 1, 4)),
fx_curves={
"eureur": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.95}),
"eurusd": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.951}),
"usdusd": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.94}),
},
)
result = fxvv.rate(vol=fxss, fx=fxf)
assert abs(result - 10.767884) < 1e-5
@pytest.mark.parametrize(
"inst",
[
IRS(dt(2000, 1, 1), "1y", spec="usd_irs", curves="sofr"),
SBS(dt(2000, 1, 1), "1y", spec="eur_sbs36", curves=["eur", "eur", "eur", "eur"]),
STIRFuture(dt(2020, 1, 1), "1m", spec="usd_stir1", curves=["sofr"]),
XCS(dt(2000, 1, 1), "1y", spec="eurusd_xcs", curves=["a", "b", "c", "d"]),
CDS(dt(2000, 3, 20), "2y", spec="us_ig_cds", curves=["a", "b"]),
ZCS(dt(2000, 1, 1), "5y", spec="gbp_zcs", curves=["sonia"]),
ZCIS(dt(2000, 1, 1), "2y", spec="gbp_zcis", curves=["index", "sonia"]),
IIRS(dt(2000, 1, 1), "1y", spec="usd_irs", curves=["index", "sonia", "rate", "sonia"]),
FRA(dt(2000, 1, 1), "3m", spec="eur_fra3", curves=["eur"]),
NDF(dt(2000, 1, 1), pair="eurusd", curves=["usd"]),
FixedRateBond(
dt(2000, 1, 1), "2y", spec="uk_gb", curves=["uk"], fixed_rate=1.2, metric="ytm"
),
],
)
def test_unpriced_cashflows_string_id(inst):
result = inst.cashflows()
assert isinstance(result, DataFrame)
@pytest.mark.parametrize(
("inst", "curves"),
[
(IRS(dt(2022, 2, 1), "1m", spec="usd_irs", fixed_rate=2.0), ["c"]),
(
SBS(dt(2022, 2, 1), "2m", frequency="2M", leg2_frequency="1M", float_spread=2.0),
["c", "c", "c2", "c"],
),
(STIRFuture(dt(2022, 2, 1), "1m", spec="usd_stir1", price=99.0), ["c"]),
(CDS(dt(2022, 2, 1), "1m", frequency="1M", fixed_rate=1.0), ["c2", "c"]),
(ZCS(dt(2022, 1, 1), "2m", frequency="3M", fixed_rate=2.0), ["c"]),
(
ZCIS(
dt(2022, 1, 1),
"2M",
frequency="3M",
fixed_rate=2.0,
leg2_index_base=99.0,
leg2_index_lag=0,
),
["c2", "c"],
),
(
IIRS(
dt(2022, 1, 1), "2M", spec="usd_irs", fixed_rate=2.0, index_base=99.0, index_lag=0
),
["c2", "c", "c", "c"],
),
(FRA(dt(2022, 2, 1), "1m", fixed_rate=1.0, frequency="1M"), ["c"]),
],
)
def test_forward_npv_argument(curve, curve2, inst, curves):
c_ = {"c": curve, "c2": curve2}
npv = inst.npv(curves=[c_[v] for v in curves])
forward_npv = inst.npv(
curves=[c_[v] for v in curves],
forward=dt(2022, 3, 15),
)
assert abs(forward_npv - npv / curve[dt(2022, 3, 15)]) < 1e-10
@pytest.mark.parametrize(
("inst", "curves"),
[
(
XCS(
dt(2022, 1, 1),
"2m",
mtm=True,
fixed=True,
leg2_fixed=True,
fixed_rate=2.0,
leg2_fixed_rate=3.0,
frequency="1M",
fx_fixings=2.0,
currency="eur",
pair="eurusd",
leg2_notional=10e6,
),
["c", "c", "c2", "c2"],
),
(
XCS(
dt(2022, 1, 1),
"2m",
mtm=True,
fixed=True,
fixed_rate=2.0,
frequency="1M",
fx_fixings=2.0,
currency="eur",
pair="eurusd",
leg2_notional=10e6,
),
["c", "c", "c2", "c2"],
),
(
XCS(
dt(2022, 1, 1),
"2m",
mtm=True,
leg2_fixed=True,
leg2_fixed_rate=2.0,
float_spread=0.0,
frequency="1M",
fx_fixings=2.0,
currency="eur",
pair="eurusd",
leg2_notional=10e6,
),
["c", "c", "c2", "c2"],
),
(
XCS(
dt(2022, 1, 1),
"2m",
mtm=True,
float_spread=0.0,
leg2_float_spread=0.0,
frequency="1M",
fx_fixings=2.0,
currency="eur",
pair="eurusd",
leg2_notional=10e6,
),
["c", "c", "c2", "c2"],
),
(
XCS(
dt(2022, 1, 1),
"2m",
fixed=True,
leg2_fixed=True,
fixed_rate=2.0,
leg2_fixed_rate=2.5,
frequency="1M",
leg2_fx_fixings=2.0,
currency="usd",
pair="eurusd",
),
["c2", "c2", "c", "c"],
),
(
XCS(
dt(2022, 1, 1),
"2m",
fixed=True,
fixed_rate=2.0,
frequency="1M",
leg2_fx_fixings=2.0,
currency="usd",
pair="usdeur",
),
["c2", "c2", "c", "c"],
),
(
XCS(
dt(2022, 1, 1),
"2m",
leg2_fixed=True,
leg2_fixed_rate=2.0,
float_spread=0.0,
frequency="1M",
leg2_fx_fixings=2.0,
currency="usd",
pair="usdeur",
),
["c2", "c2", "c", "c"],
),
(
XCS(
dt(2022, 1, 1),
"2m",
float_spread=0.0,
leg2_float_spread=0.0,
frequency="1M",
leg2_fx_fixings=2.0,
currency="usd",
pair="usdeur",
),
["c2", "c2", "c", "c"],
),
(NDF(dt(2022, 2, 15), pair="eurusd", fx_rate=1.15), ["c"]),
(
FXSwap(dt(2022, 2, 15), "1m", pair="eurusd", fx_rate=1.15, points=56.5),
["c", "c2"],
),
(FXForward(dt(2022, 2, 16), "eurusd", fx_rate=1.15), ["c", "c2"]),
],
)
def test_forward_npv_argument_with_fx(curve, curve2, inst, curves):
fxr = FXRates({"eurusd": 1.12}, settlement=dt(2022, 1, 3))
fxf = FXForwards(fx_rates=fxr, fx_curves={"eureur": curve, "eurusd": curve, "usdusd": curve2})
c_ = {"c": curve, "c2": curve2}
npv = inst.npv(curves=[c_[v] for v in curves], fx=fxf, base="eur")
forward_npv = inst.npv(
curves=[c_[v] for v in curves], forward=dt(2022, 3, 15), fx=fxf, base="eur"
)
result = npv / curve[dt(2022, 3, 15)] - forward_npv
assert abs(result) < 1e-7
class TestFixings:
def test_local_fixings_rate_and_fx(self):
fixings.add("wmr_eurusd", Series(index=[dt(1999, 1, 1)], data=[100.0]))
fixings.add("rpi", Series(index=[dt(1999, 1, 1)], data=[100.0]))
fixings.add("ibor_1M", Series(index=[dt(1999, 1, 1)], data=[100.0]))
curve = Curve({dt(2000, 1, 1): 1.0, dt(2000, 2, 15): 0.999, dt(2005, 1, 1): 0.9})
curve2 = Curve({dt(2000, 1, 1): 1.0, dt(2005, 1, 1): 0.95})
fxf = FXForwards(
fx_curves={"eurusd": curve2, "usdusd": curve, "eureur": curve2},
fx_rates=FXRates({"eurusd": 1.10}, settlement=dt(2000, 1, 1)),
)
irs = IRS(
dt(2000, 1, 1),
dt(2000, 4, 1),
"M",
currency="usd",
pair="eurusd",
fx_fixings="wmr",
leg2_fixing_method="ibor(0)",
leg2_rate_fixings="ibor",
payment_lag=0,
curves=[curve],
fixed_rate=2.07,
)
# cf = irs.cashflows(fx=fxf)
cft = irs.cashflows_table(fx=fxf)
result = irs.local_fixings(
identifiers=[
(
"wmr_eurusd",
Series(
index=[dt(2000, 1, 28), dt(2000, 2, 28), dt(2000, 3, 30)],
data=[1.0998008124280523, 1.1002139078693074, 1.101254251708383],
),
),
(
"ibor_1m",
Series(
index=[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)],
data=[0.8006761616124619, 1.4777702977501797, 2.110198054725164],
),
),
],
scalars=(1.0, 0.01),
fx=fxf,
)
expected_rate_fixings = irs.local_analytic_rate_fixings(fx=fxf)
for i in range(3):
assert (
abs(result[("usd", "ibor_1m")].iloc[i * 2] - expected_rate_fixings.iloc[i, 0])
< 1e-8
)
expected_fx_fixings = [
cft.iloc[0, 0] / 1.0998008124280523 * curve[dt(2000, 2, 1)],
cft.iloc[1, 0] / 1.1002139078693074 * curve[dt(2000, 3, 1)],
cft.iloc[2, 0] / 1.101254251708383 * curve[dt(2000, 4, 1)],
]
for i in range(3):
assert (
abs(result[("usd", "wmr_eurusd")].iloc[i * 2 + 1] - expected_fx_fixings[i]) < 1e-6
)
def test_wmr_crosses_not_allowed_standard_instruments():
sek = Curve({dt(2000, 1, 1): 1.0, dt(2005, 1, 1): 0.8})
cad = Curve({dt(2000, 1, 1): 1.0, dt(2005, 1, 1): 0.85})
fxf = FXForwards(
fx_rates=FXRates({"cadsek": 8.0}, settlement=dt(2000, 1, 3)),
fx_curves={"cadcad": cad, "sekcad": sek, "seksek": sek},
)
fxvs = FXDeltaVolSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 6, 2),
nodes={0.5: 10.0},
delta_type="forward",
)
instruments = [
FXForward(dt(2000, 6, 1), FXIndex("cadsek", "tro,stk", 2), curves=[cad, sek]),
FXForward(dt(2000, 6, 1), FXIndex("cadsek", "tro,stk", 2), fx_rate=8.1, curves=[cad, sek]),
FXSwap(dt(2000, 6, 2), dt(2000, 7, 2), FXIndex("cadsek", "tro,stk", 2), curves=[cad, sek]),
FXSwap(
dt(2000, 6, 2),
dt(2000, 7, 2),
FXIndex("cadsek", "tro,stk", 2),
fx_rate=8.0,
points=100.0,
curves=[cad, sek],
),
FXCall(
expiry=dt(2000, 6, 2),
strike=8.0,
pair=FXIndex("cadsek", "tro,stk", 2),
curves=[cad, sek],
),
]
for inst in instruments:
inst.npv(vol=fxvs, fx=fxf)
class TestSwaptions:
def test_npv_no_set_premium(self):
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="tgt"
)
irsw = IRSCall(
expiry=dt(2027, 2, 16),
tenor="6m",
strike=3.020383,
irs_series="usd_irs",
)
result = irsw.npv(curves=curve, vol=25.16)
expected = 0.0
assert abs(result - expected) < 1e-6
def test_npv_with_set_premium(self):
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="tgt"
)
irsw = IRSCall(
expiry=dt(2027, 2, 16),
tenor="6m",
strike=3.020383,
irs_series="usd_irs",
premium=10000.0,
)
result = irsw.npv(curves=curve, vol=25.16)
expected = -8246.831212232395
assert abs(result - expected) < 1e-6
def test_npv_local(self):
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="tgt"
)
irsw = IRSCall(
expiry=dt(2027, 2, 16),
tenor="6m",
strike=3.020383,
irs_series="usd_irs",
premium=10000.0,
)
result = irsw.npv(curves=curve, vol=25.16, local=True)
expected = -8246.831212232395
assert abs(result["usd"] - expected) < 1e-6
def test_default_payment_date(self):
irsw = IRSCall(
expiry=dt(2027, 2, 16),
tenor="6m",
strike=3.020383,
irs_series="usd_irs",
premium=10000.0,
)
assert irsw.leg2.periods[0].settlement_params.payment == dt(2027, 2, 18)
@pytest.mark.parametrize(
("metric", "expected"),
[
("BlackVolShift_0", 25.16),
("Premium", 149725.796514),
("NormalVol", 75.792872),
("Black_vol_shift_100", 18.880156),
("Black_vol_shift_200", 15.111396),
("Black_vol_shift_300", 12.597702),
("PercentNotional", 0.149725),
],
)
def test_rate(self, metric, expected):
# if we know that the exercise will occur (from the fixing_value) value the cashflow
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
irsw = IRSCall(
expiry=dt(2027, 2, 16),
tenor="6m",
strike=3.020383,
notional=100e6,
irs_series="usd_irs",
premium=10000.0,
)
result = irsw.rate(
curves=[curve],
vol=25.16,
metric=metric,
)
assert abs(result - expected) < 1e-5
@pytest.mark.parametrize(
("metric", "expected"),
[("Premium", 149725.796514), ("PercentNotional", 0.149725)],
)
@pytest.mark.parametrize("date", [dt(2027, 1, 3), dt(2027, 3, 19)])
def test_rate_unconventional_payment_date(self, metric, expected, date):
# if we know that the exercise will occur (from the fixing_value) value the cashflow
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
alt_curve = Curve(nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.91}, calendar="nyc")
irsw = IRSCall(
expiry=dt(2027, 2, 16),
tenor="6m",
strike=3.020383,
notional=100e6,
irs_series="usd_irs",
premium=10000.0,
payment_lag=date,
)
result = irsw.rate(
curves=[curve, alt_curve, curve],
vol=25.16,
metric=metric,
)
expected = expected * alt_curve[date] / alt_curve[dt(2027, 2, 18)]
assert abs(result - expected) < 1e-5
def test_cashflows(self):
# if we know that the exercise will occur (from the fixing_value) value the cashflow
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
irsw = IRSCall(
expiry=dt(2027, 2, 16),
tenor="6m",
strike=3.020383,
notional=100e6,
irs_series="usd_irs",
premium=10000.0,
)
result = irsw.cashflows(
curves=[curve],
vol=25.16,
)
assert len(result.index) == 2
assert abs(result.loc["leg1", "DF"].iloc[0] - 0.969902553602701) < 1e-8
assert abs(result.loc["leg1", "Cashflow"].iloc[0] - 149725.7965143448) < 1e-8
assert abs(result.loc["leg1", "NPV"].iloc[0] - 145219.43237946142) < 1e-8
assert result.loc["leg1", "Ccy"].iloc[0] == "USD"
assert result.loc["leg1", "Type"].iloc[0] == "IRSCallPeriod"
@pytest.mark.parametrize(
("metric", "weights"),
[
("PercentNotional", [1.0, 1.0]),
("Premium", [1.0, 1.0]),
("NormalVol", [0.5, 0.5]),
("BlackVolShift_0", [0.5, 0.5]),
("BlackVolShift_100", [0.5, 0.5]),
("BlackVolShift_200", [0.5, 0.5]),
("BlackVolShift_300", [0.5, 0.5]),
],
)
def test_straddle_rate(self, metric, weights):
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="tgt"
)
irss = IRSabrSmile(
eval_date=dt(2026, 2, 16),
expiry=dt(2026, 8, 16),
tenor="6m",
nodes={
"alpha": 0.2,
"rho": -0.05,
"nu": 0.6,
},
beta=0.5,
irs_series="usd_irs",
)
irsc = IRSCall(
irs_series="usd_irs",
expiry=dt(2026, 8, 16),
tenor="6m",
strike=2.90,
metric=metric,
)
irsp = IRSPut(
irs_series="usd_irs",
expiry=dt(2026, 8, 16),
tenor="6m",
strike=2.90,
metric=metric,
)
irstr = IRSStraddle(
irs_series="usd_irs",
expiry=dt(2026, 8, 16),
tenor="6m",
strike=2.90,
metric=metric,
)
r1 = irsc.rate(vol=irss, curves=curve)
r2 = irsp.rate(vol=irss, curves=curve)
r3 = irstr.rate(vol=irss, curves=curve)
assert abs(r3 - r1 * weights[0] - r2 * weights[1]) < 1e-5
def test_straddle_npv(self):
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="tgt"
)
irss = IRSabrSmile(
eval_date=dt(2026, 2, 16),
expiry=dt(2026, 8, 16),
tenor="6m",
nodes={
"alpha": 0.2,
"rho": -0.05,
"nu": 0.6,
},
beta=0.5,
irs_series="usd_irs",
)
irsc = IRSCall(
irs_series="usd_irs",
expiry=dt(2026, 8, 16),
tenor="6m",
strike=2.90,
)
irsp = IRSPut(
irs_series="usd_irs",
expiry=dt(2026, 8, 16),
tenor="6m",
strike=2.90,
)
irstr = IRSStraddle(
irs_series="usd_irs",
expiry=dt(2026, 8, 16),
tenor="6m",
strike=2.90,
)
r1 = irsc.npv(vol=irss, curves=curve)
r2 = irsp.npv(vol=irss, curves=curve)
r3 = irstr.npv(vol=irss, curves=curve)
assert abs(r3 - r1 - r2) < 1e-5
def test_delta_rate_scalar(self):
smile = IRSabrSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
nodes={
"alpha": 0.20,
"rho": -0.05,
"nu": 0.65,
},
beta=0.5,
id="sofr_vol",
)
curve = Curve(nodes={dt(2000, 1, 1): 1.0, dt(2003, 1, 1): 0.90}, id="sofr")
option_args = dict(
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
metric="NormalVol",
curves="sofr",
vol="sofr_vol",
)
solver = Solver(
curves=[curve, smile],
instruments=[
IRS(dt(2000, 1, 1), "1y", spec="usd_irs", curves="sofr"),
IRSCall(strike="-20bps", **option_args),
IRSCall(strike="atm", **option_args),
IRSCall(strike="+20bps", **option_args),
],
s=[3.0, 50.0, 45.0, 49.0],
instrument_labels=["1Y IRS", "-20bps Vol", "ATM Vol", "+20bps Vol"],
)
irc = IRSCall(strike=3.05, premium=0.0, **option_args)
delta = irc.delta(solver=solver)
before = irc.npv(solver=solver)
solver.s = [3.0, 50.0, 46.0, 49.0]
solver.iterate()
after = irc.npv(solver=solver)
finite_diff = after - before
assert abs(delta.iloc[2, 0] - finite_diff) < 1e-1
@pytest.mark.parametrize(("strike", "expected"), [(3.99, 5558.52), ("+0bps", -48193.65)])
def test_npv_from_normal_vol_object(self, strike, expected, curve):
smile = IRSplineSmile(
nodes={-100: 100.0, 0: 95.0, 100: 100.0},
eval_date=dt(2022, 1, 1),
expiry=dt(2023, 1, 3),
tenor="1y",
irs_series="usd_irs",
)
iro = IRSCall(
eval_date=dt(2022, 1, 1),
expiry="1y",
tenor="1y",
strike=strike,
irs_series="usd_irs",
curves=curve,
vol=smile,
notional=100e6,
premium=420000.0,
)
result = iro.npv()
assert abs(result - expected) < 1e-2
class TestIRVolValue:
@pytest.mark.parametrize(
"vol",
[
IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=0.75,
eval_date=dt(2001, 1, 1),
expiry="1y",
irs_series="eur_irs6",
tenor="1y",
id="VolSmile",
),
IRSabrCube(
eval_date=dt(2001, 1, 1),
expiries=["1y"],
irs_series="eur_irs6",
tenors=["1y"],
alpha=0.17,
beta=0.75,
rho=-0.11,
nu=0.817,
id="VolSmile",
),
],
)
def test_solver_passthrough(self, vol) -> None:
instruments = [
IRVolValue(
strike=1.0,
expiry="1y",
tenor="1y",
irs_series="eur_irs6",
eval_date=dt(2001, 1, 1),
vol=vol,
metric="alpha",
),
IRVolValue(
strike=1.0,
expiry="1y",
tenor="1y",
irs_series="eur_irs6",
eval_date=dt(2001, 1, 1),
vol="VolSmile",
metric="rho",
),
IRVolValue(
strike=1.0,
expiry="1y",
tenor="1y",
irs_series="eur_irs6",
eval_date=dt(2001, 1, 1),
vol="VolSmile",
metric="nu",
),
]
Solver(curves=[vol], instruments=instruments, s=[0.25, -0.04, 0.75])
for param, expected in zip(["alpha", "rho", "nu"], [0.25, -0.04, 0.75]):
if isinstance(vol, IRSabrCube):
result = getattr(
vol.get_smile(expiry=dt(2002, 1, 2), tenor=dt(2003, 1, 4)).nodes, param
)
else:
result = getattr(vol.nodes, param)
assert abs(result - expected) < 1e-6
v = IRVolValue(
strike=9.0,
expiry="1y",
tenor="1y",
irs_series="eur_irs6",
eval_date=dt(2001, 1, 1),
vol=vol,
metric="black_vol_shift_0",
)
result = v.rate(vol=vol, curves=Curve({dt(2001, 1, 1): 1.0, dt(2005, 1, 1): 0.7}))
expected = 15.170743310759043
assert abs(result - expected) < 1e-6
def test_no_solver_vol_value(self) -> None:
vv = IRVolValue(
strike=1.0,
irs_series="eur_irs6",
expiry="1y",
tenor="1y",
eval_date=dt(2000, 1, 1),
vol="string_id",
)
with pytest.raises(ValueError, match="`vol` must contain IRVol object, not str,"):
vv.rate()
def test_repr(self):
v = IRVolValue(
strike=0.25,
expiry="1y",
tenor="1y",
eval_date=dt(2000, 1, 1),
irs_series="usd_irs",
)
expected = f""
assert v.__repr__() == expected
@pytest.mark.parametrize(
"vol",
[
# IRSabrSmile(
# nodes={
# "alpha": 0.17431060,
# "rho": -0.11268306,
# "nu": 0.81694072,
# },
# beta=1.0,
# eval_date=dt(2001, 1, 1),
# expiry="1y",
# irs_series="eur_irs6",
# tenor="1y",
# id="vol",
# ),
IRSabrCube(
eval_date=dt(2001, 1, 1),
expiries=["1y"],
tenors=["1Y", "2y"],
irs_series="usd_irs",
beta=1.0,
alpha=np.array([[0.17431060, 0.2]]),
rho=np.array([[-0.11268306, 0.2]]),
nu=np.array([[0.81694072, 0.2]]),
),
],
)
@pytest.mark.parametrize("metric", ["alpha", "beta", "rho", "nu"])
def test_sabr_param(self, vol, metric):
v = IRVolValue(
strike=0.25,
expiry="1y",
tenor="1y",
eval_date=dt(2001, 1, 1),
irs_series="usd_irs",
metric=metric,
)
expected = {
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
}
assert v.rate(vol=vol) == expected[metric]
class TestFee:
# init
def test_date_and_attributes(self):
fee = Fee(dt(2022, 1, 1), 2e6, calendar="tgt", payment_lag=0, ex_div=2, currency="EUR")
assert fee.settlement_params.payment == dt(2022, 1, 3)
assert fee.settlement_params.notional == 2e6
assert fee.settlement_params.ex_dividend == dt(2021, 12, 30)
assert fee.settlement_params.currency == "eur"
# protocols
def test_npv(self, curve):
fee = Fee(dt(2022, 3, 1), 2e6)
result = fee.npv(curves=curve)
assert abs(result + 1986866.2068519176) < 1e-7
@pytest.mark.parametrize(("metric", "exp"), [("npv", -1986866.20), ("payment", -2e6)])
def test_rate(self, curve, metric, exp):
fee = Fee(dt(2022, 3, 1), 2e6)
result = fee.rate(curves=curve, metric=metric)
assert abs(result - exp) < 1e-2
def test_analytic_delta(self, curve):
fee = Fee(dt(2022, 3, 1), 2e6)
result = fee.analytic_delta(curves=curve)
assert abs(result - 0.0) < 1e-2
def test_cashflows(self, curve):
fee = Fee(dt(2022, 3, 1), 2e6)
result = fee.cashflows(curves=curve)
assert isinstance(result, DataFrame)
def test_fixings(self, curve):
fee = Fee(dt(2022, 3, 1), 2e6)
result = fee.local_analytic_rate_fixings(curves=curve)
assert isinstance(result, DataFrame)
def test_non_deliverable(self, curve):
name = str(hash(os.urandom(2)))
fixings.add(name + "_eurusd", Series(index=[dt(2022, 2, 25)], data=[1.50]))
fee = Fee(
effective=dt(2022, 3, 1), notional=2e6, currency="usd", pair="eurusd", fx_fixings=name
)
result = fee.npv(curves=curve)
fixings.pop(name + "_eurusd")
assert abs(result + curve[dt(2022, 3, 1)] * 2e6 * 1.5) < 1e-7
def test_indexation(self, curve):
name = str(hash(os.urandom(2)))
fixings.add(name, Series(index=[dt(2022, 2, 1), dt(2022, 3, 1)], data=[1.10, 1.50]))
fee = Fee(
effective=dt(2022, 3, 1),
notional=2e6,
currency="usd",
index_fixings=name,
index_lag=0,
index_base_date=dt(2022, 2, 1),
)
result = fee.npv(curves=curve)
fixings.pop(name)
assert abs(result + curve[dt(2022, 3, 1)] * 2e6 * 1.5 / 1.1) < 1e-7
class TestLoan:
# init
def test_date_and_attributes(self):
loan = Loan(
dt(2022, 1, 1),
"1y",
"Q",
notional=2e6,
calendar="tgt",
payment_lag=0,
ex_div=2,
currency="EUR",
)
assert loan.settlement_params.notional == 2e6
assert loan.settlement_params.ex_dividend == dt(2022, 3, 30)
assert loan.settlement_params.currency == "eur"
assert isinstance(loan.leg1.periods[0], Cashflow)
assert isinstance(loan.leg1.periods[-1], Cashflow)
# protocols
def test_npv(self, curve):
loan = Loan(
dt(2022, 1, 1),
"1y",
"Q",
notional=2e6,
calendar="tgt",
payment_lag=0,
ex_div=2,
currency="EUR",
fixed_rate=10.0,
)
result = loan.npv(curves=curve)
assert abs(result + 117558.44166647314) < 1e-7
@pytest.mark.parametrize(("metric", "exp"), [("npv", 0.0)])
def test_rate(self, curve, metric, exp):
loan = Loan(
dt(2022, 1, 1),
"1y",
"Q",
notional=2e6,
calendar="tgt",
payment_lag=0,
ex_div=2,
currency="EUR",
fixed=False,
)
result = loan.rate(curves=curve, metric=metric)
assert abs(result - exp) < 1e-2
def test_analytic_delta(self, curve):
loan = Loan(
dt(2022, 1, 1),
"1y",
"Q",
notional=10e6,
calendar="tgt",
payment_lag=0,
ex_div=2,
currency="EUR",
)
result = loan.analytic_delta(curves=curve)
assert abs(result - 985.608939) < 1e-2
def test_cashflows(self, curve):
loan = Loan(
dt(2022, 1, 1),
"1y",
"Q",
notional=10e6,
calendar="tgt",
payment_lag=0,
ex_div=2,
currency="EUR",
fixed_rate=10.0,
)
result = loan.cashflows(curves=curve)
assert isinstance(result, DataFrame)
def test_fixings(self, curve):
loan = Loan(
dt(2022, 1, 1),
"1y",
"Q",
notional=10e6,
calendar="tgt",
payment_lag=0,
ex_div=2,
currency="EUR",
fixed=False,
)
result = loan.local_analytic_rate_fixings(curves=curve)
assert isinstance(result, DataFrame)
def test_non_deliverable(self, curve):
name = str(hash(os.urandom(2)))
fixings.add(name + "_eurusd", Series(index=[dt(2021, 12, 30)], data=[1.50]))
loan = Loan(
dt(2022, 1, 1),
"3m",
"Q",
notional=1e6,
calendar="all",
payment_lag=0,
ex_div=2,
currency="usd",
pair="eurusd",
fx_fixings=name,
fixed_rate=0.0,
)
result = loan.npv(curves=curve)
fixings.pop(name + "_eurusd")
assert abs(result + curve[dt(2022, 4, 1)] * 1e6 * 1.5 - 1.5e6) < 1e-7
assert loan.settlement_params.currency == "usd"
assert loan.settlement_params.notional_currency == "eur"
def test_indexation(self, curve):
name = str(hash(os.urandom(2)))
fixings.add(name, Series(index=[dt(2022, 2, 1), dt(2022, 3, 1)], data=[1.10, 1.50]))
loan = Loan(
dt(2022, 2, 1),
"1m",
"Q",
notional=2e6,
calendar="tgt",
payment_lag=0,
ex_div=0,
currency="EUR",
index_lag=0,
index_method="monthly",
index_fixings=name,
fixed_rate=0.0,
)
result = loan.npv(curves=curve)
expected = 2e6 * (curve[dt(2022, 2, 1)] - curve[dt(2022, 3, 1)] * 1.5 / 1.1)
fixings.pop(name)
assert abs(result - expected) < 1e-7
@pytest.mark.skip(reason="metric not implemented")
@pytest.mark.parametrize(
("settlement", "exp"),
[(NoInput(0), 4.058910928323769), (dt(2022, 4, 5), 4.058910928323769)],
)
def test_metric_fixed_rate(self, settlement, exp, curve):
loan = Loan(
dt(2022, 1, 1),
"1y",
"Q",
notional=2e6,
calendar="tgt",
payment_lag=0,
ex_div=0,
currency="EUR",
)
result = loan.rate(curves=curve, metric="fixed_rate")
assert abs(result - exp) < 1e-7
@pytest.mark.skip(reason="metric not implemented")
@pytest.mark.parametrize(
("settlement", "exp"),
[
(NoInput(0), 4.058910928323769),
# (dt(2022, 4, 5), 4.058910928323769)
],
)
def test_metric_float_spread(self, settlement, exp, curve):
disc_curve = curve.shift(0.0)
loan = Loan(
dt(2022, 1, 3),
"1y",
"Q",
notional=2e6,
calendar="tgt",
convention="act360",
payment_lag=0,
ex_div=0,
currency="EUR",
fixed=False,
spread_compound_method="isda_compounding",
)
_pv = loan.npv(curves=curve)
result = loan.rate(curves=[curve, disc_curve], metric="float_spread")
assert abs(result - 0.0) < 1e-7
================================================
FILE: python/tests/legs/test_analytic_delta.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib.curves import Curve
from rateslib.legs import FixedLeg
from rateslib.scheduling import Schedule
@pytest.fixture
def curve():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.99,
dt(2022, 7, 1): 0.98,
dt(2022, 10, 1): 0.97,
}
return Curve(nodes=nodes, interpolation="log_linear")
def test_analytic_delta_protocol_local(curve):
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 4, 1),
frequency="M",
),
fixed_rate=1.0,
)
result = leg.analytic_delta(disc_curve=curve, local=True)
expected = {"usd": 24.827510962072353}
assert result == expected
def test_forward_settlement(curve):
# tset that the analytic delta reacts to the settlement/ex-div constraint
leg = FixedLeg(
schedule=Schedule(
effective=dt(2021, 12, 2),
termination=dt(2022, 4, 2),
frequency="M",
payment_lag=0,
),
fixed_rate=1.0,
notional=1e9,
)
result = leg.analytic_delta(disc_curve=curve, local=False)
result2 = leg.analytic_delta(disc_curve=curve, local=False, settlement=dt(2022, 1, 3))
assert result2 < (result - 5000)
def test_forward(curve):
# tset that the analytic delta reacts to the forward argument
leg = FixedLeg(
schedule=Schedule(
effective=dt(2021, 12, 2),
termination=dt(2022, 4, 2),
frequency="M",
payment_lag=0,
),
fixed_rate=1.0,
notional=1e9,
)
result = leg.analytic_delta(disc_curve=curve, local=False)
result2 = leg.analytic_delta(disc_curve=curve, local=False, forward=dt(2022, 3, 15))
expected = result / curve[dt(2022, 3, 15)]
assert abs(result2 - expected) < 1e-6
================================================
FILE: python/tests/legs/test_init.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib.legs.fixed import FixedLeg
from rateslib.scheduling import Schedule
class TestFixedLeg:
def test_init(self):
FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2001, 1, 1),
frequency="3M",
payment_lag=2,
payment_lag_exchange=0,
extra_lag=-1,
),
notional=1000000.0,
amortization=1000.0,
currency="USD",
final_exchange=True,
initial_exchange=True,
)
================================================
FILE: python/tests/legs/test_leg_fixings.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from pandas import Series
from rateslib import fixings
from rateslib.curves import Curve
from rateslib.enums.generics import NoInput
from rateslib.enums.parameters import FloatFixingMethod
from rateslib.legs import FixedLeg, FloatLeg
from rateslib.scheduling import Schedule
class TestFixedLeg:
def test_populated_resets(self):
fixings.add(
name="index",
series=Series(
index=[dt(2000, 1, 1), dt(2000, 7, 1), dt(2001, 1, 1)], data=[1.0, 1.1, 1.2]
),
state=100,
)
fixings.add(
name="fx_eurusd", series=Series(index=[dt(1999, 12, 30)], data=[2.0]), state=100
)
fl = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), "1y", "S"),
index_fixings="index",
index_lag=0,
index_method="monthly",
pair="eurusd",
fx_fixings="fx",
)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[1].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[0].index_params.index_fixing.value == 1.1
assert fl.periods[0].index_params.index_base.value == 1.0
assert fl.periods[1].index_params.index_fixing.value == 1.2
assert fl.periods[1].index_params.index_base.value == 1.0
fixings.pop("index")
fixings.pop("fx_eurusd")
fl.reset_fixings(100)
assert fl.periods[0].non_deliverable_params.fx_fixing._value == NoInput(0)
assert fl.periods[1].non_deliverable_params.fx_fixing._value == NoInput(0)
assert fl.periods[0].index_params.index_fixing._value == NoInput(0)
assert fl.periods[0].index_params.index_base._value == NoInput(0)
assert fl.periods[1].index_params.index_fixing._value == NoInput(0)
assert fl.periods[1].index_params.index_base._value == NoInput(0)
def test_populated_at_init_no_reset(self):
fixings.add(
name="index",
series=Series(
index=[dt(2000, 1, 1), dt(2000, 7, 1), dt(2001, 1, 1)], data=[1.0, 1.1, 1.2]
),
state=100,
)
fixings.add(
name="fx_eurusd", series=Series(index=[dt(1999, 12, 30)], data=[2.0]), state=100
)
fl = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), "1y", "S"),
index_fixings="index",
index_lag=0,
index_method="monthly",
pair="eurusd",
fx_fixings="fx",
)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[1].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[0].index_params.index_fixing.value == 1.1
assert fl.periods[0].index_params.index_base.value == 1.0
assert fl.periods[1].index_params.index_fixing.value == 1.2
assert fl.periods[1].index_params.index_base.value == 1.0
fixings.pop("index")
fixings.pop("fx_eurusd")
fl.reset_fixings(666)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[1].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[0].index_params.index_fixing.value == 1.1
assert fl.periods[0].index_params.index_base.value == 1.0
assert fl.periods[1].index_params.index_fixing.value == 1.2
assert fl.periods[1].index_params.index_base.value == 1.0
def test_populated_resets_notional_exchanges(self):
fixings.add(
name="index",
series=Series(
index=[dt(2000, 1, 1), dt(2000, 7, 1), dt(2001, 1, 1)], data=[1.0, 1.1, 1.2]
),
state=100,
)
fixings.add(
name="fx_eurusd", series=Series(index=[dt(1999, 12, 30)], data=[2.0]), state=100
)
fl = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), "1y", "S"),
index_fixings="index",
index_lag=0,
index_method="monthly",
pair="eurusd",
fx_fixings="fx",
initial_exchange=True,
)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[-1].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[0].index_params.index_fixing.value == 1.0
assert fl.periods[0].index_params.index_base.value == 1.0
assert fl.periods[-1].index_params.index_fixing.value == 1.2
assert fl.periods[-1].index_params.index_base.value == 1.0
fixings.pop("index")
fixings.pop("fx_eurusd")
fl.reset_fixings(100)
assert fl.periods[0].non_deliverable_params.fx_fixing._value == NoInput(0)
assert fl.periods[-1].non_deliverable_params.fx_fixing._value == NoInput(0)
assert fl.periods[0].index_params.index_fixing._value == NoInput(0)
assert fl.periods[0].index_params.index_base._value == NoInput(0)
assert fl.periods[-1].index_params.index_fixing._value == NoInput(0)
assert fl.periods[-1].index_params.index_base._value == NoInput(0)
class TestFloatLeg:
def test_populated_resets_ibor(self):
fixings.add(
name="index",
series=Series(
index=[dt(2000, 1, 1), dt(2000, 3, 1), dt(2000, 6, 1)], data=[1.0, 1.1, 1.2]
),
state=100,
)
fixings.add(
name="fx_eurusd", series=Series(index=[dt(1999, 12, 30)], data=[2.0]), state=100
)
fixings.add(
name="ibor_1M",
series=Series(index=[dt(2000, 1, 1), dt(2000, 3, 1)], data=[1.0, 2.0]),
state=100,
)
fixings.add(
name="ibor_3M",
series=Series(index=[dt(2000, 1, 1), dt(2000, 3, 1)], data=[1.1, 2.1]),
state=100,
)
fl = FloatLeg(
schedule=Schedule(dt(2000, 1, 1), "5m", "Q"),
index_fixings="index",
index_lag=0,
index_method="monthly",
pair="eurusd",
fx_fixings="fx",
fixing_method="ibor(0)",
rate_fixings="ibor",
)
assert fl.periods[0].rate_params.rate_fixing.value == 1.0483333333333333
assert fl.periods[1].rate_params.rate_fixing.value == 2.1
assert fl.periods[0].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[1].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[0].index_params.index_fixing.value == 1.1
assert fl.periods[0].index_params.index_base.value == 1.0
assert fl.periods[1].index_params.index_fixing.value == 1.2
assert fl.periods[1].index_params.index_base.value == 1.0
assert fl.periods[1].index_params.index_fixing.value == 1.2
assert fl.periods[1].index_params.index_base.value == 1.0
fixings.pop("index")
fixings.pop("fx_eurusd")
fixings.pop("ibor_1M")
fixings.pop("ibor_3M")
fixings.add(name="ibor_1M", series=Series(index=[dt(1999, 1, 1)], data=[99.0]), state=100)
fixings.add(
name="ibor_3M",
series=Series(
index=[
dt(1999, 1, 1),
],
data=[99.0],
),
state=100,
)
fl.reset_fixings(100)
assert fl.periods[0].rate_params.rate_fixing.value == NoInput(0)
assert fl.periods[1].rate_params.rate_fixing.value == NoInput(0)
assert fl.periods[0].non_deliverable_params.fx_fixing._value == NoInput(0)
assert fl.periods[1].non_deliverable_params.fx_fixing._value == NoInput(0)
assert fl.periods[0].index_params.index_fixing._value == NoInput(0)
assert fl.periods[0].index_params.index_base._value == NoInput(0)
assert fl.periods[1].index_params.index_fixing._value == NoInput(0)
assert fl.periods[1].index_params.index_base._value == NoInput(0)
def test_populated_at_init_no_reset(self):
fixings.add(
name="index",
series=Series(
index=[dt(2000, 1, 1), dt(2000, 3, 1), dt(2000, 6, 1)], data=[1.0, 1.1, 1.2]
),
state=100,
)
fixings.add(
name="fx_eurusd", series=Series(index=[dt(1999, 12, 30)], data=[2.0]), state=100
)
fixings.add(
name="ibor_1M",
series=Series(index=[dt(2000, 1, 1), dt(2000, 3, 1)], data=[1.0, 2.0]),
state=100,
)
fixings.add(
name="ibor_3M",
series=Series(index=[dt(2000, 1, 1), dt(2000, 3, 1)], data=[1.1, 2.1]),
state=100,
)
fl = FloatLeg(
schedule=Schedule(dt(2000, 1, 1), "5m", "Q"),
index_fixings="index",
index_lag=0,
index_method="monthly",
pair="eurusd",
fx_fixings="fx",
fixing_method=FloatFixingMethod.IBOR(0),
rate_fixings="ibor",
)
assert fl.periods[0].rate_params.rate_fixing.value == 1.0483333333333333
assert fl.periods[1].rate_params.rate_fixing.value == 2.1
assert fl.periods[0].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[1].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[0].index_params.index_fixing.value == 1.1
assert fl.periods[0].index_params.index_base.value == 1.0
assert fl.periods[1].index_params.index_fixing.value == 1.2
assert fl.periods[1].index_params.index_base.value == 1.0
fixings.pop("index")
fixings.pop("fx_eurusd")
fixings.pop("ibor_1M")
fixings.pop("ibor_3M")
fixings.add(name="ibor_1M", series=Series(index=[dt(1999, 1, 1)], data=[99.0]), state=100)
fixings.add(
name="ibor_3M",
series=Series(
index=[
dt(1999, 1, 1),
],
data=[99.0],
),
state=100,
)
fl.reset_fixings(666)
assert fl.periods[0].rate_params.rate_fixing.value == 1.0483333333333333
assert fl.periods[1].rate_params.rate_fixing.value == 2.1
assert fl.periods[0].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[1].non_deliverable_params.fx_fixing.value == 2.0
assert fl.periods[0].index_params.index_fixing.value == 1.1
assert fl.periods[0].index_params.index_base.value == 1.0
assert fl.periods[1].index_params.index_fixing.value == 1.2
assert fl.periods[1].index_params.index_base.value == 1.0
================================================
FILE: python/tests/legs/test_legs_legacy.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import os
from datetime import datetime as dt
import numpy as np
import pytest
import rateslib.errors as err
from pandas import DataFrame, Index, MultiIndex, Series, date_range, isna
from pandas.testing import assert_frame_equal, assert_series_equal
from rateslib import default_context, defaults, fixings
from rateslib.curves import Curve
from rateslib.data.fixings import FloatRateSeries, FXIndex
from rateslib.default import NoInput
from rateslib.dual import Dual
from rateslib.enums import SpreadCompoundMethod
from rateslib.enums.generics import _drb
from rateslib.enums.parameters import LegMtm
from rateslib.fx import FXForwards, FXRates
from rateslib.legs import (
Amortization,
CreditPremiumLeg,
CreditProtectionLeg,
CustomLeg,
FixedLeg,
FloatLeg,
ZeroFixedLeg,
ZeroFloatLeg,
)
from rateslib.legs.amortization import _AmortizationType
from rateslib.periods import (
Cashflow,
CreditPremiumPeriod,
CreditProtectionPeriod,
FixedPeriod,
FloatPeriod,
ZeroFloatPeriod,
)
from rateslib.rs import LegIndexBase
from rateslib.scheduling import Frequency, Schedule, get_calendar
@pytest.fixture
def curve():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.99,
dt(2022, 7, 1): 0.98,
dt(2022, 10, 1): 0.97,
}
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def hazard_curve():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.999,
dt(2022, 7, 1): 0.997,
dt(2022, 10, 1): 0.991,
}
return Curve(nodes=nodes, interpolation="log_linear", id="hazard_fixture")
@pytest.mark.parametrize(
"Leg",
[
FloatLeg,
FixedLeg,
ZeroFixedLeg,
ZeroFloatLeg,
],
)
def test_repr(Leg):
leg = Leg(schedule=Schedule(dt(2022, 1, 1), "1y", "Q"))
result = leg.__repr__()
expected = f""
assert result == expected
@pytest.mark.parametrize("Leg", [FixedLeg, FloatLeg])
def test_repr_mtm(Leg):
leg = Leg(
schedule=Schedule(dt(2022, 1, 1), "1y", "Q"),
currency="usd",
pair="eurusd",
mtm="xcs",
initial_exchange=True,
)
result = leg.__repr__()
expected = f""
assert result == expected
def test_repr_custom():
period = FixedPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
frequency=Frequency.Months(1, None),
)
leg = CustomLeg([period])
assert leg.__repr__() == f""
class TestFloatLeg:
@pytest.mark.parametrize(
"obj",
[
(
FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=0,
frequency="Q",
),
notional=1e9,
convention="Act360",
fixing_method="rfr_payment_delay",
spread_compound_method="none_simple",
currency="nok",
)
),
(
FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=0,
payment_lag_exchange=0,
frequency="Q",
),
notional=1e9,
convention="Act360",
fixing_method="rfr_payment_delay",
spread_compound_method="none_simple",
currency="nok",
initial_exchange=True,
final_exchange=True,
)
),
],
)
def test_float_leg_analytic_delta_with_npv(self, curve, obj) -> None:
result = 5 * obj.analytic_delta(rate_curve=curve, disc_curve=curve)
before_npv = -obj.npv(rate_curve=curve, disc_curve=curve)
obj.float_spread = 5
after_npv = -obj.npv(rate_curve=curve, disc_curve=curve)
expected = after_npv - before_npv
assert abs(result - expected) < 1e-7
def test_float_leg_analytic_delta_with_npv_mtm_exchange(self, curve) -> None:
obj = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=0,
payment_lag_exchange=0,
frequency="Q",
),
convention="Act360",
fixing_method="rfr_payment_delay",
spread_compound_method="none_simple",
currency="nok",
pair=FXIndex("usdnok", "osl|fed", 2, "osl", -2),
notional=1e8,
mtm="xcs",
initial_exchange=True,
)
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0})
fxf = FXForwards(
fx_curves={"usdusd": curve, "usdnok": curve, "noknok": curve},
fx_rates=FXRates({"usdnok": 1.0}, settlement=dt(2022, 1, 1)),
)
result = 5 * obj.analytic_delta(rate_curve=curve, disc_curve=curve, fx=fxf)
before_npv = -obj.npv(rate_curve=curve, disc_curve=curve, fx=fxf)
obj.float_spread = 5
after_npv = -obj.npv(rate_curve=curve, disc_curve=curve, fx=fxf)
expected = after_npv - before_npv
assert abs(result - expected) < 1e-7
def test_float_leg_analytic_delta(self, curve) -> None:
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=1e9,
convention="Act360",
)
result = float_leg.analytic_delta(rate_curve=curve)
assert abs(result - 41400.42965267) < 1e-7
def test_float_leg_cashflows(self, curve) -> None:
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
float_spread=NoInput(0),
notional=1e9,
convention="Act360",
)
result = float_leg.cashflows(rate_curve=curve)
# test a couple of return elements
assert abs(result.loc[0, defaults.headers["cashflow"]] + 6610305.76834) < 1e-4
assert abs(result.loc[1, defaults.headers["df"]] - 0.98307) < 1e-4
assert abs(result.loc[1, defaults.headers["notional"]] - 1e9) < 1e-7
def test_float_leg_npv(self, curve) -> None:
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
float_spread=NoInput(0),
notional=1e9,
convention="Act360",
)
result = float_leg.npv(rate_curve=curve)
assert abs(result + 16710777.50089434) < 1e-7
def test_float_leg_fixings(self, curve) -> None:
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 2, 1),
termination="9M",
frequency="Q",
payment_lag=0,
),
rate_fixings=[10.0, 20.0],
)
assert float_leg.periods[0].rate_params.rate_fixing.value == 10
assert float_leg.periods[1].rate_params.rate_fixing.value == 20
assert float_leg.periods[2].rate_params.rate_fixing.value is NoInput(0)
def test_float_leg_fixings2(self, curve) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_3M", Series(index=[dt(2022, 2, 1), dt(2022, 5, 1)], data=[10.0, 20.0]))
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 2, 1),
termination="9M",
frequency="Q",
payment_lag=0,
),
rate_fixings=name,
fixing_method="IBOR(0)",
)
assert float_leg.periods[0].rate_params.rate_fixing.value == 10
assert float_leg.periods[1].rate_params.rate_fixing.value == 20
assert float_leg.periods[2].rate_params.rate_fixing.value is NoInput(0)
def test_float_leg_fixings_series(self, curve) -> None:
fixings = Series(0.5, index=date_range(dt(2021, 11, 1), dt(2022, 2, 15)))
float_leg = FloatLeg(
schedule=Schedule(dt(2021, 12, 1), "9M", "M", payment_lag=0), rate_fixings=fixings
)
assert float_leg.periods[0].rate_params.rate_fixing.value != NoInput(0) # december fixings
assert float_leg.periods[1].rate_params.rate_fixing.value != NoInput(0) # january fixings
assert float_leg.periods[2].rate_params.rate_fixing.value == NoInput(0) # february fixings
assert float_leg.periods[4].rate_params.rate_fixing.value == NoInput(0) # no march fixings
def test_float_leg_fixings_scalar(self, curve) -> None:
float_leg = FloatLeg(
schedule=Schedule(dt(2022, 2, 1), "9M", "Q", payment_lag=0), rate_fixings=5.0
)
assert float_leg.periods[0].rate_params.rate_fixing.value == 5.0
assert float_leg.periods[1].rate_params.rate_fixing.value is NoInput(0)
assert float_leg.periods[2].rate_params.rate_fixing.value is NoInput(0)
@pytest.mark.parametrize(
("method"),
[
"rfr_payment_delay",
"rfr_lockout(1)",
"rfr_observation_shift(0)",
],
)
def test_float_leg_rfr_fixings_table(self, method, curve) -> None:
name = str(hash(os.urandom(8)))
fixings.add(
f"{name}_1B",
Series(
[1.19, 1.19, -8.81],
index=[dt(2022, 12, 28), dt(2022, 12, 29), dt(2022, 12, 30)],
),
)
curve._set_ad_order(order=1)
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 12, 28),
termination="2M",
frequency="M",
payment_lag=0,
),
rate_fixings=name,
currency="SEK",
fixing_method=method,
)
result = float_leg.local_analytic_rate_fixings(rate_curve=curve)
result = result[dt(2022, 12, 28) : dt(2023, 1, 1)]
assert isinstance(result.iloc[0, 0], Dual)
data = [_.real for _ in result.iloc[0:5, 0]]
expected = [0, 0, 0, -0.266647, -0.266647]
for x, y in zip(data, expected):
assert abs(x - y) < 1e-6
fixings.pop(f"{name}_1B")
@pytest.mark.skip(reason="Unclear what this does: maybe tests an IRS fixing table?")
def test_rfr_with_fixings_fixings_table_issue(self) -> None:
from rateslib import IRS
instruments = [
IRS(dt(2024, 1, 15), dt(2024, 3, 20), spec="eur_irs", curves="estr"),
IRS(dt(2024, 3, 20), dt(2024, 6, 19), spec="eur_irs", curves="estr"),
IRS(dt(2024, 6, 19), dt(2024, 9, 18), spec="eur_irs", curves="estr"),
]
curve = Curve(
nodes={
dt(2024, 1, 11): 1.0,
dt(2024, 3, 20): 1.0,
dt(2024, 6, 19): 1.0,
dt(2024, 9, 18): 1.0,
},
calendar="tgt",
convention="act360",
id="estr",
)
from rateslib import Solver
Solver(
curves=[curve],
instruments=instruments,
s=[
3.89800324,
3.63414284,
3.16864932,
],
id="eur",
)
fixings = Series(
data=[
3.904,
3.904,
3.904,
3.905,
3.902,
3.904,
3.906,
3.882,
3.9,
3.9,
3.899,
3.899,
3.901,
3.901,
],
index=[
dt(2024, 1, 10),
dt(2024, 1, 9),
dt(2024, 1, 8),
dt(2024, 1, 5),
dt(2024, 1, 4),
dt(2024, 1, 3),
dt(2024, 1, 2),
dt(2023, 12, 29),
dt(2023, 12, 28),
dt(2023, 12, 27),
dt(2023, 12, 22),
dt(2023, 12, 21),
dt(2023, 12, 20),
dt(2023, 12, 19),
],
)
swap = IRS(
dt(2023, 12, 20),
dt(2024, 1, 31),
spec="eur_irs",
curves="estr",
leg2_fixings=fixings,
notional=3e9,
fixed_rate=3.922,
)
result = swap.leg2.local_rate_fixings(rate_curve=curve)
assert result.loc[dt(2024, 1, 10), (curve.id, "notional")] == 0.0
assert abs(result.loc[dt(2024, 1, 11), (curve.id, "notional")] - 3006829846) < 1.0
assert abs(result.loc[dt(2023, 12, 20), (curve.id, "rates")] - 3.901) < 0.001
def test_float_leg_set_float_spread(self, curve) -> None:
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
)
assert float_leg.float_spread == 0.0
assert float_leg.periods[0].rate_params.float_spread == 0
float_leg.float_spread = 2.0
assert float_leg.float_spread == 2.0
assert float_leg.periods[0].rate_params.float_spread == 2.0
@pytest.mark.parametrize(
("method", "spread_method", "expected"),
[
("ibor(2)", NoInput(0), True),
("rfr_payment_delay", "none_simple", True),
("rfr_payment_delay", "isda_compounding", False),
("rfr_payment_delay", "isda_flat_compounding", False),
],
)
def test_is_linear(self, method, spread_method, expected) -> None:
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
fixing_method=method,
spread_compound_method=spread_method,
)
assert float_leg._is_linear is expected
@pytest.mark.parametrize(
("method", "settlement", "forward", "expected"),
[
("ISDA_compounding", NoInput(0), NoInput(0), 357.7019143401966),
("ISDA_compounding", dt(2022, 4, 6), dt(2022, 4, 6), 580.3895480501503),
("ISDA_flat_compounding", NoInput(0), NoInput(0), 360.65913016465225),
("ISDA_flat_compounding", dt(2022, 4, 6), dt(2022, 4, 6), 587.64160672647),
("NONE_Simple", NoInput(0), NoInput(0), 362.2342162),
("NONE_Simple", NoInput(0), dt(2022, 2, 1), 360.98240826375957),
("NONE_Simple", dt(2022, 4, 6), dt(2022, 4, 6), 590.6350781908598),
],
)
def test_float_leg_spread_calculation(
self, method, settlement, forward, expected, curve
) -> None:
leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=0,
frequency="Q",
),
notional=1e9,
convention="Act360",
fixing_method="rfr_payment_delay",
spread_compound_method=method,
currency="nok",
float_spread=0,
)
base_npv = leg.npv(
rate_curve=curve, disc_curve=curve, forward=forward, settlement=settlement
)
result = leg.spread(
target_npv=-15000000 + base_npv,
rate_curve=curve,
disc_curve=curve,
settlement=settlement,
forward=forward,
)
assert abs(result - expected) < 1e-3
leg.float_spread = result
assert (
abs(
leg.npv(rate_curve=curve, disc_curve=curve, forward=forward, settlement=settlement)
- base_npv
+ 15000000
)
< 2e2
)
def test_fixing_method_raises(self) -> None:
with pytest.raises(ValueError, match="`fixing_method`"):
FloatLeg(schedule=Schedule(dt(2022, 2, 1), "9M", "Q"), fixing_method="bad")
@pytest.mark.parametrize(
("eff", "term", "freq", "stub", "expected"),
[
(
dt(2022, 1, 1),
dt(2022, 6, 15),
"Q",
"ShortFront",
[dt(2022, 1, 1), dt(2022, 3, 15), dt(2022, 6, 15)],
),
(
dt(2022, 1, 1),
dt(2022, 6, 15),
"Q",
"ShortBack",
[dt(2022, 1, 1), dt(2022, 4, 1), dt(2022, 6, 15)],
),
(
dt(2022, 1, 1),
dt(2022, 9, 15),
"Q",
"LongFront",
[dt(2022, 1, 1), dt(2022, 6, 15), dt(2022, 9, 15)],
),
(
dt(2022, 1, 1),
dt(2022, 9, 15),
"Q",
"LongBack",
[dt(2022, 1, 1), dt(2022, 4, 1), dt(2022, 9, 15)],
),
],
)
def test_leg_periods_unadj_dates(self, eff, term, freq, stub, expected) -> None:
leg = FloatLeg(
schedule=Schedule(effective=eff, termination=term, frequency=freq, stub=stub)
)
assert leg.schedule.uschedule == expected
@pytest.mark.parametrize(
("eff", "term", "freq", "stub", "expected"),
[
(
dt(2022, 1, 1),
dt(2022, 6, 15),
"Q",
"ShortFront",
[dt(2022, 1, 3), dt(2022, 3, 15), dt(2022, 6, 15)],
),
(
dt(2022, 1, 1),
dt(2022, 6, 15),
"Q",
"ShortBack",
[dt(2022, 1, 3), dt(2022, 4, 1), dt(2022, 6, 15)],
),
(
dt(2022, 1, 1),
dt(2022, 9, 15),
"Q",
"LongFront",
[dt(2022, 1, 3), dt(2022, 6, 15), dt(2022, 9, 15)],
),
(
dt(2022, 1, 1),
dt(2022, 9, 15),
"Q",
"LongBack",
[dt(2022, 1, 3), dt(2022, 4, 1), dt(2022, 9, 15)],
),
],
)
def test_leg_periods_adj_dates(self, eff, term, freq, stub, expected) -> None:
leg = FloatLeg(
schedule=Schedule(
effective=eff, termination=term, frequency=freq, stub=stub, calendar="bus"
)
)
assert leg.schedule.aschedule == expected
@pytest.mark.parametrize(
("eff", "term", "freq", "stub", "expected"),
[
(
dt(2022, 1, 1),
dt(2022, 6, 15),
"Q",
"ShortFront",
[
FloatPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 3, 15),
payment=dt(2022, 3, 17),
frequency=Frequency.Months(3, None),
notional=defaults.notional,
convention=defaults.convention,
termination=dt(2022, 6, 15),
),
FloatPeriod(
start=dt(2022, 3, 15),
end=dt(2022, 6, 15),
payment=dt(2022, 6, 17),
frequency=Frequency.Months(3, None),
notional=defaults.notional,
convention=defaults.convention,
termination=dt(2022, 6, 15),
),
],
),
],
)
def test_leg_periods_adj_dates2(self, eff, term, freq, stub, expected) -> None:
# as of v2.5 rateslib no longer puts details of the period into the str REPR.
leg = FloatLeg(
schedule=Schedule(
effective=eff,
termination=term,
frequency=freq,
stub=stub,
payment_lag=2,
calendar="bus",
)
)
for i in range(2):
assert leg.periods[i].__str__()[:19] == expected[i].__str__()[:19]
def test_spread_compound_method_raises(self) -> None:
with pytest.raises(ValueError, match="`spread_compound_method`"):
FloatLeg(
schedule=Schedule(
dt(2022, 2, 1),
"9M",
"Q",
),
spread_compound_method="bad",
)
def test_leg_fixings_as_2_tuple(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_1M", Series([2.0, 3.0], index=[dt(2022, 6, 2), dt(2022, 7, 4)]))
float_leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 5, 2),
termination="4M",
frequency="M",
calendar="stk",
),
rate_fixings=(1.5, name),
currency="SEK",
fixing_method="ibor(0)",
)
assert float_leg.periods[0].rate_params.rate_fixing.value == 1.5
assert float_leg.periods[1].rate_params.rate_fixing.value == 2.0
assert float_leg.periods[2].rate_params.rate_fixing.value == 3.0
assert float_leg.periods[3].rate_params.rate_fixing.value == NoInput.blank
assert float_leg.periods[3].rate_params.rate_fixing.identifier == f"{name}_1M"
def test_ex_div(self):
leg = FloatLeg(schedule=Schedule(dt(2000, 1, 1), dt(2001, 1, 1), "Q", extra_lag=-3))
assert not leg.ex_div(dt(2000, 3, 29))
assert leg.ex_div(dt(2000, 3, 30))
assert leg.ex_div(dt(2000, 4, 1))
def test_mtm_xcs_type_type_sets_fx_fixing_start_initially(self):
fixings.add(
"EURUSD_1600",
Series(
index=[dt(2000, 4, 1), dt(2000, 4, 2), dt(2000, 7, 2)], data=[1.268, 1.27, 1.29]
),
)
leg = FloatLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag=1,
payment_lag_exchange=0,
),
currency="usd",
pair="eurusd",
initial_exchange=True,
mtm="xcs",
notional=5e6,
fx_fixings=(1.25, "EURUSD_1600"),
)
assert leg.periods[2].mtm_params.fx_fixing_start.value == 1.25
fixings.pop("EURUSD_1600")
## 4 types of non-deliverability
@pytest.mark.parametrize(
("fx_fixings", "expected"),
[
("ABCD", 1.10),
(1.5, 1.5),
((1.2, "ABCD"), 1.2),
],
)
def test_non_mtm_xcs_type(self, fx_fixings, expected):
fixings.add("ABCD_EURUSD", Series(index=[dt(1999, 12, 30)], data=[1.10]))
fl = FloatLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 3, 1),
frequency="M",
payment_lag=2,
payment_lag_exchange=1,
calendar="all",
),
currency="usd",
pair="eurusd",
mtm="initial",
initial_exchange=True,
final_exchange=True,
fx_fixings=fx_fixings,
)
# this leg has 4 periods with only one initial fixing date
assert fl.periods[0].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[1].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[2].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[3].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == expected
assert fl.periods[1].non_deliverable_params.fx_fixing.value == expected
assert fl.periods[2].non_deliverable_params.fx_fixing.value == expected
assert fl.periods[3].non_deliverable_params.fx_fixing.value == expected
fixings.pop("ABCD_EURUSD")
@pytest.mark.parametrize(
("fx_fixings", "expected"),
[
("ABCDE", [1.21, 1.31]),
(1.5, [1.5, NoInput(0)]), # this is bad practice: should just supply str ID
((1.5, "ABCDE"), [1.5, 1.31]), # this is bad practice: should just supply str ID
],
)
def test_irs_nd_type(self, fx_fixings, expected):
fixings.add(
"ABCDE_EURUSD",
Series(
index=[
dt(1999, 12, 30),
dt(2000, 1, 31),
dt(2000, 2, 1),
dt(2000, 2, 29),
dt(2000, 3, 1),
],
data=[1.10, 1.20, 1.21, 1.30, 1.31],
),
)
fl = FloatLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 3, 1),
frequency="M",
payment_lag=2,
payment_lag_exchange=1,
calendar="all",
),
currency="usd",
pair="eurusd",
mtm="payment",
initial_exchange=False,
final_exchange=False,
fx_fixings=fx_fixings,
)
# this leg has 2 periods and only 2 relevant fixings dates
assert fl.periods[0].non_deliverable_params.fx_fixing.date == dt(2000, 2, 1)
assert fl.periods[1].non_deliverable_params.fx_fixing.date == dt(2000, 3, 1)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == expected[0]
assert fl.periods[1].non_deliverable_params.fx_fixing.value == expected[1]
fixings.pop("ABCDE_EURUSD")
@pytest.mark.parametrize(
("fx_fixings", "expected"),
[
("ADE", [1.10, 1.10, 1.20, 1.20, 1.20]),
(
1.5,
[1.5, 1.5, NoInput(0), NoInput(0), NoInput(0)],
), # this is bad practice: should just supply str ID
(
(1.5, "ADE"),
[1.5, 1.5, 1.20, 1.20, 1.20],
), # this is bad practice: should just supply str ID
],
)
def test_mtm_xcs_nd_type(self, fx_fixings, expected):
fixings.add(
"ADE_EURUSD",
Series(
index=[
dt(1999, 12, 30),
dt(2000, 1, 31),
dt(2000, 2, 1),
dt(2000, 2, 29),
dt(2000, 3, 1),
],
data=[1.10, 1.20, 1.21, 1.30, 1.31],
),
)
fl = FloatLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 3, 1),
frequency="M",
payment_lag=2,
payment_lag_exchange=1,
calendar="all",
),
currency="usd",
pair="eurusd",
mtm=LegMtm.XCS,
initial_exchange=True,
final_exchange=True,
fx_fixings=fx_fixings,
)
# this leg has 5 periods with only two relevant fixing dates
assert fl.periods[0].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[1].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[2].mtm_params.fx_fixing_end.date == dt(2000, 1, 31)
assert fl.periods[3].non_deliverable_params.fx_fixing.date == dt(2000, 1, 31)
assert fl.periods[4].non_deliverable_params.fx_fixing.date == dt(2000, 1, 31)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == expected[0]
assert fl.periods[1].non_deliverable_params.fx_fixing.value == expected[1]
assert fl.periods[2].mtm_params.fx_fixing_end.value == expected[2]
assert fl.periods[3].non_deliverable_params.fx_fixing.value == expected[3]
assert fl.periods[4].non_deliverable_params.fx_fixing.value == expected[4]
fixings.pop("ADE_EURUSD")
@pytest.mark.parametrize(
("fx_fixings", "expected"),
[
("AXDE", [1.10, 1.21, 1.31, 1.30]),
(
1.5,
[1.5, NoInput(0), NoInput(0), NoInput(0)],
), # this is bad practice: should just supply str ID
(
(1.5, "AXDE"),
[1.5, 1.21, 1.31, 1.30],
), # this is bad practice: should just supply str ID
],
)
def test_non_mtm_xcs_nd_type(self, fx_fixings, expected):
fixings.add(
"AXDE_EURUSD",
Series(
index=[
dt(1999, 12, 30),
dt(2000, 1, 31),
dt(2000, 2, 1),
dt(2000, 2, 29),
dt(2000, 3, 1),
],
data=[1.10, 1.20, 1.21, 1.30, 1.31],
),
)
fl = FloatLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 3, 1),
frequency="M",
payment_lag=2,
payment_lag_exchange=1,
calendar="all",
),
currency="usd",
pair="eurusd",
mtm="payment",
initial_exchange=True,
final_exchange=True,
fx_fixings=fx_fixings,
)
# this leg has 4 periods with 3 or 4 (if lag exchange is different) relevant fixing dates.
assert fl.periods[0].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[1].non_deliverable_params.fx_fixing.date == dt(2000, 2, 1)
assert fl.periods[2].non_deliverable_params.fx_fixing.date == dt(2000, 3, 1)
assert fl.periods[3].non_deliverable_params.fx_fixing.date == dt(2000, 2, 29)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == expected[0]
assert fl.periods[1].non_deliverable_params.fx_fixing.value == expected[1]
assert fl.periods[2].non_deliverable_params.fx_fixing.value == expected[2]
assert fl.periods[3].non_deliverable_params.fx_fixing.value == expected[3]
fixings.pop("AXDE_EURUSD")
def test_sub_zero(self):
# test that a Leg with a zero flag can be composed of multiple ZeroFloatPeriods
# e.g. quarterly payments on 7d
# this tests specifically a 1Y CNY IRS with Quarterly payments to CNRR007 7d rate.
fl = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 21),
termination=dt(2027, 1, 21),
frequency="Q",
calendar="all",
),
fixing_frequency="7d",
fixing_method="ibor(1)",
zero_periods=True,
)
curve = Curve({dt(2026, 1, 20): 1.0, dt(2027, 10, 1): 0.95})
# ensure all periods have rates
for zero_period in fl._regular_periods:
for float_period in zero_period.float_periods:
_ = float_period.rate(rate_curve=curve)
result = fl.local_analytic_rate_fixings(rate_curve=curve)
# first 4 fixings are regular: back stubs.
assert [
dt(2026, 1, 20),
dt(2026, 1, 27),
dt(2026, 2, 3),
dt(2026, 2, 10),
] == result.index.to_list()[:4]
# around the July Payment date
assert dt(2026, 7, 13) in result.index
assert dt(2026, 7, 20) in result.index
assert dt(2026, 7, 27) in result.index
# around the October Payment date with stubs
assert dt(2026, 10, 12) in result.index
assert dt(2026, 10, 19) in result.index
assert dt(2026, 10, 20) in result.index
assert dt(2026, 10, 27) in result.index
# final fixings
assert dt(2027, 1, 12) in result.index
assert dt(2027, 1, 19) in result.index
assert isinstance(fl._regular_periods[0], ZeroFloatPeriod)
def test_sub_zero_bjs_calendar(self):
# test that a Leg with a zero flag can be composed of multiple ZeroFloatPeriods
# e.g. quarterly payments on 7d
# this tests specifically a 1Y CNY IRS with Quarterly payments to CNRR007 7d rate.
fl = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 21),
termination=dt(2027, 1, 21),
frequency="Q",
calendar="bjs",
),
fixing_frequency="7d",
fixing_method="ibor(1)",
fixing_series=FloatRateSeries(
lag=1,
convention="Act365F",
calendar="bjs",
tenors=["7D"],
zero_period_stub="shortback",
modifier="F",
eom=False,
),
zero_periods=True,
)
curve = Curve(
nodes={dt(2026, 1, 20): 1.0, dt(2027, 10, 1): 0.95},
convention="act365f",
)
# ensure all periods have rates
for zero_period in fl._regular_periods:
for float_period in zero_period.float_periods:
_ = float_period.rate(rate_curve=curve)
result = fl.local_analytic_rate_fixings(rate_curve=curve)
# first 4 fixings are regular: back stubs.
assert [
dt(2026, 1, 20),
dt(2026, 1, 27),
dt(2026, 2, 3),
dt(2026, 2, 10),
] == result.index.to_list()[:4]
# around the July Payment date
assert dt(2026, 7, 13) in result.index
assert dt(2026, 7, 20) in result.index
assert dt(2026, 7, 27) in result.index
# around the October Payment date with stubs
assert dt(2026, 10, 12) in result.index
assert dt(2026, 10, 19) in result.index
assert dt(2026, 10, 20) in result.index
assert dt(2026, 10, 27) in result.index
# final fixings
assert dt(2027, 1, 12) in result.index
assert dt(2027, 1, 19) in result.index
assert isinstance(fl._regular_periods[0], ZeroFloatPeriod)
def test_sub_zero_equivalence_with_rfr_type_rate(self):
# test the two representations of an object yield the same data.
curve = Curve(
nodes={dt(2026, 1, 20): 1.0, dt(2026, 3, 20): 0.99, dt(2026, 5, 20): 0.984},
calendar="nyc",
convention="act360",
)
regular = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 20),
termination=dt(2026, 2, 3),
frequency="7d",
calendar="nyc",
modifier="F",
),
fixing_series="usd_rfr",
fixing_frequency="1b",
fixing_method="rfr_payment_delay",
)
zero_type = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 20),
termination=dt(2026, 2, 3),
frequency="7d",
calendar="nyc",
modifier="F",
),
fixing_series="usd_rfr",
fixing_frequency="1b",
fixing_method="rfr_payment_delay",
zero_periods=True,
)
rates = [
curve.rate(dt(2026, 1, 20), dt(2026, 1, 21)),
curve.rate(dt(2026, 1, 21), dt(2026, 1, 22)),
curve.rate(dt(2026, 1, 22), dt(2026, 1, 23)),
curve.rate(dt(2026, 1, 23), dt(2026, 1, 26)),
curve.rate(dt(2026, 1, 26), dt(2026, 1, 27)),
]
from math import prod
rate = prod(
[
1 + r / 100 * d
for (r, d) in zip(rates, [1 / 360, 1 / 360, 1 / 360, 3 / 360, 1 / 360])
]
)
rate = (rate - 1) * 36000 / 7
rate1 = regular.periods[0].rate(rate_curve=curve)
rate2 = zero_type.periods[0].rate(rate_curve=curve)
assert abs(rate1 - rate) < 1e-8
assert abs(rate2 - rate) < 1e-8
rates2 = [_.rate(rate_curve=curve) for _ in zero_type.periods[0].float_periods]
assert all(abs(x - y) < 1e-10 for (x, y) in zip(rates, rates2))
def test_sub_zero_equivalence_with_rfr_type_rate_with_fixings(self):
# test the two representations of an object yield the same data.
name = str(hash(os.urandom(3)))
fixings.add(
name + "_1B", Series(index=[dt(2026, 1, 20), dt(2026, 1, 21)], data=[10.0, 12.0])
)
curve = Curve(
nodes={dt(2026, 1, 20): 1.0, dt(2026, 3, 20): 0.99, dt(2026, 5, 20): 0.984},
calendar="nyc",
convention="act360",
)
regular = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 20),
termination=dt(2026, 2, 3),
frequency="7d",
calendar="nyc",
modifier="F",
),
fixing_series="usd_rfr",
fixing_frequency="1b",
fixing_method="rfr_payment_delay",
rate_fixings=name,
)
zero_type = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 20),
termination=dt(2026, 2, 3),
frequency="7d",
calendar="nyc",
modifier="F",
),
fixing_series="usd_rfr",
fixing_frequency="1b",
fixing_method="rfr_payment_delay",
zero_periods=True,
rate_fixings=name,
)
rates = [
# curve.rate(dt(2026, 1, 20), dt(2026, 1, 21)),
# curve.rate(dt(2026, 1, 21), dt(2026, 1, 22)),
10.0,
12.0,
curve.rate(dt(2026, 1, 22), dt(2026, 1, 23)),
curve.rate(dt(2026, 1, 23), dt(2026, 1, 26)),
curve.rate(dt(2026, 1, 26), dt(2026, 1, 27)),
]
from math import prod
rate = prod(
[
1 + r / 100 * d
for (r, d) in zip(rates, [1 / 360, 1 / 360, 1 / 360, 3 / 360, 1 / 360])
]
)
rate = (rate - 1) * 36000 / 7
rate1 = regular.periods[0].rate(rate_curve=curve)
rate2 = zero_type.periods[0].rate(rate_curve=curve)
assert abs(rate1 - rate) < 1e-8
assert abs(rate2 - rate) < 1e-8
rates2 = [_.rate(rate_curve=curve) for _ in zero_type.periods[0].float_periods]
assert all(abs(x - y) < 1e-10 for (x, y) in zip(rates, rates2))
fixings.pop(name + "_1B")
def test_sub_zero_index_dates(self):
fl = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 20),
termination=dt(2026, 2, 3),
frequency="7d",
calendar="nyc",
modifier="F",
),
fixing_series="usd_rfr",
fixing_frequency="1b",
fixing_method="rfr_payment_delay",
zero_periods=True,
index_base=300.0,
)
assert len(fl.periods) == 2
assert fl.periods[0].index_params.index_fixing.date == dt(2026, 1, 27)
assert fl.periods[1].index_params.index_fixing.date == dt(2026, 2, 3)
assert fl.periods[0].index_params.index_base.date == dt(2026, 1, 20)
assert fl.periods[1].index_params.index_base.date == dt(2026, 1, 20)
def test_sub_zero_spread_compounding(self):
# test that a spread under `zero_periods` is added to eahc rate individually prior to
# compounding. The spread compound method only operates at the Period level which
# is specific for a ZeroFloatPeriod.
fl = FloatLeg(
schedule=Schedule(
effective=dt(2026, 1, 20),
termination=dt(2027, 1, 20),
frequency="A",
calendar="all",
modifier="F",
),
fixing_frequency="S",
fixing_method="ibor(0)",
rate_fixings=[[5.0, 5.5]],
float_spread=50.0,
zero_periods=True,
spread_compound_method=SpreadCompoundMethod.NoneSimple,
)
result = fl.periods[0].rate()
expected = (
((1 + 181 / 36000 * (5.0 + 0.5)) * (1 + 184 / 36000 * (5.5 + 0.5)) - 1) * 36000 / 365
)
assert abs(result - expected) < 1e-10
def test_leg_index_base_period_on_period(self):
fl = FloatLeg(
schedule=Schedule(
effective=dt(2000, 1, 7),
termination=dt(2000, 3, 7),
frequency="M",
calendar="all",
),
index_fixings="some",
index_lag=0,
index_base_type=LegIndexBase.PeriodOnPeriod,
)
assert fl.periods[0].index_params.index_base.date == dt(2000, 1, 7)
assert fl.periods[1].index_params.index_base.date == dt(2000, 2, 7)
def test_index_only_all_periods(self, curve):
name = str(hash(os.urandom(2)))
fixings.add(
name,
Series(index=[dt(2022, 1, 1), dt(2022, 2, 1), dt(2022, 3, 1)], data=[1.0, 1.1, 1.3]),
)
fl = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 7),
termination=dt(2022, 3, 7),
frequency="M",
calendar="all",
),
index_fixings=name,
index_lag=0,
index_method="monthly",
index_base_type=LegIndexBase.PeriodOnPeriod,
initial_exchange=True,
index_only=True,
)
result = fl.cashflows(rate_curve=curve, disc_curve=curve)
# the rates are approximately 4% and in each period inflation increases around 10% and 20%.
# this means the `index only` amount of each cashflows are approximately below:
expected = [0.0, -346.7781, -569.3935, -181818.1818]
for i in range(4):
assert abs(result.loc[i, "Cashflow"] - expected[i]) < 1e-2
def test_period_on_period_zero_periods(self):
fl = FloatLeg(
schedule=Schedule(
effective=dt(2000, 1, 7),
termination=dt(2000, 3, 7),
frequency="M",
calendar="all",
),
zero_periods=True,
fixing_frequency="7d",
index_base_type=LegIndexBase.PeriodOnPeriod,
index_lag=2,
)
assert fl.periods[0].index_params.index_base.date == dt(2000, 1, 7)
assert fl.periods[1].index_params.index_base.date == dt(2000, 2, 7)
class TestZeroFloatLeg:
def test_zero_float_leg_set_float_spread(self, curve) -> None:
float_leg = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
)
assert float_leg.float_spread == 0.0
assert float_leg.periods[0].float_spread == 0.0
float_leg.float_spread = 2.0
assert float_leg.float_spread == 2.0
assert float_leg.periods[0].float_spread == 2.0
def test_with_fixings(self):
name = str(hash(os.urandom(8)))
fixings.add(
f"{name}_3m",
Series(
index=[dt(2022, 1, 1), dt(2022, 2, 1), dt(2022, 5, 1)],
data=[1.0, 2.0, 3.0],
),
)
fixings.add(
f"{name}_1m",
Series(
index=[dt(2022, 1, 1), dt(2022, 2, 1), dt(2022, 5, 1)],
data=[5.0, 0.0, 0.0],
),
)
leg = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 8, 1),
front_stub=dt(2022, 2, 1),
frequency="Q",
calendar="all",
),
fixing_method="ibor(0)",
rate_fixings=name,
)
expected = [5.0, 2.0, 3.0]
for i, period in enumerate(leg.periods[0]._float_periods):
assert period.rate_params.rate_fixing.value == expected[i]
result = leg.periods[0].rate()
assert abs(result - 2.8743158337825925) < 1e-8
def test_zero_float_leg_dcf(self) -> None:
ftl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
)
p = ftl.periods[0]
result = p.dcf
expected = p._float_periods[0].period_params.dcf + p._float_periods[1].period_params.dcf
assert result == expected
def test_zero_float_leg_cashflow(self, curve) -> None:
ftl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
float_spread=500,
)
p = ftl.periods[0]
result = p.try_unindexed_reference_cashflow(rate_curve=curve).unwrap()
expected = (
1
+ p._float_periods[0].period_params.dcf
* p._float_periods[0].rate(rate_curve=curve)
/ 100
)
expected *= (
1
+ p._float_periods[1].period_params.dcf
* p._float_periods[1].rate(rate_curve=curve)
/ 100
)
expected = (expected - 1) * 1e9
assert abs(result - expected) < 1e-9
def test_zero_float_leg_cashflows(self, curve) -> None:
ftl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
float_spread=500,
)
result = ftl.cashflows(rate_curve=curve)
expected = DataFrame(
{
"Type": ["ZeroFloatPeriod"],
"Acc Start": [dt(2022, 1, 1)],
"Acc End": [dt(2022, 6, 1)],
"DCF": [0.419444444444444],
"Spread": [500.0],
},
)
assert_frame_equal(result[["Type", "Acc Start", "Acc End", "DCF", "Spread"]], expected)
def test_zero_float_leg_npv(self, curve) -> None:
ftl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
)
result = ftl.npv(rate_curve=curve)
expected = 16710778.891147703
assert abs(result - expected) < 1e-2
result2 = ftl.npv(rate_curve=curve, local=True)
assert abs(result2["usd"] - expected) < 1e-2
def test_cashflows_none(self) -> None:
ftl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
)
result = ftl.cashflows()
assert result.iloc[0].to_dict()[defaults.headers["npv"]] is None
assert result.iloc[0].to_dict()[defaults.headers["npv_fx"]] is None
def test_amortization_raises(self) -> None:
with pytest.raises(TypeError, match="unexpected keyword argument"):
ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
amortization=1.0,
)
def test_rfr_fixings_table(self, curve) -> None:
zfl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 10, 1),
payment_lag=0,
frequency="Q",
),
notional=-1e9,
convention="Act360",
)
# fl = FloatLeg(
# effective=dt(2022, 1, 1),
# termination=dt(2022, 10, 1),
# payment_lag=0,
# notional=-1e9,
# convention="Act360",
# frequency="Q",
# )
result = zfl.local_analytic_rate_fixings(rate_curve=curve)
# compare = fl.fixings_table(curve)
for i in range(len(result.index)):
# consistent risk throught the compounded leg
assert abs(result.iloc[i, 0] - 277.75) < 1e-1
def test_ibor_fixings_table(self, curve) -> None:
zfl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 10, 1),
payment_lag=0,
frequency="Q",
),
notional=-1e9,
convention="Act360",
fixing_method="ibor(0)",
)
result = zfl.local_analytic_rate_fixings(rate_curve=curve)
assert abs(result.iloc[0, 0] - 24750) < 1e-3
assert abs(result.iloc[1, 0] - 25022.4466) < 1e-2
assert abs(result.iloc[2, 0] - 25294.7845) < 1e-2
def test_ibor_stub_fixings_table(self, curve) -> None:
curve2 = curve.copy()
curve2._id = "3mIBOR"
curve._id = "1mIBOR"
zfl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 9, 1),
payment_lag=0,
frequency="Q",
),
notional=-1e9,
convention="Act360",
fixing_method="ibor(0)",
)
result = zfl.local_analytic_rate_fixings(
rate_curve={"1m": curve, "3m": curve2}, disc_curve=curve
)
assert abs(result.iloc[0, 0] - 8554.562) < 1e-2
assert abs(result.iloc[0, 1] - 7726.701) < 1e-2
assert isna(result.iloc[1, 0])
assert abs(result.iloc[2, 1] - 25294.7235) < 1e-3
@pytest.mark.parametrize(
"fixings", [[2.0, 2.5], Series([2.0, 2.5], index=[dt(2021, 7, 1), dt(2021, 10, 1)])]
)
def test_ibor_fixings_table_after_known_fixings(self, curve, fixings) -> None:
curve2 = curve.copy()
curve2._id = "3mIBOR"
curve._id = "1mIBOR"
zfl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2021, 7, 1),
termination=dt(2022, 9, 1),
payment_lag=0,
frequency="Q",
stub="shortBack",
),
notional=-1e9,
convention="Act360",
fixing_method="ibor(0)",
rate_fixings=fixings,
)
result = zfl.local_analytic_rate_fixings(
rate_curve={"1m": curve, "3m": curve2}, disc_curve=curve
)
assert abs(result.iloc[0, 0] - 0) < 1e-2
assert abs(result.iloc[1, 0] - 0) < 1e-2
assert isna(result.iloc[0, 1])
assert abs(result.iloc[4, 0] - 8792.231) < 1e-2
assert abs(result.iloc[4, 1] - 8508.6111) < 1e-3
def test_frequency_raises(self) -> None:
with pytest.raises(ValueError, match="`frequency` for a ZeroFloatLeg should not be 'Z'"):
ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="Z",
),
notional=-1e8,
convention="ActActISDA",
)
def test_zero_float_leg_analytic_delta(self, curve) -> None:
zfl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
float_spread=1.0,
fixing_series=FloatRateSeries(
lag=0,
calendar="all",
modifier="f",
convention="act360",
eom=False,
),
)
result = zfl.analytic_delta(rate_curve=curve)
expected = -47914.3660
assert abs(result - expected) < 1e-3
@pytest.mark.parametrize(
("settlement", "forward", "exp"),
[
(NoInput(0), NoInput(0), 0.71008),
(NoInput(0), dt(2023, 1, 1), -0.11739),
(dt(2026, 1, 1), dt(2026, 1, 1), -2.40765),
],
)
def test_zero_float_spread_calc(self, settlement, forward, exp, curve) -> None:
rate_curve = curve.shift(25)
zfl = ZeroFloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="Act360",
fixing_method="ibor(2)",
)
tgt_npv = 25000000 * curve[dt(2027, 1, 1)]
result = zfl.spread(
target_npv=tgt_npv,
rate_curve=rate_curve,
disc_curve=curve,
settlement=settlement,
forward=forward,
)
zfl.float_spread = result
tested = zfl.local_npv(
rate_curve=rate_curve,
disc_curve=curve,
settlement=settlement,
forward=forward,
)
assert abs(result / 100 - exp) < 1e-3
assert abs(tgt_npv - tested) < 1e-3
class TestZeroFixedLeg:
@pytest.mark.parametrize(
("freq", "cash", "rate"),
[
("A", 13140821.29, 2.50),
("S", 13227083.80, 2.50),
("A", None, NoInput(0)),
],
)
def test_zero_fixed_leg_cashflows(self, freq, cash, rate, curve) -> None:
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency=freq,
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=rate,
)
result = zfl.cashflows(disc_curve=curve)
expected = DataFrame(
{
"Type": ["ZeroFixedPeriod"],
"Acc Start": [dt(2022, 1, 1)],
"Acc End": [dt(2027, 1, 1)],
"DCF": [5.0],
"Rate": [_drb(None, rate)],
"Cashflow": [cash],
},
)
assert_frame_equal(
result[["Type", "Acc Start", "Acc End", "DCF", "Rate", "Cashflow"]],
expected,
rtol=1e-3,
)
def test_zero_fixed_leg_cashflows_cal(self, curve) -> None:
# assert stated cashflows accrual dates are adjusted according to calendar
# GH561/562
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2024, 12, 15),
termination="5y",
payment_lag=0,
calendar="tgt",
modifier="mf",
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=2.0,
)
result = zfl.cashflows(disc_curve=curve)
expected = DataFrame(
{
"Type": ["ZeroFixedPeriod"],
"Acc Start": [dt(2024, 12, 16)],
"Acc End": [dt(2029, 12, 17)],
"DCF": [5.0],
"Rate": [2.0],
},
)
assert_frame_equal(
result[["Type", "Acc Start", "Acc End", "DCF", "Rate"]],
expected,
rtol=1e-3,
)
def test_zero_fixed_leg_npv(self, curve) -> None:
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=2.5,
)
result = zfl.npv(disc_curve=curve)
expected = 13140821.29 * curve[dt(2027, 1, 1)]
assert abs(result - expected) < 1e-2
result2 = zfl.npv(disc_curve=curve, local=True)
assert abs(result2["usd"] - expected) < 1e-2
def test_zero_fixed_leg_analytic_delta(self, curve) -> None:
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=2.5,
)
result2 = zfl.analytic_delta(disc_curve=curve)
assert abs(result2 + 45024.1974) < 1e-3
@pytest.mark.parametrize(
("settlement", "forward", "exp"),
[
(NoInput(0), NoInput(0), 2.50),
(NoInput(0), dt(2023, 1, 1), 2.404826),
(dt(2026, 1, 1), NoInput(0), 2.139550),
],
)
def test_zero_fixed_spread(self, settlement, forward, exp, curve) -> None:
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=NoInput(0),
)
result = zfl.spread(
target_npv=13140821.29 * curve[dt(2027, 1, 1)],
rate_curve=NoInput(0),
disc_curve=curve,
settlement=settlement,
forward=forward,
)
assert abs(result / 100 - exp) < 1e-3
@pytest.mark.parametrize("final_exchange", [False, True])
def test_zero_fixed_spread_exchanges(self, curve, final_exchange) -> None:
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 5),
termination="8m",
payment_lag=0,
frequency="M",
),
notional=-1e8,
convention="ActActISDA",
final_exchange=final_exchange,
fixed_rate=NoInput(0),
)
result = zfl.spread(
target_npv=50000.0 + 1e8 * curve[dt(2022, 9, 5)] * final_exchange, rate_curve=curve
)
expected = 7.718420018560934 # bps
assert abs(result - expected) < 1e-8
zfl.fixed_rate = expected / 100.0
result = zfl.npv(rate_curve=curve)
assert abs(result - (50000.0 + 1e8 * curve[dt(2022, 9, 5)] * final_exchange)) < 1e-7
def test_zero_fixed_spread_raises_settlement(self, curve) -> None:
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=NoInput(0),
)
with pytest.raises(ZeroDivisionError):
zfl.spread(
target_npv=13140821.29 * curve[dt(2027, 1, 1)],
rate_curve=NoInput(0),
disc_curve=curve,
settlement=dt(2029, 1, 1),
forward=NoInput(0),
)
@pytest.mark.parametrize("final_exchange", [False, True])
def test_zero_fixed_spread_indexed(self, curve, final_exchange) -> None:
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=NoInput(0),
final_exchange=final_exchange,
index_base=100.0,
index_fixings=110.0,
)
target_npv = (13140821.29 + 1e8 * 1.1 * final_exchange) * curve[dt(2027, 1, 1)]
result = zfl.spread(
target_npv=target_npv,
rate_curve=NoInput(0),
disc_curve=curve,
)
assert abs(result / 100 - 2.2826266057484057) < 1e-3
zfl.fixed_rate = result / 100.0
result = zfl.npv(rate_curve=curve)
assert abs(result - target_npv) < 1e-7
@pytest.mark.parametrize("final_exchange", [False, True])
def test_zero_fixed_spread_non_deliverable(self, curve, final_exchange) -> None:
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=NoInput(0),
currency="usd",
final_exchange=final_exchange,
pair="eurusd",
fx_fixings=2.0,
)
target_npv = (13140821.29 + 1e8 * 2.0 * final_exchange) * curve[dt(2027, 1, 1)]
result = zfl.spread(
target_npv=target_npv,
rate_curve=NoInput(0),
disc_curve=curve,
)
assert abs(result / 100 - 1.2808477472765924) < 1e-3
zfl.fixed_rate = result / 100.0
result = zfl.npv(rate_curve=curve)
assert abs(result - target_npv) < 1e-7
def test_amortization_raises(self) -> None:
with pytest.raises(TypeError, match="unexpected keyword argument 'amortization'"):
ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=NoInput(0),
amortization=1.0,
)
def test_frequency_raises(self) -> None:
with pytest.raises(ValueError, match="`frequency` for a ZeroFixedLeg should not be 'Z'"):
ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="Z",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=NoInput(0),
)
def test_analytic_delta_no_fixed_rate(self, curve) -> None:
zfl = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="5y",
payment_lag=0,
frequency="A",
),
notional=-1e8,
convention="ActActISDA",
fixed_rate=NoInput(0),
)
with pytest.raises(ValueError, match="A `fixed_rate` must be set for a "):
zfl.analytic_delta(disc_curve=curve)
class TestZeroIndexLeg:
@pytest.mark.parametrize(
("index_base", "index_fixings", "meth", "exp"),
[
(NoInput(0), NoInput(0), "monthly", -61855.670),
(NoInput(0), NoInput(0), "daily", -61782.379),
(100.0, NoInput(0), "monthly", -61855.670),
(NoInput(0), 110.0, "monthly", -100000.0),
(NoInput(0), 110.0, "daily", -98696.645),
(100.0, 110.0, "monthly", -100000.0),
(100.0, 110.0, "daily", -100000.0),
],
)
def test_zero_index_cashflow(self, index_base, index_fixings, meth, exp) -> None:
index_curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.97,
},
index_base=100.0,
index_lag=3,
interpolation="linear_index",
)
zil = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 15),
termination="2Y",
frequency="A",
),
fixed_rate=0.0,
convention="1+",
index_base=index_base,
index_fixings=index_fixings,
index_method=meth,
final_exchange=True,
index_only=True,
)
result = zil.cashflows(index_curve=index_curve).loc[1, "Cashflow"]
assert abs(result - exp) < 1e-3
@pytest.mark.skip(reason="v2.2 no longer permits fixing setting")
def test_set_index_leg_after_init(self) -> None:
leg = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 3, 15),
termination="9M",
frequency="Q",
payment_lag=0,
),
convention="1+",
notional=40e6,
index_base=None,
)
for period in leg.periods[:1]:
assert period.index_base is None
leg.index_base = 205.0
for period in leg.periods[:1]:
assert period.index_base == 205.0
# The following test no longer passes after ZeroIndexLeg removed from use.
# def test_zero_analytic_delta(self, curve) -> None:
# zil = ZeroFixedLeg(
# schedule=Schedule(
# effective=dt(2022, 1, 15),
# termination="2Y",
# frequency="A",
# ),
# convention="1+",
# index_lag=0,
# index_base=100.0,
# index_fixings=110.0,
# index_only=True,
# final_exchange=True,
# fixed_rate=0.0,
# )
# assert zil.analytic_delta(disc_curve=curve) == 0.0
def test_cashflows(self) -> None:
index_curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.97,
},
index_base=100.0,
index_lag=3,
interpolation="linear_index",
)
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.97})
zil = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 15),
termination="2Y",
frequency="A",
payment_lag=0,
payment_lag_exchange=0,
),
convention="1+",
index_lag=3,
index_method="curve",
index_only=True,
fixed_rate=0.0,
final_exchange=True,
)
result = zil.cashflows(index_curve=index_curve, disc_curve=curve)
expected = DataFrame(
{
"Type": ["ZeroFixedPeriod", "Cashflow"], # ["ZeroIndexLeg"],
"Notional": [1000000.0, 1000000.0],
"Unindexed Cashflow": [-0.0, -1000000.0],
"Index Base": [100.11863, 100.11863],
"Index Ratio": [1.06178, 1.06178],
"Cashflow": [0.0, -61782.379],
"NPV": [0.0, -58063.1659], # [-58053.47605],
},
)
assert_frame_equal(
result[
[
"Type",
"Notional",
"Unindexed Cashflow",
"Index Base",
"Index Ratio",
"Cashflow",
"NPV",
]
],
expected,
rtol=1e-3,
)
@pytest.mark.parametrize("only", [True, False])
def test_three_ways(self, only):
# A Zero Index Legs can also be created in three ways.
one = ZeroFixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="2Y",
frequency="A",
payment_lag=0,
payment_lag_exchange=0,
),
fixed_rate=0.0,
index_base=100.0,
index_fixings=110.0,
index_only=only,
final_exchange=True,
)
result1 = one.cashflows().loc[1, "Cashflow"]
two = Cashflow(
payment=dt(2024, 1, 1),
notional=1e6,
index_base=100.0,
index_fixings=110.0,
index_only=only,
)
result2 = two.cashflows()["Cashflow"]
three = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination="2Y",
frequency="Z",
payment_lag=0,
payment_lag_exchange=0,
),
fixed_rate=0.0,
index_base=100.0,
index_fixings=110.0,
index_only=only,
final_exchange=True,
)
result3 = three.cashflows().loc[1, "Cashflow"]
# four = ZeroIndexLeg(
# schedule=Schedule(
# effective=dt(2022, 1, 1),
# termination="2Y",
# frequency="Z",
# payment_lag=0,
# payment_lag_exchange=0,
# ),
# index_base=100.0,
# index_fixings=110.0,
# final_exchange=not only,
# )
# result4 = four.cashflows().loc[0, "Cashflow"]
assert abs(result1 - result2) < 1e-8
assert abs(result1 - result3) < 1e-8
# assert abs(result1 - result4) < 1e-8
@pytest.mark.parametrize(
("ini", "final", "mtm", "lenn", "nd_dt", "cf"),
[
(False, False, "initial", 2, dt(2000, 1, 1), 500e3 * 2.0),
(False, False, "payment", 2, dt(2001, 1, 1), 500e3 * 3.0),
(False, True, "initial", 2, dt(2000, 1, 1), 1.5e6 * 2.0),
(False, True, "payment", 2, dt(2001, 1, 1), 1.5e6 * 3.0),
# (True, False, False, 2, dt(2000, 1, 1)), # final exch True by default
# (True, False, True, 2, dt(2000, 1, 1)), # final exch True by default
(True, True, "initial", 3, dt(2000, 1, 1), 1.5e6 * 2.0),
(True, True, "payment", 3, dt(2001, 1, 1), 1.5e6 * 3.0),
],
)
def test_attributes(self, ini, final, mtm, lenn, nd_dt, cf) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name, Series(index=[dt(2000, 1, 1), dt(2001, 1, 1)], data=[10.0, 15.0]))
fixings.add(
name + "fx_EURUSD", Series(index=[dt(1999, 12, 30), dt(2000, 12, 28)], data=[2.0, 3.0])
)
leg = ZeroFixedLeg(
schedule=Schedule(effective=dt(2000, 1, 1), termination=dt(2001, 1, 1), frequency="A"),
currency="usd",
initial_exchange=ini,
final_exchange=True,
pair="eurusd",
mtm=mtm,
fx_fixings=name + "fx",
index_lag=0,
index_fixings=name,
notional=-1e6,
index_only=not final,
fixed_rate=0.0,
)
assert len(leg.periods) == lenn
assert leg.periods[-1].non_deliverable_params.delivery == nd_dt
assert leg.periods[-1].non_deliverable_params.publication == get_calendar(
"ldn"
).lag_bus_days(nd_dt, -2, True)
assert leg.periods[-1].cashflow() == cf
fixings.pop(name)
fixings.pop(name + "fx_EURUSD")
class TestFloatLegExchange:
@pytest.mark.skip(reason="v 2.2 removed ability to mutate notional")
def test_float_leg_exchange_notional_setter(self) -> None:
float_leg_exc = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
initial_exchange=True,
final_exchange=True,
)
float_leg_exc.notional = 200
assert float_leg_exc.notional == 200
@pytest.mark.skip(reason="v 2.2 removed ability to mutate amortisation.")
def test_float_leg_exchange_amortization_setter(self) -> None:
float_leg_exc = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 10, 1),
payment_lag=2,
frequency="Q",
),
notional=-1000,
convention="Act360",
initial_exchange=True,
final_exchange=True,
)
float_leg_exc.amortization = -200
cashflows = [2, 4, 6]
cash_notionals = [None, -200, None, -200, None, -600]
fixed_notionals = [None, -1000, None, -800, None, -600]
for i in cashflows:
assert isinstance(float_leg_exc.periods[i], Cashflow)
assert float_leg_exc.periods[i].notional == cash_notionals[i - 1]
assert isinstance(float_leg_exc.periods[i - 1], FloatPeriod)
assert float_leg_exc.periods[i - 1].notional == fixed_notionals[i - 1]
def test_float_leg_exchange_set_float_spread(self) -> None:
float_leg_exc = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 10, 1),
payment_lag=2,
frequency="Q",
),
notional=-1000,
convention="Act360",
initial_exchange=True,
final_exchange=True,
)
assert float_leg_exc.float_spread == 0.0
float_leg_exc.float_spread = 2.0
assert float_leg_exc.float_spread == 2.0
for period in float_leg_exc.periods:
if isinstance(period, FloatPeriod):
period.rate_params.float_spread == 2.0
def test_float_leg_exchange_amortization(self, curve) -> None:
leg = FloatLeg(
schedule=Schedule(
dt(2022, 1, 1),
dt(2023, 1, 1),
"Q",
payment_lag=0,
),
notional=5e6,
amortization=1e6,
initial_exchange=True,
final_exchange=True,
)
assert len(leg.periods) == 9
for i in [0, 2, 4, 6, 8]:
assert type(leg.periods[i]) is Cashflow
for i in [1, 3, 5, 7]:
assert type(leg.periods[i]) is FloatPeriod
assert leg.periods[1].settlement_params.notional == 5e6
assert leg.periods[7].settlement_params.notional == 2e6
assert leg.periods[8].settlement_params.notional == 2e6
assert abs(leg.npv(rate_curve=curve).real) < 1e-9
def test_float_leg_exchange_npv(self, curve) -> None:
fle = FloatLeg(
schedule=Schedule(
dt(2022, 2, 1),
"6M",
"Q",
payment_lag=0,
),
initial_exchange=True,
final_exchange=True,
)
result = fle.npv(rate_curve=curve)
assert abs(result) < 1e-9
def test_float_leg_exchange_fixings_table(self, curve) -> None:
fle = FloatLeg(
schedule=Schedule(
dt(2022, 2, 1),
"6M",
"Q",
payment_lag=0,
),
initial_exchange=True,
final_exchange=True,
)
result = fle.local_analytic_rate_fixings(rate_curve=curve)
expected = DataFrame(
data=[-0.2767869527597316, -0.27405055522733884],
index=Index([dt(2022, 4, 30), dt(2022, 5, 1)], name="obs_dates"),
columns=MultiIndex.from_tuples(
[(curve.id, "usd", "usd", "1B")],
names=["identifier", "local_ccy", "display_ccy", "frequency"],
),
)
assert_frame_equal(result[dt(2022, 4, 30) : dt(2022, 5, 1)], expected)
class TestFixedLeg:
def test_fixed_leg_analytic_delta(self, curve) -> None:
fixed_leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=1e9,
convention="Act360",
)
result = fixed_leg.analytic_delta(rate_curve=curve)
assert abs(result - 41400.42965267) < 1e-7
def test_fixed_leg_npv(self, curve) -> None:
fixed_leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=1e9,
convention="Act360",
fixed_rate=4.00,
)
result = fixed_leg.npv(disc_curve=curve)
assert abs(result + 400 * fixed_leg.analytic_delta(disc_curve=curve)) < 1e-7
def test_fixed_leg_cashflows(self, curve) -> None:
fixed_leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
fixed_rate=4.00,
notional=-1e9,
convention="Act360",
)
result = fixed_leg.cashflows(disc_curve=curve)
# test a couple of return elements
assert abs(result.loc[0, defaults.headers["cashflow"]] - 6555555.55555) < 1e-4
assert abs(result.loc[1, defaults.headers["df"]] - 0.98307) < 1e-4
assert abs(result.loc[1, defaults.headers["notional"]] + 1e9) < 1e-7
def test_fixed_leg_set_fixed(self, curve) -> None:
fixed_leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
)
assert fixed_leg.fixed_rate is NoInput(0)
assert fixed_leg.periods[0].rate_params.fixed_rate is NoInput(0)
fixed_leg.fixed_rate = 2.0
assert fixed_leg.fixed_rate == 2.0
assert fixed_leg.periods[0].rate_params.fixed_rate == 2.0
def test_fixed_leg_final_exchange_custom_amort(self):
leg = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), dt(2000, 5, 1), "M"),
notional=100,
amortization=Amortization(4, 100, [0, 50.0, 0]),
final_exchange=True,
)
result = leg.cashflows()
assert result["Notional"].tolist() == [100.0, 0.0, 100.0, 50.0, 50.0, 0.0, 50.0, 50.0]
def test_non_deliverable(self, curve):
fxf = FXForwards(
fx_curves={"usdusd": curve, "brlusd": curve, "brlbrl": curve},
fx_rates=FXRates({"usdbrl": 25.0}, settlement=dt(2022, 1, 3)),
)
fixed_leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
payment_lag_exchange=1,
frequency="Q",
),
notional=1e9,
convention="Act360",
fixed_rate=4.00,
currency="usd",
pair=FXIndex("usdbrl", "all", 0),
)
cf = fixed_leg.cashflows(disc_curve=curve, fx=fxf)
assert fixed_leg.periods[0].non_deliverable_params.fx_fixing.date == dt(2022, 1, 2)
assert fixed_leg.periods[1].non_deliverable_params.fx_fixing.date == dt(2022, 1, 2)
assert abs(cf.loc[1, "Cashflow"] + 408888.8888) < 1e-4
assert cf.loc[0, "Reference Ccy"] == "BRL"
# v2.5
@pytest.mark.parametrize(
("settlement", "forward", "exp"),
[
(NoInput(0), NoInput(0), 403.9491881327746),
(dt(2022, 3, 30), dt(2022, 3, 30), 399.9990223763462),
(dt(2022, 4, 6), dt(2022, 4, 6), 799.0147512470912),
],
)
def test_fixed_leg_spread(self, settlement, forward, exp, curve) -> None:
fixed_leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 7, 1),
payment_lag=2,
payment_lag_exchange=1,
frequency="Q",
),
notional=-1e9,
convention="Act360",
fixed_rate=4.00,
currency="usd",
)
result = fixed_leg.spread(
target_npv=20000000,
disc_curve=curve,
rate_curve=curve,
index_curve=curve,
settlement=settlement,
forward=forward,
)
assert abs(result - exp) < 1e-6
@pytest.mark.parametrize("initial", [True, False])
@pytest.mark.parametrize("final", [True, False])
@pytest.mark.parametrize("amortization", [True, False])
def test_construction_of_relevant_periods(self, initial, final, amortization):
# test construction cases:
#
# - Regular periods only; no amortization, no exchanges
# - Regular with different exchanges: final and initial
# - Regular with Amortization, but no exchanges.
# - Regular with Amortization and with exchanges.
#
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
),
initial_exchange=initial,
final_exchange=final,
amortization=250000.0 if amortization else NoInput(0),
)
assert len(fl._regular_periods) == 2
assert (fl._exchange_periods[0] is None) is not initial
assert (fl._exchange_periods[1] is None) is not (final or initial)
if not amortization:
assert fl.amortization._type == _AmortizationType.NoAmortization
assert fl._amortization_exchange_periods is None
else:
assert fl.amortization._type == _AmortizationType.ConstantPeriod
if not (final or initial): # initial sets final to True
assert fl._amortization_exchange_periods is None
else:
assert len(fl._amortization_exchange_periods) == 1
@pytest.mark.parametrize("initial", [True, False])
@pytest.mark.parametrize("final", [True, False])
@pytest.mark.parametrize("amortization", [True, False])
def test_construction_of_relevant_periods_non_deliverable(self, initial, final, amortization):
# when the leg is ND but not MTM the same construction as in the regular deliverable
# case should be permitted. All FXFixings should beb determined by a single rate of
# exchange. This test builds on the above test for non-deliverability.
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag_exchange=1,
),
fixed_rate=10.0,
currency="usd",
pair="eurusd", # the notional of this leg is expressed in BRL but payments made in USD
initial_exchange=initial,
final_exchange=final,
amortization=250000.0 if amortization else NoInput(0),
fx_fixings=2.0, # this should not impact the reference currency notional and amortiz
)
for rp in fl._regular_periods:
assert rp.non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert rp.non_deliverable_params.fx_fixing.value == 2.0
if initial:
assert fl._exchange_periods[0].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl._exchange_periods[0].non_deliverable_params.fx_fixing.value == 2.0
if final:
assert fl._exchange_periods[1].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl._exchange_periods[1].non_deliverable_params.fx_fixing.value == 2.0
if amortization and final:
assert fl._amortization_exchange_periods[0].non_deliverable_params.fx_fixing.date == dt(
1999, 12, 30
)
assert (
fl._amortization_exchange_periods[0].non_deliverable_params.fx_fixing.value == 2.0
)
assert fl.amortization.amortization == (250000.0,)
cf = fl.cashflows()
if initial:
assert abs(cf.loc[0, "Cashflow"] - 2000000.0) < 1e-4 # ini exchange
assert abs(cf.loc[1, "Cashflow"] + 50555.55555) < 1e-4 # fixed rate
assert abs(cf.loc[2, "Cashflow"] + 500000.0) < 1e-4 # amort exchange
assert abs(cf.loc[3, "Cashflow"] + 37916.66666) < 1e-4 # fixed rate
assert abs(cf.loc[4, "Cashflow"] + 1500000.0) < 1e-4 # final exchange
def test_construction_index_fixings(self):
# test that amortization index_value date is correctly applied to each period.
name = str(hash(os.urandom(8)))
fixings.add(name, Series(index=[dt(2000, 1, 1)], data=[101.0]))
leg = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag_exchange=1,
payment_lag=2,
),
fixed_rate=2.0,
convention="Act360",
notional=5000000,
amortization=1000000,
final_exchange=True,
index_fixings=name,
index_lag=0,
index_method="monthly",
)
assert leg._regular_periods[0].index_params.index_fixing.date == leg.schedule.aschedule[1]
assert leg._regular_periods[1].index_params.index_fixing.date == leg.schedule.aschedule[2]
assert (
leg._amortization_exchange_periods[0].index_params.index_fixing.date
== leg.schedule.aschedule[1]
)
assert leg._exchange_periods[1].index_params.index_fixing.date == leg.schedule.aschedule[2]
assert leg._regular_periods[0].index_params.index_base.value == 101.0
assert leg._regular_periods[1].index_params.index_base.value == 101.0
assert leg._amortization_exchange_periods[0].index_params.index_base.value == 101.0
assert leg._exchange_periods[1].index_params.index_base.value == 101.0
fixings.pop(name)
@pytest.mark.parametrize("amortization", [True, False])
def test_construction_of_relevant_periods_non_deliverable_mtm(self, amortization):
# when the leg is ND and MTM the FXFixings should be determined by their appropriate
# payment dates deriving fixing date. This test excludes notional exchanges,
# designed for ND-IRS
name = str(hash(os.urandom(8)))
fixings.add(
name + "_EURUSD",
Series(
index=[
dt(1999, 12, 24),
dt(1999, 12, 29),
dt(2000, 3, 29),
dt(2000, 3, 30),
dt(2000, 6, 28),
dt(2000, 6, 29),
],
data=[1.1, 2.2, 3.3, 4.4, 5.5, 6.6],
),
)
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 5),
termination=dt(2000, 7, 5),
frequency="Q",
payment_lag_exchange=1,
payment_lag=0,
),
fixed_rate=10.0,
currency="usd",
pair=FXIndex("eurusd", "tgt|fed", 2, "ldn", -5),
mtm="payment",
initial_exchange=False,
final_exchange=False,
amortization=250000.0 if amortization else NoInput(0),
fx_fixings=name, # this should not impact the reference currency notional and amortiz
)
expected = [3.3, 5.5]
for i, rp in enumerate(fl._regular_periods):
# every regular period in a typical leg has an FX fixing date equal to coupon payment dt
assert rp.non_deliverable_params.fx_fixing.date == (
get_calendar("ldn").lag_bus_days(fl.schedule.pschedule[i + 1], -5, True)
)
assert rp.non_deliverable_params.fx_fixing.value == expected[i]
fixings.pop(name + "_EURUSD")
def test_construction_of_relevant_periods_non_deliverable_mtm_exchange(self):
# when the leg is ND and MTM the FXFixings should be determined at the start of a period.
# MTM cashflows are generated with notional exchanges between FX fixings at start and end.
name = str(hash(os.urandom(8)))
fixings.add(
name + "_EURUSD",
Series(
index=[
dt(1999, 12, 24),
dt(1999, 12, 29),
dt(2000, 3, 29),
dt(2000, 3, 30),
dt(2000, 6, 28),
dt(2000, 6, 29),
],
data=[1.1, 2.2, 3.3, 4.4, 5.5, 6.6],
),
)
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 5),
termination=dt(2000, 7, 5),
frequency="Q",
payment_lag_exchange=1,
payment_lag=0,
),
fixed_rate=10.0,
currency="usd",
pair=FXIndex("eurusd", "tgt|fed", 2, "ldn", -5),
mtm=LegMtm.XCS,
initial_exchange=True,
final_exchange=True,
amortization=NoInput(0),
fx_fixings=name, # this should not impact the reference currency notional and amortiz
)
expected = [2.2, 4.4]
for i, rp in enumerate(fl._regular_periods):
assert rp.non_deliverable_params.fx_fixing.date == (
get_calendar("ldn").lag_bus_days(fl.schedule.pschedule2[i], -5, True)
)
assert rp.non_deliverable_params.fx_fixing.value == expected[i]
# there should be 1 MTM cashflow exchanges:
assert len(fl._mtm_exchange_periods) == 1
assert fl._mtm_exchange_periods[0].mtm_params.fx_fixing_start.date == (
get_calendar("ldn").lag_bus_days(dt(2000, 1, 6), -5, True)
)
assert fl._mtm_exchange_periods[0].mtm_params.fx_fixing_end.date == (
get_calendar("ldn").lag_bus_days(dt(2000, 4, 6), -5, True)
)
fixings.pop(name + "_EURUSD")
def test_construction_of_relevant_periods_non_deliverable_mtm_exchange_amortization(self):
# when the leg is ND and MTM the FXFixings should be determined at the start of a period.
# MTM cashflows are generated with notional exchanges between FX fixings at start and end.
# Amortization has interim cashflows.
usd = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.95})
eur = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.075})
fxf = FXForwards(
fx_curves={"eureur": eur, "usdusd": usd, "eurusd": eur},
fx_rates=FXRates({"eurusd": 1.1}, settlement=dt(2000, 1, 1)),
)
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 5),
termination=dt(2000, 10, 5),
frequency="Q",
payment_lag=1,
payment_lag_exchange=0,
),
convention="actacticma",
fixed_rate=1.0,
currency="usd",
pair=FXIndex("eurusd", "tgt|fed", 2, "ldn", -5),
initial_exchange=True,
mtm=LegMtm.XCS,
notional=-1e6,
amortization=-2e5,
fx_fixings=Series(
index=[
dt(1999, 12, 24),
dt(1999, 12, 29),
dt(2000, 3, 29),
dt(2000, 3, 30),
dt(2000, 6, 28),
dt(2000, 6, 29),
dt(2000, 9, 28),
],
data=[1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7],
),
)
d1, d2, d3 = dt(1999, 12, 24), dt(2000, 3, 29), dt(2000, 6, 28)
expected = DataFrame(
{
"Type": [
"Cashflow",
"FixedPeriod",
"MtmCashflow",
"Cashflow",
"FixedPeriod",
"MtmCashflow",
"Cashflow",
"FixedPeriod",
"Cashflow",
],
"Notional": [1e6, -1e6, 1e6, -2e5, -8e5, 8e5, -2e5, -6e5, -6e5],
"Cashflow": [-1.1e6, 2750, -2e5, 2.6e5, 2600, -1.6e5, 3e5, 2250, 9e5],
"FX Fix Date": [d1, d1, d2, d2, d2, d3, d3, d3, d3],
}
)
result = fl.cashflows(fx=fxf)[["Type", "Notional", "Cashflow", "FX Fix Date"]]
assert_frame_equal(result, expected)
def test_ex_div(self):
leg = FixedLeg(schedule=Schedule(dt(2000, 1, 1), dt(2001, 1, 1), "Q", extra_lag=-3))
assert not leg.ex_div(dt(2000, 3, 29))
assert leg.ex_div(dt(2000, 3, 30))
assert leg.ex_div(dt(2000, 4, 1))
def test_mtm_xcs_type_type_sets_fx_fixing_start_initially(self):
fixings.add(
"EURUSD_1600",
Series(
index=[dt(2000, 4, 1), dt(2000, 4, 2), dt(2000, 7, 2)], data=[1.268, 1.27, 1.29]
),
)
leg = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 7, 1),
frequency="Q",
payment_lag=1,
payment_lag_exchange=0,
),
fixed_rate=1.0,
currency="usd",
pair="eurusd",
initial_exchange=True,
mtm="xcs",
notional=5e6,
fx_fixings=(1.25, "EURUSD_1600"),
)
assert leg.periods[2].mtm_params.fx_fixing_start.value == 1.25
fixings.pop("EURUSD_1600")
## 4 types of non-deliverability
@pytest.mark.parametrize(
("fx_fixings", "expected"),
[
("ABCD", 1.10),
(1.5, 1.5),
((1.2, "ABCD"), 1.2),
],
)
def test_non_mtm_xcs_type(self, fx_fixings, expected):
fixings.add("ABCD_EURUSD", Series(index=[dt(1999, 12, 30)], data=[1.10]))
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 3, 1),
frequency="M",
payment_lag=2,
payment_lag_exchange=1,
calendar="all",
),
currency="usd",
pair="eurusd",
mtm="initial",
initial_exchange=True,
final_exchange=True,
fx_fixings=fx_fixings,
)
# this leg has 4 periods with only one initial fixing date
assert fl.periods[0].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[1].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[2].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[3].non_deliverable_params.fx_fixing.date == dt(1999, 12, 30)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == expected
assert fl.periods[1].non_deliverable_params.fx_fixing.value == expected
assert fl.periods[2].non_deliverable_params.fx_fixing.value == expected
assert fl.periods[3].non_deliverable_params.fx_fixing.value == expected
fixings.pop("ABCD_EURUSD")
@pytest.mark.parametrize(
("fx_fixings", "expected"),
[
("ABCDE", [1.20, 1.30]),
(1.5, [1.5, NoInput(0)]), # this is bad practice: should just supply str ID
((1.5, "ABCDE"), [1.5, 1.30]), # this is bad practice: should just supply str ID
],
)
def test_irs_nd_type(self, fx_fixings, expected):
fixings.add(
"ABCDE_EURUSD",
Series(
index=[
dt(2000, 1, 5),
dt(2000, 2, 3),
dt(2000, 2, 4),
dt(2000, 3, 3),
dt(2000, 3, 6),
],
data=[1.10, 1.20, 1.21, 1.30, 1.31],
),
)
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 7),
termination=dt(2000, 3, 7),
frequency="M",
payment_lag=0,
payment_lag_exchange=1,
calendar="all",
),
currency="usd",
pair="eurusd",
mtm="payment",
initial_exchange=False,
final_exchange=False,
fx_fixings=fx_fixings,
)
# this leg has 2 periods and only 2 relevant fixings dates
assert fl.periods[0].non_deliverable_params.fx_fixing.date == dt(2000, 2, 3)
assert fl.periods[1].non_deliverable_params.fx_fixing.date == dt(2000, 3, 3)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == expected[0]
assert fl.periods[1].non_deliverable_params.fx_fixing.value == expected[1]
fixings.pop("ABCDE_EURUSD")
@pytest.mark.parametrize(
("fx_fixings", "expected"),
[
("ADE", [1.10, 1.10, 1.20, 1.20, 1.20]),
(
1.5,
[1.5, 1.5, NoInput(0), NoInput(0), NoInput(0)],
), # this is bad practice: should just supply str ID
(
(1.5, "ADE"),
[1.5, 1.5, 1.20, 1.20, 1.20],
), # this is bad practice: should just supply str ID
],
)
def test_mtm_xcs_nd_type(self, fx_fixings, expected):
fixings.add(
"ADE_EURUSD",
Series(
index=[
dt(2000, 1, 6),
dt(2000, 2, 4),
dt(2000, 2, 8),
dt(2000, 3, 7),
dt(2000, 3, 8),
],
data=[1.10, 1.20, 1.21, 1.30, 1.31],
),
)
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 7),
termination=dt(2000, 3, 7),
frequency="M",
payment_lag=2,
payment_lag_exchange=1,
calendar="all",
),
currency="usd",
pair="eurusd",
mtm=LegMtm.XCS,
initial_exchange=True,
final_exchange=True,
fx_fixings=fx_fixings,
)
# this leg has 5 periods with only two relevant fixing dates
assert fl.periods[0].non_deliverable_params.fx_fixing.date == dt(2000, 1, 6)
assert fl.periods[1].non_deliverable_params.fx_fixing.date == dt(2000, 1, 6)
assert fl.periods[2].mtm_params.fx_fixing_end.date == dt(2000, 2, 4)
assert fl.periods[3].non_deliverable_params.fx_fixing.date == dt(2000, 2, 4)
assert fl.periods[4].non_deliverable_params.fx_fixing.date == dt(2000, 2, 4)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == expected[0]
assert fl.periods[1].non_deliverable_params.fx_fixing.value == expected[1]
assert fl.periods[2].mtm_params.fx_fixing_end.value == expected[2]
assert fl.periods[3].non_deliverable_params.fx_fixing.value == expected[3]
assert fl.periods[4].non_deliverable_params.fx_fixing.value == expected[4]
fixings.pop("ADE_EURUSD")
@pytest.mark.parametrize(
("fx_fixings", "expected"),
[
("AXDE", [1.10, 1.21, 1.31, 1.30]),
(
1.5,
[1.5, NoInput(0), NoInput(0), NoInput(0)],
), # this is bad practice: should just supply str ID
(
(1.5, "AXDE"),
[1.5, 1.21, 1.31, 1.30],
), # this is bad practice: should just supply str ID
],
)
def test_non_mtm_xcs_nd_type(self, fx_fixings, expected):
fixings.add(
"AXDE_EURUSD",
Series(
index=[
dt(2000, 1, 5),
dt(2000, 2, 3),
dt(2000, 2, 4),
dt(2000, 3, 3),
dt(2000, 3, 6),
],
data=[1.10, 1.20, 1.21, 1.30, 1.31],
),
)
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 7),
termination=dt(2000, 3, 7),
frequency="M",
payment_lag=1,
payment_lag_exchange=0,
calendar="all",
),
currency="usd",
pair="eurusd",
mtm="payment",
initial_exchange=True,
final_exchange=True,
fx_fixings=fx_fixings,
)
# this leg has 4 periods with 3 or 4 (if lag exchange is different) relevant fixing dates.
assert fl.periods[0].non_deliverable_params.fx_fixing.date == dt(2000, 1, 5)
assert fl.periods[1].non_deliverable_params.fx_fixing.date == dt(2000, 2, 4)
assert fl.periods[2].non_deliverable_params.fx_fixing.date == dt(2000, 3, 6)
assert fl.periods[3].non_deliverable_params.fx_fixing.date == dt(2000, 3, 3)
assert fl.periods[0].non_deliverable_params.fx_fixing.value == expected[0]
assert fl.periods[1].non_deliverable_params.fx_fixing.value == expected[1]
assert fl.periods[2].non_deliverable_params.fx_fixing.value == expected[2]
assert fl.periods[3].non_deliverable_params.fx_fixing.value == expected[3]
fixings.pop("AXDE_EURUSD")
def test_leg_index_base(self):
fl = FixedLeg(
schedule=Schedule(
effective=dt(2000, 1, 7),
termination=dt(2000, 3, 7),
frequency="M",
calendar="all",
),
index_fixings="some",
index_lag=0,
index_base_type=LegIndexBase.PeriodOnPeriod,
)
assert fl.periods[0].index_params.index_base.date == dt(2000, 1, 7)
assert fl.periods[1].index_params.index_base.date == dt(2000, 2, 7)
class TestCreditPremiumLeg:
@pytest.mark.parametrize(
("premium_accrued", "exp"), [(True, 41357.455568685626), (False, 41330.94188109829)]
)
def test_premium_leg_analytic_delta(self, hazard_curve, curve, premium_accrued, exp) -> None:
leg = CreditPremiumLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=1e9,
convention="Act360",
premium_accrued=premium_accrued,
)
result = leg.analytic_delta(rate_curve=hazard_curve, disc_curve=curve)
assert abs(result - exp) < 1e-7
@pytest.mark.parametrize(("premium_accrued"), [True, False])
def test_premium_leg_npv(self, hazard_curve, curve, premium_accrued) -> None:
leg = CreditPremiumLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=1e9,
convention="Act360",
premium_accrued=premium_accrued,
fixed_rate=4.00,
)
result = leg.npv(rate_curve=hazard_curve, disc_curve=curve)
assert (
abs(result + 400 * leg.analytic_delta(rate_curve=hazard_curve, disc_curve=curve)) < 1e-7
)
def test_premium_leg_cashflows(self, hazard_curve, curve) -> None:
leg = CreditPremiumLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
fixed_rate=4.00,
)
result = leg.cashflows(rate_curve=hazard_curve, disc_curve=curve)
# test a couple of return elements
assert abs(result.loc[0, defaults.headers["cashflow"]] - 6555555.55555) < 1e-4
assert abs(result.loc[1, defaults.headers["df"]] - 0.98307) < 1e-4
assert abs(result.loc[1, defaults.headers["notional"]] + 1e9) < 1e-7
def test_premium_leg_set_fixed_rate(self, curve) -> None:
leg = CreditPremiumLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
)
assert leg.fixed_rate is NoInput(0)
assert leg.periods[0].rate_params.fixed_rate is NoInput(0)
leg.fixed_rate = 2.0
assert leg.fixed_rate == 2.0
assert leg.periods[0].rate_params.fixed_rate == 2.0
@pytest.mark.parametrize(
("date", "exp"),
[
(dt(2022, 2, 1), 1e9 * 0.02 * 0.25 * 31 / 90),
(dt(2022, 3, 1), 0.0),
(dt(2022, 6, 1), 0.0),
],
)
def test_premium_leg_accrued(self, date, exp):
leg = CreditPremiumLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="ActActICMA",
fixed_rate=2.0,
)
result = leg.accrued(date)
assert abs(result - exp) < 1e-6
@pytest.mark.parametrize("final", [True, False])
def test_exchanges_raises(self, final):
with pytest.raises(TypeError, match="unexpected keyword argument"):
CreditPremiumLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="ActActICMA",
fixed_rate=2.0,
initial_exchange=final,
final_exchange=not final,
)
@pytest.mark.parametrize(
("settlement", "forward", "exp"),
[
(NoInput(0), NoInput(0), 408.02994815795125),
(dt(2022, 3, 30), dt(2022, 3, 30), 404.03987718823055),
(dt(2022, 4, 6), dt(2022, 4, 6), 811.1815703665554),
],
)
def test_fixed_leg_spread(self, settlement, forward, exp, curve) -> None:
fixed_leg = CreditPremiumLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 7, 1),
payment_lag=2,
payment_lag_exchange=1,
frequency="Q",
),
notional=-1e9,
convention="Act360",
fixed_rate=4.00,
currency="usd",
)
result = fixed_leg.spread(
target_npv=20000000,
disc_curve=curve,
rate_curve=curve,
index_curve=curve,
settlement=settlement,
forward=forward,
)
assert abs(result - exp) < 1e-6
def test_ex_div(self):
leg = CreditPremiumLeg(schedule=Schedule(dt(2000, 1, 1), dt(2001, 1, 1), "Q", extra_lag=-3))
assert not leg.ex_div(dt(2000, 3, 29))
assert leg.ex_div(dt(2000, 3, 30))
assert leg.ex_div(dt(2000, 4, 1))
class TestCreditProtectionLeg:
def test_leg_analytic_delta(self, hazard_curve, curve) -> None:
leg = CreditProtectionLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=1e9,
)
result = leg.analytic_delta(rate_curve=hazard_curve, disc_curve=curve)
assert abs(result) < 1e-7
def test_leg_analytic_rec_risk(self, hazard_curve, curve) -> None:
leg = CreditProtectionLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2027, 1, 1),
payment_lag=2,
frequency="Q",
),
notional=1e7,
)
result = leg.analytic_rec_risk(rate_curve=hazard_curve, disc_curve=curve)
pv0 = leg.npv(rate_curve=hazard_curve, disc_curve=curve)
hazard_curve.update_meta("credit_recovery_rate", 0.41)
pv1 = leg.npv(rate_curve=hazard_curve, disc_curve=curve)
expected = pv1 - pv0
assert abs(result - expected) < 1e-7
@pytest.mark.parametrize(("premium_accrued"), [True, False])
def test_leg_npv(self, hazard_curve, curve, premium_accrued) -> None:
leg = CreditProtectionLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Z",
),
notional=1e9,
)
result = leg.npv(rate_curve=hazard_curve, disc_curve=curve)
expected = -1390922.0390295777 # with 1 cds_discretization this is -1390906.242843
assert abs(result - expected) < 1e-7
def test_leg_cashflows(self, hazard_curve, curve) -> None:
leg = CreditProtectionLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
frequency="Q",
),
notional=-1e9,
# convention="Act360",
)
result = leg.cashflows(rate_curve=hazard_curve, disc_curve=curve)
# test a couple of return elements
assert abs(result.loc[0, defaults.headers["cashflow"]] - 600e6) < 1e-4
assert abs(result.loc[1, defaults.headers["df"]] - 0.98307) < 1e-4
assert abs(result.loc[1, defaults.headers["notional"]] + 1e9) < 1e-7
def test_leg_zero_sched(self):
leg = CreditProtectionLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2024, 6, 1),
frequency="Z",
),
notional=-1e9,
# convention="Act360",
)
assert len(leg.periods) == 1
assert leg.periods[0].period_params.end == dt(2024, 6, 1)
class TestIndexFixedLegExchange:
@pytest.mark.parametrize(
"i_fixings",
[
NoInput(0),
# [210, 220, 230], # list not supported in v2.0
# 210, # dualtypes is not supported as of v2.2
Series(
[210.0, 220.0, 230.0],
index=[dt(2022, 6, 15), dt(2022, 9, 15), dt(2022, 12, 15)],
),
],
)
def test_idx_leg_cashflows(self, i_fixings) -> None:
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 3, 15),
termination="9M",
frequency="Q",
payment_lag=0,
),
convention="ActActICMA",
notional=40e6,
fixed_rate=5.0,
index_base=200.0,
index_lag=0,
index_fixings=i_fixings,
initial_exchange=False,
final_exchange=True,
index_method="curve",
)
index_curve = Curve(
nodes={
dt(2022, 3, 15): 1.0,
dt(2022, 6, 15): 1.0 / 1.05,
dt(2022, 9, 15): 1.0 / 1.10,
dt(2022, 12, 15): 1.0 / 1.15,
},
index_base=200.0,
interpolation="linear_index",
index_lag=0,
)
disc_curve = Curve({dt(2022, 3, 15): 1.0, dt(2022, 12, 15): 1.0})
flows = leg.cashflows(index_curve=index_curve, disc_curve=disc_curve)
def equals_with_tol(a, b):
if isinstance(a, str):
return a == b
else:
return abs(a - b) < 1e-7
expected = {
"Type": "FixedPeriod",
"DCF": 0.250,
"Notional": 40e6,
"Rate": 5.0,
"Unindexed Cashflow": -500e3,
"Index Val": 210.0,
"Index Ratio": 1.05,
"Cashflow": -525000,
}
flow = flows.iloc[0].to_dict()
for key in set(expected.keys()) & set(flow.keys()):
assert equals_with_tol(expected[key], flow[key])
final_flow = flows.iloc[3].to_dict()
expected = {
"Type": "Cashflow",
"Notional": 40e6,
"Unindexed Cashflow": -40e6,
"Index Val": 230.0,
"Index Ratio": 1.15,
"Cashflow": -46e6,
}
for key in set(expected.keys()) & set(final_flow.keys()):
assert equals_with_tol(expected[key], final_flow[key])
def test_args_raises(self) -> None:
with pytest.raises(ValueError, match="`index_method` as string: 'BAD' is not "):
FixedLeg(
schedule=Schedule(
effective=dt(2022, 3, 15),
termination="9M",
frequency="Q",
),
index_base=200.0,
index_method="BAD",
initial_exchange=True,
final_exchange=True,
)
@pytest.mark.skip(reason="v2.2 removed the ability to mutate `index_base` at period level.")
def test_set_index_leg_after_init(self) -> None:
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 3, 15),
termination="9M",
frequency="Q",
payment_lag=0,
),
convention="ActActICMA",
notional=40e6,
fixed_rate=5.0,
index_base=None,
initial_exchange=False,
final_exchange=True,
)
for period in leg.periods:
assert period.index_base is None
leg.index_base = 205.0
for period in leg.periods:
assert period.index_base == 205.0
def test_npv(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98})
index_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99},
index_base=100.0,
interpolation="linear_index",
index_lag=3,
)
index_leg_exch = FixedLeg(
schedule=Schedule(
dt(2022, 1, 1),
"9M",
"Q",
payment_lag=2,
payment_lag_exchange=0,
),
notional=1000000,
amortization=200000,
index_base=100.0,
initial_exchange=False,
fixed_rate=1.0,
final_exchange=True,
index_lag=3,
)
result = index_leg_exch.npv(index_curve=index_curve, disc_curve=curve)
expected = -999993.7970219046
assert abs(result - expected) < 1e-4
def test_index_lag_on_periods(self):
index_leg_exch = FixedLeg(
schedule=Schedule(
dt(2022, 1, 1),
"6M",
"Q",
),
notional=1000000,
amortization=200000,
index_base=100.0,
fixed_rate=1.0,
final_exchange=True,
index_lag=4,
)
for period in index_leg_exch.periods:
assert period.index_params.index_lag == 4
class TestIndexFixedLeg:
@pytest.mark.parametrize(
("i_fixings", "meth"),
[
(NoInput(0), "daily"),
# ([210, 220, 230], "daily"), # list unsupported in v2.0
# (210, "daily"), # dualtypes unsupported as of v2.2
(
Series(
[210.0, 210, 220, 220, 230, 230],
index=[
dt(2022, 6, 1),
dt(2022, 7, 1),
dt(2022, 9, 1),
dt(2022, 10, 1),
dt(2022, 12, 1),
dt(2023, 1, 1),
],
),
"daily",
),
(
Series(
[210.0, 220, 230],
index=[dt(2022, 6, 1), dt(2022, 9, 1), dt(2022, 12, 1)],
),
"monthly",
),
],
)
def test_idx_leg_cashflows(self, i_fixings, meth) -> None:
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 3, 15),
termination="9M",
frequency="Q",
payment_lag=0,
),
convention="ActActICMA",
notional=40e6,
fixed_rate=5.0,
index_base=200.0,
index_fixings=i_fixings,
index_method=meth,
index_lag=0,
)
index_curve = Curve(
nodes={
dt(2022, 3, 15): 1.0,
dt(2022, 6, 15): 1.0 / 1.05,
dt(2022, 9, 15): 1.0 / 1.10,
dt(2022, 12, 15): 1.0 / 1.15,
},
index_base=200.0,
interpolation="linear_index",
index_lag=0,
)
disc_curve = Curve({dt(2022, 3, 15): 1.0, dt(2022, 12, 15): 1.0})
flows = leg.cashflows(index_curve=index_curve, disc_curve=disc_curve)
def equals_with_tol(a, b):
if isinstance(a, str):
return a == b
else:
return abs(a - b) < 1e-7
expected = {
"Type": "FixedPeriod",
"DCF": 0.250,
"Notional": 40e6,
"Rate": 5.0,
"Unindexed Cashflow": -500e3,
"Index Val": 210.0,
"Index Ratio": 1.05,
"Cashflow": -525000,
}
flow = flows.iloc[0].to_dict()
for key in set(expected.keys()) & set(flow.keys()):
assert equals_with_tol(expected[key], flow[key])
@pytest.mark.parametrize(("meth", "exp"), [("daily", 230.0), ("monthly", 227.91208)])
def test_missing_fixings(self, meth, exp) -> None:
i_fixings = Series(
[210.0, 210, 220, 220],
index=[dt(2022, 6, 1), dt(2022, 7, 1), dt(2022, 9, 1), dt(2022, 10, 1)],
)
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 3, 20),
termination="9M",
frequency="Q",
payment_lag=0,
),
convention="ActActICMA",
notional=40e6,
fixed_rate=5.0,
index_base=200.0,
index_fixings=i_fixings,
index_method=meth,
index_lag=0,
)
index_curve = Curve(
nodes={
dt(2022, 3, 20): 1.0,
dt(2022, 6, 20): 1.0 / 1.05,
dt(2022, 9, 20): 1.0 / 1.10,
dt(2022, 12, 20): 1.0 / 1.15,
},
index_base=200.0,
interpolation="linear_index",
index_lag=0,
)
cashflows = leg.cashflows(index_curve=index_curve)
result = cashflows.iloc[2]["Index Val"]
assert abs(result - exp) < 1e-3
@pytest.mark.skip(reason="v2.2 removed the ability to mutate `index_base` at period level.")
def test_set_index_leg_after_init(self) -> None:
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 3, 15),
termination="9M",
frequency="Q",
payment_lag=0,
),
convention="ActActICMA",
notional=40e6,
fixed_rate=5.0,
index_base=None,
)
for period in leg.periods:
assert period.index_params.index_base is None
leg.index_base = 205.0
for period in leg.periods:
assert period.index_params.index_base == 205.0
@pytest.mark.skip(reason="v2.2 removed the ability to mutate `index_base` at period level.")
@pytest.mark.parametrize(
"i_base",
[
200.0,
Series([199.0, 201.0], index=[dt(2022, 4, 1), dt(2022, 5, 1)]),
],
)
def test_set_index_base(self, curve, i_base) -> None:
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 4, 16),
termination=dt(2022, 5, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
index_method="daily",
index_lag=0,
)
assert leg.periods[0].index_params.index_base == NoInput(0)
leg.index_base = i_base
assert leg.periods[0].index_base == 200.0
@pytest.mark.parametrize(
("i_base", "exp"),
[
(NoInput(0), NoInput(0)),
(110.0, 110.0),
],
)
def test_initialise_index_base(self, i_base, exp) -> None:
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
index_base=i_base,
index_lag=0,
)
assert leg.periods[-1].index_params.index_base.value == exp
@pytest.mark.parametrize(
("i_base", "exp"),
[
(Series([199.0, 200.0], index=[dt(2021, 12, 31), dt(2022, 1, 1)]), 200.0),
(Series([1.0, 2.0], index=[dt(2000, 1, 1), dt(2000, 12, 1)]), NoInput(0)),
],
)
def test_initialise_index_base2(self, i_base, exp) -> None:
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 6, 1),
payment_lag=2,
frequency="Q",
),
notional=-1e9,
convention="Act360",
index_fixings=i_base,
index_lag=0,
)
assert leg.periods[-1].index_params.index_base.value == exp
@pytest.mark.skip(reason="fixings as list removed in v2.0")
def test_index_fixings_as_list(self) -> None:
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 10, 1),
payment_lag=2,
frequency="Q",
),
convention="Act360",
notional=1e6,
amortization=250e3,
index_base=NoInput(0),
index_fixings=[100.0, 200.0],
)
assert leg.periods[0].index_fixings == 100.0
assert leg.periods[1].index_fixings == 200.0
assert leg.periods[2].index_fixings == NoInput(0)
@pytest.mark.skip(reason="fixings as list removed in v2.0")
def test_index_fixings_as_list_final_exchange(self) -> None:
leg = FixedLeg(
effective=dt(2022, 1, 1),
termination=dt(2022, 10, 1),
payment_lag=2,
convention="Act360",
frequency="Q",
notional=1e6,
amortization=250e3,
index_base=NoInput(0),
index_fixings=[100.0, 100.0, 200.0, 199.0],
final_exchange=True,
)
assert leg.periods[0].index_fixings == 100.0
assert leg.periods[1].index_fixings == 100.0
assert leg.periods[2].index_fixings == 200.0
assert leg.periods[3].index_fixings == 199.0
assert leg.periods[4].index_fixings == NoInput(0)
assert leg.periods[5].index_fixings == NoInput(0)
@pytest.mark.skip(reason="v2.2 refactor fixings, + input as Series was stated as bad practice")
@pytest.mark.parametrize(
"index_fixings",
[
Series([1, 2, 3], index=[dt(2000, 1, 1), dt(1999, 1, 1), dt(2001, 1, 1)]),
Series([1, 2, 3], index=[dt(2000, 1, 1), dt(2000, 1, 1), dt(2001, 1, 1)]),
],
)
def test_index_as_series_invalid(self, index_fixings):
with pytest.raises(ValueError, match="`index_fixings` as Series must be"):
FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 10, 1),
frequency="Q",
),
index_base=NoInput(0),
index_fixings=index_fixings,
)
@pytest.mark.skip(reason="v2.2 refactor fixings, + input as Series was stated as bad practice")
def test_index_reverse_monotonic_decreasing_series(self):
s = Series([1, 2, 3], index=[dt(2000, 1, 1), dt(1999, 1, 1), dt(1998, 1, 1)])
assert s.index.is_monotonic_decreasing
leg = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 1),
termination=dt(2022, 10, 1),
frequency="Q",
),
index_base=NoInput(0),
index_fixings=s,
)
assert leg.index_fixings.index.is_monotonic_increasing
class TestFloatLegExchangeMtm:
@pytest.mark.parametrize(
("fx_fixings", "exp"),
[
(NoInput(0), [NoInput(0), NoInput(0), NoInput(0)]),
([1.5], [1.5, NoInput(0), NoInput(0)]),
(1.25, [1.25, NoInput(0), NoInput(0)]),
([1.25, 1.35], [1.25, 1.35, NoInput(0)]),
(Series([1.25, 1.3], index=[dt(2022, 1, 4), dt(2022, 4, 4)]), [1.25, 1.3, NoInput(0)]),
(Series([1.25], index=[dt(2022, 1, 4)]), [1.25, NoInput(0), NoInput(0)]),
],
)
def test_float_leg_exchange_mtm(self, fx_fixings, exp) -> None:
float_leg_exch = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 3),
termination=dt(2022, 7, 3),
frequency="Q",
payment_lag_exchange=3,
),
float_spread=5.0,
currency="usd",
pair="eurusd",
notional=10e6,
fx_fixings=fx_fixings,
mtm="xcs",
initial_exchange=True,
)
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fxr,
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.965}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.985}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.987}),
},
)
d = [
dt(2022, 1, 6),
dt(2022, 4, 6),
dt(2022, 7, 6),
] # payment_lag_exchange is 3 days.
rate = [_ if _ is not NoInput(0) else fxf.rate("eurusd", d[i]) for i, _ in enumerate(exp)]
float_leg_exch.cashflows(
rate_curve=fxf.curve("usd", "usd"), disc_curve=fxf.curve("usd", "usd"), fx=fxf
)
assert (
float(float_leg_exch.periods[0].try_cashflow(fx=fxf).unwrap() - 10e6 * rate[0]) < 1e-6
)
assert (
float(
float_leg_exch.periods[2].try_cashflow(fx=fxf).unwrap() - 10e6 * (rate[1] - rate[0])
)
< 1e-6
)
assert (
float(
float_leg_exch.periods[4].try_cashflow(fx=fxf).unwrap() - 10e6 * (rate[2] - rate[1])
)
< 1e-6
)
assert float_leg_exch.periods[4].settlement_params.payment == d[-1]
assert float_leg_exch.periods[1].settlement_params.notional == 10e6
assert float_leg_exch.periods[1].non_deliverable_params.fx_fixing.value == exp[0]
assert float_leg_exch.periods[1].non_deliverable_params.fx_fixing.date == dt(2022, 1, 4)
assert type(float_leg_exch.periods[1]) is FloatPeriod
assert float_leg_exch.periods[3].settlement_params.notional == 10e6
assert float_leg_exch.periods[3].non_deliverable_params.fx_fixing.value == exp[1]
assert float_leg_exch.periods[3].non_deliverable_params.fx_fixing.date == dt(2022, 4, 4)
assert type(float_leg_exch.periods[3]) is FloatPeriod
assert float_leg_exch.periods[-1].settlement_params.notional == 10e6
assert float_leg_exch.periods[-1].non_deliverable_params.fx_fixing.value == exp[1]
assert float_leg_exch.periods[-1].non_deliverable_params.fx_fixing.date == dt(2022, 4, 4)
def test_float_leg_exchange_fixings_table(self) -> None:
float_leg_exch = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 3),
termination=dt(2022, 7, 3),
frequency="Q",
payment_lag_exchange=3,
),
float_spread=5.0,
currency="usd",
pair="eurusd",
notional=10e6,
fixing_method="ibor(0)",
mtm="xcs",
initial_exchange=True,
)
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fxr,
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.965}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.985}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.987}),
},
)
result = float_leg_exch.local_analytic_rate_fixings(
rate_curve=fxf.curve("usd", "usd"), fx=fxf
)
assert isinstance(result, DataFrame)
assert isinstance(result.iloc[0, 0], Dual)
assert abs(result.iloc[0, 0] + 260.1507) < 1e-3
assert abs(result.iloc[1, 0] + 262.1683) < 1
def test_float_leg_exchange_fixings_table_rfr(self) -> None:
float_leg_exch = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 3),
termination=dt(2022, 7, 3),
frequency="Q",
payment_lag_exchange=0,
),
float_spread=5.0,
currency="usd",
pair="eurusd",
notional=10e6,
mtm="xcs",
initial_exchange=True,
)
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fxr,
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.965}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.985}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.987}),
},
)
result = float_leg_exch.local_analytic_rate_fixings(
rate_curve=fxf.curve("usd", "usd"), disc_curve=fxf.curve("usd", "usd"), fx=fxf
)
assert isinstance(result, DataFrame)
assert isinstance(result.iloc[0, 0], Dual) # Dual is converted to float for fixings table
assert result.columns.values[0] == (fxf.curve("usd", "usd").id, "usd", "usd", "1B")
def test_mtm_leg_exchange_spread(self) -> None:
leg = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 3),
termination=dt(2022, 7, 3),
frequency="Q",
payment_lag=0,
payment_lag_exchange=0,
),
currency="usd",
pair="eurusd",
notional=1e9,
fixing_method="rfr_payment_delay",
spread_compound_method="isda_compounding",
float_spread=0.0,
mtm="xcs",
initial_exchange=True,
)
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fxr,
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.965}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.985}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.987}),
},
)
npv = leg.npv(
rate_curve=fxf.curve("usd", "usd"), disc_curve=fxf.curve("usd", "usd"), fx=fxf
)
# a_delta = leg.analytic_delta(fxf.curve("usd", "usd"), fxf.curve("usd", "usd"), fxf)
result = leg.spread(
target_npv=100,
rate_curve=fxf.curve("usd", "usd"),
disc_curve=fxf.curve("usd", "usd"),
fx=fxf,
)
leg.float_spread = result
npv2 = leg.npv(
rate_curve=fxf.curve("usd", "usd"), disc_curve=fxf.curve("usd", "usd"), fx=fxf
)
assert abs(npv2 - npv - 100) < 0.01
@pytest.mark.parametrize(
("fx_fixings", "exp"),
[
(NoInput(0), [NoInput(0), NoInput(0), NoInput(0)]),
([1.5], [1.5, NoInput(0), NoInput(0)]),
(1.25, [1.25, NoInput(0), NoInput(0)]),
],
)
def test_mtm_leg_fx_fixings_warn_raise(self, curve, fx_fixings, exp) -> None:
float_leg_exch = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 3),
termination=dt(2022, 7, 3),
frequency="Q",
payment_lag_exchange=3,
),
float_spread=5.0,
currency="usd",
pair="eurusd",
notional=10e6,
fx_fixings=fx_fixings,
mtm="xcs",
initial_exchange=True,
)
with pytest.raises(ValueError, match="Must provide `fx` argument to forecast FXFixing."):
float_leg_exch.npv(rate_curve=curve)
def test_mtm_leg_fx_fixings_series_raises(self, curve) -> None:
fl = FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 3),
termination=dt(2022, 7, 3),
frequency="Q",
payment_lag_exchange=3,
),
float_spread=5.0,
currency="usd",
pair="eurusd",
notional=10e6,
fx_fixings=Series([1.25], index=[dt(2022, 2, 6)]),
mtm="xcs",
initial_exchange=True,
)
with pytest.raises(ValueError, match="Must provide `fx` argument to forecast FXFixing."):
fl.npv(rate_curve=curve)
# assert False # TODO: this test should possibly fail if the FX is before the series range.
# although a FixingsRangeError is detected and the ixing value accepted is NoInput
def test_mtm_raises_alt(self) -> None:
with pytest.raises(ValueError, match="A non-deliverable pair must contain the settlement "):
FloatLeg(
schedule=Schedule(
effective=dt(2022, 1, 3),
termination=dt(2022, 7, 3),
frequency="Q",
payment_lag_exchange=3,
),
float_spread=5.0,
currency="usd",
pair=FXIndex("eursek", "tgt,stk|fed", 2),
notional=10e6,
)
class TestCustomLeg:
@pytest.mark.parametrize(
"period",
[
FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2023, 1, 1),
payment=dt(2023, 1, 9),
frequency=Frequency.Months(12, None),
fixed_rate=1.0,
),
FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
float_spread=10.0,
),
CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.0,
currency="usd",
),
CreditProtectionPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
# convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
),
Cashflow(notional=1e9, payment=dt(2022, 4, 3)),
],
)
def test_init(self, curve, period) -> None:
CustomLeg(periods=[period, period])
def test_npv(self, curve) -> None:
cl = CustomLeg(
periods=[
FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2023, 1, 1),
payment=dt(2023, 1, 9),
frequency=Frequency.Months(12, None),
fixed_rate=1.0,
),
FixedPeriod(
start=dt(2022, 2, 1),
end=dt(2023, 2, 1),
payment=dt(2023, 2, 9),
frequency=Frequency.Months(12, None),
fixed_rate=2.0,
),
],
)
result = cl.npv(rate_curve=curve)
expected = -29109.962157023772
assert abs(result - expected) < 1e-6
def test_cashflows(self, curve) -> None:
cl = CustomLeg(
periods=[
FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2023, 1, 1),
payment=dt(2023, 1, 9),
frequency=Frequency.Months(12, None),
fixed_rate=1.0,
),
FixedPeriod(
start=dt(2022, 2, 1),
end=dt(2023, 2, 1),
payment=dt(2023, 2, 9),
frequency=Frequency.Months(12, None),
fixed_rate=2.0,
),
],
)
result = cl.cashflows(rate_curve=curve)
assert isinstance(result, DataFrame)
assert len(result.index) == 2
def test_analytic_delta(self, curve) -> None:
cl = CustomLeg(
periods=[
FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2023, 1, 1),
payment=dt(2023, 1, 9),
frequency=Frequency.Months(12, None),
fixed_rate=1.0,
),
FixedPeriod(
start=dt(2022, 2, 1),
end=dt(2023, 2, 1),
payment=dt(2023, 2, 9),
frequency=Frequency.Months(12, None),
fixed_rate=2.0,
),
],
)
result = cl.analytic_delta(rate_curve=curve)
expected = 194.1782607729773
assert abs(result - expected) < 1e-6
class TestNonDeliverableFixedLeg:
def test_set_periods(self):
leg = FixedLeg(
schedule=Schedule(dt(2000, 1, 1), dt(2000, 3, 1), "M"),
fixed_rate=2.0,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
)
assert len(leg.periods) == 2
def test_npv(self):
fxr = FXRates({"usdbrl": 9.50}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fxr,
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.965}),
"brlbrl": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.985}),
"brlusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.987}),
},
)
leg = FixedLeg(
schedule=Schedule(dt(2022, 1, 1), dt(2022, 3, 1), "M"),
fixed_rate=2.0,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
notional=1e6, # 1mm BRL
mtm="payment",
)
result = leg.npv(disc_curve=fxf.curve("usd", "brl"), fx=fxf)
expected = -344.326093 # 2.0% * 1mm * (2 / 12) / 9.5
assert abs(result - expected) < 1e-6
result = leg.npv(disc_curve=fxf.curve("usd", "brl"), fx=fxf, base="brl")
expected = -344.326093 * fxf.rate("usdbrl") # 2.0% * 1mm * (2 / 12) / 9.5
assert abs(result - expected) < 1e-5
@pytest.mark.parametrize("fixings", [[1.66], 1.66, Series(data=[1.66], index=[dt(2022, 2, 3)])])
def test_set_fixings(self, fixings):
leg = FixedLeg(
schedule=Schedule(dt(2022, 1, 1), dt(2022, 3, 1), "M"),
fixed_rate=2.0,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
notional=1e6, # 1mm BRL
fx_fixings=fixings,
mtm="payment",
)
assert leg.periods[0].non_deliverable_params.fx_fixing.value == 1.66
assert leg.periods[1].non_deliverable_params.fx_fixing.value == NoInput(0)
class TestAmortization:
def test_percent(self):
a = Amortization(4, 100.0, "20%")
assert a.outstanding == (100.0, 80.0, 64.0, 51.2)
assert a.amortization == (20.0, 16.0, 12.8)
assert a._type == _AmortizationType.CustomSchedule
def test_to_zero(self):
a = Amortization(4, 100.0, "to_zero")
assert a.outstanding == (100.0, 75.0, 50.0, 25.0)
assert a.amortization == (25.0, 25.0, 25.0)
assert a._type == _AmortizationType.ConstantPeriod
def test_custom(self):
a = Amortization(4, 100.0, [10.0, 20.0, 30.0])
assert a.outstanding == (100.0, 90.0, 70.0, 40.0)
assert a.amortization == (10.0, 20.0, 30.0)
assert a._type == _AmortizationType.CustomSchedule
def test_leg_amortization() -> None:
fixed_leg = FixedLeg(
schedule=Schedule(
dt(2022, 1, 1),
dt(2022, 10, 1),
frequency="Q",
),
notional=1e6,
amortization=250e3,
fixed_rate=2.0,
)
for i, period in enumerate(fixed_leg.periods):
assert period.settlement_params.notional == 1e6 - 250e3 * i
float_leg = FloatLeg(
schedule=Schedule(
dt(2022, 1, 1),
dt(2022, 10, 1),
frequency="Q",
),
notional=1e6,
amortization=250e3,
float_spread=2.0,
)
for i, period in enumerate(float_leg.periods):
assert period.settlement_params.notional == 1e6 - 250e3 * i
index_leg = FixedLeg(
schedule=Schedule(
dt(2022, 1, 1),
dt(2022, 10, 1),
frequency="Q",
),
notional=1e6,
amortization=250e3,
fixed_rate=2.0,
index_base=100.0,
)
for i, period in enumerate(index_leg.periods):
assert period.settlement_params.notional == 1e6 - 250e3 * i
index_leg_exchange = FixedLeg(
schedule=Schedule(
dt(2022, 1, 1),
dt(2022, 10, 1),
frequency="Q",
),
notional=1e6,
amortization=250e3,
fixed_rate=2.0,
index_base=100.0,
initial_exchange=False,
final_exchange=True,
)
for i, period in enumerate(index_leg_exchange.periods[0::2]):
assert period.settlement_params.notional == 1e6 - 250e3 * i
for i, period in enumerate(index_leg_exchange.periods[1:4:2]):
assert period.settlement_params.notional == 250e3
def test_custom_leg_raises() -> None:
with pytest.raises(ValueError):
_ = CustomLeg(periods=["bad_period"])
def test_custom_leg() -> None:
float_leg = FloatLeg(
schedule=Schedule(effective=dt(2022, 1, 1), termination=dt(2023, 1, 1), frequency="S"),
)
custom_leg = CustomLeg(periods=float_leg.periods)
for i, period in enumerate(custom_leg.periods):
assert period == float_leg.periods[i]
@pytest.mark.parametrize(
("fx_fixings", "exp"),
[
(NoInput(0), [NoInput(0), NoInput(0), NoInput(0)]),
([1.5], [1.5, NoInput(0), NoInput(0)]),
(1.25, [1.25, NoInput(0), NoInput(0)]),
((1.25, Series([1.5], index=[dt(2022, 4, 4)])), [1.25, 1.5, NoInput(0)]),
],
)
def test_fixed_leg_exchange_mtm(fx_fixings, exp) -> None:
fixed_leg_exch = FixedLeg(
schedule=Schedule(
effective=dt(2022, 1, 3),
termination=dt(2022, 7, 3),
frequency="Q",
payment_lag_exchange=3,
),
fixed_rate=5.0,
currency="usd",
pair="eurusd",
notional=10e6,
fx_fixings=fx_fixings,
mtm="xcs",
initial_exchange=True,
)
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fxr,
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.965}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.985}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.987}),
},
)
d = [
dt(2022, 1, 6),
dt(2022, 4, 6),
dt(2022, 7, 6),
] # payment_lag_exchange is 3 days.
rate = [_ if _ is not NoInput(0) else fxf.rate("eurusd", d[i]) for i, _ in enumerate(exp)]
fixed_leg_exch.cashflows(
rate_curve=fxf.curve("usd", "usd"), disc_curve=fxf.curve("usd", "usd"), fx=fxf
)
assert float(fixed_leg_exch.periods[0].try_cashflow(fx=fxf).unwrap() - 10e6 * rate[0]) < 1e-6
assert (
float(fixed_leg_exch.periods[2].try_cashflow(fx=fxf).unwrap() - 10e6 * (rate[1] - rate[0]))
< 1e-6
)
assert (
float(fixed_leg_exch.periods[4].try_cashflow(fx=fxf).unwrap() - 10e6 * (rate[2] - rate[1]))
< 1e-6
)
assert fixed_leg_exch.periods[4].settlement_params.payment == dt(2022, 7, 6)
assert fixed_leg_exch.periods[1].settlement_params.notional == 10e6
assert fixed_leg_exch.periods[1].non_deliverable_params.fx_fixing.value == exp[0]
assert fixed_leg_exch.periods[1].non_deliverable_params.fx_fixing.date == dt(2022, 1, 4)
assert type(fixed_leg_exch.periods[1]) is FixedPeriod
assert fixed_leg_exch.periods[3].settlement_params.notional == 10e6
assert fixed_leg_exch.periods[3].non_deliverable_params.fx_fixing.value == exp[1]
assert fixed_leg_exch.periods[3].non_deliverable_params.fx_fixing.date == dt(2022, 4, 4)
assert type(fixed_leg_exch.periods[3]) is FixedPeriod
assert fixed_leg_exch.periods[-1].settlement_params.notional == 10e6
assert fixed_leg_exch.periods[-1].non_deliverable_params.fx_fixing.value == exp[1]
assert fixed_leg_exch.periods[-1].non_deliverable_params.fx_fixing.date == dt(2022, 4, 4)
@pytest.mark.parametrize(
("type_", "expected", "kw"),
[
(FloatLeg, [522.324262, 522.324262], {"float_spread": 1.0}),
(FixedLeg, [522.324262, 53772.226595], {"fixed_rate": 2.5}),
],
)
def test_mtm_leg_exchange_metrics(type_, expected, kw) -> None:
leg = type_(
schedule=Schedule(
effective=dt(2022, 1, 3),
termination=dt(2022, 7, 3),
frequency="Q",
payment_lag=0,
payment_lag_exchange=0,
),
currency="usd",
pair="eurusd",
notional=10e6,
initial_exchange=True,
mtm="xcs",
**kw,
)
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fxr,
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.965}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.985}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.987}),
},
)
# d = [
# dt(2022, 1, 6),
# dt(2022, 4, 6),
# dt(2022, 7, 6),
# ] # payment_lag_exchange is 3 days.
# rate = [fxf.rate("eurusd", d[i]) for i in range(3)]
result = leg.analytic_delta(
rate_curve=fxf.curve("usd", "usd"), disc_curve=fxf.curve("usd", "usd"), fx=fxf
)
assert float(result - expected[0]) < 1e-6
result = leg.npv(rate_curve=fxf.curve("usd", "usd"), disc_curve=fxf.curve("usd", "usd"), fx=fxf)
assert float(result - expected[1]) < 1e-6
@pytest.mark.parametrize(
("klass", "kwargs", "expected"),
[
(FixedLeg, {}, [200.0, 300.0, 400.0]),
(
FixedLeg,
{"initial_exchange": False, "final_exchange": True},
[200.0, 300.0, 400.0, 400.0],
),
(ZeroFixedLeg, {}, [400.0]),
],
)
def test_set_index_fixings_series_leg_types(klass, kwargs, expected) -> None:
index_fixings = Series(
[100.0, 200.0, 300, 400.0, 500.0],
index=[dt(2022, 1, 1), dt(2022, 2, 1), dt(2022, 5, 1), dt(2022, 8, 1), dt(2022, 11, 1)],
)
obj = klass(
schedule=Schedule(
effective=dt(2022, 2, 5),
termination="9M",
frequency="Q",
),
index_fixings=index_fixings,
index_base=100.0,
index_lag=3,
index_method="monthly",
**kwargs,
)
for i, period in enumerate(obj.periods):
if type(period) is Cashflow:
continue
assert period.index_params.index_fixing.value == expected[i]
@pytest.mark.skip(reason="fixings as a list removed in v2.0")
@pytest.mark.parametrize(
("klass", "kwargs", "expected"),
[
(FixedLeg, {"index_fixings": [200.0, 300.0, 400.0]}, [200.0, 300.0, 400.0]),
(
FixedLeg,
{
"initial_exchange": False,
"final_exchange": True,
"index_fixings": [200.0, 300.0, 400.0, 400.0],
},
[200.0, 300.0, 400.0, 400.0],
),
(ZeroFixedLeg, {"index_fixings": [400.0]}, [400.0]),
],
)
def test_set_index_fixings_list_leg_types(klass, kwargs, expected) -> None:
obj = klass(
schedule=Schedule(
effective=dt(2022, 2, 5),
termination="9M",
frequency="Q",
),
index_base=100.0,
index_lag=3,
index_method="monthly",
**kwargs,
)
for i, period in enumerate(obj.periods):
if type(period) is Cashflow:
continue
assert period.index_fixings == expected[i]
@pytest.mark.skip(reason="v2.2 refactored fixings. Fixing as dualtype is not allowed.")
@pytest.mark.parametrize(
("klass", "kwargs", "expected"),
[
(FixedLeg, {"index_fixings": 200.0}, [200.0, NoInput(0), NoInput(0)]),
(
FixedLeg,
{"initial_exchange": False, "final_exchange": True, "index_fixings": 200.0},
[200.0, NoInput(0), NoInput(0), NoInput(0)],
),
(ZeroFixedLeg, {"index_fixings": 400.0}, [400.0]),
],
)
def test_set_index_fixings_float_leg_types(klass, kwargs, expected) -> None:
obj = klass(
schedule=Schedule(
effective=dt(2022, 2, 5),
termination="9M",
frequency="Q",
),
index_base=100.0,
index_lag=3,
index_method="monthly",
**kwargs,
)
for i, period in enumerate(obj.periods):
if type(period) is Cashflow:
continue
assert period.index_fixings == expected[i]
================================================
FILE: python/tests/periods/test_fixings_exposure.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import os
from datetime import datetime as dt
import numpy as np
import pandas as pd
import pytest
from rateslib import fixings
from rateslib.curves import Curve
from rateslib.data.fixings import FXIndex
from rateslib.enums import FloatFixingMethod, SpreadCompoundMethod
from rateslib.enums.generics import NoInput
from rateslib.fx import FXForwards, FXRates
from rateslib.instruments import IRS
from rateslib.periods import FixedPeriod, FloatPeriod, FXCallPeriod, MtmCashflow, ZeroFloatPeriod
from rateslib.scheduling import Schedule
from rateslib.solver import Solver
@pytest.fixture
def curve():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.99,
dt(2022, 7, 1): 0.98,
dt(2022, 10, 1): 0.97,
}
return Curve(nodes=nodes, interpolation="log_linear", id="curve_fixture")
class TestFloatPeriod:
@pytest.mark.parametrize(
("method"),
[
FloatFixingMethod.RFRPaymentDelay(),
FloatFixingMethod.RFRObservationShift(3),
FloatFixingMethod.RFRLockout(2),
FloatFixingMethod.RFRLookback(3),
FloatFixingMethod.RFRLockoutAverage(2),
FloatFixingMethod.RFRPaymentDelayAverage(),
FloatFixingMethod.RFRObservationShiftAverage(3),
FloatFixingMethod.RFRLookbackAverage(3),
],
)
@pytest.mark.parametrize(
("scm", "spread"),
[
(SpreadCompoundMethod.NoneSimple, 0.0),
(SpreadCompoundMethod.NoneSimple, 500.0),
(SpreadCompoundMethod.ISDACompounding, 0.0),
(SpreadCompoundMethod.ISDACompounding, 500.0),
(SpreadCompoundMethod.ISDAFlatCompounding, 0.0),
(SpreadCompoundMethod.ISDAFlatCompounding, 500.0),
],
)
def test_baseline_versus_solver_fixings_sensitivity(self, method, scm, spread, curve):
# the Solver can make fixings exposure calculations independently from analytical
# calculations and approximations. This tests validates the analytical calculations
# against the Solver
if type(method) in [
FloatFixingMethod.RFRLockoutAverage,
FloatFixingMethod.RFRPaymentDelayAverage,
FloatFixingMethod.RFRObservationShiftAverage,
FloatFixingMethod.RFRLookbackAverage,
] and scm in [
SpreadCompoundMethod.ISDAFlatCompounding,
SpreadCompoundMethod.ISDACompounding,
]:
pytest.skip(reason="Impossible combination raises ValueError on initialisation.")
# let us construct baseline instruments
rate_curve = Curve(
nodes={
dt(2022, 1, 1): 1.00,
dt(2022, 1, 31): 0.99,
dt(2022, 2, 1): 0.99,
dt(2022, 2, 2): 0.99,
dt(2022, 2, 3): 0.99,
dt(2022, 2, 4): 0.99,
dt(2022, 2, 7): 0.99,
dt(2022, 2, 8): 0.99,
dt(2022, 2, 9): 0.99,
dt(2022, 2, 10): 0.98,
dt(2029, 2, 1): 0.97,
},
interpolation="log_linear",
calendar="nyc",
id="curve",
)
solver = Solver(
curves=[rate_curve],
instruments=[
IRS(
dt(2022, 1, 4), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
IRS(
dt(2022, 1, 31), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
IRS(
dt(2022, 2, 1), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
IRS(
dt(2022, 2, 2), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
IRS(
dt(2022, 2, 3), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
IRS(
dt(2022, 2, 4), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
IRS(
dt(2022, 2, 7), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
IRS(
dt(2022, 2, 8), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
IRS(
dt(2022, 2, 9), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
IRS(
dt(2022, 2, 10), "1b", spec="usd_irs", payment_lag=0, curves=[rate_curve, curve]
),
],
s=[4.03] * 10,
)
p = FloatPeriod(
notional=-10e6,
fixing_series="usd_rfr",
fixing_method=method,
frequency="A",
start=dt(2022, 2, 3),
end=dt(2022, 2, 10),
float_spread=spread,
payment=dt(2022, 2, 10),
convention="act360",
spread_compound_method=scm,
)
risk = solver.delta(npv=p.npv(rate_curve=rate_curve, disc_curve=curve, local=True))
fixings_ = p.local_analytic_rate_fixings(rate_curve=rate_curve, disc_curve=curve)
fixings_ = fixings_.reindex(
[
dt(2022, 1, 30),
dt(2022, 1, 31),
dt(2022, 2, 1),
dt(2022, 2, 2),
dt(2022, 2, 3),
dt(2022, 2, 4),
dt(2022, 2, 7),
dt(2022, 2, 8),
dt(2022, 2, 9),
dt(2022, 2, 10),
],
fill_value=np.nan,
)
risk_compare = fixings_[("curve", "usd", "usd", "1B")].astype(float).fillna(0.0).to_numpy()
risk_array = risk.to_numpy()[:, 0].copy()
_diff = np.max(np.abs(risk_compare - risk_array))
if scm == SpreadCompoundMethod.ISDAFlatCompounding and spread > 100.0:
atol = 1e-2
else:
atol = 1e-12
assert np.all(np.isclose(risk_array, risk_compare, atol=atol))
# now add some fixings
name = str(hash(os.urandom(8)))
fixings.add(
f"{name}_1B",
pd.Series(
index=[dt(2022, 1, 31), dt(2022, 2, 1), dt(2022, 2, 2), dt(2022, 2, 3)],
data=[4.03, 4.03, 4.03, 4.03],
),
)
p = FloatPeriod(
notional=-10e6,
fixing_series="usd_rfr",
fixing_method=method,
frequency="A",
start=dt(2022, 2, 3),
end=dt(2022, 2, 10),
float_spread=spread,
payment=dt(2022, 2, 10),
convention="act360",
spread_compound_method=scm,
rate_fixings=name,
)
fixings_ = p.local_analytic_rate_fixings(rate_curve=rate_curve, disc_curve=curve)
fixings_ = fixings_.reindex(
[
dt(2022, 1, 30),
dt(2022, 1, 31),
dt(2022, 2, 1),
dt(2022, 2, 2),
dt(2022, 2, 3),
dt(2022, 2, 4),
dt(2022, 2, 7),
dt(2022, 2, 8),
dt(2022, 2, 9),
dt(2022, 2, 10),
],
fill_value=np.nan,
)
risk_array[:5] = 0.0
risk_compare = fixings_[("curve", "usd", "usd", "1B")].astype(float).fillna(0.0).to_numpy()
assert np.all(np.isclose(risk_array, risk_compare, atol=atol))
def test_ibor_curve_example_book(self, curve):
p = FloatPeriod(
notional=-10e6,
fixing_series="eur_ibor",
fixing_method="ibor(2)",
frequency="Q",
start=dt(2025, 10, 8),
end=dt(2026, 1, 8),
float_spread=100.0,
payment=dt(2026, 1, 8),
convention="act360",
calendar="tgt",
)
result = p.try_unindexed_reference_cashflow_analytic_rate_fixings(rate_curve=curve).unwrap()
assert abs(result.iloc[0, 0] - 10e2 * 92 / 360) < 1e-12
assert result.index[0] == dt(2025, 10, 6)
def test_ibor_stub_curve_example_book(self, curve):
p = FloatPeriod(
notional=-10e6,
fixing_method=FloatFixingMethod.IBOR(2),
frequency="Q",
start=dt(2025, 10, 8),
end=dt(2025, 12, 16),
float_spread=100.0,
payment=dt(2025, 12, 16),
convention="act360",
calendar="tgt",
stub=True,
)
result = p.try_unindexed_reference_cashflow_analytic_rate_fixings(
rate_curve={"2m": curve, "3m": curve, "6m": curve}
).unwrap()
alpha = 23 / 31.0
assert abs(result.iloc[0, 0] - 10e2 * 69 / 360 * alpha) < 1e-12
assert abs(result.iloc[0, 1] - 10e2 * 69 / 360 * (1 - alpha)) < 1e-12
assert result.index[0] == dt(2025, 10, 6)
def test_ibor_fixing_set(self, curve):
p = FloatPeriod(
notional=-10e6,
fixing_series="eur_ibor",
fixing_method="ibor(2)",
rate_fixings=2.0,
frequency="Q",
start=dt(2025, 10, 8),
end=dt(2026, 1, 8),
float_spread=100.0,
payment=dt(2026, 1, 8),
convention="act360",
calendar="tgt",
)
result = p.try_unindexed_reference_cashflow_analytic_rate_fixings(rate_curve=curve).unwrap()
assert abs(result.iloc[0, 0]) < 1e-12
assert result.index[0] == dt(2025, 10, 6)
def test_ibor_stub_curve_fixings_set(self, curve):
p = FloatPeriod(
notional=-10e6,
fixing_method="ibor(2)",
frequency="Q",
start=dt(2025, 10, 8),
end=dt(2025, 12, 16),
float_spread=100.0,
payment=dt(2025, 12, 16),
convention="act360",
calendar="tgt",
stub=True,
rate_fixings=2.0,
)
result = p.try_unindexed_reference_cashflow_analytic_rate_fixings(
rate_curve={"2m": curve, "3m": curve, "6m": curve}
).unwrap()
assert abs(result.iloc[0, 0]) < 1e-12
assert abs(result.iloc[0, 1]) < 1e-12
assert result.index[0] == dt(2025, 10, 6)
@pytest.mark.parametrize(
("method", "expected"),
[
(FloatFixingMethod.RFRPaymentDelay(), [0, 0, 0, 0, 277, 830, 277, 277, 277, 0]),
(FloatFixingMethod.RFRLockout(2), [0, 0, 0, 0, 277, 830, 830, 0, 0, 0]),
(FloatFixingMethod.RFRLookback(3), [0, 277, 830, 277, 277, 277, 0, 0, 0, 0]),
(FloatFixingMethod.RFRObservationShift(3), [0, 277, 277, 277, 277, 830, 0, 0, 0, 0]),
],
)
def test_rfr_curve_book(self, method, expected, curve):
p = FloatPeriod(
notional=-1e6,
fixing_series="usd_rfr",
fixing_method=method,
frequency="Q",
start=dt(2022, 2, 3),
end=dt(2022, 2, 10),
float_spread=0.0,
payment=dt(2022, 2, 10),
)
result = p.local_analytic_rate_fixings(rate_curve=curve)
result = result.reindex(
pd.Index(
data=[
dt(2022, 1, 30),
dt(2022, 1, 31),
dt(2022, 2, 1),
dt(2022, 2, 2),
dt(2022, 2, 3),
dt(2022, 2, 4),
dt(2022, 2, 7),
dt(2022, 2, 8),
dt(2022, 2, 9),
dt(2022, 2, 10),
]
),
fill_value=0.0,
)
for i in range(10):
assert abs(expected[i] - result.iloc[i, 0] * 1000) < 5e-1
def test_doc_reset(self):
fp = FloatPeriod(
start=dt(2026, 1, 12),
end=dt(2026, 1, 16),
payment=dt(2026, 1, 16),
frequency="M",
fixing_method="rfr_payment_delay",
rate_fixings="sofr",
)
fixings.add(
name="sofr_1B",
series=pd.Series(
index=[dt(2026, 1, 12), dt(2026, 1, 13), dt(2026, 1, 14), dt(2026, 1, 15)],
data=[3.1, 3.2, 3.3, 3.4],
),
)
# value is populated from given data
assert 3.245 < fp.rate_params.rate_fixing.value < 3.255
fp.reset_fixings()
# private data related to fixing is removed and requires new data lookup
assert fp.rate_params.rate_fixing._value == NoInput(0)
assert fp.rate_params.rate_fixing._populated.empty
fixings.pop("sofr_1B")
class TestFixedPeriod:
def test_immediate_fixing_sensitivity(self, curve):
p = FixedPeriod(
fixed_rate=2.0,
start=dt(2022, 1, 1),
end=dt(2022, 2, 1),
payment=dt(2022, 2, 1),
frequency="M",
notional=2e6,
currency="usd",
convention="act360",
)
result = p.try_immediate_analytic_rate_fixings(disc_curve=curve).unwrap()
assert isinstance(result, pd.DataFrame)
assert result.empty
class TestMtmCashflow:
def test_local_fixings(self):
curve1 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
curve2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.10}, dt(2000, 1, 1)),
fx_curves={"eureur": curve2, "eurusd": curve2, "usdusd": curve1},
)
fixings.add("wmr12_eurusd", pd.Series(index=[dt(1999, 1, 1)], data=[1.15]))
mc = MtmCashflow(
currency="usd",
notional=2e6,
pair="eurusd",
payment=dt(2000, 2, 15),
start=dt(2000, 1, 10),
end=dt(2000, 2, 15),
fx_fixings_start="wmr12",
fx_fixings_end="wmr12",
)
result = mc.local_fixings(
disc_curve=curve1,
fx=fxf,
identifiers=[
(
"wmr12_eurusd",
pd.Series(
index=[dt(2000, 1, 6), dt(2000, 2, 11)],
data=[
fxf.rate("eurusd", dt(2000, 1, 10)),
fxf.rate("eurusd", dt(2000, 2, 15)),
],
),
)
],
)
assert abs(result.iloc[0, 0] - 2e6 * 1.0 * curve1[dt(2000, 2, 15)]) < 1e-6
assert abs(result.iloc[1, 0] + 2e6 * 1.0 * curve1[dt(2000, 2, 15)]) < 1e-6
fixings.pop("wmr12_eurusd")
class TestFXCallPeriod:
@pytest.mark.parametrize(("fixing", "itm"), [(1.15, True), (1.05, False)])
def test_itm_otm_fixing(self, fixing, itm):
curve1 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
# curve2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
# fxf = FXForwards(
# fx_rates=FXRates({"eurusd": 1.10}, dt(2000, 1, 1)),
# fx_curves={"eureur": curve2, "eurusd": curve2, "usdusd": curve1},
# )
fixings.add("wmr13_eurusd", pd.Series(index=[dt(1999, 1, 1)], data=[1.15]))
fxo = FXCallPeriod(
delivery=dt(2000, 3, 1),
pair="eurusd",
expiry=dt(2000, 2, 28),
strike=1.10,
delta_type="forward",
notional=1e6,
option_fixings="wmr13",
)
result = fxo.local_fixings(
identifiers=[
("wmr13_eurusd", pd.Series(index=[dt(2000, 2, 28)], data=[fixing])),
],
disc_curve=curve1,
)
assert abs(result.iloc[0, 0] - itm * 1e6 * 1.0 * curve1[dt(2000, 3, 1)]) < 1e-6
fixings.pop("wmr13_eurusd")
class TestZeroFloatPeriod:
def test_multiple_sub_periods(self):
fixings.add("MY_RATE_INDEX_6M", pd.Series(index=[dt(1999, 1, 1)], data=[1.15]))
period = ZeroFloatPeriod(
schedule=Schedule(dt(2000, 1, 1), "2Y", "S"),
fixing_method=FloatFixingMethod.IBOR(0),
rate_fixings="MY_RATE_INDEX",
convention="Act360",
notional=1e6,
)
rc = Curve({dt(2000, 1, 1): 1.0, dt(2003, 1, 1): 0.95})
from rateslib.legs import CustomLeg
# cf = CustomLeg(periods=period.float_periods).cashflows(rate_curve=rc)
result = period.local_fixings(
identifiers=[
(
"MY_RATE_INDEX_6M",
pd.Series(index=[dt(2000, 1, 1), dt(2000, 7, 1)], data=[1.692, 1.692]),
)
],
scalars=[0.01],
rate_curve=rc,
)
expected = period.local_analytic_rate_fixings(rate_curve=rc)
assert abs(result.iloc[0, 0] - expected.iloc[0, 0]) < 1e-4
assert abs(result.iloc[1, 0] - expected.iloc[1, 0]) < 1e-4
assert period.float_periods[0].rate_params.rate_fixing.value == NoInput(0)
fixings.pop("MY_RATE_INDEX_6M")
def test_local_fixings_raises_scalars():
curve1 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
fixings.add("wmr12_eurusd", pd.Series(index=[dt(1999, 1, 1)], data=[1.15]))
mc = MtmCashflow(
currency="usd",
notional=2e6,
pair=FXIndex("eurusd", "tgt|fed", 2, "all", 0),
payment=dt(2000, 2, 15),
start=dt(2000, 1, 10),
end=dt(2000, 2, 15),
fx_fixings_start="wmr12",
fx_fixings_end="wmr12",
)
with pytest.raises(ValueError, match="If given, ``scalars`` must be same length as"):
mc.local_fixings(
identifiers=[
(
"wmr12_eurusd",
pd.Series(index=[dt(2000, 1, 10), dt(2000, 2, 15)], data=[1.1, 1.1]),
)
],
scalars=[1.0, 2.0],
disc_curve=curve1,
)
fixings.pop("wmr12_eurusd")
================================================
FILE: python/tests/periods/test_fixings_load.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import os
from datetime import datetime as dt
import pytest
import rateslib.errors as err
from pandas import Series
from rateslib import fixings
from rateslib.data.fixings import (
FloatRateIndex,
FloatRateSeries,
FXFixing,
FXIndex,
IBORFixing,
IBORStubFixing,
RFRFixing,
)
from rateslib.data.loader import FixingMissingDataError
from rateslib.enums import FloatFixingMethod, SpreadCompoundMethod
from rateslib.enums.generics import NoInput
from rateslib.enums.parameters import IndexMethod
from rateslib.errors import VE_INDEX_BASE_NO_STR
from rateslib.periods import Cashflow, FloatPeriod
from rateslib.scheduling.frequency import Frequency
class TestIndexParams:
def test_index_lookup_and_populate_from_str_fixings(self):
rpi = Series(index=[dt(2000, 1, 1), dt(2000, 1, 2)], data=[101.0, 103.0])
name = str(hash(os.urandom(8)))
fixings.add(name, rpi)
c = Cashflow(
payment=dt(2000, 1, 2),
notional=1e6,
index_fixings=name,
index_method=IndexMethod.Curve,
index_base_date=dt(2000, 1, 1),
index_lag=0,
)
assert c.index_params.index_fixing.value == 103.0
assert c.index_params.index_base.value == 101.0
fixings.pop(name)
def test_lookup_and_populate_from_series_fixings(self):
rpi = Series(index=[dt(2000, 1, 1), dt(2000, 1, 2)], data=[101.0, 103.0])
with pytest.warns(FutureWarning, match=err.FW_FIXINGS_AS_SERIES[:25]):
c = Cashflow(
payment=dt(2000, 1, 2),
notional=1e6,
index_fixings=rpi,
index_method=IndexMethod.Curve,
index_base_date=dt(2000, 1, 1),
index_lag=0,
)
assert c.index_params.index_fixing.value == 103.0
assert c.index_params.index_base.value == 101.0
def test_immutable_index_fixings(self):
c = Cashflow(
payment=dt(2000, 1, 2),
notional=1e6,
index_fixings=0.0,
index_method=IndexMethod.Curve,
index_base_date=dt(2000, 1, 1),
index_lag=0,
)
with pytest.raises(ValueError, match=err.VE_ATTRIBUTE_IS_IMMUTABLE.format("index_fixing")):
c.index_params.index_fixing = 2.0
def test_index_fixings_determined_once(self):
# a change in the datastore will not affect an already loaded fixing for the period
c = Cashflow(
payment=dt(2000, 1, 2),
notional=1e6,
index_fixings="rpi",
index_method=IndexMethod.Curve,
index_base_date=dt(2000, 1, 1),
index_lag=0,
)
rpi = Series(index=[dt(2000, 1, 1), dt(2000, 1, 2)], data=[101.0, 103.0])
fixings.add("rpi", rpi)
before1 = c.index_params.index_fixing.value
before2 = c.index_params.index_base.value
fixings.pop("rpi")
rpi2 = Series(index=[dt(2000, 1, 1), dt(2000, 1, 2)], data=[201.0, 203.0])
fixings.add("rpi", rpi2)
assert c.index_params.index_fixing.value == before1
assert c.index_params.index_base.value == before2
@pytest.mark.parametrize("int_or_float", [3, 3.0])
def test_index_fixings_as_scalar(self, int_or_float):
# a scalar value for `index_fixings` will only impact `index_fixing` and not `index_base`
c = Cashflow(
payment=dt(2000, 1, 2),
notional=1e6,
index_fixings=int_or_float,
index_method=IndexMethod.Curve,
index_base_date=dt(2000, 1, 1),
index_lag=0,
)
assert c.index_params.index_fixing.value == int_or_float
assert c.index_params.index_base.value == NoInput(0)
def test_index_base_as_str_raises(self):
# index base as string series identifier will not work
with pytest.raises(ValueError, match=VE_INDEX_BASE_NO_STR):
Cashflow(
payment=dt(2000, 1, 2),
notional=1e6,
index_fixings=0.0,
index_method=IndexMethod.Curve,
index_base_date=dt(2000, 1, 1),
index_base="str",
index_lag=0,
)
def test_index_realtime_updates(self):
# test that the first series contains no data and an update adds new data
rpi = Series(index=[dt(2000, 1, 1), dt(2000, 1, 2)], data=[101.0, 103.0])
name = str(hash(os.urandom(8)))
fixings.add(name, rpi)
c = Cashflow(
payment=dt(2000, 1, 3),
notional=1e6,
index_fixings=name,
index_method=IndexMethod.Curve,
index_base_date=dt(2000, 1, 3),
index_lag=0,
)
assert c.index_params.index_fixing.value == NoInput(0)
assert c.index_params.index_base.value == NoInput(0)
fixings.pop(name)
rpi = Series(index=[dt(2000, 1, 1), dt(2000, 1, 3)], data=[101.0, 105.0])
fixings.add(name, rpi)
assert c.index_params.index_fixing.value == 105.0
assert c.index_params.index_base.value == 105.0
class TestSettlementParams:
def test_fx_fixings_no_input(
self,
):
c = Cashflow(currency="usd", pair="eurusd", payment=dt(2000, 1, 2), notional=2.0)
assert isinstance(c.non_deliverable_params.fx_fixing, FXFixing)
assert c.non_deliverable_params.fx_fixing.value is NoInput(0)
def test_fx_fixings_scalar_input(self):
c = Cashflow(
currency="usd", pair="eurusd", payment=dt(2000, 1, 2), notional=2.0, fx_fixings=2.0
)
assert c.non_deliverable_params.fx_fixing.value == 2.0
assert c.non_deliverable_params.fx_fixing._state == 0
def test_fx_fixings_series_input(self):
s = Series(index=[dt(1999, 12, 29), dt(1999, 12, 30)], data=[1.1, 2.1])
c = Cashflow(
currency="usd", pair="eurusd", payment=dt(2000, 1, 2), notional=2.0, fx_fixings=s
)
assert c.non_deliverable_params.fx_fixing._state == 0
assert c.non_deliverable_params.fx_fixing.value == 2.1
def test_fx_fixings_str_input(self):
s = Series(index=[dt(1999, 12, 29), dt(1999, 12, 30)], data=[1.1, 2.1])
name = str(hash(os.urandom(8)))
fixings.add(name + "_eurusd", s)
c = Cashflow(
currency="usd", pair="eurusd", payment=dt(2000, 1, 2), notional=2.0, fx_fixings=name
)
assert c.non_deliverable_params.fx_fixing.value == 2.1
assert isinstance(c.non_deliverable_params.fx_fixing.identifier, str)
assert c.non_deliverable_params.fx_fixing._state == hash(fixings[name + "_eurusd"][0])
fixings.pop(name + "_eurusd")
def test_fx_fixings_str_state_cache(self):
s = Series(index=[dt(2000, 1, 1), dt(2000, 1, 2)], data=[1.1, 2.1])
name = str(hash(os.urandom(8)))
fixings.add(name + "_eurusd", s)
c = Cashflow(
currency="usd",
pair="eurusd",
payment=dt(2000, 1, 3), # <- not in Series
notional=2.0,
fx_fixings=name,
)
assert c.non_deliverable_params.fx_fixing.value is NoInput(0)
assert isinstance(c.non_deliverable_params.fx_fixing.identifier, str)
# states match the hash because the FXFixing uses composite FXFixingMajors
assert c.non_deliverable_params.fx_fixing._state == hash(fixings[name + "_eurusd"][0])
assert c.non_deliverable_params.fx_fixing.value is NoInput(0)
assert c.non_deliverable_params.fx_fixing._state == hash(fixings[name + "_eurusd"][0])
fixings.pop(name + "_eurusd")
def test_fx_fixing_cashflow(self):
s = Series(index=[dt(1999, 12, 29), dt(1999, 12, 30)], data=[1.1, 2.1])
name = str(hash(os.urandom(8)))
fixings.add(name + "_eurusd", s)
c = Cashflow(
notional=100,
payment=dt(2000, 1, 2),
currency="usd",
pair="eurusd",
fx_fixings=name,
)
cf = c.cashflows()
assert cf["FX Fixing"] == 2.1
fix = c.non_deliverable_params.fx_fixing.value
assert fix == 2.1
fixings.pop(name + "_eurusd")
def test_immutable_fx_fixing(self):
c = Cashflow(
payment=dt(2000, 1, 2),
notional=1e6,
currency="usd",
pair="eurusd",
fx_fixings=0.0,
)
with pytest.raises(ValueError, match=err.VE_ATTRIBUTE_IS_IMMUTABLE.format("fx_fixing")):
c.non_deliverable_params.fx_fixing = 2.0
def test_fx_missing_data_raises(self):
s = Series(index=[dt(1999, 12, 29), dt(2000, 1, 1)], data=[1.1, 2.1])
name = str(hash(os.urandom(8)))
fixings.add(name + "_eurusd", s)
c = Cashflow(
notional=100,
payment=dt(2000, 1, 2),
currency="usd",
pair="eurusd",
fx_fixings=name,
)
with pytest.raises(FixingMissingDataError, match="Fixing lookup for date "):
c.non_deliverable_params.fx_fixing.value
fixings.pop(name + "_eurusd")
def test_fx_missing_data_raises_cross(self):
s = Series(index=[dt(1999, 12, 29), dt(1999, 12, 30)], data=[1.1, 2.1])
s2 = Series(index=[dt(1999, 12, 29), dt(2000, 1, 1)], data=[1.1, 2.1])
name = str(hash(os.urandom(8)))
fixings.add(name + "_usdinr", s)
fixings.add(name + "_usdrub", s2)
c = Cashflow(
notional=100,
payment=dt(2000, 1, 2),
currency="inr",
pair=FXIndex("inrrub", "mum|fed", 2, "mum", -2),
fx_fixings=name,
)
with pytest.raises(FixingMissingDataError, match="Fixing lookup for date "):
c.non_deliverable_params.fx_fixing.value
fixings.pop(name + "_usdinr")
fixings.pop(name + "_usdrub")
class TestRateParams:
def test_rate_fixings_input_as_str_out_of_range(
self,
):
s = Series(index=[dt(1999, 1, 1), dt(1999, 1, 2)], data=[1.1, 2.1])
fixings.add("IBOR123dfgs_1M", s)
c = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
notional=2.0,
frequency="M",
fixing_series="usd_ibor",
fixing_method="IBOR(2)",
rate_fixings="IBOR123dfgs",
)
assert c.rate_params.rate_fixing.value == NoInput(0)
assert c.rate_params.rate_fixing.value == NoInput(0)
assert c.rate_params.rate_fixing.identifier == "IBOR123dfgs_1M".upper()
assert c.rate_params.rate_fixing._state == fixings["IBOR123dfgs_1M"][0]
fixings.pop("IBOR123dfgs_1M")
def test_rate_fixings_no_input(
self,
):
c = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
notional=2.0,
frequency="M",
fixing_method=FloatFixingMethod.IBOR(2),
fixing_series="usd_ibor",
rate_fixings=NoInput(0),
)
assert c.rate_params.rate_fixing.value == NoInput(0)
assert c.rate_params.rate_fixing.value == NoInput(0)
assert c.rate_params.rate_fixing._state == 0
def test_rate_fixings_scalar(
self,
):
c = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
notional=2.0,
frequency="M",
fixing_method="IBOR(2)",
fixing_series="usd_ibor",
rate_fixings=2.5,
)
assert c.rate_params.rate_fixing.value == 2.5
assert c.rate_params.rate_fixing.value == 2.5
assert c.rate_params.rate_fixing._state == 0
def test_ibor_fixing_load(self):
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_3M", Series(index=[dt(2022, 1, 3)], data=[55.0]))
f = IBORFixing(
accrual_start=dt(2022, 1, 5),
rate_index=FloatRateIndex(
frequency=Frequency.Months(3, None),
series="eur_ibor",
),
identifier=f"{name}_3M",
)
assert f.value == 55.0
assert f._state == fixings[f"{name}_3M"][0]
def test_stub_ibor_fixing_load(self):
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_3M", Series(index=[dt(2022, 1, 3)], data=[55.0]))
fixings.add(f"{name}_6M", Series(index=[dt(2022, 1, 3)], data=[65.0]))
index_series = FloatRateIndex(
frequency=Frequency.Months(3, None),
series="eur_ibor",
).series
f = IBORStubFixing(
accrual_start=dt(2022, 1, 5),
accrual_end=dt(2022, 5, 21),
rate_series=index_series,
identifier=name,
)
assert f.value == 55 * 45 / 91 + 65 * 46 / 91
fixings.pop(f"{name}_3M")
fixings.pop(f"{name}_6M")
def test_rfr_fixings_load(self):
name = str(hash(os.urandom(8)))
fixings.add(
f"{name}_1B",
Series(
index=[dt(2023, 2, 8), dt(2023, 2, 9), dt(2023, 2, 10), dt(2023, 2, 13)],
data=[1.0, 2.0, 3.0, 4.0],
),
)
rate_index = FloatRateIndex(
frequency="1B",
series="usd_rfr",
)
f = RFRFixing(
accrual_start=dt(2023, 2, 8),
accrual_end=dt(2023, 2, 13),
rate_index=rate_index,
fixing_method=FloatFixingMethod.RFRPaymentDelay(),
spread_compound_method=SpreadCompoundMethod.NoneSimple,
identifier=f"{name}_1B",
float_spread=0.0,
)
result = f.value
expected = ((1 + 1 / 36000) * (1 + 2 / 36000) * (1 + 3 * 3 / 36000) - 1) * 36000 / 5
assert abs(result - expected) < 1e-10
f = RFRFixing(
accrual_start=dt(2023, 2, 8),
accrual_end=dt(2023, 2, 17),
rate_index=rate_index,
fixing_method=FloatFixingMethod.RFRPaymentDelay(),
spread_compound_method=SpreadCompoundMethod.NoneSimple,
identifier=f"{name}_1B",
float_spread=0.0,
)
result = f.value
assert result == NoInput(0)
# # test is removed because a `fixing_series` with no tenors now
# # defaults to [1w, 1M, 3M, 6M, 12M]
# def test_stub_ibor_warns_no_series(self):
# with pytest.warns(UserWarning, match=err.UW_NO_TENORS[:15]):
# fix = IBORStubFixing(
# accrual_start=dt(2022, 1, 5),
# accrual_end=dt(2022, 5, 21),
# rate_series=FloatRateSeries(
# lag=0,
# calendar="tgt",
# convention="act360",
# modifier="mf",
# eom=False,
# ),
# identifier="NOT_AVAILABLE",
# )
# assert isinstance(fix.value, NoInput)
def test_rfr_fixing_identifier(self):
p = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 4, 1),
frequency=Frequency.Months(3, None),
payment=dt(2000, 1, 4),
fixing_method=FloatFixingMethod.RFRPaymentDelay(),
rate_fixings="TEST",
)
assert p.rate_params.fixing_identifier == "TEST"
assert p.rate_params.rate_fixing.identifier == "TEST_1B"
def test_ibor_fixing_identifier(self):
p = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 4, 1),
frequency=Frequency.Months(3, None),
payment=dt(2000, 1, 4),
fixing_method=FloatFixingMethod.IBOR(2),
rate_fixings="TEST",
)
assert p.rate_params.fixing_identifier == "TEST"
assert p.rate_params.rate_fixing.identifier == "TEST_3M"
def test_ibor12M_fixing_identifier(self):
p = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2001, 1, 1),
frequency=Frequency.Months(12, None),
payment=dt(2000, 1, 4),
fixing_method=FloatFixingMethod.IBOR(2),
rate_fixings="TEST",
)
assert p.rate_params.fixing_identifier == "TEST"
assert p.rate_params.rate_fixing.identifier == "TEST_12M"
def test_ibor_stub_fixing_identifier(self):
# these tenors are derived from the default tenors [1W, 1M, 3M, 6M, 12M]
p = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 3, 1),
frequency=Frequency.Months(3, None),
payment=dt(2000, 1, 4),
fixing_method=FloatFixingMethod.IBOR(2),
stub=True,
rate_fixings="TEST",
)
assert p.rate_params.rate_fixing.fixing1.identifier == "TEST_1M"
assert p.rate_params.rate_fixing.fixing2.identifier == "TEST_3M"
================================================
FILE: python/tests/periods/test_float_rate.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import os
from datetime import datetime as dt
import pytest
import rateslib.errors as err
from pandas import NA, Series
from pandas.testing import assert_series_equal
from rateslib import fixings
from rateslib.curves import Curve, LineCurve
from rateslib.data.fixings import FloatRateIndex, FloatRateSeries, _RFRRate
from rateslib.data.loader import FixingMissingForecasterError
from rateslib.default import NoInput
from rateslib.enums.parameters import FloatFixingMethod, SpreadCompoundMethod
from rateslib.periods.float_rate import rate_value
from rateslib.scheduling import Adjuster, Convention, Frequency, NamedCal
@pytest.fixture
def curve():
return Curve(
nodes={
dt(2000, 1, 3): 1.00,
dt(2000, 4, 3): 1.00 / (1.0 + 0.02 * 91 / 360),
},
convention="Act360",
calendar="bus",
)
@pytest.fixture
def curve2():
return Curve(
nodes={
dt(2000, 1, 3): 1.00,
dt(2000, 7, 3): 1.00 / (1.0 + 0.03 * 182 / 360),
},
convention="Act360",
calendar="bus",
)
@pytest.fixture
def line_curve():
return LineCurve(
nodes={
dt(1999, 12, 30): 2.00,
dt(2000, 3, 31): 10.0,
},
convention="Act360",
calendar="bus",
)
@pytest.fixture
def line_curve2():
return LineCurve(
nodes={
dt(1999, 12, 30): 3.00,
dt(2000, 3, 31): 10.0,
},
convention="Act360",
calendar="bus",
)
D = 1 / 360.0
class TestFloatRateIndex:
def test_init_attributes(self):
s = FloatRateSeries(
lag=1,
calendar="bus",
convention="Act360",
modifier="mf",
eom=False,
)
assert s.calendar == NamedCal("bus")
assert isinstance(s.calendar, NamedCal)
assert s.convention == Convention.Act360
assert s.modifier == Adjuster.ModifiedFollowing()
assert not s.eom
assert s.lag == 1
def test_init_index_attrbutes(self):
s = FloatRateIndex(
frequency="Q",
series="usd_ibor",
)
assert s.calendar == NamedCal("nyc")
assert isinstance(s.calendar, NamedCal)
assert s.convention == Convention.Act360
assert s.modifier == Adjuster.ModifiedFollowing()
assert not s.eom
assert s.lag == 2
assert s.frequency == Frequency.Months(3, None)
class TestIBORRate:
def test_tenor_rate_from_curve(self, curve, line_curve):
# test an IBOR rate is calculated correctly from a forecast curve
for rate_curve in [curve, line_curve]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings=NoInput(0),
start=dt(2000, 1, 3),
end=dt(2000, 4, 3),
stub=False,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
assert abs(result - 2.18) < 1e-12
def test_tenor_rate_from_curve_fail_from_history(self, curve, line_curve):
# test an IBOR rate cannot be forecast in the past
for rate_curve in [curve, line_curve]:
with pytest.raises(ValueError, match="`effective` date for rate period is before the"):
rate_value(
rate_curve=rate_curve,
rate_fixings=NoInput(0),
start=dt(1980, 1, 3),
end=dt(1980, 4, 3),
stub=False,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
def test_tenor_rate_from_dict_curve(self, curve, curve2, line_curve, line_curve2):
# test an IBOR rate is calculated correctly from a dict of forecast curves
for rate_curve in [{"3m": curve, "6m": curve2}, {"3m": line_curve, "6m": line_curve2}]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings=NoInput(0),
start=dt(2000, 1, 3),
end=dt(2000, 4, 3),
stub=False,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
assert abs(result - 2.18) < 1e-12
def test_tenor_rate_from_scalar_fixing(self, curve, curve2, line_curve, line_curve2):
# test an IBOR rate is calculated correctly from a direct scalar fixing
for rate_curve in [
curve,
line_curve,
{"3m": curve, "6m": curve2},
{"3m": line_curve, "6m": line_curve2},
]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings=1.5,
start=dt(2000, 1, 3),
end=dt(2000, 4, 3),
stub=False,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
assert abs(result - 1.68) < 1e-12
def test_tenor_rate_from_fixing_str(self, curve, line_curve, curve2, line_curve2):
# test an IBOR rate is calculated correctly from a fixing series
fixings.add("TEST_VALUES_3M", Series(index=[dt(1999, 12, 30)], data=[1.2]))
for rate_curve in [
curve,
line_curve,
{"3m": curve, "6m": curve2},
{"3m": line_curve, "6m": line_curve2},
]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings="TEST_VALUES_3M",
start=dt(2000, 1, 3),
end=dt(2000, 4, 3),
stub=False,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
assert abs(result - 1.38) < 1e-12
fixings.pop("TEST_VALUES_3M")
def test_tenor_rate_from_fixing_str_fallback(self, curve, line_curve, curve2, line_curve2):
# test an IBOR rate is calculated correctly from a curve when no fixing date exists
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_3M", Series(index=[dt(2001, 1, 1)], data=[1.2]))
for rate_curve in [
curve,
line_curve,
{"3m": curve, "6m": curve2},
{"3m": line_curve, "6m": line_curve2},
]:
with pytest.warns(UserWarning, match=f"Fixings are provided in series: '{name}_3M',"):
result = rate_value(
rate_curve=rate_curve,
rate_fixings=f"{name}_3M",
start=dt(2000, 1, 3),
end=dt(2000, 4, 3),
stub=False,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
assert abs(result - 2.18) < 1e-12
fixings.pop(f"{name}_3M")
def test_stub_rate_from_fixing_dict(self, curve, line_curve, curve2, line_curve2):
# test an IBOR rate is calculated correctly from a fixing series
fixings.add("TEST_VALUES_3M", Series(index=[dt(1999, 12, 30)], data=[1.2]))
fixings.add("TEST_VALUES_6M", Series(index=[dt(1999, 12, 30)], data=[2.2]))
for rate_curve in [
curve,
line_curve,
{"3m": curve, "6m": curve2},
{"3m": line_curve, "6m": line_curve2},
]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings="TEST_VALUES",
start=dt(2000, 1, 3),
end=dt(2000, 5, 18),
stub=True,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
expected = 1.2 + 1.0 * 45 / 91 + 0.18
assert abs(result - expected) < 1e-12
fixings.pop("TEST_VALUES_3M")
fixings.pop("TEST_VALUES_6M")
def test_stub_rate_from_fixing_dict_missing_data(self, curve, line_curve, curve2, line_curve2):
# test an IBOR rate is calculated correctly from a fixing series
fixings.add("TEST_VALUES_3M", Series(index=[dt(1999, 12, 1)], data=[1.2]))
fixings.add("TEST_VALUES_6M", Series(index=[dt(1999, 12, 1)], data=[2.2]))
for rate_curve, expected in [
(curve, 2.18249787441),
(line_curve, 2.180),
({"3m": curve, "6m": curve2}, 2.674505494505512),
({"3m": line_curve, "6m": line_curve2}, 2.6745054945054947),
]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings="TEST_VALUES",
start=dt(2000, 1, 3),
end=dt(2000, 5, 18),
stub=True,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
# expected = 1.2 + 1.0 * 45 / 91 + 0.18
assert abs(result - expected) < 1e-11
fixings.pop("TEST_VALUES_3M")
fixings.pop("TEST_VALUES_6M")
def test_stub_rate_from_fixing_dict_1tenor(self, curve, line_curve, curve2, line_curve2):
# test an IBOR rate is calculated correctly from a fixing series
fixings.add("TEST_VALUES_6M", Series(index=[dt(1999, 12, 30)], data=[4.1]))
for rate_curve in [
curve,
line_curve,
{"3m": curve, "6m": curve2},
{"3m": line_curve, "6m": line_curve2},
]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings="TEST_VALUES",
start=dt(2000, 1, 3),
end=dt(2000, 5, 18),
stub=True,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
expected = 4.1 + 0.18
assert abs(result - expected) < 1e-12
fixings.pop("TEST_VALUES_6M")
def test_stub_rate_from_scalar_fixing(self, curve, line_curve, curve2, line_curve2):
# test an IBOR stub rate is calculated correctly from a fixing scalar
for rate_curve in [
curve,
line_curve,
{"3m": curve, "6m": curve2},
{"3m": line_curve, "6m": line_curve2},
]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings=9.9,
start=dt(2000, 1, 3),
end=dt(2000, 5, 18),
stub=True,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
expected = 9.9 + 0.18
assert abs(result - expected) < 1e-12
def test_stub_rate_from_dict_curve(self, curve, curve2, line_curve, line_curve2):
# test an IBOR stub rate is calculated correctly from a dict of forecast curves
for rate_curve in [{"3m": curve, "6m": curve2}, {"3m": line_curve, "6m": line_curve2}]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings=NoInput(0),
start=dt(2000, 1, 3),
end=dt(2000, 5, 18),
stub=True,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
expected = 2.0 * 46 / 91 + 3.0 * 45 / 91 + 0.18
assert abs(result - expected) < 1e-12
def test_stub_rate_from_dict_curve_long_curves(self, curve, curve2, line_curve, line_curve2):
# test an IBOR stub rate is calculated correctly from a dict of forecast curves
for rate_curve in [{"9m": curve, "6m": curve2}, {"9m": line_curve, "6m": line_curve2}]:
with pytest.warns(UserWarning, match="Interpolated stub period has a length shorter"):
result = rate_value(
rate_curve=rate_curve,
rate_fixings=NoInput(0),
start=dt(2000, 1, 3),
end=dt(2000, 5, 18),
stub=True,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
expected = 3.0 + 0.18 # just the 6m curve
assert abs(result - expected) < 1e-12
def test_stub_rate_from_dict_curve_short_curves(self, curve, curve2, line_curve, line_curve2):
# test an IBOR stub rate is calculated correctly from a dict of forecast curves
for rate_curve in [{"3m": curve, "1m": curve2}, {"3m": line_curve, "1m": line_curve2}]:
with pytest.warns(UserWarning, match="Interpolated stub period has a length longer"):
result = rate_value(
rate_curve=rate_curve,
rate_fixings=NoInput(0),
start=dt(2000, 1, 3),
end=dt(2000, 5, 18),
stub=True,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
expected = 2.0 + 0.18 # just the 3m curve
assert abs(result - expected) < 1e-12
def test_stub_rate_from_single_curve(self, curve, curve2, line_curve, line_curve2):
# test an IBOR stub rate is calculated from a single forecast curve
for rate_curve in [curve, line_curve]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings=NoInput(0),
start=dt(2000, 1, 3),
end=dt(2000, 5, 18),
stub=True,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
expected = 2.0 + 0.18
assert abs(result - expected) < 3e-3
def test_stub_rate_from_dict_curve_on_fixing_fail(self, curve, curve2, line_curve, line_curve2):
# test an IBOR stub rate is calculated from curve when no fixing is found
for rate_curve in [{"3m": curve, "6m": curve2}, {"3m": line_curve, "6m": line_curve2}]:
result = rate_value(
rate_curve=rate_curve,
rate_fixings="NO_DATA",
start=dt(2000, 1, 3),
end=dt(2000, 5, 18),
stub=True,
frequency="3M",
fixing_method="IBOR(2)",
float_spread=18.0,
)
expected = 2.0 * 46 / 91 + 3.0 * 45 / 91 + 0.18
assert abs(result - expected) < 1e-12
class TestRFRRate:
def test_pandas_series_update_mechanism(self):
# rateslib relies on the following mechanism. Test this for compatibility.
a = Series(index=[3, 4, 5, 6, 7], data=[NA, NA, NA, NA, NA])
b = Series(index=[1, 2, 3, 4, 5], data=[2, 4, 6, 8, 10])
a.update(b)
assert a.index.to_list() == [3, 4, 5, 6, 7]
assert a.to_list() == [6, 8, 10, NA, NA]
def test_populate_rates_from_rate_fixings(self):
fixing_rates = Series(
index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 3), dt(2000, 1, 4)], data=NA
)
fixings.add(
"USD_SOFR_1B",
Series(index=[dt(1999, 1, 1), dt(2000, 1, 1), dt(2000, 1, 2)], data=[1.0, 2.0, 3.0]),
)
result, _, _ = _RFRRate._push_rate_fixings_as_series_to_fixing_rates(
fixing_rates, "USD_SOFR_1B", FloatFixingMethod.RFRPaymentDelay()
)
assert_series_equal(
result,
Series(
index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 3), dt(2000, 1, 4)],
data=[2.0, 3.0, NA, NA],
),
)
fixings.pop("USD_SOFR_1B")
def test_populate_rates_from_rate_fixings_all_filled(self):
fixing_rates = Series(index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 3)], data=NA)
fixings.add(
"USD_SOFR_1B",
Series(
index=[
dt(1999, 1, 1),
dt(2000, 1, 1),
dt(2000, 1, 2),
dt(2000, 1, 3),
dt(2000, 1, 4),
],
data=[1.0, 2.0, 3.0, 4.0, 5.0],
),
)
result, _, _ = _RFRRate._push_rate_fixings_as_series_to_fixing_rates(
fixing_rates, "USD_SOFR_1B", FloatFixingMethod.RFRPaymentDelay()
)
assert_series_equal(
result,
Series(
index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 3)],
data=[2.0, 3.0, 4.0],
dtype=object,
),
)
fixings.pop("USD_SOFR_1B")
def test_populate_rates_from_rate_fixings_none_filled(self):
fixing_rates = Series(index=[dt(2000, 1, 1), dt(2000, 1, 2)], data=NA)
fixings.add(
"USD_SOFR_1B",
Series(index=[dt(1999, 1, 1)], data=[1.0]),
)
result, _, _ = _RFRRate._push_rate_fixings_as_series_to_fixing_rates(
fixing_rates, "USD_SOFR_1B", FloatFixingMethod.RFRPaymentDelay()
)
assert_series_equal(
result,
Series(index=[dt(2000, 1, 1), dt(2000, 1, 2)], data=[NA, NA], dtype=object),
)
fixings.pop("USD_SOFR_1B")
def test_populate_rates_from_rate_fixings_missing_fixing(self):
fixing_rates = Series(
index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 3), dt(2000, 1, 4)], data=NA
)
fixings.add("USD_SOFR_1B", Series(index=[dt(1999, 1, 1), dt(2000, 1, 2)], data=[1.0, 3.0]))
with pytest.raises(ValueError, match="The fixings series 'USD_SOFR_1B' for the RFR 1B rat"):
_RFRRate._push_rate_fixings_as_series_to_fixing_rates(
fixing_rates, "USD_SOFR_1B", FloatFixingMethod.RFRPaymentDelay()
)
fixings.pop("USD_SOFR_1B")
@pytest.mark.skip(reason="Not expecting the most recent fixing is an allowed oversight.")
def test_populate_rates_from_rate_fixings_extra_fixing(self):
# this test will fail becuase of the validation that is applied. The missing fixing
# is right at the end of the series and is not detected at the populated/unpopulated
# crossover point.
fixing_rates = Series(
index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 4), dt(2000, 1, 5)], data=NA
)
fixings.add(
"USD_SOFR_1B",
Series(index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 3)], data=[1.0, 2.0, 3.0]),
)
with pytest.warns(UserWarning, match="The fixings series 'USD_SOFR' for the RFR 1B rates"):
_RFRRate._push_rate_fixings_as_series_to_fixing_rates(fixing_rates, "USD_SOFR_1B")
fixings.pop("USD_SOFR_1B")
def test_populate_rates_from_rate_fixings_extra_fixing2(self):
# the lengths of the expected fixings in the return and fixing series is different and
# detected.
fixing_rates = Series(
index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 4), dt(2000, 1, 5)], data=NA
)
fixings.add(
"USD_SOFR_1B",
Series(
index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 3), dt(2000, 1, 4)],
data=[1.0, 2.0, 3.0, 4.0],
),
)
with pytest.warns(UserWarning, match="The fixings series 'USD_SOFR_1B' for the RFR 1B rat"):
_RFRRate._push_rate_fixings_as_series_to_fixing_rates(
fixing_rates, "USD_SOFR_1B", FloatFixingMethod.RFRPaymentDelay()
)
fixings.pop("USD_SOFR_1B")
@pytest.mark.parametrize(
("fixing_method"),
[FloatFixingMethod.RFRPaymentDelay(), FloatFixingMethod.RFRObservationShift(1)],
)
@pytest.mark.parametrize(
("spread_compound_method", "float_spread"),
[
(SpreadCompoundMethod.NoneSimple, 10.0),
(SpreadCompoundMethod.ISDACompounding, 0.0),
(SpreadCompoundMethod.ISDAFlatCompounding, 0.0),
],
)
def test_efficient_calc(self, curve, fixing_method, spread_compound_method, float_spread):
# rates
r0 = curve._rate_with_raise(dt(2000, 1, 3), dt(2000, 1, 4))
r1 = curve._rate_with_raise(dt(2000, 1, 4), dt(2000, 1, 5))
r2 = curve._rate_with_raise(dt(2000, 1, 5), dt(2000, 1, 6))
r3 = curve._rate_with_raise(dt(2000, 1, 6), dt(2000, 1, 7))
result = rate_value(
start=dt(2000, 1, 4),
end=dt(2000, 1, 7),
rate_curve=curve,
spread_compound_method=spread_compound_method,
float_spread=float_spread,
)
if isinstance(fixing_method, FloatFixingMethod.RFRObservationShift):
expected = (
(1 + r0 / 36000) * (1 + r1 / 36000) * (1 + r2 / 36000) - 1
) * 36000 / 3.0 + float_spread / 100.0
else:
expected = (
(1 + r1 / 36000) * (1 + r2 / 36000) * (1 + r3 / 36000) - 1
) * 36000 / 3.0 + float_spread / 100.0
assert abs(result - expected) < 1e-10
def test_semi_inefficient_calc_with_populated_fixings(self, curve):
fixings.add("USD_SOFR_1B", Series(index=[dt(2000, 1, 3), dt(2000, 1, 4)], data=[1.5, 1.7]))
r2 = curve._rate_with_raise(dt(2000, 1, 5), dt(2000, 1, 6))
r3 = curve._rate_with_raise(dt(2000, 1, 6), dt(2000, 1, 7))
result = rate_value(
start=dt(2000, 1, 3),
end=dt(2000, 1, 7),
rate_curve=curve,
spread_compound_method=SpreadCompoundMethod.NoneSimple,
float_spread=10.0,
rate_fixings="USD_SOFR_1B",
)
expected = (
(1 + 0.015 / 360) * (1 + 0.017 / 360) * (1 + r2 / 36000) * (1 + r3 / 36000) - 1
) * 36000 / 4 + 0.1
fixings.pop("USD_SOFR_1B")
assert abs(result - expected) < 1e-10
def test_inefficient_calc_with_populated_fixings_no_curve_raises(self, curve):
fixings.add("USD_SOFR_1B", Series(index=[dt(2000, 1, 3), dt(2000, 1, 4)], data=[1.5, 1.7]))
with pytest.raises(
FixingMissingForecasterError, match=err.VE_NEEDS_RATE_POPULATE_FIXINGS[:25]
):
rate_value(
start=dt(2000, 1, 3),
end=dt(2000, 1, 7),
rate_curve=NoInput(0),
spread_compound_method=SpreadCompoundMethod.ISDACompounding,
float_spread=10.0,
rate_fixings="USD_SOFR_1B",
rate_series="usd_rfr",
)
fixings.pop("USD_SOFR_1B")
def test_inefficient_calc_with_lockout_too_long_raises(self, curve):
# the lockout param is invalid
with pytest.raises(ValueError, match=err.VE_LOCKOUT_METHOD_PARAM[:25]):
rate_value(
start=dt(2000, 1, 3),
end=dt(2000, 1, 7),
rate_curve=curve,
spread_compound_method=SpreadCompoundMethod.ISDACompounding,
float_spread=10.0,
fixing_method=FloatFixingMethod.RFRLockout(9),
)
@pytest.mark.parametrize("curve_type", ["values", "dfs"])
def test_inefficient_calc_with_populated_fixings(self, curve_type, curve, line_curve):
rate_curve = curve if curve_type == "dfs" else line_curve
fixings.add("USD_SOFR_1B", Series(index=[dt(2000, 1, 3), dt(2000, 1, 4)], data=[1.5, 1.7]))
r2 = rate_curve._rate_with_raise(dt(2000, 1, 5), dt(2000, 1, 6))
r3 = rate_curve._rate_with_raise(dt(2000, 1, 6), dt(2000, 1, 7))
result = rate_value(
start=dt(2000, 1, 3),
end=dt(2000, 1, 7),
rate_curve=rate_curve,
spread_compound_method=SpreadCompoundMethod.NoneSimple,
float_spread=10.0,
fixing_method=FloatFixingMethod.RFRLookback(0),
rate_fixings="USD_SOFR_1B",
)
expected = (
(1 + 0.015 / 360) * (1 + 0.017 / 360) * (1 + r2 / 36000) * (1 + r3 / 36000) - 1
) * 36000 / 4 + 0.1
fixings.pop("USD_SOFR_1B")
assert abs(result - expected) < 1e-10
def test_inefficient_calc_with_non_overlapping_fixings(self, curve):
fixings.add("USD_SOFR_1B", Series(index=[dt(2001, 1, 1)], data=[100.0]))
rate_value(
start=dt(2000, 1, 4),
end=dt(2000, 1, 7),
rate_curve=curve,
spread_compound_method=SpreadCompoundMethod.NoneSimple,
float_spread=0.0,
rate_fixings="USD_SOFR_1B",
)
fixings.pop("USD_SOFR_1B")
@pytest.mark.parametrize(
("fixing_method", "expected"),
[
(
FloatFixingMethod.RFRPaymentDelay(),
((1 + 0.04 * D) * (1 + 0.05 * D) * (1 + 0.06 * D) * (1 + 0.07 * D) - 1)
* 100
/ (4 * D),
),
(
FloatFixingMethod.RFRObservationShift(2),
((1 + 0.02 * D) * (1 + 0.03 * D) * (1 + 0.04 * D) * (1 + 0.05 * D) - 1)
* 100
/ (4 * D),
),
(
FloatFixingMethod.RFRLockout(2),
((1 + 0.04 * D) * (1 + 0.05 * D) * (1 + 0.05 * D) * (1 + 0.05 * D) - 1)
* 100
/ (4 * D),
),
(
FloatFixingMethod.RFRLookback(2),
((1 + 0.02 * D) * (1 + 0.03 * D) * (1 + 0.04 * D) * (1 + 0.05 * D) - 1)
* 100
/ (4 * D),
),
(
FloatFixingMethod.RFRPaymentDelayAverage(),
(4 + 5 + 6 + 7) / 4,
),
(
FloatFixingMethod.RFRObservationShiftAverage(2),
(2 + 3 + 4 + 5) / 4,
),
(
FloatFixingMethod.RFRLockoutAverage(2),
(4 + 5 + 5 + 5) / 4,
),
(
FloatFixingMethod.RFRLookbackAverage(2),
(2 + 3 + 4 + 5) / 4,
),
],
)
def test_fixing_methods(self, fixing_method, expected):
rate_curve = LineCurve(
nodes={
dt(2000, 1, 1): 2.0,
dt(2000, 1, 2): 3.0,
dt(2000, 1, 3): 4.0,
dt(2000, 1, 4): 5.0,
dt(2000, 1, 5): 6.0,
dt(2000, 1, 6): 7.0,
dt(2000, 1, 7): 8.0,
},
convention="act360",
calendar="all",
)
result = rate_value(
start=dt(2000, 1, 3),
end=dt(2000, 1, 7),
rate_curve=rate_curve,
spread_compound_method=SpreadCompoundMethod.NoneSimple,
float_spread=0.0,
fixing_method=fixing_method,
)
assert abs(result - expected) < 1e-10
@pytest.mark.parametrize(
"fixing_method", [FloatFixingMethod.RFRPaymentDelay(), FloatFixingMethod.RFRLockout(0)]
)
def test_bus252_convention(self, fixing_method):
rate_curve = Curve(
nodes={
dt(2000, 1, 3): 1.0,
dt(2000, 1, 17): 0.999,
},
convention="bus252",
calendar="bus",
)
result = rate_value(
start=dt(2000, 1, 6),
end=dt(2000, 1, 11),
rate_curve=rate_curve,
spread_compound_method=SpreadCompoundMethod.NoneSimple,
float_spread=0.0,
fixing_method=fixing_method,
)
r1 = rate_curve._rate_with_raise(dt(2000, 1, 6), "1b")
r2 = rate_curve._rate_with_raise(dt(2000, 1, 7), "1b")
r3 = rate_curve._rate_with_raise(dt(2000, 1, 10), "1b")
expected = ((1 + r1 / 25200) * (1 + r2 / 25200) * (1 + r3 / 25200) - 1) * 25200 / 3
assert abs(result - expected) < 1e-10
================================================
FILE: python/tests/periods/test_periods_init.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
import rateslib.errors as err
from rateslib.periods import (
Cashflow,
CreditPremiumPeriod,
CreditProtectionPeriod,
FixedPeriod,
FloatPeriod,
# IndexCashflow,
# IndexFixedPeriod,
# IndexFloatPeriod,
# NonDeliverableCashflow,
# NonDeliverableFixedPeriod,
# NonDeliverableFloatPeriod,
# NonDeliverableIndexCashflow,
# NonDeliverableIndexFixedPeriod,
# NonDeliverableIndexFloatPeriod,
ZeroFixedPeriod,
)
from rateslib.periods.cashflow import MtmCashflow
from rateslib.scheduling import Schedule
class TestCashflow:
def test_init(self):
Cashflow(currency="usd", notional=2e6, payment=dt(2000, 1, 1))
pass
class TestFixedPeriod:
def test_init(self):
FixedPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
frequency="M",
notional=2e6,
currency="usd",
convention="act365f",
calendar="tgt",
adjuster="mf",
)
pass
class TestFloatPeriod:
def test_init(self):
FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
frequency="M",
notional=2e6,
currency="usd",
convention="act365f",
calendar="tgt",
adjuster="mf",
)
pass
# @pytest.mark.skip(reason="Deprecated objects.")
# class TestIndexFixedPeriod:
# def test_init(self):
# IndexFixedPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# index_base=100.0,
# )
# pass
#
# def test_errors(self):
# with pytest.raises(ValueError, match=err.VE_NEEDS_INDEX_PARAMS[:15]):
# IndexFixedPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# )
#
# with pytest.raises(ValueError, match=err.VE_HAS_ND_CURRENCY_PARAMS[:15]):
# IndexFixedPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# index_base=100.0,
# pair="eurusd",
# )
#
#
# @pytest.mark.skip(reason="Deprecated objects.")
# class TestNonDeliverableIndexFixedPeriod:
# def test_init(self):
# NonDeliverableIndexFixedPeriod(
# pair="eurusd",
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# index_base=100.0,
# )
# pass
#
# def test_errors(self):
# with pytest.raises(ValueError, match=err.VE_NEEDS_ND_CURRENCY_PARAMS[:15]):
# NonDeliverableIndexFixedPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# index_base=100.0,
# )
#
# with pytest.raises(ValueError, match=err.VE_NEEDS_INDEX_PARAMS[:15]):
# NonDeliverableIndexFixedPeriod(
# pair="eurusd",
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# )
#
#
# @pytest.mark.skip(reason="Deprecated objects.")
# class TestNonDeliverableCashflow:
# def test_init(self):
# NonDeliverableCashflow(
# currency="usd", pair="brlusd", notional=2e6, payment=dt(2000, 1, 1)
# )
# pass
#
# def test_errors(self):
# with pytest.raises(ValueError, match=err.VE_NEEDS_ND_CURRENCY_PARAMS[:15]):
# NonDeliverableCashflow(currency="usd", notional=2e6, payment=dt(2000, 1, 1))
#
# with pytest.raises(ValueError, match=err.VE_HAS_INDEX_PARAMS[:15]):
# NonDeliverableCashflow(
# currency="usd",
# pair="eurusd",
# notional=2e6,
# payment=dt(2000, 1, 1),
# index_base=100.0,
# )
#
# def test_undefined_currencies(self):
# with pytest.raises(ValueError, match=err.VE_MISMATCHED_ND_PAIR[:15]):
# NonDeliverableCashflow(
# pair="eurbrl",
# payment=dt(2000, 1, 1),
# notional=2e6,
# )
#
#
# @pytest.mark.skip(reason="Deprecated objects.")
# class TestIndexCashflow:
# def test_init(self):
# IndexCashflow(currency="usd", notional=2e6, payment=dt(2000, 1, 1), index_base=100.0)
# pass
#
# def test_errors(self):
# with pytest.raises(ValueError, match=err.VE_NEEDS_INDEX_PARAMS[:15]):
# IndexCashflow(currency="usd", notional=2e6, payment=dt(2000, 1, 1))
#
# with pytest.raises(ValueError, match=err.VE_HAS_ND_CURRENCY_PARAMS[:15]):
# IndexCashflow(
# currency="usd",
# pair="eurusd",
# notional=2e6,
# payment=dt(2000, 1, 1),
# index_base=100.0,
# )
#
#
# @pytest.mark.skip(reason="Deprecated objects.")
# class TestNonDeliverableIndexCashflow:
# def test_init(self):
# NonDeliverableIndexCashflow(
# currency="usd", pair="eurusd", notional=2e6, payment=dt(2000, 1, 1), index_base=100.0
# )
# pass
#
# def test_errors(self):
# with pytest.raises(ValueError, match=err.VE_NEEDS_INDEX_PARAMS[:15]):
# NonDeliverableIndexCashflow(currency="usd", notional=2e6, payment=dt(2000, 1, 1))
#
# with pytest.raises(ValueError, match=err.VE_NEEDS_ND_CURRENCY_PARAMS[:15]):
# NonDeliverableIndexCashflow(
# currency="usd",
# notional=2e6,
# payment=dt(2000, 1, 1),
# index_base=100.0,
# )
class TestMtmCashflow:
def test_init(self):
MtmCashflow(
currency="usd",
notional=2e6,
payment=dt(2000, 1, 10),
pair="eurusd",
fx_fixings_start=2.0,
fx_fixings_end=3.0,
start=dt(2000, 1, 1),
end=dt(2000, 1, 10),
)
# @pytest.mark.skip(reason="Deprecated objects.")
# class TestNonDeliverableFixedPeriod:
# def test_init(self):
# NonDeliverableFixedPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# pair="brlusd",
# )
# pass
#
# def test_errors(self):
# with pytest.raises(ValueError, match=err.VE_NEEDS_ND_CURRENCY_PARAMS[:15]):
# NonDeliverableFixedPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# )
#
# with pytest.raises(ValueError, match=err.VE_HAS_INDEX_PARAMS[:15]):
# NonDeliverableFixedPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# pair="brlusd",
# index_base=100.0,
# )
#
#
# @pytest.mark.skip(reason="Deprecated objects.")
# class TestNonDeliverableFloatPeriod:
# def test_init(self):
# NonDeliverableFloatPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# pair="brlusd",
# )
# pass
#
# def test_errors(self):
# with pytest.raises(ValueError, match=err.VE_NEEDS_ND_CURRENCY_PARAMS[:15]):
# NonDeliverableFloatPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# )
#
# with pytest.raises(ValueError, match=err.VE_HAS_INDEX_PARAMS[:15]):
# NonDeliverableFloatPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# pair="brlusd",
# index_base=100.0,
# )
#
#
# @pytest.mark.skip(reason="Deprecated objects.")
# class TestIndexFloatPeriod:
# def test_init(self):
# IndexFloatPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# index_base=100.0,
# )
# pass
#
# def test_errors(self):
# with pytest.raises(ValueError, match=err.VE_NEEDS_INDEX_PARAMS[:15]):
# IndexFloatPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# )
#
# with pytest.raises(ValueError, match=err.VE_HAS_ND_CURRENCY_PARAMS[:15]):
# IndexFloatPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# index_base=100.0,
# pair="eurusd",
# )
#
#
# @pytest.mark.skip(reason="Deprecated objects.")
# class TestNonDeliverableIndexFloatPeriod:
# def test_init(self):
# NonDeliverableIndexFloatPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# index_base=100.0,
# pair="eurusd",
# )
# pass
#
# def test_errors(self):
# with pytest.raises(ValueError, match=err.VE_NEEDS_INDEX_PARAMS[:15]):
# NonDeliverableIndexFloatPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# pair="eurusd",
# )
#
# with pytest.raises(ValueError, match=err.VE_NEEDS_ND_CURRENCY_PARAMS[:15]):
# NonDeliverableIndexFloatPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency="M",
# notional=2e6,
# currency="usd",
# convention="act365f",
# calendar="tgt",
# adjuster="mf",
# index_base=100.0,
# )
class TestCreditPremiumPeriod:
def test_init(self):
CreditPremiumPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
frequency="M",
notional=2e6,
premium_accrued=False,
)
class TestCreditProtectionPeriod:
def test_init(self):
CreditProtectionPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
frequency="M",
notional=2e6,
)
class TestZeroFixedPeriod:
def test_init(self):
ZeroFixedPeriod(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2000, 9, 1),
frequency="M",
),
convention="act365f",
)
================================================
FILE: python/tests/periods/test_periods_legacy.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import os
import re
from dataclasses import replace
from datetime import datetime as dt
from datetime import timedelta
import numpy as np
import pytest
import rateslib.errors as err
from pandas import DataFrame, Index, MultiIndex, Series, date_range
from pandas.testing import assert_frame_equal
from rateslib import defaults, fixings
from rateslib.curves import CompositeCurve, Curve, LineCurve
from rateslib.curves.curves import _try_index_value
from rateslib.data.fixings import FloatRateSeries, FXIndex
from rateslib.data.loader import FixingMissingForecasterError
from rateslib.default import NoInput, _drb
from rateslib.dual import Dual, gradient
from rateslib.enums import Err, FloatFixingMethod, Ok, OptionPricingModel
from rateslib.enums.parameters import FXDeltaMethod, IndexMethod, SpreadCompoundMethod
from rateslib.fx import FXForwards, FXRates
from rateslib.periods import (
Cashflow,
CreditPremiumPeriod,
CreditProtectionPeriod,
FixedPeriod,
FloatPeriod,
FXCallPeriod,
FXPutPeriod,
IRSCallPeriod,
IRSPutPeriod,
MtmCashflow,
ZeroFixedPeriod,
)
from rateslib.periods.float_rate import rate_value
from rateslib.scheduling import Cal, Frequency, RollDay, Schedule
from rateslib.volatility import (
FXDeltaVolSmile,
FXSabrSmile,
FXSabrSurface,
IRSabrCube,
IRSabrSmile,
IRSplineCube,
IRSplineSmile,
)
from rateslib.volatility.utils import _OptionModelBlack76
@pytest.fixture
def curve():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.99,
dt(2022, 7, 1): 0.98,
dt(2022, 10, 1): 0.97,
}
return Curve(nodes=nodes, interpolation="log_linear", id="curve_fixture")
@pytest.fixture
def hazard_curve():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.999,
dt(2022, 7, 1): 0.997,
dt(2022, 10, 1): 0.991,
}
return Curve(nodes=nodes, interpolation="log_linear", id="hazard_fixture")
@pytest.fixture
def fxr():
return FXRates({"usdnok": 10.0})
@pytest.fixture
def rfr_curve():
v1 = 1 / (1 + 0.01 / 365)
v2 = v1 / (1 + 0.02 / 365)
v3 = v2 / (1 + 0.03 / 365)
v4 = v3 / (1 + 0.04 / 365)
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 1, 2): v1,
dt(2022, 1, 3): v2,
dt(2022, 1, 4): v3,
dt(2022, 1, 5): v4,
}
return Curve(nodes=nodes, interpolation="log_linear", convention="act365f")
@pytest.fixture
def line_curve():
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 1, 2): 2.00,
dt(2022, 1, 3): 3.00,
dt(2022, 1, 4): 4.00,
dt(2022, 1, 5): 5.00,
}
return LineCurve(nodes=nodes, interpolation="linear", convention="act365f")
@pytest.mark.parametrize(
"obj",
[
FixedPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
frequency=Frequency.Months(1, None),
fixed_rate=2.0,
),
Cashflow(notional=1e6, payment=dt(2022, 1, 1), currency="usd"),
# IndexCashflow(notional=1e6, payment=dt(2022, 1, 1), currency="usd", index_base=100.0),
# IndexFixedPeriod(
# start=dt(2000, 1, 1),
# end=dt(2000, 2, 1),
# payment=dt(2000, 2, 1),
# frequency=Frequency.Months(1, None),
# fixed_rate=2.0,
# index_base=1.0,
# ),
FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
payment=dt(2000, 2, 1),
frequency=Frequency.Months(1, None),
),
FXCallPeriod(
pair="eurusd",
expiry=dt(2000, 1, 1),
delivery=dt(2000, 1, 1),
),
FXPutPeriod(
pair="eurusd",
expiry=dt(2000, 1, 1),
delivery=dt(2000, 1, 1),
),
],
)
def test_repr(obj):
result = obj.__repr__()
expected = f""
assert result == expected
class TestFXandBase:
def test_fx_and_base_raise(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.96}, id="curve")
per = FixedPeriod(
start=dt(2022, 2, 1),
end=dt(2022, 3, 1),
payment=dt(2022, 3, 1),
frequency=Frequency.Months(12, None),
fixed_rate=2,
currency="usd",
)
with pytest.raises(ValueError, match="`base` "):
per.npv(rate_curve=curve, base="eur")
def test_fx_and_base_warn1(self) -> None:
# base and numeric fx given.
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.96}, id="curve")
per = FixedPeriod(
start=dt(2022, 2, 1),
end=dt(2022, 3, 1),
payment=dt(2022, 3, 1),
frequency=Frequency.Months(12, None),
fixed_rate=2.0,
currency="usd",
)
with pytest.warns(DeprecationWarning, match="`base` "):
per.npv(rate_curve=curve, disc_curve=curve, fx=1.1, base="eur")
def test_fx_and_base_warn2(self) -> None:
# base is none and numeric fx given.
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.96}, id="curve")
per = FixedPeriod(
start=dt(2022, 2, 1),
end=dt(2022, 3, 1),
payment=dt(2022, 3, 1),
frequency=Frequency.Months(12, None),
fixed_rate=2.0,
currency="usd",
)
with pytest.warns(UserWarning, match="It is not best practice to provide"):
per.npv(rate_curve=curve, fx=1.1)
class TestFloatPeriod:
def test_none_cashflow(self) -> None:
float_period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
)
assert float_period.try_cashflow(rate_curve=None).is_err
@pytest.mark.parametrize(
("spread_method", "float_spread", "expected"),
[
("none_simple", 100.0, 24744.478172244584),
("isda_compounding", 0.0, 24744.478172244584),
("isda_compounding", 100.0, 25053.484941157145),
("isda_flat_compounding", 100.0, 24867.852396116967),
],
)
def test_float_period_analytic_delta(
self,
curve,
spread_method,
float_spread,
expected,
) -> None:
float_period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
float_spread=float_spread,
spread_compound_method=spread_method,
)
result = float_period.analytic_delta(rate_curve=curve)
assert abs(result - expected) < 1e-7
@pytest.mark.parametrize(
("spread", "crv", "fx"),
[
(4.00, True, 2.0),
(NoInput(0), False, 2.0),
(4.00, True, 10.0),
(NoInput(0), False, 10.0),
],
)
def test_float_period_cashflows(self, curve, fxr, spread, crv, fx) -> None:
float_period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
float_spread=spread,
)
curve = curve if crv else None
rate = None if curve is None else float(float_period.rate(curve))
cashflow = None if rate is None else rate * -1e9 * float_period.period_params.dcf / 100
expected = {
defaults.headers["base"]: "UNSPECIFIED" if fx == 2.0 else "NOK",
defaults.headers["type"]: "FloatPeriod",
defaults.headers["stub_type"]: "Regular",
defaults.headers["a_acc_start"]: dt(2022, 1, 1),
defaults.headers["a_acc_end"]: dt(2022, 4, 1),
defaults.headers["payment"]: dt(2022, 4, 3),
defaults.headers["notional"]: 1e9,
defaults.headers["currency"]: "USD",
defaults.headers["convention"]: "Act360",
defaults.headers["dcf"]: float_period.period_params.dcf,
defaults.headers["df"]: 0.9897791268897856 if crv else None,
defaults.headers["rate"]: rate,
defaults.headers["spread"]: 0.0 if spread is NoInput.blank else spread,
defaults.headers["npv"]: -10096746.871171726 if crv else None,
defaults.headers["cashflow"]: cashflow,
defaults.headers["fx"]: fx,
defaults.headers["npv_fx"]: -10096746.871171726 * fx if crv else None,
defaults.headers["collateral"]: None,
}
if fx == 2.0:
with pytest.warns(UserWarning):
# It is not best practice to provide `fx` as numeric
result = float_period.cashflows(
rate_curve=curve if crv else NoInput(0),
fx=2.0,
base=NoInput(0),
)
else:
result = float_period.cashflows(
rate_curve=curve if crv else NoInput(0),
fx=fxr,
base="nok",
)
assert result == expected
def test_spread_compound_raises(self) -> None:
with pytest.raises(ValueError, match="`spread_compound_method`"):
FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
spread_compound_method="bad_vibes",
)
def test_spread_compound_calc_raises(self) -> None:
with pytest.raises(ValueError, match="`spread_compound_method` as string: 'bad_input'"):
rate_value(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
spread_compound_method="bad_input",
float_spread=1,
)
@pytest.mark.parametrize(
"scm",
[
SpreadCompoundMethod.ISDACompounding,
SpreadCompoundMethod.ISDAFlatCompounding,
SpreadCompoundMethod.NoneSimple,
],
)
@pytest.mark.parametrize(
("meth"),
[
FloatFixingMethod.RFRObservationShift(2),
FloatFixingMethod.RFRPaymentDelay(),
FloatFixingMethod.RFRLockout(2),
FloatFixingMethod.RFRLookback(2),
],
)
def test_spread_compound_with_fixing_method_allowed(self, scm, meth):
FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 1),
frequency="Q",
float_spread=1.0,
spread_compound_method=scm,
fixing_method=meth,
)
def test_rfr_lockout_too_few_dates(self, curve) -> None:
period = FloatPeriod(
start=dt(2022, 1, 10),
end=dt(2022, 1, 15),
payment=dt(2022, 1, 15),
frequency=Frequency.Months(1, None),
fixing_method="rfr_lockout(6)",
)
with pytest.raises(ValueError, match="The `method_param` for an RFR Lockout type `fixing_"):
period.rate(curve)
def test_fixing_method_raises(self) -> None:
with pytest.raises(ValueError, match="`fixing_method`"):
FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
fixing_method="bad_vibes",
)
def test_float_period_npv(self, curve) -> None:
float_period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
)
result = float_period.npv(rate_curve=curve)
assert abs(result + 9997768.95848275) < 1e-7
@pytest.mark.parametrize(
"scm", [SpreadCompoundMethod.ISDACompounding, SpreadCompoundMethod.ISDAFlatCompounding]
)
@pytest.mark.parametrize(
"fm",
[
FloatFixingMethod.RFRObservationShiftAverage(2),
FloatFixingMethod.RFRPaymentDelayAverage(),
FloatFixingMethod.RFRLockoutAverage(2),
FloatFixingMethod.RFRLookbackAverage(2),
FloatFixingMethod.IBOR(2),
],
)
def test_rfr_avg_method_raises(self, scm, fm, curve) -> None:
msg = "is not compatible"
with pytest.raises(ValueError, match=msg):
FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method=fm,
spread_compound_method=scm,
)
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_payment_delay_method(self, curve_type, rfr_curve, line_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = ((1 + 0.01 / 365) * (1 + 0.02 / 365) * (1 + 0.03 / 365) - 1) * 36500 / 3
assert abs(result - expected) < 1e-12
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_payment_delay_method_with_fixings(self, curve_type, rfr_curve, line_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_1B", Series(index=[dt(2022, 1, 1), dt(2022, 1, 2)], data=[10.0, 8.0]))
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
rate_fixings=name,
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = ((1 + 0.10 / 365) * (1 + 0.08 / 365) * (1 + 0.03 / 365) - 1) * 36500 / 3
assert abs(result - expected) < 1e-12
fixings.pop(f"{name}_1B")
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_payment_delay_avg_method(self, curve_type, rfr_curve, line_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay_avg",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = (1.0 + 2.0 + 3.0) / 3
assert abs(result - expected) < 1e-11
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_payment_delay_avg_method_with_fixings(
self,
curve_type,
rfr_curve,
line_curve,
) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
fixings.add("887762_1B", Series(index=[dt(2022, 1, 1), dt(2022, 1, 2)], data=[10.0, 8.0]))
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay_avg",
rate_fixings="887762",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = (10.0 + 8.0 + 3.0) / 3
assert abs(result - expected) < 1e-11
fixings.pop("887762_1B")
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_lockout_avg_method(self, curve_type, rfr_curve, line_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_lockout_avg(2)",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
# assert period.rate_params._is_inefficient is True # lockout requires all fixings.
result = period.rate(curve)
expected = 1.0
assert abs(result - expected) < 1e-11
period = FloatPeriod(
start=dt(2022, 1, 2),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_lockout_avg(1)",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(rfr_curve)
expected = (2 + 3.0 + 3.0) / 3
assert abs(result - expected) < 1e-11
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_lockout_avg_method_with_fixings(self, curve_type, rfr_curve, line_curve) -> None:
name = str(hash(os.urandom(2)))
curve = rfr_curve if curve_type == "curve" else line_curve
fixings.add(f"{name}_1B", Series(index=[dt(2022, 1, 1), dt(2022, 1, 2)], data=[10.0, 8.0]))
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_lockout_avg(2)",
rate_fixings=name,
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = 10.0
assert abs(result - expected) < 1e-12
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 1),
frequency=Frequency.Months(3, None),
fixing_method="rfr_lockout_avg(1)",
rate_fixings=name,
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(rfr_curve)
expected = (10.0 + 8.0 + 8.0) / 3
assert abs(result - expected) < 1e-12
fixings.pop(f"{name}_1B")
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_lockout_method(self, curve_type, rfr_curve, line_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_lockout(2)",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
# assert period.rate_params._is_inefficient is True # lockout requires all fixings.
result = period.rate(curve)
expected = ((1 + 0.01 / 365) * (1 + 0.01 / 365) * (1 + 0.01 / 365) - 1) * 36500 / 3
assert abs(result - expected) < 1e-12
period = FloatPeriod(
start=dt(2022, 1, 2),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_lockout(1)",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(rfr_curve)
expected = ((1 + 0.02 / 365) * (1 + 0.03 / 365) * (1 + 0.03 / 365) - 1) * 36500 / 3
assert abs(result - expected) < 1e-12
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_lockout_method_with_fixings(self, curve_type, rfr_curve, line_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
fixings.add("887762_1B", Series(index=[dt(2022, 1, 1), dt(2022, 1, 2)], data=[10.0, 8.0]))
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_lockout(2)",
rate_fixings="887762",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = ((1 + 0.10 / 365) * (1 + 0.10 / 365) * (1 + 0.10 / 365) - 1) * 36500 / 3
assert abs(result - expected) < 1e-12
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_lockout(1)",
rate_fixings="887762",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(rfr_curve)
expected = ((1 + 0.10 / 365) * (1 + 0.08 / 365) * (1 + 0.08 / 365) - 1) * 36500 / 3
assert abs(result - expected) < 1e-12
fixings.pop("887762_1B")
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_observation_shift_method(self, curve_type, rfr_curve, line_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
period = FloatPeriod(
start=dt(2022, 1, 2),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_observation_shift(1)",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = ((1 + 0.01 / 365) * (1 + 0.02 / 365) * (1 + 0.03 / 365) - 1) * 36500 / 3
assert abs(result - expected) < 1e-12
period = FloatPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_observation_shift(2)",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = ((1 + 0.01 / 365) * (1 + 0.02 / 365) - 1) * 36500 / 2
assert abs(result - expected) < 1e-12
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_observation_shift_method_with_fixings(
self,
curve_type,
rfr_curve,
line_curve,
) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_1B", Series(index=[dt(2022, 1, 1), dt(2022, 1, 2)], data=[10.0, 8.0]))
period = FloatPeriod(
start=dt(2022, 1, 2),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_observation_shift(1)",
rate_fixings=name,
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = ((1 + 0.10 / 365) * (1 + 0.08 / 365) * (1 + 0.03 / 365) - 1) * 36500 / 3
assert abs(result - expected) < 1e-12
period = FloatPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_observation_shift(2)",
rate_fixings=name,
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = ((1 + 0.10 / 365) * (1 + 0.08 / 365) - 1) * 36500 / 2
assert abs(result - expected) < 1e-12
fixings.pop(f"{name}_1B")
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_observation_shift_method_with_fixings_and_float_spread(
self,
curve_type,
rfr_curve,
line_curve,
) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_1B", Series(index=[dt(2022, 1, 1), dt(2022, 1, 2)], data=[10.0, 8.0]))
period = FloatPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_observation_shift(2)",
rate_fixings=name,
float_spread=1000.0,
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
period.rate(curve)
result = period.rate(curve) # double calc to test caching of fixing result
expected = ((1 + 0.10 / 365) * (1 + 0.08 / 365) - 1) * 36500 / 2 + 10.0
assert abs(result - expected) < 1e-12
fixings.pop(f"{name}_1B")
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_observation_shift_avg_method(self, curve_type, rfr_curve, line_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
period = FloatPeriod(
start=dt(2022, 1, 2),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_observation_shift_avg(1)",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = (1.0 + 2 + 3) / 3
assert abs(result - expected) < 1e-11
period = FloatPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_observation_shift_avg(2)",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = (1.0 + 2.0) / 2
assert abs(result - expected) < 1e-11
@pytest.mark.parametrize("curve_type", ["curve", "line_curve"])
def test_rfr_observation_shift_avg_method_with_fixings(
self,
curve_type,
rfr_curve,
line_curve,
) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
fixings.add("123454_1B", Series(index=[dt(2022, 1, 1), dt(2022, 1, 2)], data=[10.0, 8.0]))
period = FloatPeriod(
start=dt(2022, 1, 2),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_observation_shift_avg(1)",
rate_fixings="123454",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = (10.0 + 8.0 + 3.0) / 3
assert abs(result - expected) < 1e-11
period = FloatPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 1, 5),
payment=dt(2022, 1, 5),
frequency=Frequency.Months(3, None),
fixing_method="rfr_observation_shift_avg(2)",
rate_fixings="123454",
fixing_series=FloatRateSeries(
calendar="all",
lag=0,
convention="act365f",
modifier="mf",
eom=True,
),
)
result = period.rate(curve)
expected = (10.0 + 8) / 2
assert abs(result - expected) < 1e-11
fixings.pop("123454_1B")
def test_dcf_obs_period_raises(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, calendar="ldn")
float_period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 12, 31),
payment=dt(2022, 12, 31),
frequency=Frequency.Months(12, None),
fixing_method="rfr_lookback(5)",
fixing_series=FloatRateSeries(
calendar="ldn",
lag=0,
convention="act360",
modifier="mf",
eom=True,
),
)
# this may only raise when lookback is used ?
with pytest.raises(
ValueError, match="`start` and `end` for a calendar `bus_date_range` must both be vali"
):
float_period.rate(curve)
@pytest.mark.skip(reason="NOTIONAL mapping not yet implemented.")
@pytest.mark.parametrize(
"curve_type",
["curve", "linecurve"],
)
@pytest.mark.parametrize(
("method", "expected", "expected_date"),
[
("rfr_payment_delay", [1000000, 1000082, 1000191, 1000561], dt(2022, 1, 6)),
("rfr_observation_shift(2)", [1499240, 1499281, 1499363, 1499486], dt(2022, 1, 4)),
("rfr_lockout(2)", [999931, 4999411, 0, 0], dt(2022, 1, 6)),
("rfr_lookback(2)", [999657, 999685, 2998726, 999821], dt(2022, 1, 4)),
],
)
def test_rfr_fixings_array(self, curve_type, method, expected, expected_date) -> None:
# tests the fixings array and the compounding for different types of curve
# at different rates in the period.
v1 = 1 / (1 + 0.01 / 365)
v2 = v1 / (1 + 0.02 / 365)
v3 = v2 / (1 + 0.03 / 365)
v4 = v3 / (1 + 0.04 / 365)
v5 = v4 / (1 + 0.045 * 3 / 365)
v6 = v5 / (1 + 0.05 / 365)
v7 = v6 / (1 + 0.055 / 365)
nodes = {
dt(2022, 1, 3): 1.00,
dt(2022, 1, 4): v1,
dt(2022, 1, 5): v2,
dt(2022, 1, 6): v3,
dt(2022, 1, 7): v4,
dt(2022, 1, 10): v5,
dt(2022, 1, 11): v6,
dt(2022, 1, 12): v7,
}
curve = Curve(
nodes=nodes,
interpolation="log_linear",
convention="act365f",
calendar="bus",
)
line_curve = LineCurve(
nodes={
dt(2022, 1, 2): -99,
dt(2022, 1, 3): 1.0,
dt(2022, 1, 4): 2.0,
dt(2022, 1, 5): 3.0,
dt(2022, 1, 6): 4.0,
dt(2022, 1, 7): 4.5,
dt(2022, 1, 10): 5.0,
dt(2022, 1, 11): 5.5,
},
interpolation="linear",
convention="act365f",
calendar="bus",
)
rfr_curve = curve if curve_type == "curve" else line_curve
period = FloatPeriod(
start=dt(2022, 1, 5),
end=dt(2022, 1, 11),
payment=dt(2022, 1, 11),
frequency=Frequency.Months(3, None),
fixing_method=method,
convention="act365f",
notional=-1000000,
fixing_series=FloatRateSeries(
calendar="bus",
lag=0,
convention="act365f",
modifier="f",
eom=True,
),
)
table = period.try_unindexed_reference_fixings_exposure(
rate_curve=rfr_curve, disc_curve=curve
).unwrap()
assert table.index.tolist()[1] == expected_date
assert np.all(np.isclose(np.array(expected), table[(rfr_curve.id, "notional")].to_numpy()))
@pytest.mark.parametrize(
"curve_type",
["curve", "linecurve"],
)
@pytest.mark.parametrize(
("method", "expected", "expected_date"),
[
("rfr_payment_delay", [0.27393, 0.27392, 0.82155, 0.27391], dt(2022, 1, 6)),
("rfr_observation_shift(2)", [0.41074, 0.41073, 0.41072, 0.41071], dt(2022, 1, 4)),
("rfr_lockout(2)", [0.27391, 1.36933, 0, 0], dt(2022, 1, 6)),
("rfr_lookback(2)", [0.27387, 0.27386, 0.82143, 0.27385], dt(2022, 1, 4)),
],
)
def test_rfr_fixings_array_substitute(
self, curve_type, method, expected, expected_date
) -> None:
# tests the fixings array and the compounding for different types of curve
# at different rates in the period.
v1 = 1 / (1 + 0.01 / 365)
v2 = v1 / (1 + 0.02 / 365)
v3 = v2 / (1 + 0.03 / 365)
v4 = v3 / (1 + 0.04 / 365)
v5 = v4 / (1 + 0.045 * 3 / 365)
v6 = v5 / (1 + 0.05 / 365)
v7 = v6 / (1 + 0.055 / 365)
nodes = {
dt(2022, 1, 3): 1.00,
dt(2022, 1, 4): v1,
dt(2022, 1, 5): v2,
dt(2022, 1, 6): v3,
dt(2022, 1, 7): v4,
dt(2022, 1, 10): v5,
dt(2022, 1, 11): v6,
dt(2022, 1, 12): v7,
}
curve = Curve(
nodes=nodes,
interpolation="log_linear",
convention="act365f",
calendar="bus",
)
line_curve = LineCurve(
nodes={
dt(2022, 1, 2): -99,
dt(2022, 1, 3): 1.0,
dt(2022, 1, 4): 2.0,
dt(2022, 1, 5): 3.0,
dt(2022, 1, 6): 4.0,
dt(2022, 1, 7): 4.5,
dt(2022, 1, 10): 5.0,
dt(2022, 1, 11): 5.5,
},
interpolation="linear",
convention="act365f",
calendar="bus",
)
rfr_curve = curve if curve_type == "curve" else line_curve
period = FloatPeriod(
start=dt(2022, 1, 5),
end=dt(2022, 1, 11),
payment=dt(2022, 1, 11),
frequency=Frequency.Months(3, None),
fixing_method=method,
convention="act365f",
notional=-1000000,
fixing_series=FloatRateSeries(
calendar="bus",
lag=0,
convention="act365f",
modifier="f",
eom=True,
),
)
table = period.local_analytic_rate_fixings(rate_curve=rfr_curve, disc_curve=curve)
assert table.index.tolist()[1] == expected_date
result = table[(rfr_curve.id, "usd", "usd", "1B")].to_numpy()
assert np.all(np.isclose(np.array(expected), result, atol=1e-4))
def test_rfr_fixings_array_raises2(self, line_curve, curve) -> None:
period = FloatPeriod(
start=dt(2022, 1, 5),
end=dt(2022, 1, 11),
payment=dt(2022, 1, 11),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
convention="act365f",
notional=-1000000,
fixing_series=FloatRateSeries(
calendar="bus",
lag=0,
convention="act365f",
modifier="f",
eom=True,
),
)
with pytest.raises(ValueError, match="`disc_curve` cannot be inferred from a non-DF"):
period.local_analytic_rate_fixings(rate_curve=line_curve)
with pytest.raises(ValueError, match="A `rate_curve` supplied as dict to an RF"):
period.local_analytic_rate_fixings(
rate_curve={"1m": line_curve, "2m": line_curve}, disc_curve=curve
)
@pytest.mark.skip(reason="NOTIONAL mapping not implemented")
@pytest.mark.parametrize(
("method", "expected"),
[
("rfr_payment_delay", 1000000),
("rfr_observation_shift(1)", 333319),
("rfr_lookback(1)", 333319),
],
)
def test_rfr_fixings_array_single_period(self, method, expected) -> None:
rfr_curve = Curve(
nodes={dt(2022, 1, 3): 1.0, dt(2022, 1, 15): 0.9995},
interpolation="log_linear",
convention="act365f",
calendar="bus",
)
period = FloatPeriod(
start=dt(2022, 1, 10),
end=dt(2022, 1, 11),
payment=dt(2022, 1, 11),
frequency=Frequency.Months(3, None),
fixing_method=method,
notional=-1000000,
convention="act365f",
fixing_series=FloatRateSeries(
calendar="bus",
lag=0,
convention="act365f",
modifier="f",
eom=True,
),
)
result = period.try_unindexed_reference_fixings_exposure(rate_curve=rfr_curve).unwrap()
assert abs(result[(rfr_curve.id, "notional")].iloc[0] - expected) < 1
@pytest.mark.parametrize(
("method", "expected"),
[
("rfr_payment_delay", 0.27388),
("rfr_observation_shift(1)", 0.27388),
("rfr_lookback(1)", 0.27388),
],
)
def test_rfr_fixings_array_single_period_substitute(self, method, expected) -> None:
rfr_curve = Curve(
nodes={dt(2022, 1, 3): 1.0, dt(2022, 1, 15): 0.9995},
interpolation="log_linear",
convention="act365f",
calendar="bus",
)
period = FloatPeriod(
start=dt(2022, 1, 10),
end=dt(2022, 1, 11),
payment=dt(2022, 1, 11),
frequency=Frequency.Months(3, None),
fixing_method=method,
notional=-1000000,
convention="act365f",
fixing_series=FloatRateSeries(
calendar="bus",
lag=0,
convention="act365f",
modifier="f",
eom=True,
),
)
result = period.local_analytic_rate_fixings(rate_curve=rfr_curve)
assert abs(result[(rfr_curve.id, "usd", "usd", "1B")].iloc[0] - expected) < 1
@pytest.mark.parametrize(
("method", "expected", "index"),
[
(
"rfr_payment_delay",
3.20040557,
[dt(2022, 1, 28), dt(2022, 1, 31), dt(2022, 2, 1)],
),
("rfr_lockout(1)", 3.80063892, [dt(2022, 1, 28), dt(2022, 1, 31), dt(2022, 2, 1)]),
("rfr_lookback(1)", 3.20040557, [dt(2022, 1, 27), dt(2022, 1, 28), dt(2022, 1, 31)]),
(
"rfr_observation_shift(1)",
4.00045001,
[dt(2022, 1, 27), dt(2022, 1, 28), dt(2022, 1, 31)],
),
],
)
def test_rfr_period_all_types_with_defined_fixings(self, method, expected, index):
# This is probably a redundant test but it came later after some refactoring and
# was double checked with manual calculation in Excel. Easy to do.
curve = Curve({dt(2022, 1, 1): 1.0, dt(2022, 3, 1): 1.0}, calendar="nyc")
name = str(hash(os.urandom(2)))
fixings.add(f"{name}_1B", Series(data=[3.0, 5.0, 2.0], index=index))
period = FloatPeriod(
start=dt(2022, 1, 28),
end=dt(2022, 2, 2),
frequency=Frequency.Months(12, None),
payment=dt(2022, 1, 1),
fixing_method=method,
convention="act360",
calendar="nyc",
rate_fixings=name,
)
result = period.rate(curve)
assert abs(result - expected) < 1e-8
fixings.pop(f"{name}_1B")
@pytest.mark.parametrize(
("method", "expected"),
[
(
"none_simple",
((1 + 0.01 / 365) * (1 + 0.02 / 365) * (1 + 0.03 / 365) - 1) * 36500 / 3
+ 100 / 100,
),
(
"isda_compounding",
((1 + 0.02 / 365) * (1 + 0.03 / 365) * (1 + 0.04 / 365) - 1) * 36500 / 3,
),
("isda_flat_compounding", 3.000173518986841),
],
)
def test_rfr_compounding_float_spreads(self, method, expected, rfr_curve) -> None:
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(1, None),
float_spread=100,
spread_compound_method=method,
convention="act365f",
)
result = period.rate(rfr_curve)
assert abs(result - expected) < 1e-8
def test_ibor_rate_line_curve(self, line_curve) -> None:
period = FloatPeriod(
start=dt(2022, 1, 5),
end=dt(2022, 4, 5),
payment=dt(2022, 4, 5),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
fixing_series=FloatRateSeries(
lag=2,
calendar="all",
convention="act365f",
modifier="mf",
eom=True,
),
)
# assert period.rate_params._is_inefficient is False
assert period.rate(line_curve) == 3.0
@pytest.mark.skip(reason="NOTIONAL mapping not implemented")
def test_ibor_fixing_table(self, line_curve, curve) -> None:
float_period = FloatPeriod(
start=dt(2022, 1, 4),
end=dt(2022, 4, 4),
payment=dt(2022, 4, 4),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
convention="act365f",
fixing_series=FloatRateSeries(
calendar="all",
lag=2,
convention="act365f",
modifier="f",
eom=True,
),
)
result = float_period.try_unindexed_reference_fixings_exposure(
rate_curve=line_curve, disc_curve=curve
).unwrap()
expected = DataFrame(
{
"obs_dates": [dt(2022, 1, 2)],
"notional": [-1e6],
"risk": [-24.402790080357686],
"dcf": [0.2465753424657534],
"rates": [2.0],
},
).set_index("obs_dates")
expected.columns = MultiIndex.from_tuples(
[
(line_curve.id, "notional"),
(line_curve.id, "risk"),
(line_curve.id, "dcf"),
(line_curve.id, "rates"),
]
)
assert_frame_equal(expected, result)
def test_ibor_fixing_table_substitute(self, line_curve, curve) -> None:
float_period = FloatPeriod(
start=dt(2022, 1, 4),
end=dt(2022, 4, 4),
payment=dt(2022, 4, 4),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
convention="act365f",
fixing_series=FloatRateSeries(
calendar="all",
lag=2,
convention="act365f",
modifier="f",
eom=True,
),
)
result = float_period.local_analytic_rate_fixings(rate_curve=line_curve, disc_curve=curve)
assert abs(result.iloc[0, 0] + 24.402790080357686) < 1e-10
@pytest.mark.skip(reason="`right` removed by v2.5")
def test_ibor_fixing_table_right(self, line_curve, curve) -> None:
float_period = FloatPeriod(
start=dt(2022, 1, 4),
end=dt(2022, 4, 4),
payment=dt(2022, 4, 4),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
convention="act365f",
fixing_series=FloatRateSeries(
calendar="all",
lag=2,
convention="act365f",
modifier="f",
eom=True,
),
)
result = float_period.try_unindexed_reference_fixings_exposure(
rate_curve=line_curve, disc_curve=curve, right=dt(2022, 1, 1)
).unwrap()
expected = DataFrame(
{
"notional": [],
"risk": [],
"dcf": [],
"rates": [],
},
)
expected.index = Index([], dtype="datetime64[ns]", name="obs_dates")
expected.columns = MultiIndex.from_tuples(
[
(line_curve.id, "notional"),
(line_curve.id, "risk"),
(line_curve.id, "dcf"),
(line_curve.id, "rates"),
]
)
assert_frame_equal(expected, result)
# @pytest.mark.skip(reason="PERMANENT REMOVAL due to approximate method removed in v2.2. This "
# "test becomes identical to one above"
# )
# def test_ibor_fixing_table_fast(self, line_curve, curve) -> None:
# float_period = FloatPeriod(
# start=dt(2022, 1, 4),
# end=dt(2022, 4, 4),
# payment=dt(2022, 4, 4),
# frequency=Frequency.Months(3, None),
# fixing_method="ibor",
# method_param=2,
# convention="act365f",
# )
# result = float_period.fixings_table(line_curve, disc_curve=curve, approximate=True)
# expected = DataFrame(
# {
# "obs_dates": [dt(2022, 1, 2)],
# "notional": [-1e6],
# "risk": [-24.402790080357686],
# "dcf": [0.2465753424657534],
# "rates": [2.0],
# },
# ).set_index("obs_dates")
# expected.columns = MultiIndex.from_tuples(
# [
# (line_curve.id, "notional"),
# (line_curve.id, "risk"),
# (line_curve.id, "dcf"),
# (line_curve.id, "rates"),
# ]
# )
# assert_frame_equal(expected, result)
def test_ibor_fixings(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2025, 1, 1): 0.90}, calendar="bus")
fixings_ = Series(
[1.00, 2.801, 1.00, 1.00],
index=[dt(2023, 3, 1), dt(2023, 3, 2), dt(2023, 3, 3), dt(2023, 3, 6)],
)
fixings.add("TEST_VALUES_3M", fixings_)
float_period = FloatPeriod(
start=dt(2023, 3, 6),
end=dt(2023, 6, 6),
payment=dt(2023, 6, 6),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
rate_fixings="TEST_VALUES",
fixing_series=FloatRateSeries(
calendar="bus",
convention="act360",
lag=2,
modifier="mf",
eom=False,
),
)
result = float_period.rate(curve)
assert result == 2.801
fixings.pop("TEST_VALUES_3M")
@pytest.mark.skip(reason="NOTIONAL mapping not implemented")
def test_ibor_fixings_table_historical_before_curve(self) -> None:
# fixing table should return a DataFrame with an unknown rate and zero exposure
# the fixing has occurred in the past but is unspecified.
curve = Curve({dt(2022, 1, 1): 1.0, dt(2025, 1, 1): 0.90}, calendar="bus")
float_period = FloatPeriod(
start=dt(2000, 2, 2),
end=dt(2000, 5, 2),
payment=dt(2000, 5, 2),
frequency=Frequency.Months(3, None),
fixing_method="ibor(0)",
fixing_series=FloatRateSeries(
calendar="bus",
convention="act360",
lag=0,
modifier="mf",
eom=False,
),
)
result = float_period.try_unindexed_reference_fixings_exposure(rate_curve=curve).unwrap()
expected = DataFrame(
data=[[0.0, 0.0, 0.25, np.nan]],
index=Index([dt(2000, 2, 2)], name="obs_dates"),
columns=MultiIndex.from_tuples(
[
(curve.id, "notional"),
(curve.id, "risk"),
(curve.id, "dcf"),
(curve.id, "rates"),
],
),
)
assert_frame_equal(expected, result)
def test_ibor_fixings_table_historical_before_curve_substitute(self) -> None:
# fixing table should return a DataFrame with an unknown rate and zero exposure
# the fixing has occurred in the past but is unspecified.
curve = Curve({dt(2022, 1, 1): 1.0, dt(2025, 1, 1): 0.90}, calendar="bus")
float_period = FloatPeriod(
start=dt(2000, 2, 2),
end=dt(2000, 5, 2),
payment=dt(2000, 5, 2),
frequency=Frequency.Months(3, None),
fixing_method="ibor(0)",
fixing_series=FloatRateSeries(
calendar="bus",
convention="act360",
lag=0,
modifier="mf",
eom=False,
),
)
result = float_period.local_analytic_rate_fixings(rate_curve=curve)
expected = DataFrame(
data=[[0.0]],
index=Index([dt(2000, 2, 2)], name="obs_dates"),
columns=MultiIndex.from_tuples(
[(curve.id, "usd", "usd", "3M")],
names=["identifier", "local_ccy", "display_ccy", "frequency"],
),
)
assert_frame_equal(expected, result)
@pytest.mark.skip(reason="NOTIONAL mapping not implemented.")
def test_rfr_fixings_table_historical_before_curve(self) -> None:
# fixing table should return a DataFrame with an unknown rate and zero exposure
# the fixing has occurred in the past but is unspecified.
curve = Curve({dt(2022, 1, 4): 1.0, dt(2025, 1, 1): 0.90}, calendar="bus")
float_period = FloatPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
fixing_series=FloatRateSeries(
calendar="bus",
convention="act360",
eom=False,
modifier="F",
lag=0,
),
)
with pytest.raises(ValueError, match="`effective` date for rate period is before the init"):
float_period.try_unindexed_reference_fixings_exposure(rate_curve=curve).unwrap()
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_1B", Series(index=[dt(2022, 1, 3)], data=[4.0]))
float_period = FloatPeriod(
rate_fixings=name,
start=dt(2022, 1, 3),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
fixing_series=FloatRateSeries(
calendar="bus",
convention="act360",
eom=False,
modifier="F",
lag=0,
),
)
result = float_period.try_unindexed_reference_fixings_exposure(rate_curve=curve).unwrap()
assert isinstance(result, DataFrame)
assert result.iloc[0, 0] == 0.0
assert result[f"{curve.id}", "notional"][dt(2022, 1, 3)] == 0.0
def test_rfr_fixings_table_historical_before_curve_substitute(self) -> None:
# fixing table should return a DataFrame with an unknown rate and zero exposure
# the fixing has occurred in the past but is unspecified.
curve = Curve({dt(2022, 1, 4): 1.0, dt(2025, 1, 1): 0.90}, calendar="bus")
float_period = FloatPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
fixing_series=FloatRateSeries(
calendar="bus",
convention="act360",
eom=False,
modifier="F",
lag=0,
),
)
with pytest.raises(ValueError, match="The Curve initial node date is after the required"):
float_period.local_analytic_rate_fixings(rate_curve=curve)
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_1B", Series(index=[dt(2022, 1, 3)], data=[4.0]))
float_period = FloatPeriod(
rate_fixings=name,
start=dt(2022, 1, 3),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
fixing_series=FloatRateSeries(
calendar="bus",
convention="act360",
eom=False,
modifier="F",
lag=0,
),
)
result = float_period.local_analytic_rate_fixings(rate_curve=curve)
assert isinstance(result, DataFrame)
assert result.iloc[0, 0] == 0.0
assert result.index[0] == dt(2022, 1, 3)
def test_ibor_fixing_unavailable(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2025, 1, 1): 0.90}, calendar="bus")
lcurve = LineCurve({dt(2022, 1, 1): 2.0, dt(2025, 1, 1): 4.0}, calendar="bus")
fixings_ = Series([2.801], index=[dt(2023, 3, 1)])
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_3M", fixings_)
float_period = FloatPeriod(
start=dt(2023, 3, 20),
end=dt(2023, 6, 20),
payment=dt(2023, 6, 20),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
calendar="bus",
rate_fixings=name,
)
result = float_period.rate(curve) # fixing occurs 18th Mar, not in `fixings`
assert abs(result - 3.476095729528156) < 1e-5
result = float_period.rate(lcurve) # fixing occurs 18th Mar, not in `fixings`
assert abs(result - 2.801094890510949) < 1e-5
fixings.pop(f"{name}_3M")
def test_ibor_fixings_exposure_with_fixing(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2025, 1, 1): 0.90}, calendar="bus")
float_period = FloatPeriod(
start=dt(2023, 3, 20),
end=dt(2023, 6, 20),
payment=dt(2023, 6, 20),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
calendar="bus",
rate_fixings=2.0,
)
result = float_period.local_analytic_rate_fixings(rate_curve=curve)
assert result.iloc[0, 0] == 0.0
@pytest.mark.parametrize("float_spread", [0, 100])
def test_ibor_rate_df_curve(self, float_spread, curve) -> None:
period = FloatPeriod(
start=dt(2022, 4, 1),
end=dt(2022, 7, 1),
payment=dt(2022, 7, 1),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
float_spread=float_spread,
)
expected = (0.99 / 0.98 - 1) * 36000 / 91 + float_spread / 100
result = period.rate(curve)
assert result == expected
@pytest.mark.parametrize("float_spread", [0, 100])
def test_ibor_rate_stub_df_curve(self, float_spread, curve) -> None:
period = FloatPeriod(
start=dt(2022, 4, 1),
end=dt(2022, 5, 1),
payment=dt(2022, 5, 1),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
stub=True,
float_spread=float_spread,
)
expected = (0.99 / curve[dt(2022, 5, 1)] - 1) * 36000 / 30 + float_spread / 100
assert period.rate(curve) == expected
def test_single_fixing_override(self, curve) -> None:
period = FloatPeriod(
start=dt(2022, 4, 1),
end=dt(2022, 5, 1),
payment=dt(2022, 5, 1),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
stub=True,
float_spread=100,
rate_fixings=7.5,
)
expected = 7.5 + 1
assert period.rate(curve) == expected
@pytest.mark.parametrize("curve_type", ["curve", "linecurve"])
def test_period_historic_fixings(self, curve_type, line_curve, rfr_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
fixings.add("123_1B", Series(index=[dt(2021, 12, 30), dt(2021, 12, 31)], data=[1.50, 2.50]))
period = FloatPeriod(
start=dt(2021, 12, 30),
end=dt(2022, 1, 3),
payment=dt(2022, 1, 3),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
float_spread=100,
rate_fixings="123",
convention="act365F",
)
expected = (
(1 + 0.015 / 365) * (1 + 0.025 / 365) * (1 + 0.01 / 365) * (1 + 0.02 / 365) - 1
) * 36500 / 4 + 1
assert period.rate(curve) == expected
fixings.pop("123_1B")
@pytest.mark.parametrize("curve_type", ["curve", "linecurve"])
def test_period_historic_fixings_series(self, curve_type, line_curve, rfr_curve) -> None:
curve = rfr_curve if curve_type == "curve" else line_curve
fixings_ = Series(
[99, 99, 1.5, 2.5],
index=[dt(1995, 1, 1), dt(2021, 12, 29), dt(2021, 12, 30), dt(2021, 12, 31)],
)
fixings.add("123_1B", fixings_)
period = FloatPeriod(
start=dt(2021, 12, 30),
end=dt(2022, 1, 3),
payment=dt(2022, 1, 3),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
float_spread=100,
rate_fixings="123",
convention="act365F",
)
expected = (
(1 + 0.015 / 365) * (1 + 0.025 / 365) * (1 + 0.01 / 365) * (1 + 0.02 / 365) - 1
) * 36500 / 4 + 1
result = period.rate(curve)
assert result == expected
fixings.pop("123_1B")
@pytest.mark.parametrize("curve_type", ["linecurve", "curve"])
def test_period_historic_fixings_series_missing_warns(
self,
curve_type,
line_curve,
rfr_curve,
) -> None:
#
# This test modified by PR 357. The warning is still produced but the code also now
# later errors due to the missing fixing and no forecasting method.
#
# this test was modified for v2.2. Now a missing fixing raises an error directly
fixings_ = Series(
[4.0, 3.0, 2.5], index=[dt(1995, 12, 1), dt(2021, 12, 30), dt(2022, 1, 1)]
)
with pytest.raises(ValueError, match="The fixings series '199"):
FloatPeriod(
start=dt(2021, 12, 30),
end=dt(2022, 1, 3),
payment=dt(2022, 1, 3),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
float_spread=100,
rate_fixings=fixings_,
convention="act365F",
)
def test_more_fixings_than_expected_by_calendar_raises(self):
# Create historical fixings spanning 5 days for a FloatPeriod.
# But set a Cal that does not expect all of these - one holdiay midweek.
# Observe the rate calculation.
fixings_ = Series(
data=[1.0, 2.0, 3.0, 4.0, 5.0],
index=[
dt(2023, 1, 23),
dt(2023, 1, 24),
dt(2023, 1, 25),
dt(2023, 1, 26),
dt(2023, 1, 27),
],
)
cal = Cal(holidays=[dt(2023, 1, 25)], week_mask=[5, 6])
fixings.add("x45_1B", fixings_)
period = FloatPeriod(
start=dt(2023, 1, 23),
end=dt(2023, 1, 30),
payment=dt(2023, 1, 30),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
rate_fixings="x45",
convention="act360",
calendar=cal,
)
curve = Curve({dt(2023, 1, 26): 1.0, dt(2025, 1, 26): 1.0}, calendar=cal)
with pytest.warns(UserWarning, match=err.W02_0[:20]):
period.rate(curve)
fixings.pop("x45_1B")
def test_fewer_fixings_than_expected_raises(self):
# Create historical fixings spanning 4 days for a FloatPeriod, with mid-week holiday
# But set a Cal that expects 5 (the cal does not have the holiday)
# Observe the rate calculation.
# this tests performs a minimal version of test_period_historic_fixings_series_missing_warns
fixings_ = Series(
data=[1.0, 2.0, 4.0, 5.0],
index=[dt(2023, 1, 23), dt(2023, 1, 24), dt(2023, 1, 26), dt(2023, 1, 27)],
)
with pytest.raises(ValueError, match="The fixings series '2023"):
FloatPeriod(
start=dt(2023, 1, 23),
end=dt(2023, 1, 30),
payment=dt(2023, 1, 30),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
rate_fixings=fixings_,
convention="act365F",
calendar="bus",
)
@pytest.mark.skip(reason="new fixings processes in v2.2 require cached fixing. See next test")
def test_fixing_with_float_spread_warning(self, curve) -> None:
float_period = FloatPeriod(
start=dt(2022, 1, 4),
end=dt(2022, 4, 4),
payment=dt(2022, 4, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
spread_compound_method="isda_compounding",
float_spread=100,
rate_fixings=1.0,
)
with pytest.warns(UserWarning):
result = float_period.rate(curve)
assert result == 2.0
def test_fixing_with_float_spread_complicated_compounding(self, curve) -> None:
# this test ensures float spread is calculated correctly and populate to the fixings
# value as a scalar and repeated calculations are avoided.
fixings.add(
"x45_1B", Series(index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 3)], data=1.0)
)
float_period = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 1, 4),
payment=dt(2000, 1, 4),
frequency=Frequency.Months(12, None),
fixing_method="rfr_payment_delay",
spread_compound_method="isda_compounding",
float_spread=100,
rate_fixings="x45",
fixing_series=FloatRateSeries(
calendar="all",
convention="act360",
lag=0,
modifier="F",
eom=False,
),
)
result = float_period.rate(curve)
assert abs(result - 2.000111113166) < 1e-10
assert abs(float_period.rate_params.rate_fixing.value - 2.000111113166) < 1e-10
# @pytest.mark.skip(reason="PERMANENTLY REMOVED due to reformed allowed inputs.
# This is input error.")
# def test_float_period_fixings_list_raises_on_ibor(self, curve, line_curve) -> None:
# with pytest.raises(ValueError, match=err.VE_FIXINGS_BAD_TYPE[:25]):
# FloatPeriod(
# start=dt(2022, 1, 4),
# end=dt(2022, 4, 4),
# payment=dt(2022, 4, 4),
# frequency=Frequency.Months(3, None),
# fixing_method="ibor",
# method_param=2,
# rate_fixings=[1.00],
# )
@pytest.mark.skip(reason="NOTIONAL mapping not implemented.")
@pytest.mark.parametrize(
("meth", "exp"),
[
(
"rfr_payment_delay",
DataFrame(
{
"obs_dates": [
dt(2022, 12, 28),
dt(2022, 12, 29),
dt(2022, 12, 30),
dt(2022, 12, 31),
dt(2023, 1, 1),
],
"notional": [
0.0,
0.0,
0.0,
-999821.37380,
-999932.84380,
],
"risk": [0.0, 0.0, 0.0, -0.26664737262, -0.26664737262],
"dcf": [0.0027777777777777778] * 5,
"rates": [1.19, 1.19, -8.81, 4.01364, 4.01364],
},
).set_index("obs_dates"),
),
(
"rfr_payment_delay_avg",
DataFrame(
{
"obs_dates": [
dt(2022, 12, 28),
dt(2022, 12, 29),
dt(2022, 12, 30),
dt(2022, 12, 31),
dt(2023, 1, 1),
],
"notional": [
0.0,
0.0,
0.0,
-999888.52252,
-1000000.00000,
],
"risk": [0.0, 0.0, 0.0, -0.26666528084917104, -0.26666528084917104],
"dcf": [0.0027777777777777778] * 5,
"rates": [1.19, 1.19, -8.81, 4.01364, 4.01364],
},
).set_index("obs_dates"),
),
],
)
def test_rfr_fixings_table(self, curve, meth, exp) -> None:
exp.columns = MultiIndex.from_tuples(
[(curve.id, "notional"), (curve.id, "risk"), (curve.id, "dcf"), (curve.id, "rates")]
)
name = str(hash(os.urandom(8)))
fixings.add(
f"{name}_1B",
Series(
index=[dt(2022, 12, 28), dt(2022, 12, 29), dt(2022, 12, 30)],
data=[1.19, 1.19, -8.81],
),
)
float_period = FloatPeriod(
start=dt(2022, 12, 28),
end=dt(2023, 1, 2),
payment=dt(2023, 1, 2),
frequency=Frequency.Months(1, None),
rate_fixings=name,
fixing_method=meth,
)
result = float_period.try_unindexed_reference_fixings_exposure(rate_curve=curve).unwrap()
assert_frame_equal(result, exp, rtol=1e-4)
curve._set_ad_order(order=1)
# assert values are unchanged even if curve can calculate derivatives
result = float_period.try_unindexed_reference_fixings_exposure(rate_curve=curve).unwrap()
fixings.pop(f"{name}_1B")
assert_frame_equal(result, exp)
@pytest.mark.skip(reason="`right` removed by v2.5")
@pytest.mark.parametrize(
("right", "exp"),
[
(dt(2021, 1, 1), 0),
(dt(2022, 12, 31), 4),
],
)
def test_rfr_fixings_table_right(self, curve, right, exp) -> None:
name = str(hash(os.urandom(8)))
fixings.add(
f"{name}_1B",
Series(
index=[dt(2022, 12, 28), dt(2022, 12, 29), dt(2022, 12, 30)],
data=[1.19, 1.19, -8.81],
),
)
float_period = FloatPeriod(
start=dt(2022, 12, 28),
end=dt(2023, 1, 2),
payment=dt(2023, 1, 2),
frequency=Frequency.Months(1, None),
rate_fixings=name,
fixing_method="rfr_payment_delay",
)
result = float_period.try_unindexed_reference_fixings_exposure(curve, right=right).unwrap()
assert isinstance(result, DataFrame)
assert len(result.index) == exp
@pytest.mark.skip(reason="`right` removed by v2.5")
def test_rfr_fixings_table_right_non_bus_day(self) -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2022, 11, 19): 0.98}, calendar="tgt")
float_period = FloatPeriod(
start=dt(2022, 2, 1),
end=dt(2022, 2, 28),
payment=dt(2022, 2, 28),
frequency=Frequency.Months(1, None),
fixing_method="rfr_payment_delay",
fixing_series=FloatRateSeries(
calendar="tgt",
lag=0,
convention="act360",
modifier="F",
eom=False,
),
)
result = float_period.try_unindexed_reference_fixings_exposure(
rate_curve=curve, right=dt(2022, 2, 13)
).unwrap()
assert isinstance(result, DataFrame)
assert len(result.index) == 9
# @pytest.mark.skip(reason="PERMANENT REMOVAL due to approximate method removed in v2.2.")
# @pytest.mark.parametrize(
# ("method", "param"),
# [
# ("rfr_payment_delay", NoInput(0)),
# ("rfr_lookback", 4),
# ("rfr_lockout", 1),
# ("rfr_observation_shift", 2),
# ],
# )
# @pytest.mark.parametrize(
# ("scm", "spd"),
# [
# ("none_simple", 1000.0),
# ("isda_compounding", 1000.0),
# ("isda_flat_compounding", 1000.0),
# ],
# )
# @pytest.mark.parametrize(
# "crv",
# [
# Curve(
# {
# dt(2022, 1, 1): 1.00,
# dt(2022, 4, 1): 0.99,
# dt(2022, 7, 1): 0.98,
# dt(2022, 10, 1): 0.97,
# dt(2023, 6, 1): 0.96,
# },
# interpolation="log_linear",
# calendar="bus",
# ),
# ],
# )
# def test_rfr_fixings_table_fast(self, method, param, scm, spd, crv) -> None:
# float_period = FloatPeriod(
# start=dt(2022, 12, 28),
# end=dt(2023, 1, 3),
# payment=dt(2023, 1, 3),
# frequency=Frequency.Months(1, None),
# fixing_method=method,
# method_param=param,
# spread_compound_method=scm,
# float_spread=spd,
# )
# expected = float_period.fixings_table(crv)
# result = float_period.fixings_table(crv, approximate=True)
# assert_frame_equal(result, expected, rtol=1e-2)
#
# @pytest.mark.skip(reason="PERMANENT REMOVAL due to approximate method removed in v2.2.")
# @pytest.mark.parametrize(
# "right",
# [
# dt(2022, 12, 31),
# dt(2021, 1, 1),
# ],
# )
# def test_rfr_fixings_table_fast_right(self, curve, right) -> None:
# float_period = FloatPeriod(
# start=dt(2022, 12, 28),
# end=dt(2023, 1, 3),
# payment=dt(2023, 1, 3),
# frequency=Frequency.Months(1, None),
# fixing_method="rfr_payment_delay",
# )
# expected = float_period.fixings_table(curve, right=right)
# result = float_period.fixings_table(curve, approximate=True, right=right)
# assert_frame_equal(result, expected, rtol=1e-2, check_dtype=False)
#
# @pytest.mark.skip(reason="PERMANENT REMOVAL due to approximate method removed in v2.2.")
# @pytest.mark.parametrize(
# ("method", "param"),
# [
# ("rfr_payment_delay_avg", None),
# ("rfr_lookback_avg", 4),
# ("rfr_lockout_avg", 1),
# ("rfr_observation_shift_avg", 2),
# ],
# )
# @pytest.mark.parametrize(
# "crv",
# [
# Curve(
# {
# dt(2022, 1, 1): 1.00,
# dt(2022, 4, 1): 0.99,
# dt(2022, 7, 1): 0.98,
# dt(2022, 10, 1): 0.97,
# dt(2023, 6, 1): 0.96,
# },
# interpolation="log_linear",
# calendar="bus",
# ),
# ],
# )
# def test_rfr_fixings_table_fast_avg(self, method, param, crv) -> None:
# float_period = FloatPeriod(
# start=dt(2022, 12, 28),
# end=dt(2023, 1, 3),
# payment=dt(2023, 1, 3),
# frequency=Frequency.Months(1, None),
# fixing_method=method,
# method_param=param,
# spread_compound_method="none_simple",
# float_spread=100.0,
# )
# expected = float_period.fixings_table(crv)
# result = float_period.fixings_table(crv, approximate=True)
# assert_frame_equal(result, expected, rtol=1e-2)
# @pytest.mark.skip(reason="Series are not recommended inputs. Testing is removed.")
# def test_rfr_rate_fixings_series_monotonic_error(self) -> None:
# nodes = {
# dt(2022, 1, 1): 1.00,
# dt(2022, 4, 1): 0.99,
# dt(2022, 7, 1): 0.98,
# dt(2022, 10, 1): 0.97,
# }
# curve = Curve(nodes=nodes, interpolation="log_linear")
# fixings = Series(
# [99, 2.25, 2.375, 2.5],
# index=[dt(1995, 12, 1), dt(2021, 12, 30), dt(2022, 12, 31), dt(2020, 1, 1)],
# )
# period = FloatPeriod(
# start=dt(2021, 12, 30),
# end=dt(2022, 1, 3),
# payment=dt(2022, 1, 3),
# frequency=Frequency.Months(3, None),
# fixing_method="rfr_payment_delay",
# float_spread=100,
# rate_fixings=fixings,
# convention="act365F",
# fixing_series=FloatRateSeries(
# calendar="all",
# convention="act360",
# lag=0,
# modifier="F",
# eom=True,
# ),
# )
# # with pytest.raises(ValueError, match="`fixings` as a Series"):
# with pytest.raises(ValueError, match=err.VE02_5[:20]):
# period.rate(curve)
@pytest.mark.parametrize(
("scm", "exp"),
[
("none_simple", True),
("isda_compounding", False),
],
)
def test_float_spread_affects_fixing_exposure(self, scm, exp) -> None:
nodes = {
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.99,
dt(2022, 7, 1): 0.98,
dt(2022, 10, 1): 0.97,
}
curve = Curve(nodes=nodes, interpolation="log_linear", convention="act360")
period = FloatPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 7, 1),
payment=dt(2022, 7, 1),
frequency=Frequency.Months(6, None),
fixing_method="rfr_payment_delay",
float_spread=0,
convention="act365F",
spread_compound_method=scm,
fixing_series=FloatRateSeries(
calendar="all", convention="act360", eom=True, lag=0, modifier="F"
),
)
table = period.local_analytic_rate_fixings(rate_curve=curve)
period.rate_params.float_spread = 200
table2 = period.local_analytic_rate_fixings(rate_curve=curve)
assert (table.iloc[0, 0] == table2.iloc[0, 0]) == exp
def test_custom_interp_rate_nan(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(
f"{name}_1B", Series(index=[dt(2022, 12, 28), dt(2022, 12, 29)], data=[1.19, 1.19])
)
float_period = FloatPeriod(
start=dt(2022, 12, 28),
end=dt(2023, 1, 2),
payment=dt(2023, 1, 2),
frequency=Frequency.Months(1, None),
rate_fixings=name,
)
def interp(date, nodes):
if date < dt(2023, 1, 1):
return None
return 2.0
line_curve = LineCurve({dt(2023, 1, 1): 3.0, dt(2023, 2, 1): 2.0}, interpolation=interp)
curve = Curve({dt(2023, 1, 1): 1.0, dt(2023, 2, 1): 0.999})
with pytest.raises(ValueError, match="The Curve initial node date is after the "):
float_period.local_analytic_rate_fixings(rate_curve=line_curve, disc_curve=curve)
def test_method_param_raises(self) -> None:
with pytest.raises(ValueError, match='`method_param` must be >0 for "RFRLockout'):
FloatPeriod(
start=dt(2022, 1, 4),
end=dt(2022, 4, 4),
payment=dt(2022, 4, 4),
frequency=Frequency.Months(3, None),
fixing_method="rfr_lockout(0)",
rate_fixings=[1.00],
)
# test obsolete with FloatFixingMethod enum
# with pytest.raises(ValueError, match="`method_param` should not be used"):
# FloatPeriod(
# start=dt(2022, 1, 4),
# end=dt(2022, 4, 4),
# payment=dt(2022, 4, 4),
# frequency=Frequency.Months(3, None),
# fixing_method="rfr_payment_delay",
# rate_fixings=[1.00],
# )
def test_analytic_delta_no_curve_raises(self) -> None:
name = str(hash(os.urandom(9)))
fixings.add(f"{name}_1B", Series(index=[dt(2022, 12, 28)], data=1.19))
float_period = FloatPeriod(
start=dt(2022, 12, 28),
end=dt(2023, 1, 2),
payment=dt(2023, 1, 2),
frequency=Frequency.Months(1, None),
rate_fixings=name,
spread_compound_method="isda_compounding",
float_spread=1.0,
)
with pytest.raises(ValueError, match="`disc_curve` is required but it has not been pr"):
float_period.analytic_delta()
def test_more_series_fixings_than_calendar_from_curve_raises(self) -> None:
fixings = Series(
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0],
index=[
dt(2022, 1, 4),
dt(2022, 1, 5),
dt(2022, 1, 6),
dt(2022, 1, 7),
dt(2022, 1, 8),
dt(2022, 1, 9),
dt(2022, 1, 10),
],
)
with pytest.warns(UserWarning, match=err.W02_0[:20]):
FloatPeriod(
start=dt(2022, 1, 4),
end=dt(2022, 1, 11),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
payment=dt(2022, 1, 9),
float_spread=10.0,
rate_fixings=fixings,
fixing_series=FloatRateSeries(
calendar="bus",
convention="act360",
lag=0,
eom=True,
modifier="F",
),
)
def test_series_fixings_not_applicable_to_period(self) -> None:
# if a series is historic and of no relevance all fixings are forecast from crv
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, calendar="bus")
fixings = Series([1.0, 2.0, 3.0], index=[dt(2021, 1, 4), dt(2021, 1, 5), dt(2021, 1, 6)])
period = FloatPeriod(
start=dt(2022, 1, 4),
end=dt(2022, 1, 11),
frequency=Frequency.Months(3, None),
fixing_method="rfr_payment_delay",
payment=dt(2022, 1, 9),
float_spread=10.0,
rate_fixings=fixings,
)
result = period.rate(curve)
expected = 1.09136153 # series fixings are completely ignored
assert abs(result - expected) < 1e-5
@pytest.mark.parametrize(
("meth", "exp"),
[
("rfr_payment_delay", 3.1183733605),
("rfr_observation_shift(2)", 3.085000395),
("rfr_lookback(2)", 3.05163645),
("rfr_lockout(7)", 3.00157855),
],
)
def test_norges_bank_nowa_calc_same(self, meth, exp) -> None:
# https://app.norges-bank.no/nowa/#/en/
curve = Curve({dt(2023, 8, 4): 1.0}, calendar="osl", convention="act365f")
fixings.add("nowa_1B", fixings["nowa"][1])
period = FloatPeriod(
start=dt(2023, 4, 27),
end=dt(2023, 5, 12),
payment=dt(2023, 5, 16),
frequency=Frequency.Months(12, None),
fixing_method=meth,
float_spread=0.0,
rate_fixings="nowa",
fixing_series=FloatRateSeries(
calendar="osl",
convention="act365f",
lag=0,
modifier="F",
eom=True,
),
)
result = period.rate(curve)
assert abs(result - exp) < 1e-7
fixings.pop("nowa_1B")
def test_interpolated_ibor_warns(self) -> None:
period = FloatPeriod(
start=dt(2023, 4, 27),
end=dt(2023, 6, 12),
payment=dt(2023, 6, 16),
frequency=Frequency.Months(12, None),
fixing_method="ibor(1)",
float_spread=0.0,
stub=True,
)
curve1 = LineCurve({dt(2022, 1, 1): 1.0, dt(2024, 2, 1): 1.0})
with pytest.warns(UserWarning):
period.rate({"1m": curve1})
with pytest.warns(UserWarning):
period.rate({"3m": curve1})
def test_interpolated_ibor_rate_line(self) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 4, 1),
payment=dt(2023, 4, 1),
frequency=Frequency.Months(12, None),
fixing_method="ibor(1)",
float_spread=0.0,
stub=True,
)
curve3 = LineCurve({dt(2022, 1, 1): 3.0, dt(2023, 2, 1): 3.0})
curve1 = LineCurve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
result = period.rate({"1M": curve1, "3m": curve3})
expected = 1.0 + (3.0 - 1.0) * (dt(2023, 4, 1) - dt(2023, 3, 1)) / (
dt(2023, 5, 1) - dt(2023, 3, 1)
)
assert abs(result - expected) < 1e-8
def test_interpolated_ibor_rate_df(self) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 4, 1),
payment=dt(2023, 4, 1),
frequency=Frequency.Months(12, None),
fixing_method="ibor(1)",
float_spread=0.0,
stub=True,
)
curve3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 0.97})
curve1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 0.99})
result = period.rate({"1M": curve1, "3m": curve3})
a, b = 0.91399161, 2.778518365
expected = a + (b - a) * (dt(2023, 4, 1) - dt(2023, 3, 1)) / (
dt(2023, 5, 1) - dt(2023, 3, 1)
)
assert abs(result - expected) < 1e-8
def test_rfr_period_curve_dict_raises(self, curve) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 4, 1),
payment=dt(2023, 4, 1),
frequency=Frequency.Months(12, None),
fixing_method="rfr_payment_delay",
float_spread=0.0,
stub=True,
)
with pytest.raises(ValueError, match="A `rate_curve` supplied as dict to an RFR ba"):
period.rate({"bad_index": curve})
def test_rfr_period_curve_dict_allowed(self, curve) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 4, 1),
payment=dt(2023, 4, 1),
frequency=Frequency.Months(12, None),
fixing_method="rfr_payment_delay",
float_spread=0.0,
stub=True,
)
expected = 4.02664128485892
result = period.rate({"rfr": curve})
assert result == expected
@pytest.mark.skip(reason="NOTIONAL mapping for fixings exposure not implemented.")
def test_ibor_stub_book2(self):
curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2025, 1, 1): 0.94},
calendar="tgt",
convention="act360",
id="euribor3m",
)
curve2 = Curve(
{dt(2022, 1, 1): 1.0, dt(2025, 1, 1): 0.94},
calendar="tgt",
convention="act360",
id="euribor1m",
)
stub_fp = FloatPeriod(
start=dt(2022, 3, 14),
end=dt(2022, 5, 14),
payment=dt(2022, 5, 14),
frequency="Q",
calendar="tgt",
convention="act360",
fixing_method="ibor",
method_param=2,
notional=-1e6,
stub=True,
)
result = stub_fp.try_unindexed_reference_fixings_exposure(
rate_curve={"1m": curve2, "3m": curve}, disc_curve=curve
).unwrap()
assert abs(result.iloc[0, 0] - 998307) < 1
assert abs(result.iloc[0, 4] - 326658) < 1
assert abs(result.iloc[0, 1] - 8.5467) < 1e-4
assert abs(result.iloc[0, 5] - 8.2710) < 1e-4
def test_ibor_stub_book2_substitute(self):
curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2025, 1, 1): 0.94},
calendar="tgt",
convention="act360",
id="euribor3m",
)
curve2 = Curve(
{dt(2022, 1, 1): 1.0, dt(2025, 1, 1): 0.94},
calendar="tgt",
convention="act360",
id="euribor1m",
)
stub_fp = FloatPeriod(
start=dt(2022, 3, 14),
end=dt(2022, 5, 14),
payment=dt(2022, 5, 14),
frequency="Q",
calendar="tgt",
convention="act360",
fixing_method="ibor(2)",
notional=-1e6,
stub=True,
)
result = stub_fp.local_analytic_rate_fixings(
rate_curve={"1m": curve2, "3m": curve}, disc_curve=curve
)
assert abs(result.iloc[0, 0] - 8.5467) < 1e-4
assert abs(result.iloc[0, 1] - 8.2710) < 1e-4
@pytest.mark.skip(reason="NOTIONAL mapping for fixings exposure not implemented.")
def test_ibor_stub_fixings_table(self) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 4, 1),
payment=dt(2023, 4, 1),
frequency=Frequency.Months(12, None),
fixing_method="ibor(1)",
float_spread=0.0,
stub=True,
fixing_series=FloatRateSeries(
calendar="all", convention="act360", lag=1, eom=False, modifier="mf"
),
)
curve3 = LineCurve({dt(2022, 1, 1): 3.0, dt(2023, 2, 1): 3.0})
curve1 = LineCurve({dt(2022, 1, 1): 2.0, dt(2023, 2, 1): 2.0})
dc = Curve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
result = period.try_unindexed_reference_fixings_exposure(
rate_curve={"1M": curve1, "3m": curve3}, disc_curve=dc
).unwrap()
assert isinstance(result, DataFrame)
assert abs(result.iloc[0, 0] + 1036300) < 1
assert abs(result.iloc[0, 4] + 336894) < 1
assert abs(result.iloc[0, 1] + 8.0601) < 1e-4
assert abs(result.iloc[0, 5] + 8.32877) < 1e-4
def test_ibor_stub_fixings_table_substitute(self) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 4, 1),
payment=dt(2023, 4, 1),
frequency=Frequency.Months(12, None),
fixing_method="ibor(1)",
float_spread=0.0,
stub=True,
fixing_series=FloatRateSeries(
calendar="all", convention="act360", lag=1, eom=False, modifier="mf"
),
)
curve3 = LineCurve({dt(2022, 1, 1): 3.0, dt(2023, 2, 1): 3.0})
curve1 = LineCurve({dt(2022, 1, 1): 2.0, dt(2023, 2, 1): 2.0})
dc = Curve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
result = period.local_analytic_rate_fixings(
rate_curve={"1M": curve1, "3m": curve3}, disc_curve=dc
)
assert isinstance(result, DataFrame)
assert abs(result.iloc[0, 0] + 8.0601) < 1e-4
assert abs(result.iloc[0, 1] + 8.32877) < 1e-4
@pytest.mark.skip(reason="NOTIONAL mapping for fixings exposure not implemented.")
def test_ibor_stub_fixings_rfr_in_dict_ignored(self) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 4, 1),
payment=dt(2023, 4, 1),
frequency=Frequency.Months(12, None),
fixing_method="ibor",
method_param=1,
float_spread=0.0,
stub=True,
fixing_series=FloatRateSeries(
calendar="all", convention="act360", lag=1, eom=False, modifier="mf"
),
)
curve3 = LineCurve({dt(2022, 1, 1): 3.0, dt(2023, 2, 1): 3.0})
curve1 = LineCurve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
dc = Curve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
result = period.try_unindexed_reference_fixings_exposure(
rate_curve={"1M": curve1, "3m": curve3, "rfr": curve1}, disc_curve=dc
).unwrap()
assert isinstance(result, DataFrame)
assert abs(result.iloc[0, 0] + 1036300) < 1
assert abs(result.iloc[0, 4] + 336894) < 1
assert abs(result.iloc[0, 1] + 8.0601) < 1e-4
assert abs(result.iloc[0, 5] + 8.32877) < 1e-4
def test_ibor_stub_fixings_rfr_in_dict_ignored_substitute(self) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 4, 1),
payment=dt(2023, 4, 1),
frequency=Frequency.Months(12, None),
fixing_method="ibor(1)",
float_spread=0.0,
stub=True,
fixing_series=FloatRateSeries(
calendar="all", convention="act360", lag=1, eom=False, modifier="mf"
),
)
curve3 = LineCurve({dt(2022, 1, 1): 3.0, dt(2023, 2, 1): 3.0})
curve1 = LineCurve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
dc = Curve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
result = period.local_analytic_rate_fixings(
rate_curve={"1M": curve1, "3m": curve3, "rfr": curve1}, disc_curve=dc
)
assert isinstance(result, DataFrame)
assert abs(result.iloc[0, 0] + 8.0601) < 1e-4
assert abs(result.iloc[0, 1] + 8.32877) < 1e-4
@pytest.mark.skip(reason="`right` removed by v2.5")
def test_ibor_stub_fixings_table_right(self) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 4, 1),
payment=dt(2023, 4, 1),
frequency=Frequency.Months(12, None),
fixing_method="ibor",
method_param=1,
float_spread=0.0,
stub=True,
)
curve3 = LineCurve({dt(2022, 1, 1): 3.0, dt(2023, 2, 1): 3.0})
curve1 = LineCurve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
result = period.try_unindexed_reference_fixings_exposure(
rate_curve={"1M": curve1, "3m": curve3}, disc_curve=curve1, right=dt(2022, 1, 1)
).unwrap()
assert isinstance(result, DataFrame)
assert len(result.index) == 0
def test_ibor_non_stub_fixings_table(self) -> None:
period = FloatPeriod(
start=dt(2023, 2, 1),
end=dt(2023, 5, 1),
payment=dt(2023, 5, 1),
frequency=Frequency.Months(3, None),
fixing_method="ibor(1)",
float_spread=0.0,
)
curve3 = LineCurve({dt(2022, 1, 1): 3.0, dt(2023, 2, 1): 3.0})
curve1 = LineCurve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
curved = Curve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 1.0})
result = period.local_analytic_rate_fixings(
rate_curve={"1M": curve1, "3M": curve3}, disc_curve=curved
)
expected = DataFrame(
data=[[-24.722222222222]],
index=Index([dt(2023, 1, 31)], name="obs_dates"),
columns=MultiIndex.from_tuples(
[(curve3.id, "usd", "usd", "3M")],
names=["identifier", "local_ccy", "display_ccy", "frequency"],
),
)
assert_frame_equal(result, expected)
def test_ibor_fixings_no_bad_curves_raises(self):
curve1 = LineCurve({dt(2022, 1, 1): 2.0, dt(2023, 2, 1): 2.0})
disc_curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 2, 1): 0.96})
float_period = FloatPeriod(
start=dt(2023, 3, 6),
end=dt(2023, 6, 6),
payment=dt(2023, 6, 6),
frequency=Frequency.Months(3, None),
fixing_method="ibor(2)",
fixing_series=FloatRateSeries(
calendar="bus",
convention="act360",
lag=2,
modifier="mf",
eom=False,
),
)
with pytest.raises(ValueError, match="A `rate_curve` must be provided to this method"):
float_period.local_analytic_rate_fixings(rate_curve=NoInput(0), disc_curve=disc_curve)
with pytest.raises(ValueError, match="`disc_curve` cannot be inferred from a non-DF base"):
float_period.local_analytic_rate_fixings(rate_curve=curve1, disc_curve=NoInput(0))
def test_local_historical_pay_date_issue(self, curve) -> None:
period = FloatPeriod(
start=dt(2021, 1, 1),
end=dt(2021, 4, 1),
payment=dt(2021, 4, 1),
frequency=Frequency.Months(3, None),
)
result = period.npv(rate_curve=curve, local=True)
assert result == {"usd": 0.0}
@pytest.mark.parametrize(
"curve", [NoInput(0), LineCurve({dt(2000, 1, 1): 2.0, dt(2001, 1, 1): 2.0})]
)
@pytest.mark.parametrize("fixing_method", ["ibor(2)", "rfr_payment_delay_avg"])
@pytest.mark.parametrize("fixings", [3.0, NoInput(0)])
def test_rate_optional_curve(self, fixings, fixing_method, curve) -> None:
# GH530. Allow forecasting rates without necessarily providing curve if unnecessary
period = FloatPeriod(
start=dt(2000, 1, 12),
end=dt(2000, 4, 12),
fixing_method=fixing_method,
frequency=Frequency.Months(3, None),
rate_fixings=fixings,
payment=dt(2000, 4, 12),
)
if (
isinstance(curve, NoInput)
and isinstance(fixings, NoInput)
and fixing_method != "ibor(2)"
):
# then no data to price
msg = "A `rate_curve` is required to forecast missing RFR"
with pytest.raises(FixingMissingForecasterError, match=msg):
period.rate(curve)
elif (
isinstance(curve, NoInput)
and isinstance(fixings, NoInput)
and fixing_method == "ibor(2)"
):
msg = "A `rate_curve` is required to forecast missing IBOR"
with pytest.raises(ValueError, match=msg):
period.rate(curve)
elif isinstance(fixings, NoInput):
result = period.rate(curve)
assert abs(result - 2.0) < 1e-8 # uses curve
else:
result = period.rate(curve)
assert abs(result - 3.0) < 1e-8 # uses fixing
@pytest.mark.parametrize(
"rate_fixings",
[
Series(
index=[dt(2000, 1, 1), dt(2000, 1, 2), dt(2000, 1, 3)], data=[2.0, 2.0, 2.0]
), # some unknown
Series(
index=Cal.from_name("all").bus_date_range(dt(2000, 1, 1), dt(2000, 1, 31)), data=2.0
), # exhaustive
Series(2.0, index=date_range(dt(2000, 1, 1), dt(2001, 1, 1))),
],
)
@pytest.mark.parametrize(
"curve", [NoInput(0), LineCurve({dt(2000, 1, 1): 2.0, dt(2001, 1, 1): 2.0})]
)
def test_rate_optional_curve_rfr(self, curve, rate_fixings) -> None:
# GH530. Test RFR periods what happens when supply/not supply a Curve and fixings
# are either exhaustive/ not exhaustive
name = str(hash(os.urandom(8)))
fixings.add(f"{name}_1B", rate_fixings)
period = FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 2, 1),
fixing_method="rfr_payment_delay_avg",
frequency=Frequency.Months(1, None),
calendar="all",
rate_fixings=name,
payment=dt(2000, 2, 1),
)
# When a curve is not supplied for RFR period currently it will still fail
# even if exhaustive fixings are available. There is currently no branching handling this.
if isinstance(curve, NoInput) and len(rate_fixings) == 3:
with pytest.raises(
FixingMissingForecasterError, match="A `rate_curve` is required to forecast mi"
):
period.rate(curve)
else:
# it will conclude without fail, the exhaustive case is captured.
period.rate(curve)
fixings.pop(f"{name}_1B")
def test_rfr_lockout_calculation_is_accurate(self):
# this is an additional test to ensure the validity of the lockout rate
# it combines multiple features such as weekends and changing rates.
# it ensures that the DCF is handled correctly for the locked out days
name = str(hash(os.urandom(8)))
fixings.add(
f"{name}_1B",
Series(
index=[
dt(2024, 6, 7), # 1
dt(2024, 6, 10),
dt(2024, 6, 11), # 3
dt(2024, 6, 12),
dt(2024, 6, 13), # 5
dt(2024, 6, 14), # 5
dt(2024, 6, 17),
dt(2024, 6, 18),
dt(2024, 6, 19),
],
data=[1.0, 2.0, 3.0, 4.0, 5.0, 4.0, 3.0, 2.0, 1.0],
),
)
p = FloatPeriod(
start=dt(2024, 6, 7),
end=dt(2024, 6, 20),
payment=dt(2024, 6, 21),
frequency="A",
fixing_method=FloatFixingMethod.RFRLockout(4),
fixing_series=FloatRateSeries(
calendar="bus", convention="act360", lag=0, eom=False, modifier="F"
),
spread_compound_method="NoneSimple",
float_spread=50.0,
rate_fixings=name,
)
result = p.rate(rate_curve=NoInput(0))
fixings.pop(f"{name}_1B")
d = 1.0 / 36000.0
expected = (
(1 + 1 * 3 * d)
* (1 + 2 * d)
* (1 + 3 * d)
* (1 + 4 * d)
* (1 + 3 * d * 5)
* (1 + d * 5) ** 4
)
expected = (expected - 1) * 1 / (13 * d) + 0.50
not_expected = (1 + 1 * 3 * d) * (1 + 2 * d) * (1 + 3 * d) * (1 + 4 * d) * (1 + 7 * d * 5)
not_expected = (not_expected - 1) * 1 / (13 * d) + 0.50
assert abs(result - not_expected) > 1e-14
assert abs(result - expected) < 1e-14
def test_analytic_delta_raises(self, curve):
p = FloatPeriod(
start=dt(2024, 6, 7),
end=dt(2024, 6, 20),
payment=dt(2024, 6, 21),
frequency="A",
fixing_method=FloatFixingMethod.RFRLockout(4),
fixing_series=FloatRateSeries(
calendar="bus", convention="act360", lag=0, eom=False, modifier="F"
),
spread_compound_method="ISDACompounding",
float_spread=50.0,
)
assert p.try_unindexed_reference_cashflow_analytic_delta(
rate_curve=NoInput(0), disc_curve=curve
).is_err
def test_ibor_param_mismatch(self):
with pytest.raises(
ValueError, match="A `fixing_series` has been provided with a publication `lag` that"
):
FloatPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 4, 1),
payment=dt(2000, 4, 1),
fixing_method="ibor(1)",
fixing_series="eur_ibor",
frequency="Q",
)
class TestFixedPeriod:
def test_frequency_as_str(self):
p = FixedPeriod(
start=dt(2000, 1, 1),
end=dt(2000, 4, 1),
payment=dt(2000, 4, 1),
frequency="Q",
roll=1,
)
assert p.period_params.frequency == Frequency.Months(3, RollDay.Day(1))
def test_fixed_period_analytic_delta(self, curve, fxr) -> None:
fixed_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
result = fixed_period.analytic_delta(rate_curve=curve)
assert abs(result - 24744.478172244584) < 1e-7
result = fixed_period.analytic_delta(rate_curve=curve, fx=fxr, base="nok")
assert abs(result - 247444.78172244584) < 1e-7
def test_fixed_period_analytic_delta_raises(self, curve, fxr) -> None:
fixed_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
assert fixed_period.try_immediate_local_analytic_delta(rate_curve=dict()).is_err
def test_fixed_period_analytic_delta_fxr_base(self, curve, fxr) -> None:
fixed_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
fxr = FXRates({"usdnok": 10.0}, base="NOK")
result = fixed_period.analytic_delta(rate_curve=curve, fx=fxr, base="NOK")
assert abs(result - 247444.78172244584) < 1e-7
@pytest.mark.parametrize(
("rate", "crv", "fx"),
[
(4.00, True, 2.0),
(NoInput(0), False, 2.0),
(4.00, True, 10.0),
(NoInput(0), False, 10.0),
],
)
def test_fixed_period_cashflows(self, curve, fxr, rate, crv, fx) -> None:
# also test the inputs to fx as float and as FXRates (10 is for
fixed_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=rate,
)
cashflow = (
None if rate is NoInput.blank else rate * -1e9 * fixed_period.period_params.dcf / 100
)
expected = {
defaults.headers["base"]: "UNSPECIFIED",
defaults.headers["type"]: "FixedPeriod",
defaults.headers["stub_type"]: "Regular",
defaults.headers["a_acc_start"]: dt(2022, 1, 1),
defaults.headers["a_acc_end"]: dt(2022, 4, 1),
defaults.headers["payment"]: dt(2022, 4, 3),
defaults.headers["notional"]: 1e9,
defaults.headers["currency"]: "USD",
defaults.headers["convention"]: "Act360",
defaults.headers["dcf"]: fixed_period.period_params.dcf,
defaults.headers["df"]: 0.9897791268897856 if crv else None,
defaults.headers["rate"]: _drb(None, rate),
defaults.headers["spread"]: None,
defaults.headers["npv"]: -9897791.268897856 if crv else None,
defaults.headers["cashflow"]: cashflow,
defaults.headers["fx"]: fx,
defaults.headers["npv_fx"]: -9897791.268897855 * fx if crv else None,
defaults.headers["collateral"]: None,
}
if fx == 2.0:
with pytest.warns(UserWarning):
# supplying `fx` as numeric
result = fixed_period.cashflows(
rate_curve=curve if crv else NoInput(0),
fx=2.0,
base=NoInput(0),
)
else:
result = fixed_period.cashflows(
rate_curve=curve if crv else NoInput(0), fx=fxr, base="nok"
)
expected[defaults.headers["base"]] = "NOK"
assert result == expected
def test_fixed_period_npv(self, curve, fxr) -> None:
fixed_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
)
result = fixed_period.npv(rate_curve=curve)
assert abs(result + 9897791.268897833) < 1e-7
result = fixed_period.npv(rate_curve=curve, disc_curve=curve, fx=fxr, base="nok")
assert abs(result + 98977912.68897833) < 1e-6
def test_fixed_period_npv_raises(self, curve) -> None:
fixed_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
)
with pytest.raises(
TypeError,
match=re.escape("`curves` have not been supplied correctly"),
):
fixed_period.npv()
def test_npv_no_fixed_rate(self, curve):
period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
with pytest.raises(ValueError, match="A `fixed_rate` must be set for a cashflow to be de"):
period.npv(rate_curve=curve)
class TestCreditPremiumPeriod:
@pytest.mark.parametrize(
("accrued", "exp"), [(True, -9892843.47762896), (False, -9887893.477628957)]
)
def test_period_npv(self, hazard_curve, curve, fxr, accrued, exp) -> None:
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.0,
currency="usd",
premium_accrued=accrued,
)
result = premium_period.npv(rate_curve=hazard_curve, disc_curve=curve)
assert abs(result - exp) < 1e-7
result = premium_period.npv(rate_curve=hazard_curve, disc_curve=curve, fx=fxr, base="nok")
assert abs(result - exp * 10.0) < 1e-6
def test_period_npv_raises(self, curve, hazard_curve) -> None:
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
)
with pytest.raises(
TypeError,
match=re.escape("`curves` have not been supplied correctly."),
):
premium_period.npv(rate_curve=hazard_curve)
with pytest.raises(
TypeError,
match=re.escape("`curves` have not been supplied correctly."),
):
premium_period.npv(rate_curve=NoInput(0), disc_curve=curve)
def test_period_npv_no_spread_raises(self, curve, hazard_curve) -> None:
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
with pytest.raises(
ValueError,
match=re.escape("A `fixed_rate` must be set for a cashfl"),
):
premium_period.npv(rate_curve=hazard_curve, disc_curve=curve)
@pytest.mark.parametrize(
("accrued", "exp"), [(True, 24732.108694072398), (False, 24719.733694072398)]
)
def test_period_analytic_delta(self, hazard_curve, curve, fxr, accrued, exp) -> None:
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
premium_accrued=accrued,
)
result = premium_period.analytic_delta(rate_curve=hazard_curve, disc_curve=curve)
assert abs(result - exp) < 1e-7
result = premium_period.analytic_delta(
rate_curve=hazard_curve, disc_curve=curve, fx=fxr, base="nok"
)
assert abs(result - exp * 10.0) < 1e-7
def test_period_analytic_delta_fxr_base(self, hazard_curve, curve, fxr) -> None:
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
)
fxr = FXRates({"usdnok": 10.0}, base="NOK")
result = premium_period.analytic_delta(
rate_curve=hazard_curve,
disc_curve=curve,
fx=fxr,
base="nok",
)
assert abs(result - 247321.086941) < 1e-6
def test_period_cashflows(self, hazard_curve, curve, fxr) -> None:
# also test the inputs to fx as float and as FXRates (10 is for
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
)
cashflow = 400 * -1e9 * premium_period.period_params.dcf / 10000
expected = {
defaults.headers["type"]: "CreditPremiumPeriod",
defaults.headers["base"]: "NOK",
defaults.headers["stub_type"]: "Regular",
defaults.headers["a_acc_start"]: dt(2022, 1, 1),
defaults.headers["a_acc_end"]: dt(2022, 4, 1),
defaults.headers["payment"]: dt(2022, 4, 3),
defaults.headers["notional"]: 1e9,
defaults.headers["currency"]: "USD",
defaults.headers["convention"]: "Act360",
defaults.headers["dcf"]: premium_period.period_params.dcf,
defaults.headers["df"]: 0.9897791268897856,
defaults.headers["rate"]: 4.0,
defaults.headers["survival"]: 0.999,
defaults.headers["recovery"]: 0.40,
defaults.headers["spread"]: None,
defaults.headers["npv"]: -9892843.47762896,
defaults.headers["cashflow"]: cashflow,
defaults.headers["fx"]: 10.0,
defaults.headers["npv_fx"]: -9892843.47762896 * 10.0,
defaults.headers["collateral"]: None,
}
result = premium_period.cashflows(
rate_curve=hazard_curve, disc_curve=curve, fx=fxr, base="nok"
)
assert result == expected
def test_period_cashflows_no_curves(self, fxr) -> None:
# also test the inputs to fx as float and as FXRates (10 is for
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
)
cashflow = 400 * -1e9 * premium_period.period_params.dcf / 10000
expected = {
defaults.headers["type"]: "CreditPremiumPeriod",
defaults.headers["base"]: "NOK",
defaults.headers["stub_type"]: "Regular",
defaults.headers["a_acc_start"]: dt(2022, 1, 1),
defaults.headers["a_acc_end"]: dt(2022, 4, 1),
defaults.headers["payment"]: dt(2022, 4, 3),
defaults.headers["notional"]: 1e9,
defaults.headers["currency"]: "USD",
defaults.headers["convention"]: "Act360",
defaults.headers["dcf"]: premium_period.period_params.dcf,
defaults.headers["df"]: None,
defaults.headers["rate"]: 4.0,
defaults.headers["survival"]: None,
defaults.headers["recovery"]: None,
defaults.headers["spread"]: None,
defaults.headers["npv"]: None,
defaults.headers["cashflow"]: cashflow,
defaults.headers["fx"]: 10.0,
defaults.headers["npv_fx"]: None,
defaults.headers["collateral"]: None,
}
result = premium_period.cashflows(fx=fxr, base="nok")
assert result == expected
def test_mid_period_accrued(self, hazard_curve, curve):
p1 = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="ActActICMA",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
adjuster="F",
)
p2 = CreditPremiumPeriod(
start=dt(2021, 10, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="ActActICMA",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(6, None),
fixed_rate=2.00,
currency="usd",
adjuster="F",
)
r1 = p1.npv(rate_curve=hazard_curve, disc_curve=curve)
r2 = p2.npv(rate_curve=hazard_curve, disc_curve=curve)
assert 2505 > r1 - r2 > 2500
def test_null_cashflow(self):
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
result = premium_period.try_cashflow()
assert result.is_err
def test_no_accrued(self):
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
assert premium_period.try_accrued(dt(2022, 2, 1)).is_err
def test_accrued_out_of_range(self):
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
fixed_rate=2.0,
)
assert premium_period.accrued(dt(2022, 9, 1)) == 0.0
assert premium_period.accrued(dt(2021, 9, 1)) == 0.0
def test_accrued(self):
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="ActActICMA",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
fixed_rate=2.0,
adjuster="F",
)
assert abs(premium_period.accrued(dt(2022, 2, 1)) - (-1e9 * 0.25 * 31 / 90 * 0.02)) < 1e-9
def test_analytic_delta_bad_curve(self):
premium_period = CreditPremiumPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="ActActICMA",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
fixed_rate=2.0,
adjuster="F",
)
assert premium_period.try_local_analytic_delta(rate_curve=dict()).is_err
class TestCreditProtectionPeriod:
def test_period_npv(self, hazard_curve, curve, fxr) -> None:
period = CreditProtectionPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
# convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
period.discretization = 1
result = period.npv(
rate_curve=hazard_curve,
disc_curve=curve,
) # discounted properly this is -596962.1422873045
assert abs(result - -596962.1422873045) < 34
period.discretization = 23
result = period.npv(rate_curve=hazard_curve, disc_curve=curve)
exp = -596995.7591843301
assert abs(result - exp) < 1e-7
result = period.npv(rate_curve=hazard_curve, disc_curve=curve, fx=fxr, base="nok")
assert abs(result - exp * 10.0) < 1e-6
def test_period_npv_raises(self, curve, hazard_curve) -> None:
period = CreditProtectionPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
# convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
with pytest.raises(
TypeError,
match=re.escape("`curves` have not been supplied correctly."),
):
period.npv(rate_curve=hazard_curve)
with pytest.raises(
TypeError,
match=re.escape("`curves` have not been supplied correctly."),
):
period.npv(rate_curve=NoInput(0), disc_curve=curve)
def test_period_analytic_delta(self, hazard_curve, curve, fxr) -> None:
period = CreditProtectionPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
# convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
result = period.analytic_delta(rate_curve=hazard_curve, disc_curve=curve)
assert abs(result - 0.0) < 1e-7
result = period.analytic_delta(
rate_curve=hazard_curve, disc_curve=curve, fx=fxr, base="nok"
)
assert abs(result - 0.0 * 10.0) < 1e-7
def test_period_analytic_delta_fxr_base(self, hazard_curve, curve, fxr) -> None:
period = CreditProtectionPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
# convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
fxr = FXRates({"usdnok": 10.0}, base="NOK")
result = period.analytic_delta(rate_curve=hazard_curve, disc_curve=curve, fx=fxr)
assert abs(result - 0.0) < 1e-7
def test_period_cashflows(self, hazard_curve, curve, fxr) -> None:
# also test the inputs to fx as float and as FXRates (10 is for
period = CreditProtectionPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
# convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
cashflow = -period.settlement_params.notional * (1 - hazard_curve.meta.credit_recovery_rate)
expected = {
defaults.headers["type"]: "CreditProtectionPeriod",
defaults.headers["stub_type"]: "Regular",
defaults.headers["a_acc_start"]: dt(2022, 1, 1),
defaults.headers["a_acc_end"]: dt(2022, 4, 1),
defaults.headers["payment"]: dt(2022, 4, 3),
defaults.headers["notional"]: 1e9,
defaults.headers["currency"]: "USD",
defaults.headers["convention"]: "One",
defaults.headers["dcf"]: period.period_params.dcf,
defaults.headers["df"]: 0.9897791268897856,
defaults.headers["recovery"]: 0.4,
defaults.headers["survival"]: 0.999,
defaults.headers["npv"]: -596995.7591843301,
defaults.headers["cashflow"]: cashflow,
defaults.headers["fx"]: 10.0,
defaults.headers["npv_fx"]: -596995.7591843301 * 10.0,
defaults.headers["collateral"]: None,
}
result = period.cashflows(rate_curve=hazard_curve, disc_curve=curve, fx=fxr, base="nok")
for key in expected:
assert key in result
assert result[key] == expected[key] or abs(result[key] - expected[key]) < 1e-6
def test_period_cashflows_no_curves(self, fxr) -> None:
# also test the inputs to fx as float and as FXRates (10 is for
period = CreditProtectionPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
# convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
)
cashflow = None
expected = {
defaults.headers["type"]: "CreditProtectionPeriod",
defaults.headers["stub_type"]: "Regular",
defaults.headers["base"]: "NOK",
defaults.headers["a_acc_start"]: dt(2022, 1, 1),
defaults.headers["a_acc_end"]: dt(2022, 4, 1),
defaults.headers["payment"]: dt(2022, 4, 3),
defaults.headers["notional"]: 1e9,
defaults.headers["currency"]: "USD",
defaults.headers["convention"]: "One",
defaults.headers["dcf"]: period.period_params.dcf,
defaults.headers["df"]: None,
defaults.headers["recovery"]: None,
defaults.headers["survival"]: None,
defaults.headers["npv"]: None,
defaults.headers["cashflow"]: cashflow,
defaults.headers["fx"]: 10.0,
defaults.headers["npv_fx"]: None,
defaults.headers["collateral"]: None,
}
result = period.cashflows(fx=fxr, base="nok")
assert result == expected
def test_discretization_period(self, hazard_curve, curve):
p1 = CreditProtectionPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 1),
notional=1e9,
frequency=Frequency.Months(3, None),
)
h1 = hazard_curve.copy()
h2 = hazard_curve.copy()
h1._meta = replace(h1.meta, _credit_discretization=1)
h2._meta = replace(h2.meta, _credit_discretization=31)
r1 = p1.npv(rate_curve=h1, disc_curve=curve)
r2 = p1.npv(rate_curve=h2, disc_curve=curve)
assert 0.1 < abs(r1 - r2) < 1.0 # very similar result but not identical
def test_mid_period(self, hazard_curve, curve):
period = CreditProtectionPeriod(
start=dt(2021, 10, 4),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
notional=1e9,
frequency=Frequency.Months(3, None),
)
r1 = period.npv(rate_curve=hazard_curve, disc_curve=curve)
exp = -20006.321837529074
assert abs(r1 - exp) < 1e-7
def test_recovery_risk(self, hazard_curve, curve):
period = CreditProtectionPeriod(
start=dt(2021, 10, 4),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
notional=1e9,
frequency=Frequency.Months(3, None),
)
result = period.analytic_rec_risk(hazard_curve, curve)
p1 = period.npv(rate_curve=hazard_curve, disc_curve=curve)
hazard_curve.update_meta("credit_recovery_rate", 0.41)
p2 = period.npv(rate_curve=hazard_curve, disc_curve=curve)
expected = p2 - p1
assert abs(result - expected) < 1e-9
def test_recovery_risk_raises(self, hazard_curve, curve):
period = CreditProtectionPeriod(
start=dt(2021, 10, 4),
end=dt(2022, 1, 4),
payment=dt(2022, 1, 4),
notional=1e9,
frequency=Frequency.Months(3, None),
)
with pytest.raises(TypeError, match="`curves` have not been supplied cor"):
period.analytic_rec_risk(rate_curve=dict())
class TestCashflow:
def test_cashflow_analytic_delta(self, curve) -> None:
cashflow = Cashflow(notional=1e6, payment=dt(2022, 1, 1))
assert cashflow.analytic_delta(rate_curve=curve) == 0.0
@pytest.mark.parametrize(
("crv", "fx"),
[
(True, 2.0),
(False, 2.0),
(True, 10.0),
(False, 10.0),
],
)
def test_cashflow_cashflows(self, curve, fxr, crv, fx) -> None:
cashflow = Cashflow(notional=1e9, payment=dt(2022, 4, 3))
curve = curve if crv else NoInput(0)
expected = {
defaults.headers["base"]: "UNSPECIFIED" if fx == 2.0 else "NOK",
defaults.headers["type"]: "Cashflow",
# defaults.headers["a_acc_start"]: None,
# defaults.headers["a_acc_end"]: None,
defaults.headers["payment"]: dt(2022, 4, 3),
defaults.headers["currency"]: "USD",
defaults.headers["notional"]: 1e9,
# defaults.headers["convention"]: None,
# defaults.headers["dcf"]: None,
defaults.headers["df"]: 0.9897791268897856 if crv else None,
# defaults.headers["spread"]: None,
defaults.headers["npv"]: -989779126.8897856 if crv else None,
defaults.headers["cashflow"]: -1e9,
defaults.headers["fx"]: fx,
defaults.headers["npv_fx"]: -989779126.8897856 * fx if crv else None,
defaults.headers["collateral"]: None,
}
if fx == 2.0:
with pytest.warns(UserWarning):
# supplying `fx` as numeric
result = cashflow.cashflows(
rate_curve=curve if crv else NoInput(0),
fx=2.0,
base=NoInput(0),
)
else:
result = cashflow.cashflows(
rate_curve=curve if crv else NoInput(0),
fx=fxr,
base="nok",
)
assert result == expected
def test_cashflow_npv_raises(self, curve) -> None:
with pytest.raises(TypeError, match="`curves` have not been supplied correctly."):
Cashflow(notional=1e6, payment=dt(2022, 1, 1)).npv()
cashflow = Cashflow(notional=1e6, payment=dt(2022, 1, 1))
assert cashflow.analytic_delta(rate_curve=curve) == 0
def test_cashflow_npv_local(self, curve) -> None:
cashflow = Cashflow(notional=1e9, payment=dt(2022, 4, 3), currency="nok")
result = cashflow.npv(rate_curve=curve, local=True)
expected = {"nok": -989779126.8897856}
assert result == expected
class TestIndexFixedPeriod:
@pytest.mark.parametrize(
("method", "expected"),
[("daily", 201.00502512562812), ("monthly", 200.98317675333183)],
)
def test_period_rate(self, method, expected) -> None:
index_period = FixedPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 4, 3),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
index_base=100.0,
index_method=method,
)
index_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 3): 0.995},
index_base=200.0,
interpolation="linear_index",
index_lag=3,
)
_, result, _ = index_period.index_params.index_ratio(index_curve)
assert abs(result - expected) < 1e-8
def test_period_cashflow(self) -> None:
index_period = FixedPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 4, 3),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
index_base=100.0,
index_lag=3,
)
index_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 3): 0.995},
index_base=200.0,
interpolation="linear_index",
index_lag=3,
)
result = index_period.try_unindexed_reference_cashflow()
expected = -1e7 * ((dt(2022, 4, 1) - dt(2022, 1, 1)) / timedelta(days=360)) * 4
assert abs(result.unwrap() - expected) < 1e-8
result = index_period.try_cashflow(index_curve=index_curve)
expected = expected * index_curve.index_value(dt(2022, 4, 3), 3) / 100.0
assert abs(result.unwrap() - expected) < 1e-8
@pytest.mark.parametrize("method", ["daily", "curve"])
def test_period_curve_interp_method(self, method) -> None:
# both these methods of interpolation should give the same result with the way
# the curve and period are configured.
index_period = FixedPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 4, 3),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
index_base=100.0,
index_lag=0,
index_method=method,
)
index_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 3): 0.995},
index_base=200.0,
interpolation="linear_index",
index_lag=0,
)
result = index_period.try_unindexed_reference_cashflow()
expected = -1e7 * ((dt(2022, 4, 1) - dt(2022, 1, 1)) / timedelta(days=360)) * 4
assert abs(result.unwrap() - expected) < 1e-8
result = index_period.try_cashflow(index_curve=index_curve)
assert abs(result.unwrap() + 20100502.512562) < 1e-6
expected = expected * index_curve.index_value(dt(2022, 4, 3), 0) / 100.0
assert abs(result.unwrap() - expected) < 1e-8
def test_period_analytic_delta(self, fxr, curve) -> None:
index_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 3): 0.995},
index_base=200.0,
interpolation="linear_index",
)
fixed_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
index_base=200.0,
index_fixings=300.0,
)
result = fixed_period.analytic_delta(index_curve=index_curve, rate_curve=curve)
assert abs(result - 24744.478172244584 * 300.0 / 200.0) < 1e-7
result = fixed_period.analytic_delta(
index_curve=index_curve, rate_curve=curve, fx=fxr, base="nok"
)
assert abs(result - 247444.78172244584 * 300.0 / 200.0) < 1e-7
@pytest.mark.parametrize(("fixings", "method"), [(300.0, "daily")])
def test_period_fixings_float(self, fixings, method, curve) -> None:
fixed_period = FixedPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 4, 3),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
currency="usd",
index_base=200.0,
index_fixings=fixings,
index_method=method,
)
result = fixed_period.analytic_delta(index_curve=None, rate_curve=curve)
assert abs(result - 24744.478172244584 * 300.0 / 200.0) < 1e-7
@pytest.mark.skip(reason="`index_fixings` as Series removed for Period in 2.0")
@pytest.mark.parametrize(
("fixings", "method"),
[
(
Series([1.0, 300, 5], index=[dt(2022, 4, 2), dt(2022, 4, 3), dt(2022, 4, 4)]),
"daily",
),
(Series([100.0, 500], index=[dt(2022, 4, 2), dt(2022, 4, 4)]), "daily"),
(Series([300.0, 500], index=[dt(2022, 4, 1), dt(2022, 4, 5)]), "monthly"),
],
)
def test_period_fixings_series(self, fixings, method, curve) -> None:
fixed_period = FixedPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 4, 3),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
currency="usd",
index_base=200.0,
index_fixings=fixings,
index_method=method,
)
result = fixed_period.analytic_delta(index_curve=None, rate_curve=curve)
assert abs(result - 24744.478172244584 * 300.0 / 200.0) < 1e-7
def test_period_raises(self) -> None:
with pytest.raises(ValueError, match="`index_method` as string: 'BAD' is not a val"):
FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
currency="usd",
index_base=200.0,
index_method="BAD",
)
def test_period_npv(self, curve) -> None:
index_period = FixedPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 4, 3),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
index_base=100.0,
index_lag=3,
)
index_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 3): 0.995},
index_base=200.0,
interpolation="linear_index",
index_lag=3,
)
result = index_period.npv(index_curve=index_curve, rate_curve=curve)
expected = -19895057.826930363
assert abs(result - expected) < 1e-8
result = index_period.npv(index_curve=index_curve, rate_curve=curve, local=True)
assert abs(result["usd"] - expected) < 1e-8
def test_period_npv_raises(self, curve) -> None:
index_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
index_base=100.0,
)
with pytest.raises(
ValueError,
match=re.escape("`index_value` must be forecast from a `index_curve`"),
):
index_period.npv(disc_curve=curve)
@pytest.mark.parametrize("curve_", [True, False])
def test_period_cashflows(self, curve, curve_) -> None:
curve = curve if curve_ else NoInput(0)
index_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 4, 1),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 1),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
index_base=100.0,
index_fixings=200.0,
)
result = index_period.cashflows(rate_curve=curve)
expected = {
"Type": "FixedPeriod",
"Period": "Regular",
"Ccy": "USD",
"Base Ccy": "USD",
"Acc Start": dt(2022, 1, 1),
"Acc End": dt(2022, 4, 1),
"Payment": dt(2022, 4, 3),
"Convention": "Act360",
"DCF": 0.25,
"DF": 0.9897791268897856 if curve_ else None,
"Notional": 1e9,
"Rate": 4.0,
"Spread": None,
"Cashflow": -20000000.0,
"Unindexed Cashflow": -10e6,
"Index Fix Date": dt(2022, 4, 1),
"Index Base": 100.0,
"Index Val": 200.0,
"Index Ratio": 2.0,
"NPV": -19795582.53779571 if curve_ else None,
"FX Rate": 1.0,
"NPV Ccy": -19795582.53779571 if curve_ else None,
defaults.headers["collateral"]: None,
}
assert result == expected
def test_cashflow_returns_err(self) -> None:
i_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 2, 1),
payment=dt(2022, 2, 1),
frequency=Frequency.Months(1, None),
index_base=100.0,
)
assert i_period.try_cashflow().is_err
assert i_period.try_unindexed_cashflow().is_err
def test_cashflow_no_index_rate(self) -> None:
i_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 2, 1),
payment=dt(2022, 2, 1),
frequency=Frequency.Months(1, None),
index_base=100.0,
)
result = i_period.cashflows()
assert result[defaults.headers["index_ratio"]] is None
def test_bad_curve(self) -> None:
i_period = FixedPeriod(
start=dt(2022, 1, 1),
end=dt(2022, 2, 1),
payment=dt(2022, 2, 1),
frequency=Frequency.Months(1, None),
index_base=100.0,
)
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99})
with pytest.raises(ValueError, match="Curve must be initialised with an `index_base`"):
i_period.index_params.index_ratio(curve)
def test_index_fixings_linear_interp(self) -> None:
i_fixings = Series([173.1, 174.2], index=[dt(2001, 6, 1), dt(2001, 7, 1)])
result = _try_index_value(
index_fixings=i_fixings,
index_curve=NoInput(0),
index_date=dt(2001, 7, 20),
index_lag=1,
index_method=IndexMethod.Daily,
)
expected = 173.1 + 19 / 31 * (174.2 - 173.1)
assert abs(result.unwrap() - expected) < 1e-6
def test_composite_curve(self) -> None:
index_period = FixedPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 4, 3),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
index_base=100.0,
)
index_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 3): 0.995},
index_base=200.0,
interpolation="linear_index",
)
composite_curve = CompositeCurve([index_curve])
_, result, _ = index_period.index_params.index_ratio(composite_curve)
def test_composite_curve_raises(self) -> None:
index_period = FixedPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 4, 3),
payment=dt(2022, 4, 3),
notional=1e9,
convention="Act360",
termination=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
index_base=100.0,
)
curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 3): 0.995},
)
composite_curve = CompositeCurve([curve])
with pytest.raises(ValueError, match="Curve must be initialised with an `index_base`"):
_, result, _ = index_period.index_params.index_ratio(composite_curve)
@pytest.mark.parametrize(
("method", "expected"),
[("daily", 201.00573790940518), ("monthly", 200.9836416123169)],
)
def test_index_lag_on_period_zero_curve(self, method, expected):
# test if a period can calculate the correct value by referencing a curve with
# zero index lag.
index_period = FixedPeriod(
start=dt(2022, 1, 3),
end=dt(2022, 4, 3),
payment=dt(2022, 4, 3),
notional=1e6,
convention="30360",
termination=dt(2022, 4, 3),
frequency=Frequency.Months(3, None),
fixed_rate=4.00,
currency="usd",
index_base=100.0,
index_method=method,
index_lag=3,
)
index_curve = Curve(
nodes={dt(2021, 10, 1): 1.0, dt(2022, 1, 3): 0.995},
index_base=200.0,
interpolation="linear_index",
index_lag=0,
)
discount_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 3): 0.99},
)
_, result, _ = index_period.index_params.index_ratio(index_curve)
npv = index_period.npv(index_curve=index_curve, rate_curve=discount_curve)
assert abs(result - expected) < 1e-8
expected_npv = -1e6 * 0.04 * 0.25 * result * 0.99 / 100.0
assert abs(npv - expected_npv) < 1e-5
def test_cashflows_available_with_series_fixings(self):
RPI = DataFrame(
[
[dt(2024, 2, 1), 381.0],
[dt(2024, 3, 1), 383.0],
[dt(2024, 4, 1), 385.0],
[dt(2024, 5, 1), 386.4],
[dt(2024, 6, 1), 387.3],
[dt(2024, 7, 1), 387.5],
[dt(2024, 8, 1), 389.9],
[dt(2024, 9, 1), 388.6],
[dt(2024, 10, 1), 390.7],
[dt(2024, 11, 1), 390.9],
[dt(2024, 12, 1), 392.1],
[dt(2025, 1, 1), 391.7],
[dt(2025, 2, 1), 394.0],
[dt(2025, 3, 1), 395.3],
],
columns=["month", "rate"],
).set_index("month")["rate"]
fixings.add("CPI_INDEX", RPI)
period = FixedPeriod(
start=dt(2024, 11, 27),
end=dt(2025, 5, 27),
fixed_rate=2.0,
index_lag=3,
index_fixings="CPI_INDEX",
index_base_date=dt(2024, 11, 27),
frequency=Frequency.Months(6, None),
payment=dt(2025, 5, 27),
)
result = period.cashflows()
fixings.pop("CPI_INDEX")
assert result["Index Base"] == 389.9 + (388.6 - 389.9) * (27 - 1) / 30
assert result["Index Val"] == 394 + (395.3 - 394) * (27 - 1) / 31
class TestIndexCashflow:
def test_cashflow_analytic_delta(self, curve) -> None:
cashflow = Cashflow(notional=1e6, payment=dt(2022, 1, 1), index_base=100, index_fixings=105)
assert cashflow.analytic_delta(disc_curve=curve) == 0
def test_index_cashflow(self) -> None:
cf = Cashflow(notional=1e6, payment=dt(2022, 1, 1), index_base=100, index_fixings=200)
assert cf.try_unindexed_reference_cashflow().unwrap() == -1e6
assert cf.try_cashflow().unwrap() == -2e6
def test_index_cashflow_npv(self, curve) -> None:
cf = Cashflow(notional=1e6, payment=dt(2022, 1, 1), index_base=100.0, index_fixings=200)
assert abs(cf.npv(rate_curve=curve) + 2e6) < 1e-6
def test_cashflow_no_index_rate(self) -> None:
i_period = Cashflow(
notional=200.0,
payment=dt(2022, 2, 1),
index_base=100.0,
)
result = i_period.cashflows()
assert result[defaults.headers["index_ratio"]] is None
def test_index_only(self, curve) -> None:
cf = Cashflow(
notional=1e6,
payment=dt(2022, 1, 1),
index_base=100,
index_fixings=200,
index_only=True,
)
assert abs(cf.npv(rate_curve=curve) + 1e6) < 1e-6
def test_index_cashflow_floats(self, curve) -> None:
icurve = Curve(
nodes={
dt(2022, 1, 1): 1.00,
dt(2022, 4, 1): 0.99,
dt(2022, 7, 1): 0.98,
dt(2022, 10, 1): 0.97,
},
index_base=100.0,
interpolation="linear_index",
)
icurve._set_ad_order(1)
curve._set_ad_order(1)
cf = Cashflow(notional=1e6, payment=dt(2022, 7, 1), index_base=100)
result = cf.cashflows(index_curve=icurve, disc_curve=curve)
assert isinstance(result["Cashflow"], float)
class TestMtmCashflow:
def test_cashflow(self):
p = MtmCashflow(
currency="usd",
notional=2e6,
payment=dt(2000, 1, 10),
pair="eurusd",
fx_fixings_start=2.0,
fx_fixings_end=2.2,
start=dt(2000, 1, 1),
end=dt(2000, 1, 10),
)
result = p.try_unindexed_reference_cashflow().unwrap()
expected = -0.2 * 2e6
assert abs(result - expected) < 1e-9
def test_cashflow_reversed(self):
p = MtmCashflow(
currency="usd",
notional=2e6,
payment=dt(2000, 1, 10),
pair="usdeur",
fx_fixings_start=0.5,
fx_fixings_end=1.0 / 2.2,
start=dt(2000, 1, 1),
end=dt(2000, 1, 10),
)
result = p.try_unindexed_reference_cashflow()
expected = -0.2 * 2e6
assert abs(result.unwrap() - expected) < 1e-9
class TestNonDeliverableCashflow:
@pytest.fixture(scope="class")
def fxf_ndf(self):
fxr = FXRates({"brlusd": 0.200}, settlement=dt(2025, 1, 23))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={
"brlbrl": Curve({dt(2025, 1, 21): 1.0, dt(2026, 1, 23): 0.98}),
"usdusd": Curve({dt(2025, 1, 21): 1.0, dt(2026, 1, 23): 0.96}),
"brlusd": Curve({dt(2025, 1, 21): 1.0, dt(2026, 1, 23): 0.978}),
},
)
return fxf
def test_npv(self, fxf_ndf):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
payment=dt(2025, 6, 1),
)
result = ndf.npv(disc_curve=fxf_ndf.curve("usd", "usd"), fx=fxf_ndf)
expected = -1e6 * 0.20131018767289705 * 0.9855343095437953
assert abs(result - expected) < 1e-8
def test_npv_reversed(self, fxf_ndf):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("usdbrl", "all", 0),
payment=dt(2025, 6, 1),
)
result = ndf.npv(disc_curve=fxf_ndf.curve("usd", "usd"), fx=fxf_ndf)
expected = -1e6 * 0.20131018767289705 * 0.9855343095437953
assert abs(result - expected) < 1e-8
def test_npv_fixing(self, fxf_ndf):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
payment=dt(2025, 6, 1),
fx_fixings=0.25,
)
result = ndf.npv(disc_curve=fxf_ndf.curve("usd", "usd"), fx=fxf_ndf)
expected = -1e6 * 0.25 * 0.9855343095437953
assert abs(result - expected) < 1e-8
def test_rate_as_fixing(self, fxf_ndf):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
payment=dt(2025, 6, 1),
fx_fixings=0.25,
)
result = ndf.non_deliverable_params.fx_fixing.value
expected = 0.25
assert abs(result - expected) < 1e-8
def test_forecast_as_fixing(self, fxf_ndf):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
payment=dt(2025, 6, 1),
fx_fixings=0.25,
)
result = ndf.non_deliverable_params.fx_fixing.try_value_or_forecast(fx=fxf_ndf).unwrap()
expected = 0.25
assert abs(result - expected) < 1e-8
def test_rate(self, fxf_ndf):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
payment=dt(2025, 6, 1),
)
result = ndf.non_deliverable_params.fx_fixing.try_value_or_forecast(fx=fxf_ndf).unwrap()
expected = fxf_ndf.rate(ndf.non_deliverable_params.pair, dt(2025, 6, 1))
assert abs(result - expected) < 1e-8
def test_forecast_rate(self, fxf_ndf):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
payment=dt(2025, 6, 1),
)
result = ndf.non_deliverable_params.fx_fixing.try_value_or_forecast(fx=fxf_ndf).unwrap()
expected = fxf_ndf.rate(ndf.non_deliverable_params.pair, dt(2025, 6, 1))
assert abs(result - expected) < 1e-8
def test_cashflows_priced(self, fxf_ndf):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
payment=dt(2025, 6, 1),
fx_fixings=0.25,
)
result = ndf.cashflows(disc_curve=fxf_ndf.curve("usd", "usd"), fx=fxf_ndf)
expected = {
"Base Ccy": "USD",
"Cashflow": -250000.0,
"Ccy": "USD",
"Collateral": "usd",
"DF": 0.9855343095437953,
"FX Rate": 1.0,
"NPV": -246383.57738594883,
"NPV Ccy": -246383.57738594883,
"Notional": 1000000.0,
"Payment": dt(2025, 6, 1, 0, 0),
"FX Fix Date": dt(2025, 6, 1),
"FX Fixing": 0.25,
"Reference Ccy": "BRL",
"Type": "Cashflow",
}
assert result == expected
def test_cashflows_no_args(self):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
payment=dt(2025, 6, 1),
)
result = ndf.cashflows()
expected = {
"Base Ccy": "USD",
"Cashflow": None,
"Ccy": "USD",
"Collateral": None,
"DF": None,
"FX Rate": 1.0,
"FX Fixing": None,
"FX Fix Date": dt(2025, 6, 1),
"NPV": None,
"NPV Ccy": None,
"Notional": 1000000.0,
"Reference Ccy": "BRL",
"Payment": dt(2025, 6, 1),
"Type": "Cashflow",
}
assert result == expected
def test_analytic_delta(self, curve):
ndf = Cashflow(
notional=1e6,
currency="usd",
pair=FXIndex("brlusd", "all", 0),
payment=dt(2025, 6, 1),
fx_fixings=0.25,
)
assert ndf.analytic_delta(disc_curve=curve) == 0.0
class TestNonDeliverableFixedPeriod:
@pytest.fixture(scope="class")
def fxf_ndf(self):
fxr = FXRates({"brlusd": 0.200}, settlement=dt(2025, 1, 23))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={
"brlbrl": Curve({dt(2025, 1, 21): 1.0, dt(2026, 1, 23): 0.98}),
"usdusd": Curve({dt(2025, 1, 21): 1.0, dt(2026, 1, 23): 0.96}),
"brlusd": Curve({dt(2025, 1, 21): 1.0, dt(2026, 1, 23): 0.978}),
},
)
return fxf
@pytest.mark.parametrize("fx_fixing", [NoInput(0), 5.00])
def test_cashflow_reversed(self, fx_fixing, fxf_ndf):
ndfp = FixedPeriod(
start=dt(2025, 2, 1),
end=dt(2025, 5, 1),
payment=dt(2025, 5, 1),
convention="30e360",
currency="usd",
pair=FXIndex("usdbrl", "all", 0),
notional=1e6,
fx_fixings=fx_fixing,
frequency=Frequency.Months(3, None),
fixed_rate=3.0,
)
cf = ndfp.try_cashflow(fx=fxf_ndf).unwrap()
fx_fixing = ndfp.non_deliverable_params.fx_fixing.try_value_or_forecast(fx=fxf_ndf).unwrap()
expected = -1e6 * 0.25 * 0.03 / fx_fixing # in USD
assert abs(cf - expected) < 1e-8
@pytest.mark.parametrize("fx_fixing", [NoInput(0), 0.2])
def test_cashflow(self, fx_fixing, fxf_ndf):
ndfp = FixedPeriod(
start=dt(2025, 2, 1),
end=dt(2025, 5, 1),
payment=dt(2025, 5, 1),
convention="30e360",
currency="usd",
pair=FXIndex("brlusd", "all", 0),
notional=0.2e6,
fx_fixings=fx_fixing,
frequency=Frequency.Months(3, None),
fixed_rate=3.0,
)
cf = ndfp.try_cashflow(fx=fxf_ndf).unwrap()
fx_fixing = ndfp.non_deliverable_params.fx_fixing.try_value_or_forecast(fx=fxf_ndf).unwrap()
expected = -0.2e6 * 0.25 * 0.03 * fx_fixing # in USD
assert abs(cf - expected) < 1e-8
@pytest.mark.parametrize("fx_fixing", [NoInput(0), 0.20])
def test_cashflow_err(self, fx_fixing, fxf_ndf):
ndfp = FixedPeriod(
start=dt(2025, 2, 1),
end=dt(2025, 5, 1),
payment=dt(2025, 5, 1),
convention="30e360",
currency="usd",
pair=FXIndex("brlusd", "all", 0),
notional=1e6,
fx_fixings=fx_fixing,
frequency=Frequency.Months(3, None),
)
assert ndfp.try_cashflow(fx=fxf_ndf).is_err
@pytest.mark.parametrize("fx_fixing", [NoInput(0), 5.0])
def test_analytic_delta(self, fx_fixing, fxf_ndf):
ndfp = FixedPeriod(
start=dt(2025, 2, 1),
end=dt(2025, 5, 1),
payment=dt(2025, 5, 1),
convention="30e360",
currency="usd",
pair=FXIndex("usdbrl", "all", 0),
notional=1e9,
fx_fixings=fx_fixing,
frequency=Frequency.Months(3, None),
fixed_rate=3.0,
)
curve = fxf_ndf.curve("usd", "usd")
result = ndfp.analytic_delta(rate_curve=curve, fx=fxf_ndf)
fx_fixing = ndfp.non_deliverable_params.fx_fixing.try_value_or_forecast(fx=fxf_ndf).unwrap()
expected = 1e9 * 0.25 * 0.0001 * curve[dt(2025, 5, 1)] / fx_fixing # in USD
assert abs(result - expected) < 1e-8
@pytest.mark.parametrize("fx_conv", [FXRates({"usdeur": 105.0}), 105.0])
def test_analytic_delta_base(self, fx_conv, fxf_ndf):
ndfp = FixedPeriod(
start=dt(2025, 2, 1),
end=dt(2025, 5, 1),
payment=dt(2025, 5, 1),
convention="30e360",
currency="usd",
pair=FXIndex("usdbrl", "all", 0),
notional=1e9,
fx_fixings=5.0,
frequency=Frequency.Months(3, None),
fixed_rate=3.0,
)
curve = fxf_ndf.curve("usd", "usd")
result = ndfp.analytic_delta(rate_curve=curve, fx=fx_conv, base="eur")
fx_fixing = 5.0
expected = 105 * 1e9 * 0.25 * 0.0001 * curve[dt(2025, 5, 1)] / fx_fixing # in USD
assert abs(result - expected) < 1e-8
@pytest.mark.parametrize("fx_fixing", [NoInput(0), 5.0])
def test_npv_reversed(self, fx_fixing, fxf_ndf):
ndfp = FixedPeriod(
start=dt(2025, 2, 1),
end=dt(2025, 5, 1),
payment=dt(2025, 5, 1),
convention="30e360",
currency="usd",
pair=FXIndex("usdbrl", "all", 0),
notional=1e9,
fx_fixings=fx_fixing,
frequency=Frequency.Months(3, None),
fixed_rate=3.0,
)
curve = fxf_ndf.curve("usd", "usd")
result = ndfp.npv(rate_curve=curve, fx=fxf_ndf)
fx_fixing = ndfp.non_deliverable_params.fx_fixing.try_value_or_forecast(fx=fxf_ndf).unwrap()
expected = -1e9 * 0.25 * 0.03 * curve[dt(2025, 5, 1)] / fx_fixing # in USD
assert abs(result - expected) < 1e-8
@pytest.mark.parametrize("fx_fixing", [NoInput(0), 0.20])
def test_npv(self, fx_fixing, fxf_ndf):
ndfp = FixedPeriod(
start=dt(2025, 2, 1),
end=dt(2025, 5, 1),
payment=dt(2025, 5, 1),
convention="30e360",
currency="usd",
pair=FXIndex("brlusd", "all", 0),
notional=1e9,
fx_fixings=fx_fixing,
frequency=Frequency.Months(3, None),
fixed_rate=3.0,
)
curve = fxf_ndf.curve("usd", "usd")
result = ndfp.npv(rate_curve=curve, fx=fxf_ndf)
fx_fixing = ndfp.non_deliverable_params.fx_fixing.try_value_or_forecast(fx=fxf_ndf).unwrap()
expected = -1e9 * 0.25 * 0.03 * curve[dt(2025, 5, 1)] * fx_fixing # in USD
assert abs(result - expected) < 1e-8
@pytest.mark.parametrize("curve", [True, False])
@pytest.mark.parametrize("fixed_rate", [3.0])
def test_cashflows(self, curve, fixed_rate, fxf_ndf):
curve_ = fxf_ndf.curve("usd", "usd") if curve else NoInput(0)
ndfp = FixedPeriod(
start=dt(2025, 2, 1),
end=dt(2025, 5, 1),
payment=dt(2025, 5, 1),
convention="30e360",
currency="usd",
pair=FXIndex("brlusd", "all", 0),
notional=1e9,
fx_fixings=NoInput(0),
frequency=Frequency.Months(3, None),
fixed_rate=fixed_rate,
)
result = ndfp.cashflows(rate_curve=curve_, fx=fxf_ndf)
expected = {
"Acc End": dt(2025, 5, 1, 0, 0),
"Acc Start": dt(2025, 2, 1, 0, 0),
"Cashflow": -1507459.1627133065,
"Base Ccy": "USD",
"Ccy": "USD",
"Collateral": "usd" if curve else None,
"Convention": "30e360",
"DCF": 0.25,
"DF": 0.9889384743344495 if curve else None,
"FX Rate": 1.0,
"FX Fixing": 0.20099455502844088,
"FX Fix Date": dt(2025, 5, 1),
"NPV": -1490784.364495184 if curve else None,
"NPV Ccy": -1490784.364495184 if curve else None,
"Notional": 1000000000.0,
"Reference Ccy": "BRL",
"Payment": dt(2025, 5, 1, 0, 0),
"Period": "Regular",
"Rate": 3.0,
"Spread": None,
"Type": "FixedPeriod",
}
assert result == expected
class TestZeroFixedPeriod:
def test_cashflows(self):
zp = ZeroFixedPeriod(
schedule=Schedule(
effective=dt(2000, 1, 1),
termination=dt(2003, 6, 1),
frequency="A",
),
convention="1",
fixed_rate=1.0,
)
cf = zp.cashflows()
assert cf[defaults.headers["dcf"]] == 4.0
assert cf[defaults.headers["cashflow"]] == ((1 + 0.01) ** 4 - 1) * -1e6
def test_base_period_dates_raise() -> None:
with pytest.raises(ValueError):
_ = FixedPeriod(
start=dt(2023, 1, 1),
end=dt(2022, 1, 1),
payment=dt(2024, 1, 1),
frequency=Frequency.Months(3, None),
)
@pytest.fixture
def fxfo():
# FXForwards for FX Options tests
eureur = Curve(
{dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.9851909811629752},
calendar="tgt",
id="eureur",
)
usdusd = Curve(
{dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.976009366603271},
calendar="nyc",
id="usdusd",
)
eurusd = Curve({dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.987092591908283}, id="eurusd")
fxr = FXRates({"eurusd": 1.0615}, settlement=dt(2023, 3, 20))
fxf = FXForwards(fx_curves={"eureur": eureur, "eurusd": eurusd, "usdusd": usdusd}, fx_rates=fxr)
# fxf.swap("eurusd", [dt(2023, 3, 20), dt(2023, 6, 20)]) = 60.10
return fxf
@pytest.fixture
def fxvs():
vol_ = FXDeltaVolSmile(
nodes={
0.25: 8.9,
0.5: 8.7,
0.75: 10.15,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
return vol_
class TestFXOption:
# replicate https://quant.stackexchange.com/a/77802/29443
@pytest.mark.parametrize(
("pay", "k", "exp_pts", "exp_prem", "dlty", "exp_dl"),
[
(dt(2023, 3, 20), 1.101, 69.378, 138756.54, "spot", 0.250124),
(dt(2023, 3, 20), 1.101, 69.378, 138756.54, "forward", 0.251754),
(dt(2023, 6, 20), 1.101, 70.226, 140451.53, "spot", 0.250124),
(dt(2023, 6, 20), 1.101, 70.226, 140451.53, "forward", 0.251754),
(dt(2023, 6, 20), 1.10101922, 70.180, 140360.17, "spot", 0.250000),
],
)
@pytest.mark.parametrize("smile", [False, True])
def test_premium_big_usd_pips(
self,
fxfo,
fxvs,
pay,
k,
exp_pts,
exp_prem,
dlty,
exp_dl,
smile,
) -> None:
vol_ = (
8.9
if not smile
else FXDeltaVolSmile(
nodes={0.5: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type=dlty,
)
)
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=pay,
strike=k,
notional=20e6,
delta_type=dlty,
)
result = fxo.try_rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
forward=pay,
).unwrap()
expected = exp_pts
assert abs(result - expected) < 1e-3
result = 20e6 * result / 10000
expected = exp_prem
assert abs(result - expected) < 1e-2
result = fxo.analytic_greeks(
fxfo.curve("eur", "usd"),
fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)["delta"]
expected = exp_dl
assert abs(result - expected) < 1e-6
@pytest.mark.parametrize(
("pay", "k", "exp_pts", "exp_prem", "dlty", "exp_dl"),
[
(dt(2023, 3, 20), 1.101, 0.6536, 130717.44, "spot_pa", 0.243588),
(dt(2023, 3, 20), 1.101, 0.6536, 130717.44, "forward_pa", 0.245175),
(dt(2023, 6, 20), 1.101, 0.6578, 131569.29, "spot_pa", 0.243548),
(dt(2023, 6, 20), 1.101, 0.6578, 131569.29, "forward_pa", 0.245178),
],
)
@pytest.mark.parametrize("smile", [False, True])
def test_premium_big_eur_pc(self, fxfo, pay, k, exp_pts, exp_prem, dlty, exp_dl, smile) -> None:
vol_ = (
8.9
if not smile
else FXDeltaVolSmile(
nodes={0.5: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type=dlty,
)
)
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=pay,
strike=k,
notional=20e6,
delta_type=dlty,
metric="percent",
)
result = fxo.try_rate(
fxfo.curve("eur", "usd"),
fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
forward=pay,
).unwrap()
expected = exp_pts
assert abs(result - expected) < 1e-3
result = 20e6 * result / 100
expected = exp_prem
assert abs(result - expected) < 1e-1
result = fxo.analytic_greeks(
fxfo.curve("eur", "usd"),
fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
premium=exp_prem,
premium_payment=pay,
)["delta"]
expected = exp_dl
assert abs(result - expected) < 5e-5
@pytest.mark.parametrize("smile", [False, True])
def test_npv(self, fxfo, smile) -> None:
vol_ = (
8.9
if not smile
else FXDeltaVolSmile(
nodes={0.5: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
)
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
)
result = fxo.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)
result /= fxfo.curve("usd", "usd")[dt(2023, 6, 20)]
expected = 140451.5273 # 140500 USD premium according to Tullets calcs (may be rounded)
assert abs(result - expected) < 1e-3
@pytest.mark.parametrize("smile", [False, True])
def test_npv_in_past(self, fxfo, smile) -> None:
vol_ = (
8.9
if not smile
else FXDeltaVolSmile(
nodes={0.5: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
)
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2022, 6, 16),
delivery=dt(2022, 6, 20),
# payment=dt(2022, 6, 20),
strike=1.101,
notional=20e6,
)
result = fxo.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)
assert result == 0.0
def test_npv_option_fixing(self, fxfo) -> None:
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 3, 15),
delivery=dt(2023, 3, 17),
# payment=dt(2023, 3, 17),
strike=1.101,
notional=20e6,
option_fixings=1.102,
)
result = fxo.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
)
expected = (1.102 - 1.101) * 20e6 * fxfo.curve("usd", "usd")[dt(2023, 3, 17)]
assert abs(result - expected) < 1e-9
# valuable put
fxo = FXPutPeriod(
pair="eurusd",
expiry=dt(2023, 3, 15),
delivery=dt(2023, 3, 17),
# payment=dt(2023, 3, 17),
strike=1.101,
notional=20e6,
option_fixings=1.100,
)
result = fxo.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
)
expected = (1.101 - 1.100) * 20e6 * fxfo.curve("usd", "usd")[dt(2023, 3, 17)]
assert abs(result - expected) < 1e-9
# worthless option
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 3, 15),
delivery=dt(2023, 3, 17),
# payment=dt(2023, 3, 17),
strike=1.101,
notional=20e6,
option_fixings=1.100,
)
result = fxo.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
)
expected = 0.0
assert abs(result - expected) < 1e-9
def test_rate_metric_raises(self, fxfo) -> None:
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
)
with pytest.raises(ValueError, match="FXOption `metric` as string: 'bad' i"):
fxo.rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
metric="bad",
)
@pytest.mark.parametrize("smile", [False, True])
def test_premium_points(self, fxfo, smile) -> None:
vol_ = (
8.9
if not smile
else FXDeltaVolSmile(
nodes={0.5: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
)
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
)
result = fxo.rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)
expected = 70.225764 # 70.25 premium according to Tullets calcs (may be rounded)
assert abs(result - expected) < 1e-6
def test_implied_vol(self, fxfo) -> None:
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
)
result = fxo.implied_vol(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
premium=70.25,
)
expected = 8.90141775 # Tullets have trade confo at 8.9%
assert abs(expected - result) < 1e-8
premium_pc = 0.007025 / fxfo.rate("eurusd", fxo.fx_option_params.delivery) * 100.0
result = fxo.implied_vol(
fxfo.curve("eur", "usd"),
fxfo.curve("usd", "usd"),
fx=fxfo,
premium=premium_pc,
metric="percent",
)
assert abs(expected - result) < 1e-8
@pytest.mark.parametrize("smile", [False, True])
def test_premium_put(self, fxfo, smile) -> None:
vol_ = (
10.15
if not smile
else FXDeltaVolSmile(
nodes={0.5: 10.15},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
)
fxo = FXPutPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.033,
notional=20e6,
)
result = fxo.rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)
expected = 83.836959 # Tullets trade confo has 83.75
assert abs(result - expected) < 1e-6
@pytest.mark.parametrize("smile", [False, True])
def test_npv_put(self, fxfo, smile) -> None:
vol_ = (
10.15
if not smile
else FXDeltaVolSmile(
nodes={0.5: 10.15},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
)
fxo = FXPutPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.033,
notional=20e6,
)
result = (
fxo.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)
/ fxfo.curve("usd", "usd")[dt(2023, 6, 20)]
)
expected = 167673.917818 # Tullets trade confo has 167 500
assert abs(result - expected) < 1e-6
@pytest.mark.parametrize(
("dlty", "delta", "exp_k"),
[
(FXDeltaMethod.Forward, 0.25, 1.101271021340),
(FXDeltaMethod.ForwardPremiumAdjusted, 0.25, 1.10023348001),
(FXDeltaMethod.Forward, 0.251754, 1.100999951),
(FXDeltaMethod.ForwardPremiumAdjusted, 0.8929, 0.9748614298),
# close to peak of premium adjusted delta graph.
(FXDeltaMethod.Spot, 0.25, 1.10101920113408),
(FXDeltaMethod.SpotPremiumAdjusted, 0.25, 1.099976469786),
(FXDeltaMethod.Spot, 0.251754, 1.10074736155),
(FXDeltaMethod.SpotPremiumAdjusted, 0.8870, 0.97543175409),
# close to peak of premium adjusted delta graph.
],
)
@pytest.mark.parametrize("smile", [False, True])
def test_strike_from_delta(self, fxfo, dlty, delta, exp_k, smile) -> None:
# https://quant.stackexchange.com/a/77802/29443
vol_ = (
8.9
if not smile
else FXDeltaVolSmile(
nodes={0.5: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type=dlty,
)
)
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
delta_type=dlty,
)
result = fxo._index_vol_and_strike_from_delta(
delta,
dlty,
vol_,
fxfo.curve("eur", "usd")[fxo.fx_option_params.delivery],
fxfo.curve("eur", "usd")[dt(2023, 3, 20)],
fxfo.rate("eurusd", dt(2023, 6, 20)),
fxo.fx_option_params.time_to_expiry(fxfo.curve("usd", "usd").nodes.initial),
)[2]
expected = exp_k
assert abs(result - expected) < 1e-8
## Round trip test
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=float(result),
notional=20e6,
delta_type=dlty,
)
result2 = fxo.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)["delta"]
assert abs(result2 - delta) < 1e-8
def test_payoff_at_expiry(self, fxfo) -> None:
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
)
result = fxo._payoff_at_expiry(rng=[1.07, 1.13])
assert result[0][0] == 1.07
assert result[0][-1] == 1.13
assert result[1][0] == 0.0
assert result[1][-1] == (1.13 - 1.101) * 20e6
def test_payoff_at_expiry_put(self, fxfo) -> None:
fxo = FXPutPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
)
result = fxo._payoff_at_expiry(rng=[1.07, 1.13])
assert result[0][0] == 1.07
assert result[0][-1] == 1.13
assert result[1][0] == (1.101 - 1.07) * 20e6
assert result[1][-1] == 0.0
@pytest.mark.parametrize(
"delta_type",
[
FXDeltaMethod.Spot,
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.Forward,
FXDeltaMethod.ForwardPremiumAdjusted,
],
)
@pytest.mark.parametrize(
"smile_type",
[
FXDeltaMethod.Spot,
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.Forward,
FXDeltaMethod.ForwardPremiumAdjusted,
],
)
@pytest.mark.parametrize("delta", [-0.1, -0.25, -0.75, -0.9, -1.5])
@pytest.mark.parametrize("vol_smile", [True, False])
def test_strike_and_delta_idx_multisolve_from_delta_put(
self,
fxfo,
delta_type,
smile_type,
delta,
vol_smile,
) -> None:
if delta < -1.0 and delta_type not in [
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.ForwardPremiumAdjusted,
]:
pytest.skip("Put delta cannot be below -1.0 in unadjusted cases.")
fxo = FXPutPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.033,
notional=20e6,
delta_type=delta_type,
)
if vol_smile:
vol_ = FXDeltaVolSmile(
nodes={
0.25: 8.9,
0.5: 8.7,
0.75: 10.15,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type=smile_type,
)
else:
vol_ = 9.00
result = fxo._index_vol_and_strike_from_delta(
delta,
delta_type,
vol_,
fxfo.curve("eur", "usd")[dt(2023, 6, 20)],
fxfo.curve("eur", "usd")[dt(2023, 3, 20)],
fxfo.rate("eurusd", dt(2023, 6, 20)),
fxo.fx_option_params.time_to_expiry(fxfo.curve("eur", "usd").nodes.initial),
)
fxo.fx_option_params.strike = result[2]
if vol_smile:
vol_ = result[1]
expected = fxo.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)["delta"]
assert abs(delta - expected) < 1e-8
@pytest.mark.parametrize(
"delta_type",
[
FXDeltaMethod.Spot,
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.Forward,
FXDeltaMethod.ForwardPremiumAdjusted,
],
)
@pytest.mark.parametrize(
"smile_type",
[
FXDeltaMethod.Spot,
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.Forward,
FXDeltaMethod.ForwardPremiumAdjusted,
],
)
@pytest.mark.parametrize("delta", [0.1, 0.25, 0.65, 0.9])
@pytest.mark.parametrize("vol_smile", [True, False])
def test_strike_and_delta_idx_multisolve_from_delta_call(
self,
fxfo,
delta_type,
smile_type,
delta,
vol_smile,
) -> None:
if delta > 0.65 and delta_type in [
FXDeltaMethod.SpotPremiumAdjusted,
FXDeltaMethod.ForwardPremiumAdjusted,
]:
pytest.skip("Premium adjusted call delta cannot be above the peak ~0.7?.")
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.033,
notional=20e6,
delta_type=delta_type,
)
if vol_smile:
vol_ = FXDeltaVolSmile(
nodes={
0.25: 8.9,
0.5: 8.7,
0.75: 10.15,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type=smile_type,
)
else:
vol_ = 9.00
result = fxo._index_vol_and_strike_from_delta(
delta,
delta_type,
vol_,
fxfo.curve("eur", "usd")[dt(2023, 6, 20)],
fxfo.curve("eur", "usd")[dt(2023, 3, 20)],
fxfo.rate("eurusd", dt(2023, 6, 20)),
fxo.fx_option_params.time_to_expiry(fxfo.curve("eur", "usd").nodes.initial),
)
fxo.fx_option_params.strike = result[2]
if vol_smile:
vol_ = result[1]
expected = fxo.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)["delta"]
assert abs(delta - expected) < 1e-8
@pytest.mark.parametrize("delta_type", ["spot_pa", "forward_pa"])
@pytest.mark.parametrize("smile_type", ["spot", "spot_pa", "forward", "forward_pa"])
@pytest.mark.parametrize("delta", [0.9])
@pytest.mark.parametrize("vol_smile", [True, False])
def test_strike_and_delta_idx_multisolve_from_delta_call_out_of_bounds(
self,
fxfo,
delta_type,
smile_type,
delta,
vol_smile,
) -> None:
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.033,
notional=20e6,
delta_type=delta_type,
)
if vol_smile:
vol_ = FXDeltaVolSmile(
nodes={
0.25: 8.9,
0.5: 8.7,
0.75: 10.15,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type=smile_type,
)
else:
vol_ = 9.00
with pytest.raises(ValueError, match="Newton root solver failed"):
fxo._index_vol_and_strike_from_delta(
delta,
delta_type,
vol_,
fxfo.curve("eur", "usd")[dt(2023, 6, 20)],
fxfo.curve("eur", "usd")[dt(2023, 3, 20)],
fxfo.rate("eurusd", dt(2023, 6, 20)),
fxo.fx_option_params.time_to_expiry(fxfo.curve("eur", "usd").nodes.initial),
)
@pytest.mark.parametrize("delta_type", ["forward", "spot"])
def test_analytic_gamma_fwd_diff(self, delta_type, fxfo) -> None:
# test not suitable for pa because of the assumption of a fixed premium amount
fxc = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 3, 16),
notional=20e6,
strike=1.101,
delta_type=delta_type,
)
base = fxc.analytic_greeks(
fxfo.curve("eur", "usd"),
fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
)
f_d = fxfo.rate("eurusd", dt(2023, 6, 20))
f_t = fxfo.rate("eurusd", dt(2023, 3, 20))
fxfo.fx_rates.update({"eurusd": 1.0615001})
fxfo.update()
f_d2 = fxfo.rate("eurusd", dt(2023, 6, 20))
f_t2 = fxfo.rate("eurusd", dt(2023, 3, 20))
base_1 = fxc.analytic_greeks(
fxfo.curve("eur", "usd"),
fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
)
denomn = (f_d2 - f_d) if "forward" in delta_type else (f_t2 - f_t)
fwd_diff = -(base["delta"] - base_1["delta"]) / denomn
assert abs(base["gamma"] - fwd_diff) < 1e-5
def test_analytic_vega(self, fxfo) -> None:
fxc = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 3, 16),
notional=20e6,
strike=1.101,
delta_type="forward",
)
result = fxc.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
)["vega"]
assert abs(result * 20e6 / 100 - 33757.945) < 1e-2
p0 = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
)
p1 = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.91,
)
fwd_diff = (p1 - p0) / 20e6 * 10000.0
assert abs(result - fwd_diff) < 1e-4
def test_analytic_vomma(self, fxfo) -> None:
fxc = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 3, 16),
notional=1,
strike=1.101,
delta_type="forward",
)
result = fxc.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
)["vomma"]
p0 = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.9,
)
p1 = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.91,
)
p_1 = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=8.89,
)
fwd_diff = (p1 - p0 - p0 + p_1) * 1e4 * 1e4
assert abs(result - fwd_diff) < 1e-6
@pytest.mark.parametrize("payment", [dt(2023, 3, 16), dt(2023, 6, 20)])
def test_vega_and_vomma_example(self, fxfo, payment) -> None:
fxc = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=payment,
notional=10e6,
strike=1.10,
delta_type="forward",
)
npv = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=10.0,
)
npv2 = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=10.1,
)
greeks = fxc.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=Dual(10.0, ["vol"], [100.0]),
)
taylor_vega = 10e6 * greeks["vega"] * 0.1 / 100.0
taylor_vomma = 10e6 * 0.5 * greeks["vomma"] * 0.1**2 / 10000.0
expected = npv2 - npv
assert abs(taylor_vega + taylor_vomma - expected) < 0.2
@pytest.mark.parametrize("payment", [dt(2023, 3, 16), dt(2023, 6, 20)])
@pytest.mark.parametrize("delta_type", ["spot", "forward"])
def test_delta_and_gamma_example(self, fxfo, payment, delta_type) -> None:
fxc = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=payment,
notional=10e6,
strike=1.10,
delta_type=delta_type,
)
npv = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=10.0,
)
greeks = fxc.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=10.0,
)
f_d = fxfo.rate("eurusd", dt(2023, 6, 20))
fxfo.fx_rates.update({"eurusd": 1.0625})
fxfo.update()
f_d2 = fxfo.rate("eurusd", dt(2023, 6, 20))
npv2 = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=10.0,
)
if delta_type == "forward":
fwd_diff = f_d2 - f_d
discount_date = fxc.fx_option_params.delivery
else:
fwd_diff = 0.001
discount_date = dt(2023, 3, 20)
taylor_delta = 10e6 * greeks["delta"] * fwd_diff
taylor_gamma = 10e6 * 0.5 * greeks["gamma"] * fwd_diff**2
expected = npv2 - npv
taylor = (taylor_delta + taylor_gamma) * fxfo.curve("usd", "usd")[discount_date]
assert abs(taylor - expected) < 0.5
@pytest.mark.parametrize("payment", [dt(2023, 6, 20), dt(2023, 3, 16)])
@pytest.mark.parametrize("delta_type", ["spot", "forward"])
def test_all_5_greeks_example(self, fxfo, payment, delta_type) -> None:
fxc = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=payment,
notional=10e6,
strike=1.10,
delta_type=delta_type,
)
npv = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=10.0,
)
greeks = fxc.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=Dual(10.0, ["vol"], [100.0]),
)
f_d = fxfo.rate("eurusd", dt(2023, 6, 20))
fxfo.fx_rates.update({"eurusd": 1.0625})
fxfo.update()
f_d2 = fxfo.rate("eurusd", dt(2023, 6, 20))
if delta_type == "forward":
fwd_diff = f_d2 - f_d
discount_date = fxc.fx_option_params.delivery
else:
fwd_diff = 0.001
discount_date = dt(2023, 3, 20)
npv2 = fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=10.1,
)
fxc.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=Dual(10.1, ["vol"], [100.0]),
)
expected = npv2 - npv
taylor_delta = fwd_diff * greeks["delta"] * 10e6
taylor_gamma = 0.5 * fwd_diff**2 * greeks["gamma"] * 10e6
taylor_vega = 0.1 / 100.0 * greeks["vega"] * 10e6
taylor_vomma = 0.5 * 0.1**2 / 10000.0 * greeks["vomma"] * 10e6
taylor_vanna = 0.1 / 100.0 * fwd_diff * greeks["vanna"] * 10e6
taylor = (
fxfo.curve("usd", "usd")[discount_date] * (taylor_delta + taylor_gamma + taylor_vanna)
+ taylor_vomma
+ taylor_vega
)
assert abs(taylor - expected) < 5e-1
def test_kega(self, fxfo) -> None:
fxc = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
notional=10e6,
strike=1.10,
delta_type="spot_pa",
)
d_eta = _OptionModelBlack76._d_plus_min_u(1.10 / 1.065, 0.10 * 0.5, -0.5)
result = fxc._analytic_kega(1.10 / 1.065, 0.99, -0.5, 0.10, 0.50, 1.065, 1.0, 1.10, d_eta)
expected = 0.355964619118249
assert abs(result - expected) < 1e-12
def test_bad_expiries_raises(self, fxfo) -> None:
fxc = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
notional=10e6,
strike=1.10,
delta_type="forward",
)
vol_ = FXDeltaVolSmile(
nodes={
0.25: 8.9,
0.5: 8.7,
0.75: 10.15,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 18),
delta_type="forward",
)
with pytest.raises(ValueError, match="`expiry` of VolSmile and OptionPeriod do not match"):
fxc.npv(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)
@pytest.mark.parametrize("smile", [True, False])
def test_call_cashflows(self, fxfo, smile) -> None:
vol_ = (
8.9
if not smile
else FXDeltaVolSmile(
nodes={0.5: 8.9},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="forward",
)
)
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
)
result = fxo.cashflows(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
base="eur",
)
assert isinstance(result, dict)
expected = 140451.5273
assert (result[defaults.headers["cashflow"]] - expected) < 1e-3
assert result[defaults.headers["currency"]] == "USD"
assert result[defaults.headers["type"]] == "FXCallPeriod"
@pytest.mark.parametrize("delta_type", ["spot", "forward"])
def test_sticky_delta_delta_vol_smile_against_ad(self, fxfo, delta_type) -> None:
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
delta_type=delta_type,
)
vol_ = FXDeltaVolSmile(
nodes={
0.25: 8.9,
0.5: 8.7,
0.75: 10.15,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
)
gks = fxo.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)
v_deli = fxfo.curve("usd", "usd")[fxo.fx_option_params.delivery]
v_spot = fxfo.curve("usd", "usd")[dt(2023, 3, 20)]
# this is the actual derivative of vol with respect to either spot or forward via AD
if "spot" in delta_type:
z_v_0 = v_deli / v_spot
expected = gradient(gks["__vol"], ["fx_eurusd"])[0]
else:
z_v_0 = 1.0
w_deli = fxfo.curve("eur", "usd")[fxo.fx_option_params.delivery]
w_spot = fxfo.curve("eur", "usd")[dt(2023, 3, 20)]
expected = (
gradient(gks["__vol"], ["fx_eurusd"])[0] * v_deli * w_spot / (v_spot * w_deli)
)
# this is the reverse engineered part of the sticky delta formula to get dsigma_dfspot
result = (gks["delta_sticky"] - gks["delta"]) * v_deli / (z_v_0 * gks["vega"])
# delta is
assert abs(result - expected) < 1e-3
@pytest.mark.parametrize(
("smile", "expected"),
[
(
FXSabrSmile(
nodes={"alpha": 0.05, "beta": 1.0, "rho": 0.01, "nu": 0.03},
eval_date=dt(2024, 5, 7),
expiry=dt(2024, 5, 28),
id="smile",
pair="eurusd",
),
0.700594,
),
(
FXSabrSurface(
expiries=[dt(2024, 5, 23), dt(2024, 6, 4)],
node_values=[[0.05, 1.0, 0.01, 0.03], [0.052, 1.0, 0.03, 0.05]],
eval_date=dt(2024, 5, 7),
id="smile",
pair="eurusd",
),
0.701191,
),
(
FXDeltaVolSmile(
nodes={0.25: 10, 0.5: 9, 0.75: 11},
eval_date=dt(2024, 5, 7),
expiry=dt(2024, 5, 28),
delta_type="forward",
id="smile",
),
0.704091,
),
],
)
def test_sticky_delta_calculation(self, smile, expected) -> None:
from rateslib import IRS, FXBrokerFly, FXCall, FXRiskReversal, FXStraddle, FXSwap, Solver
usd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="nyc", id="usd")
eur = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="tgt", id="eur")
eurusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, id="eurusd")
# Create an FX Forward market with spot FX rate data
spot = dt(2024, 5, 9)
fxr = FXRates({"eurusd": 1.0760}, settlement=spot)
fxf = FXForwards(
fx_rates=fxr,
fx_curves={"eureur": eur, "usdusd": usd, "eurusd": eurusd},
)
# Solve the Curves to market
pre_solver = Solver(
curves=[eur, eurusd, usd],
instruments=[
IRS(spot, "3W", spec="eur_irs", curves="eur"),
IRS(spot, "3W", spec="usd_irs", curves="usd"),
FXSwap(spot, "3W", pair="eurusd", curves=[None, "eurusd", None, "usd"]),
],
s=[3.90, 5.32, 8.85],
fx=fxf,
id="fxf",
)
option_args = dict(
pair="eurusd",
expiry=dt(2024, 5, 28),
calendar="tgt|fed",
delta_type="spot",
curves=["eurusd", "usd"],
vol="smile",
)
# Calibrate the Smile to market option data
solver = Solver(
pre_solvers=[pre_solver],
curves=[smile],
instruments=[
FXStraddle(strike="atm_delta", **option_args),
FXRiskReversal(strike=("-25d", "25d"), **option_args),
FXRiskReversal(strike=("-10d", "10d"), **option_args),
FXBrokerFly(strike=(("-25d", "25d"), "atm_delta"), **option_args),
FXBrokerFly(strike=(("-10d", "10d"), "atm_delta"), **option_args),
],
s=[5.493, -0.157, -0.289, 0.071, 0.238],
fx=fxf,
id="smile",
)
fxc = FXCall(**option_args, notional=100e6, strike=1.07, premium=982144.59)
result = fxc.analytic_greeks(solver=solver)["delta_sticky"]
assert abs(result - expected) < 1e-6
def test_sticky_delta_direct_from_ad(self, fxfo) -> None:
# this test will use AD to directly measure dP_dfs and compare that with the
# analytical derivation of sticky delta.
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.101,
notional=20e6,
delta_type="spot",
)
vol_ = FXDeltaVolSmile(
nodes={
0.25: 8.9,
0.5: 8.7,
0.75: 10.15,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
)
gks = fxo.analytic_greeks(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
)
P = 20e6 * gks["__bs76"]
dP_dfs = gradient(P, ["fx_eurusd"])[0]
v_spot = fxfo.curve("usd", "usd")[dt(2023, 3, 20)]
result = dP_dfs / (20e6 * v_spot)
expected = gks["delta_sticky"]
assert abs(result - expected) < 1e-8
def test_no_strike_raises(self):
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=NoInput(0),
notional=20e6,
delta_type="spot",
)
with pytest.raises(ValueError, match=err.VE_NEEDS_STRIKE):
fxo.try_unindexed_reference_cashflow().unwrap()
def test_try_rate_with_metric(self, fxfo):
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=1.1,
notional=20e6,
delta_type="spot",
)
vol_ = FXDeltaVolSmile(
nodes={
0.25: 8.9,
0.5: 8.7,
0.75: 10.15,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
)
result1 = fxo.try_rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
metric="Pips",
)
result2 = fxo.try_rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx_vol=vol_,
fx=fxfo,
metric="Percent",
)
result3 = fxo.try_rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx_vol=vol_,
fx=fxfo,
)
assert result1.unwrap() != result2.unwrap()
assert result1.unwrap() == result3.unwrap() # default is Pips
def test_try_rate_errs(self, fxfo):
fxo = FXCallPeriod(
pair="eurusd",
expiry=dt(2023, 6, 16),
delivery=dt(2023, 6, 20),
# payment=dt(2023, 6, 20),
strike=NoInput(0),
notional=20e6,
delta_type="spot",
)
vol_ = FXDeltaVolSmile(
nodes={
0.25: 8.9,
0.5: 8.7,
0.75: 10.15,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
delta_type="spot",
)
assert fxo.try_rate(
rate_curve=fxfo.curve("eur", "usd"),
disc_curve=fxfo.curve("usd", "usd"),
fx=fxfo,
fx_vol=vol_,
metric="Pips",
).is_err
@pytest.mark.skip(reason="non-deliverability of FXOption period not implemented in v2.5")
def test_non_deliverable_fx_option_third_currency_raises(self, fxfo):
# this is an NOKSEK FX option with notional in NOK, normal value in SEK but non-deliverable
# requiring conversion to USD
with pytest.raises(ValueError, match=err.VE_MISMATCHED_FX_PAIR_ND_PAIR[:15]):
FXCallPeriod(
delivery=dt(2000, 3, 1),
pair="NOKSEK",
nd_pair="SEKUSD",
strike=1.0,
expiry=dt(2000, 2, 28),
)
# assert fxo.settlement_params.notional_currency == "nok"
# assert fxo.settlement_params.currency == "usd"
# assert fxo.non_deliverable_params.reference_currency == "sek"
#
# fxo = FXCallPeriod(
# delivery=dt(2000, 3, 1),
# pair="NOKSEK",
# strike=1.0,
# expiry=dt(2000, 2, 28),
# )
# assert fxo.settlement_params.notional_currency == "nok"
# assert fxo.settlement_params.currency == "sek"
# assert fxo.non_deliverable_params is None
@pytest.mark.skip(reason="non-deliverability of FXOption period not implemented in v2.5")
@pytest.mark.parametrize("ndpair", ["usdbrl", "brlusd"])
def test_non_deliverable_fx_option_npv_vol_given(self, ndpair):
# this is an USDBRL FX option period non-deliverable into USD.
fxf = FXForwards(
fx_rates=FXRates({"usdbrl": 5.0}, settlement=dt(2000, 1, 1)),
fx_curves={
"usdusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 6, 1): 0.98}),
"brlusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 6, 1): 0.983}),
"brlbrl": Curve({dt(2000, 1, 1): 1.0, dt(2000, 6, 1): 0.984}),
},
)
fxo = FXCallPeriod(
delivery=dt(2000, 3, 1),
pair="USDBRL",
strike=1.0,
expiry=dt(2000, 2, 28),
)
fxond = FXCallPeriod(
delivery=dt(2000, 3, 1),
pair="USDBRL",
nd_pair=ndpair,
strike=1.0,
expiry=dt(2000, 2, 28),
)
npv = fxo.local_npv(fx=fxf, fx_vol=10.0, disc_curve=fxf.curve("brl", "usd"))
npv_nd = fxond.local_npv(fx=fxf, fx_vol=10.0, disc_curve=fxf.curve("usd", "usd"))
# local NPV should be expressed in USD for ND type
result = npv / 5.0 - npv_nd
assert abs(result) < 1e-9
@pytest.mark.skip(reason="non-deliverability of FXOption period not implemented in v2.5")
@pytest.mark.parametrize(("ndpair", "fxfix"), [("usdbrl", 5.25), ("brlusd", 1 / 5.25)])
def test_non_deliverable_fx_option_npv_vol_given_fx_fixing(self, ndpair, fxfix):
# this is an USDBRL FX option period non-deliverable into USD.
fxf = FXForwards(
fx_rates=FXRates({"usdbrl": 5.0}, settlement=dt(2000, 1, 1)),
fx_curves={
"usdusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 6, 1): 0.98}),
"brlusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 6, 1): 0.983}),
"brlbrl": Curve({dt(2000, 1, 1): 1.0, dt(2000, 6, 1): 0.984}),
},
)
fxv = FXDeltaVolSmile(
nodes={0.4: 10.0, 0.6: 11.0},
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 2, 28),
delta_type="spot",
)
fxo = FXCallPeriod(
delivery=dt(2000, 3, 1),
pair="USDBRL",
strike=1.0,
expiry=dt(2000, 2, 28),
)
fxond = FXCallPeriod(
delivery=dt(2000, 3, 1),
pair="USDBRL",
nd_pair=ndpair,
fx_fixings=fxfix,
strike=1.0,
expiry=dt(2000, 2, 28),
)
npv = fxo.local_npv(
fx=fxf,
fx_vol=fxv,
rate_curve=fxf.curve("usd", "usd"),
disc_curve=fxf.curve("brl", "usd"),
)
npv_nd = fxond.local_npv(
fx=fxf,
fx_vol=fxv,
rate_curve=fxf.curve("usd", "usd"),
disc_curve=fxf.curve("usd", "usd"),
)
# local NPV should be expressed in USD for ND type
result = (
npv_nd
* 5.25
/ fxf.curve("usd", "usd")[dt(2000, 3, 1)]
* fxf.curve("brl", "usd")[dt(2000, 3, 1)]
- npv
)
# these should be different beucase of the fix: compare with test above
assert abs(result) < 1e-8
def test_cashflow_no_pricing_objects(self):
# this is an NOKSEK FX option with notional in NOK, normal value in SEK but non-deliverable
# requiring conversion to USD
fxo = FXCallPeriod(
delivery=dt(2000, 3, 1),
pair="NOKSEK",
strike=1.0,
expiry=dt(2000, 2, 28),
)
cf = fxo.cashflows()
assert isinstance(cf, dict)
class TestIROption:
@pytest.mark.parametrize("today", [dt(2026, 1, 3), dt(2026, 4, 15)])
@pytest.mark.parametrize(
("strike", "fixing", "klass"), [(2.0, 2.5, IRSCallPeriod), (2.0, 1.5, IRSPutPeriod)]
)
def test_cashflow_known_exercise(self, today, strike, fixing, klass):
# if we know that the exercise will occur (from the fixing_value) value the cashflow
curve = Curve({today: 1.0, dt(2028, 4, 15): 0.95}, calendar="nyc")
ir_period = klass(
expiry=dt(2027, 2, 3),
irs_series="usd_irs",
tenor="6m",
strike=strike,
notional=100e6,
option_fixings=fixing,
)
immediate_npv = ir_period.ir_option_params.option_fixing.irs.npv(curves=curve)
forward_npv = immediate_npv / curve[dt(2027, 2, 5)] * 100.0
result = ir_period.unindexed_reference_cashflow(rate_curve=curve, index_curve=curve)
assert abs(abs(result) - abs(forward_npv)) < 1e-8
def test_cashflow_option_value(self):
# if we know that the exercise will occur (from the fixing_value) value the cashflow
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="tgt"
)
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.020383,
notional=100e6,
)
cashflow = ir_period.unindexed_reference_cashflow(
rate_curve=curve, ir_vol=25.16, index_curve=curve
)
expected = cashflow * curve[dt(2027, 2, 18)]
result = ir_period.npv(rate_curve=curve, ir_vol=25.16, index_curve=curve)
assert abs(result - expected) < 1e-8
assert abs(result - 145000) < 500.0
def test_option_npv_different_csa(self):
# test that a forward NPV alignbed with cashflow does not change, but an NPV does.
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="tgt"
)
alt_disc_curve = Curve(nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.91}, calendar="tgt")
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.000,
notional=100e6,
)
fwd_result = ir_period.npv(
rate_curve=curve,
ir_vol=25.16,
index_curve=curve,
disc_curve=alt_disc_curve,
forward=dt(2027, 2, 18),
)
imm_exp = fwd_result * alt_disc_curve[dt(2027, 2, 18)]
imm_res = ir_period.npv(
rate_curve=curve, ir_vol=25.16, index_curve=curve, disc_curve=alt_disc_curve
)
assert abs(imm_exp - imm_res) < 1e-6
fwd_result2 = ir_period.npv(
rate_curve=curve, ir_vol=25.16, index_curve=curve, forward=dt(2027, 2, 18)
)
imm_exp2 = fwd_result * curve[dt(2027, 2, 18)]
imm_res2 = ir_period.npv(rate_curve=curve, ir_vol=25.16, index_curve=curve)
assert abs(imm_exp2 - imm_res2) < 1e-6
assert abs(fwd_result - fwd_result2) < 1e-6
assert abs(imm_res - imm_res2) > 2000.0
@pytest.mark.parametrize(
("metric", "expected"),
[
("NormalVol", 75.792872),
("Premium", 149725.796514),
("PercentNotional", 0.149725),
("black_vol_shift_0", 25.16),
("Black_vol_shift_100", 18.880156),
("Black_vol_shift_200", 15.111396),
("Black_vol_shift_300", 12.597702),
("Black_vol_shift_117", 18.112063),
],
)
def test_option_rate(self, metric, expected):
# if we know that the exercise will occur (from the fixing_value) value the cashflow
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.020383,
notional=100e6,
)
result = ir_period.rate(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=25.16,
metric=metric,
)
assert abs(result - expected) < 1e-5
@pytest.mark.parametrize(
("smile", "expected"),
[
(
IRSabrSmile(
eval_date=dt(2026, 2, 16),
expiry=dt(2027, 2, 16),
tenor="6m",
beta=0.5,
nodes={"alpha": 0.4, "rho": -0.05, "nu": 0.4},
irs_series="usd_irs",
),
70.27947168577464,
),
(
IRSabrSmile(
eval_date=dt(2026, 2, 16),
expiry=dt(2027, 2, 16),
tenor="6m",
beta=0.5,
nodes={"alpha": 0.4, "rho": -0.05, "nu": 0.4},
irs_series="usd_irs",
shift=200.0,
),
90.68148269529259,
),
(
IRSabrSmile(
eval_date=dt(2026, 2, 16),
expiry=dt(2027, 2, 16),
tenor="6m",
beta=0.5,
nodes={"alpha": 0.3, "rho": -0.05, "nu": 0.4},
irs_series="usd_irs",
shift=50.0,
),
56.96593721292377,
),
],
)
def test_ir_option_rate_from_sabr(self, smile, expected):
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.020383,
notional=100e6,
)
result = ir_period.rate(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=smile,
metric="normal_vol",
)
assert abs(result - expected) < 1e-5
def test_cashflows(self):
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.020383,
notional=100e6,
)
result = ir_period.cashflows(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=25.16,
)
expected = {
"Base Ccy": "USD",
"Cashflow": 149725.7965143448,
"Ccy": "USD",
"Collateral": None,
"DF": 0.969902553602701,
"FX Rate": 1.0,
"NPV": 145219.43237946142,
"NPV Ccy": 145219.43237946142,
"Notional": 100000000.0,
"Payment": dt(2027, 2, 18, 0, 0),
"Type": "IRSCallPeriod",
}
assert result == expected
def test_analytic_greeks(self):
from rateslib.instruments import IRS
from rateslib.solver import Solver
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
curve_solver = Solver(
curves=[curve],
instruments=[IRS(dt(2026, 2, 16), "1y", spec="usd_irs", curves=curve)],
s=[3.0],
)
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.020383,
notional=100e6,
)
result = ir_period.analytic_greeks(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=25.16,
)
expected = {
"__bs76": 0.2792463326582493,
"__forward": 2.9774664970728626,
"__sqrt_t": 1.0,
"__strike": 3.020383,
"__vol": 0.2516,
"__notional": 100e6,
"delta": 0.5274735620216011,
"gamma": 0.5312770889914765,
"vanna": 0.28897329599293436,
"vega": 1.185019484592725,
"delta_usd": 2534.939100519541,
"gamma_usd": 25.532181384278392,
"vega_usd": 5694.981592743021,
"vanna_usd": 13.88751512418925,
"delta_sticky": 0.5274735620216011,
}
# forward rate is increased by 1bp. Check the gamma and vanna values.
curve_solver.s = [3.01]
curve_solver.iterate()
result2 = ir_period.analytic_greeks(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=25.16,
)
assert abs(result2["delta_usd"] - result["delta_usd"] - result["gamma_usd"]) < 1.0
assert abs(result2["vega_usd"] - result["vega_usd"] - result["vanna_usd"]) < 2.0
assert all(abs(v - result[k]) < 1e-5 for k, v in expected.items())
def test_analytic_greeks_bachelier(self):
# this test compares the analytic_greeks results with a Solver framework (i,e, independent
# calculations) configured about a normal_vol metric.
from rateslib.instruments import IRS, IRVolValue
from rateslib.solver import Solver
from rateslib.volatility import IRSplineCube
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225},
calendar="nyc",
id="r",
)
curve_solver = Solver(
curves=[curve],
instruments=[IRS(dt(2026, 2, 16), "1y", spec="usd_irs", curves=curve)],
s=[3.0],
)
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.020383,
notional=100e6,
)
cube = IRSplineCube(
eval_date=dt(2026, 2, 16),
expiries=[dt(2027, 2, 16)],
tenors=["6m"],
strikes=[0.0],
parameters=75.0,
irs_series="usd_irs",
id="v",
)
ir_vol_solver = Solver(
pre_solvers=[curve_solver],
surfaces=[cube],
instruments=[
IRVolValue(
expiry=dt(2027, 2, 16),
tenor="6m",
strike=3.0,
irs_series="usd_irs",
metric="normal_vol",
curves=curve,
vol=cube,
)
],
s=[75.0],
instrument_labels=["vol"],
)
result = ir_period.analytic_greeks(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=cube,
)
expected = {
"__bachelier": 0.27823818012037993,
"__forward": 2.9774664970728626,
"__sqrt_t": 1.0,
"__strike": 3.020383,
"__vol": 0.75,
"__notional": 100e6,
"delta": 0.47718417514818345,
"gamma": 0.5310528998576186,
"vanna": 0.030387911108272263,
"vega": 0.398289674893214,
"vomma": 0.001738857168037003,
"delta_usd": 2293.2577304846204,
"gamma_usd": 25.521407274593162,
"vega_usd": 1914.1055455944868,
"vanna_usd": 1.4603860666729847,
"vomma_usd": 0.08356621720682876,
"delta_sticky": 0.47718417514818345,
}
# first test that the calculations are generally static, i.e. quantities are obtainable
assert all(abs(v - result[k]) < 1e-5 for k, v in expected.items())
p = ir_period.npv(
rate_curve=curve,
index_curve=curve,
disc_curve=curve,
ir_vol=cube,
local=True,
)
_ = ir_vol_solver.delta(p)
exp_delta = _.iloc[0, 0]
exp_vega = _.iloc[1, 0]
for res, exp in zip(["delta_usd", "vega_usd"], [exp_delta, exp_vega]):
percent_diff = abs(result[res] - exp) / abs(exp)
assert percent_diff < 0.025
ir_vol_solver._set_ad_order(2)
p2 = ir_period.npv(
rate_curve=curve,
index_curve=curve,
disc_curve=curve,
ir_vol=cube,
local=True,
)
_ = ir_vol_solver.gamma(p2)
ir_vol_solver._set_ad_order(1)
exp_gamma = _.iloc[0, 0]
exp_vomma = _.iloc[1, 1]
exp_vanna = _.iloc[1, 0]
for res, exp in zip(
["gamma_usd", "vanna_usd", "vomma_usd"], [exp_gamma, exp_vanna, exp_vomma]
):
percent_diff2 = abs(result[res] - exp) / abs(exp)
assert percent_diff2 < 0.07 or abs(result[res] - exp) < 0.5
# test finite difference
# forward rate is increased by 1bp. Check the gamma and vanna values.
curve_solver.s = [3.01]
curve_solver.iterate()
ir_vol_solver.iterate()
result2 = ir_period.analytic_greeks(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=cube,
)
assert abs(result2["delta_usd"] - result["delta_usd"] - result["gamma_usd"]) < 1e-0
assert abs(result2["vega_usd"] - result["vega_usd"] - result["vanna_usd"]) < 5e-1
curve_solver.s = [3.00]
ir_vol_solver.s = [76.0]
curve_solver.iterate()
ir_vol_solver.iterate()
result3 = ir_period.analytic_greeks(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=cube,
)
assert abs(result3["delta_usd"] - result["delta_usd"] - result["vanna_usd"]) < 2e-2
assert abs(result3["vega_usd"] - result["vega_usd"] - result["vomma_usd"]) < 2e-3
@pytest.mark.parametrize(
("ir_vol", "expected"),
[
(
IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["1y"],
tenors=["1y"],
strikes=[-25, 0.0, 25.0],
irs_series="usd_irs",
parameters=[[[33.5, 32.5, 34.1]]],
k=2,
pricing_model="bachelier",
),
0.5657673654151706,
),
(
IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["1y"],
tenors=["1y"],
strikes=[-25, 0.0, 25.0],
irs_series="usd_irs",
parameters=[[[33.5, 32.5, 34.1]]],
k=2,
pricing_model="black76",
),
0.6338221418100394,
),
(
IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["1y"],
tenors=["1y"],
beta=0.5,
irs_series="usd_irs",
alpha=0.5,
rho=-0.05,
nu=0.65,
),
0.5538666266910927,
),
],
)
def test_analytic_sticky_delta(self, ir_vol, expected):
ir_period = IRSCallPeriod(
expiry=dt(2001, 1, 1),
irs_series="usd_irs",
tenor="6m",
strike=3.45,
notional=100e6,
)
curve = Curve({dt(2000, 1, 1): 1.0, dt(2003, 1, 1): 0.9})
result = ir_period.analytic_greeks(
disc_curve=curve, rate_curve=curve, index_curve=curve, ir_vol=ir_vol
)
assert abs(result["delta"] - result["delta_sticky"]) > 0.01
assert abs(result["delta_sticky"] - expected) < 1e-5
def test_repr(self):
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.020383,
notional=100e6,
)
assert ir_period.__repr__() == f""
def test_raise_on_no_strike(self):
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
notional=100e6,
)
with pytest.raises(
ValueError,
match="An FXOptionPeriod cashflow cannot be determined without setting a `strike`.",
):
ir_period.unindexed_cashflow()
def test_cash_collateralized_settlement_with_fixing(self):
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
notional=100e6,
strike=3.0,
option_fixings=3.05,
settlement_method="CashCollateralized",
)
curve = Curve({dt(2027, 2, 16): 1.0, dt(2028, 2, 16): 0.98})
result = ir_period.unindexed_cashflow(index_curve=curve)
expected = 24885.54 # approx 5 * 0.993 * 5000
assert abs(result - expected) < 1e-2
def test_try_rate(self):
# if we know that the exercise will occur (from the fixing_value) value the cashflow
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.020383,
notional=100e6,
)
result = ir_period.try_rate(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=25.16,
metric="normal_vol",
)
assert isinstance(result, Ok)
result = ir_period.try_rate(
rate_curve=NoInput(0),
disc_curve=NoInput(0),
index_curve=NoInput(0),
metric="normal_vol",
)
assert isinstance(result, Err)
def test_rate_bachelier_metric(self):
curve = Curve(
nodes={dt(2026, 2, 16): 1.0, dt(2028, 2, 16): 0.941024343401225}, calendar="nyc"
)
ir_period = IRSCallPeriod(
expiry=dt(2027, 2, 16),
irs_series="usd_irs",
tenor="6m",
strike=3.020383,
notional=100e6,
metric="normal_vol",
)
smile = IRSplineSmile(
nodes={0: 50.0},
eval_date=dt(2026, 2, 16),
expiry=dt(2027, 2, 16),
tenor="6m",
irs_series="usd_irs",
)
smile2 = IRSabrSmile(
nodes={"alpha": 0.5, "rho": 0.01, "nu": 0.03},
beta=0.5,
eval_date=dt(2026, 2, 16),
expiry=dt(2027, 2, 16),
tenor="6m",
irs_series="usd_irs",
)
result = ir_period.try_rate(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=smile,
)
expected = 50.0
assert abs(result.unwrap() - expected) < 1e-2
result2 = ir_period.try_rate(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=smile2,
)
expected = 86.6790263475833
assert abs(result2.unwrap() - expected) < 1e-5
result3 = ir_period.try_rate(
rate_curve=curve,
disc_curve=curve,
index_curve=curve,
ir_vol=smile,
metric="black_vol_shift_50",
)
expected = 14.2149591308255
assert abs(result3.unwrap() - expected) < 1e-5
================================================
FILE: python/tests/periods/test_static_npv.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib.curves import Curve
from rateslib.periods import Cashflow
class TestStaticNPV:
@pytest.mark.parametrize(
("settlement", "forward", "expected"),
[
(dt(2000, 1, 1), dt(2000, 1, 1), 80.0),
(dt(2000, 1, 1), dt(2000, 1, 6), 100.0 * 0.8 / 0.75),
(dt(2000, 1, 2), dt(2000, 1, 5), 100.0),
(dt(2000, 1, 4), dt(2000, 1, 5), 0.0),
],
)
def test_settlement_forward(self, settlement, forward, expected):
# test the example in the book
curve = Curve(
nodes={
dt(2000, 1, 1): 1.0,
dt(2000, 1, 2): 0.95,
dt(2000, 1, 3): 0.90,
dt(2000, 1, 4): 0.85,
dt(2000, 1, 5): 0.80,
dt(2000, 1, 6): 0.75,
}
)
cf = Cashflow(
currency="usd",
notional=-100.0,
payment=dt(2000, 1, 5),
ex_dividend=dt(2000, 1, 3),
)
result = cf.npv(disc_curve=curve, settlement=settlement, forward=forward)
assert abs(result - expected) < 1e-7
================================================
FILE: python/tests/scheduling/test_calendars.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib import calendars, defaults, fixings
from rateslib.curves import Curve
from rateslib.default import NoInput
from rateslib.instruments import IRS
from rateslib.scheduling import (
Adjuster,
Cal,
Convention,
Frequency,
RollDay,
UnionCal,
add_tenor,
dcf,
get_calendar,
get_imm,
next_imm,
)
from rateslib.scheduling.calendars import (
_adjust_date,
_get_years_and_months,
_is_day_type_tenor,
)
from rateslib.scheduling.frequency import _get_frequency, _get_fx_expiry_and_delivery_and_payment
@pytest.fixture
def cal_():
return Cal([dt(_, 1, 3) for _ in range(1970, 2200)], [5, 6])
@pytest.mark.parametrize(
("date", "expected"),
[
(dt(2022, 1, 1), True), # sat
(dt(2022, 1, 2), True), # sun
(dt(2022, 1, 3), True), # mon new year hol
(dt(2022, 1, 4), False), # tues
(dt(2022, 1, 5), False), # wed
],
)
def test_is_non_bus_day(date, expected, cal_) -> None:
result = cal_.is_non_bus_day(date)
assert result == expected
def test_is_non_bus_day_raises() -> None:
obj = "not a cal object"
with pytest.raises(AttributeError):
obj._is_non_bus_day(dt(2022, 1, 1))
@pytest.mark.parametrize(
"date",
[
dt(2021, 12, 29),
dt(2021, 12, 30),
dt(2021, 12, 31),
dt(2021, 1, 1),
dt(2021, 1, 2),
dt(2021, 1, 3),
dt(2021, 1, 4),
dt(2021, 1, 5),
],
)
def test_cal_no_hols(date) -> None:
cal_no_hols = Cal([], [])
assert not cal_no_hols.is_non_bus_day(date)
def test_named_cal() -> None:
ldn_cal = get_calendar("ldn")
assert ldn_cal.is_non_bus_day(dt(2022, 1, 1))
assert ldn_cal.is_bus_day(dt(2022, 1, 5))
def test_multiple_named_cal() -> None:
ldn_cal = get_calendar("ldn")
stk_cal = get_calendar("stk")
assert ldn_cal.is_non_bus_day(dt(2023, 1, 2))
assert stk_cal.is_bus_day(dt(2023, 1, 2))
assert ldn_cal.is_bus_day(dt(2023, 1, 6))
assert stk_cal.is_non_bus_day(dt(2023, 1, 6))
merged_cal = get_calendar("LDN,stk")
assert merged_cal.is_non_bus_day(dt(2023, 1, 2))
assert merged_cal.is_non_bus_day(dt(2023, 1, 6))
def test_add_tenor_raises() -> None:
# this raise is superfluous by the design principles of private methods
with pytest.raises(ValueError):
add_tenor(dt(2022, 1, 1), "1X", "mf", None)
@pytest.mark.parametrize(
("tenor", "expected"),
[
("1M", dt(2022, 1, 31)),
("2m", dt(2022, 2, 28)),
("6M", dt(2022, 6, 30)),
("1d", dt(2022, 1, 1)),
("32d", dt(2022, 2, 1)),
("1y", dt(2022, 12, 31)),
("0.5y", dt(2022, 6, 30)),
],
)
def test_add_tenor(tenor, expected) -> None:
result = add_tenor(dt(2021, 12, 31), tenor, "NONE", NoInput(0))
assert result == expected
@pytest.mark.parametrize(
("tenor", "expected", "roll"),
[
("-1M", dt(2022, 1, 31), "eom"),
("-1M", dt(2022, 1, 28), NoInput(0)),
("-2m", dt(2021, 12, 31), 31),
("-2m", dt(2021, 12, 28), NoInput(0)),
("-1Y", dt(2021, 2, 28), NoInput(0)),
("-1d", dt(2022, 2, 27), NoInput(0)),
("-2y", dt(2020, 2, 29), "eom"),
("-2y", dt(2020, 2, 28), NoInput(0)),
],
)
def test_add_negative_tenor(tenor, expected, roll) -> None:
result = add_tenor(dt(2022, 2, 28), tenor, "NONE", NoInput(0), roll)
assert result == expected
@pytest.mark.parametrize(
("date", "tenor", "mod", "roll", "cal", "expected"),
[
(dt(1990, 9, 28), "-6m", "NONE", 31, NoInput(0), dt(1990, 3, 31)),
(dt(1990, 9, 28), "-6m", "NONE", 29, NoInput(0), dt(1990, 3, 29)),
(dt(1990, 5, 29), "3m", "NONE", NoInput(0), NoInput(0), dt(1990, 8, 29)),
(dt(1990, 5, 29), "3m", "NONE", 31, NoInput(0), dt(1990, 8, 31)),
(dt(1990, 3, 31), "6m", "MF", 31, "nyc", dt(1990, 9, 28)),
(dt(2023, 4, 21), "-3m", "P", 23, "bus", dt(2023, 1, 23)),
(dt(2023, 6, 23), "-3m", "P", 25, "bus", dt(2023, 3, 24)),
],
)
def test_add_tenor_special_cases(date, tenor, mod, roll, cal, expected) -> None:
end = add_tenor(date, tenor, mod, cal, roll)
assert end == expected
@pytest.mark.parametrize(
("date", "modifier", "expected"),
[
(dt(2022, 1, 3), "NONE", dt(2022, 1, 3)),
(dt(2022, 1, 3), "F", dt(2022, 1, 4)),
(dt(2022, 1, 3), "MF", dt(2022, 1, 4)),
(dt(2022, 1, 3), "P", dt(2021, 12, 31)),
(dt(2022, 1, 3), "MP", dt(2022, 1, 4)),
(dt(2022, 7, 30), "NONE", dt(2022, 7, 30)),
(dt(2022, 7, 30), "f", dt(2022, 8, 1)),
(dt(2022, 7, 30), "mf", dt(2022, 7, 29)),
(dt(2022, 7, 30), "p", dt(2022, 7, 29)),
(dt(2022, 7, 30), "mp", dt(2022, 7, 29)),
],
)
def test_adjust_date(date, modifier, cal_, expected) -> None:
result = _adjust_date(date, modifier, cal_)
assert result == expected
def test_adjust_date_cal() -> None:
result = _adjust_date(dt(2022, 10, 1), "F", NoInput(0))
assert result == dt(2022, 10, 1)
def test_adjust_date_raises() -> None:
with pytest.raises(KeyError):
_adjust_date(dt(2000, 1, 1), "BAD_STRING", NoInput(0))
@pytest.mark.parametrize(
("modifier", "expected"),
[
("None", dt(2022, 1, 3)),
("F", dt(2022, 1, 4)),
("MF", dt(2022, 1, 4)),
("P", dt(2021, 12, 31)),
("MP", dt(2022, 1, 4)),
],
)
def test_modifiers_som(cal_, modifier, expected) -> None:
result = add_tenor(dt(2021, 12, 3), "1M", modifier, cal_)
assert result == expected
@pytest.mark.parametrize(
("modifier", "expected"),
[
("None", dt(2021, 2, 28)),
("F", dt(2021, 3, 1)),
("MF", dt(2021, 2, 26)),
("P", dt(2021, 2, 26)),
("MP", dt(2021, 2, 26)),
],
)
def test_modifiers_eom(cal_, modifier, expected) -> None:
result = add_tenor(dt(2020, 12, 31), "2M", modifier, cal_)
assert result == expected
@pytest.mark.parametrize(
("start", "end", "conv", "expected"),
[
(dt(2022, 1, 1), dt(2022, 4, 1), "ACT365F", 0.2465753424657534),
(dt(2021, 1, 1), dt(2022, 4, 1), "ACT365F+", 1.2465753424657535),
(dt(2022, 1, 1), dt(2022, 4, 1), "ACT365F+", 0.2465753424657534),
(dt(2020, 6, 1), dt(2022, 4, 1), "ACT365F+", 1.832876712328767),
(dt(2020, 1, 1), dt(2052, 1, 2), "ACT365F", 32.02465753424657),
(dt(2020, 1, 1), dt(2052, 1, 2), "ACT365F+", 32.0027397260274),
(dt(2022, 1, 1), dt(2022, 4, 1), "1", 1.0),
(dt(2022, 1, 1), dt(2022, 4, 1), "ACT360", 0.2465753424657534 * 365 / 360),
(dt(2022, 1, 1), dt(2022, 4, 1), "30360", 0.250),
(dt(2022, 1, 1), dt(2022, 4, 1), "30E360", 0.250),
(dt(2022, 1, 1), dt(2022, 4, 1), "ACTACTISDA", 0.2465753424657534),
(dt(2022, 1, 1), dt(2022, 1, 1), "ACTACTISDA", 0.0),
(dt(2022, 1, 1), dt(2023, 1, 31), "1+", 1.0),
(dt(2022, 1, 1), dt(2024, 2, 28), "1+", 2 + 1 / 12),
(dt(2022, 1, 1), dt(2022, 4, 1), "BUS252", 0.35714285714285715),
(dt(2022, 1, 1), dt(2022, 4, 1), "30U360", 0.25),
(dt(2022, 1, 1), dt(2022, 4, 1), "ACT365_25", 0.2464065708418891),
(dt(2022, 1, 1), dt(2022, 4, 1), "ACT364", 0.24725274725274726),
],
)
def test_dcf(start, end, conv, expected) -> None:
result = dcf(start, end, conv, calendar="all", frequency="Q")
assert abs(result - expected) < 1e-14
@pytest.mark.parametrize(
("start", "end", "conv", "expected", "freq", "term", "stub"),
[
(dt(2022, 6, 30), dt(2022, 7, 31), "30360", 1 / 12, NoInput(0), None, None),
(dt(2022, 6, 30), dt(2022, 7, 31), "30E360", 1 / 12, NoInput(0), None, None),
(dt(2022, 6, 30), dt(2022, 7, 31), "30E360ISDA", 1 / 12, "A", dt(2022, 7, 31), None),
(dt(2022, 6, 29), dt(2022, 7, 31), "30360", 1 / 12 + 2 / 360, NoInput(0), None, None),
(dt(2022, 6, 29), dt(2022, 7, 31), "30E360", 1 / 12 + 1 / 360, NoInput(0), None, None),
(dt(2022, 2, 28), dt(2022, 3, 31), "30E360", 1 / 12 + 2 / 360, NoInput(0), None, None),
(dt(2022, 2, 28), dt(2022, 3, 31), "30E360ISDA", 1 / 12, "A", dt(2022, 3, 3), None),
(
dt(1999, 2, 1),
dt(1999, 7, 1),
"ACTACTICMA",
150 / 365,
"A",
dt(2000, 7, 1),
True,
), # short first
(
dt(2002, 8, 15),
dt(2003, 7, 15),
"ACTACTICMA",
0.5 + 153 / 368,
"S",
dt(2004, 1, 15),
True,
), # long first
(
dt(2000, 1, 30),
dt(2000, 6, 30),
"ACTACTICMA",
152 / 364,
"S",
dt(2000, 6, 30),
True,
), # short back
(
dt(1999, 11, 30),
dt(2000, 4, 30),
"ACTACTICMA",
0.25 + 61 / 368,
Frequency.Months(3, RollDay.Day(31)),
dt(2000, 4, 30),
True,
),
(
dt(1999, 11, 30),
dt(2000, 4, 30),
"ACTACTICMA",
0.25 + 61 / 364,
Frequency.Months(3, RollDay.Day(30)),
dt(2000, 4, 30),
True,
),
# long back : with and without month end roll here
(
dt(1999, 11, 15),
dt(2000, 4, 15),
"ACTACTICMA",
0.25 + 60 / 360,
"Q",
dt(2000, 4, 15),
True,
), # long back
(dt(2002, 8, 31), dt(2002, 11, 30), "ACTACTICMA", 0.25, "Q", dt(2004, 11, 30), False),
(
dt(1999, 2, 1),
dt(1999, 7, 1),
"ACTACTICMA_STUB365F",
150 / 365,
"A",
dt(2000, 7, 1),
True,
), # short first
(
dt(2002, 8, 15),
dt(2003, 7, 15),
"ACTACTICMA_STUB365F",
0.5 + 153 / 365,
"S",
dt(2004, 1, 15),
True,
), # long first
(
dt(2000, 1, 30),
dt(2000, 6, 30),
"ACTACTICMA_STUB365F",
152 / 365,
"S",
dt(2000, 6, 30),
True,
), # short back
(
dt(1999, 11, 15),
dt(2000, 4, 15),
"ACTACTICMA_STUB365F",
0.25 + 60 / 365,
"Q",
dt(2000, 4, 15),
True,
), # long back
(
dt(2002, 8, 31),
dt(2002, 11, 30),
"ACTACTICMA_STUB365F",
0.25,
"Q",
dt(2004, 11, 30),
False,
),
],
)
def test_dcf_special(start, end, conv, expected, freq, term, stub) -> None:
# The 4 ActICMA tests match short/long first/final stubs in 1998-ISDA-memo-EMU pdf
result = dcf(start, end, conv, term, freq, stub)
assert abs(result - expected) < 1e-12
@pytest.mark.parametrize(
("conv", "freq", "term", "stub"),
[
("ACTACTICMA", NoInput(0), NoInput(0), NoInput(0)),
("ACTACTICMA", "Q", NoInput(0), NoInput(0)),
("BadConv", NoInput(0), NoInput(0), NoInput(0)),
],
)
def test_dcf_raises(conv, freq, term, stub) -> None:
with pytest.raises(ValueError):
_ = dcf(
dt(2022, 1, 1),
dt(2022, 4, 1),
conv,
term,
freq,
stub=stub,
)
def test_dcf_30e360_isda_raises():
# needs a termination if end februrary
with pytest.raises(ValueError, match="`termination` must be provided for '30e360ISDA' conv"):
_ = dcf(
dt(2022, 2, 28),
dt(2023, 2, 28),
"30e360isda",
NoInput(0),
)
def test_dcf_30u360_raises():
# needs a termination if end februrary
with pytest.raises(ValueError, match="`frequency` must be provided or has no `roll`. A roll-d"):
_ = dcf(
dt(2022, 2, 28),
dt(2023, 2, 28),
"30u360",
)
def test_dcf_actacticma_raises():
with pytest.raises(ValueError, match="Stub periods under ActActICMA require `termination`, `a"):
_ = dcf(
dt(2022, 2, 28),
dt(2023, 2, 28),
"actacticma",
NoInput(0),
"Q",
True,
Cal.from_name("tgt"),
NoInput(0),
)
def test_dcf_actact_raises():
with pytest.raises(ValueError, match=r"`ActAct` must be directly specified as `ActActICMA` "):
_ = dcf(
dt(2022, 2, 28),
dt(2023, 2, 28),
"actact",
)
@pytest.mark.parametrize(
("start", "end", "expected"),
[
(dt(2000, 1, 1), dt(2000, 1, 4), 1.0 / 252.0),
(dt(2000, 1, 2), dt(2000, 1, 4), 1.0 / 252.0),
(dt(2000, 1, 2), dt(2000, 1, 5), 2.0 / 252.0),
(dt(2000, 1, 1), dt(2000, 1, 5), 2.0 / 252.0),
(dt(2000, 1, 3), dt(2000, 1, 5), 1.0 / 252.0),
(dt(2000, 1, 3), dt(2000, 1, 4), 0.0 / 252.0),
(dt(2000, 1, 4), dt(2000, 1, 5), 1.0 / 252.0),
(dt(2000, 1, 5), dt(2000, 1, 6), 0.0 / 252.0),
(dt(2000, 1, 5), dt(2000, 1, 5), 0.0 / 252.0),
],
)
def test_bus252(start, end, expected) -> None:
cal = Cal(
[
dt(2000, 1, 1),
dt(2000, 1, 3),
dt(2000, 1, 5),
dt(2000, 1, 6),
],
[],
)
assert dcf(start, end, "BUS252", calendar=cal) == expected
@pytest.mark.parametrize(
("start", "end", "roll", "expected"),
[
(dt(2024, 2, 29), dt(2025, 2, 28), "eom", 1.00),
(dt(2024, 2, 29), dt(2025, 2, 28), 29, 0.99722222222222),
(dt(2024, 2, 28), dt(2025, 2, 28), "eom", 1.0),
(dt(2024, 2, 28), dt(2025, 2, 28), 28, 1.0),
(dt(2024, 2, 29), dt(2025, 2, 27), "eom", 0.99166666666666),
(dt(2024, 2, 29), dt(2025, 2, 27), 27, 0.99444444444444),
(dt(2024, 2, 28), dt(2025, 2, 27), "eom", 0.99722222222222),
(dt(2024, 2, 28), dt(2025, 2, 27), 27, 0.99722222222222),
(dt(2024, 9, 30), dt(2024, 12, 31), None, 0.25),
(dt(2024, 3, 31), dt(2024, 6, 30), None, 0.25),
(dt(2024, 3, 31), dt(2024, 12, 31), None, 0.75),
(dt(2024, 12, 1), dt(2024, 12, 31), None, 30 / 360),
(dt(2024, 11, 30), dt(2024, 12, 31), None, 30 / 360),
(dt(2024, 2, 29), dt(2024, 3, 31), 29, 32 / 360),
(dt(2024, 2, 29), dt(2024, 3, 31), "eom", 30 / 360),
(dt(2024, 2, 28), dt(2024, 3, 31), "eom", 33 / 360),
(dt(2025, 2, 28), dt(2025, 3, 31), "eom", 30 / 360),
],
)
def test_30u360(start, end, roll, expected):
freq = _get_frequency("M", roll, "all")
result = dcf(start, end, "30U360", frequency=freq)
assert abs(result - expected) < 1e-10
@pytest.mark.parametrize(
("d1", "d2", "exp"),
[
(dt(2009, 3, 1), dt(2012, 1, 15), (2, 10)),
(dt(2008, 12, 1), dt(2013, 10, 31), (4, 10)),
(dt(2008, 12, 1), dt(2018, 11, 15), (9, 11)),
(dt(2008, 12, 1), dt(2038, 5, 15), (29, 5)),
],
)
def test_get_years_and_months(d1, d2, exp) -> None:
result = _get_years_and_months(d1, d2)
assert result == exp
@pytest.mark.parametrize(
("s", "e", "t", "exp"),
[
(dt(2024, 2, 29), dt(2024, 5, 29), dt(2024, 5, 29), 0.24657534),
(dt(2021, 2, 28), dt(2024, 5, 29), dt(2024, 5, 29), 3.24863387),
(dt(2021, 2, 28), dt(2024, 5, 29), dt(2026, 5, 28), 3.24657534),
],
)
def test_act_act_icma_z_freq(s, e, t, exp) -> None:
with pytest.warns(UserWarning, match="`frequency` cannot be 'Zero' variant in combination wit"):
result = dcf(
start=s,
end=e,
convention="ActActICMA",
termination=t,
frequency=Frequency.Zero(), # Z Frequency
stub=True,
calendar=Cal([], []),
adjuster=Adjuster.Actual(),
)
assert abs(result - exp) < 1e-6
def test_calendar_aligns_with_fixings_tyo() -> None:
# using this test in a regular way, and with "-W error" for error on warn ensures that:
# - Curve cal is a business day and fixings cal has no fixing: is a warn
# - Curve cal is not a business day and fixings cal has a fixing: errors
curve = Curve(
{dt(2015, 6, 10): 1.0, dt(2024, 6, 3): 1.0},
calendar="tyo",
)
fixings_ = fixings["jpy_rfr"][1]
irs = IRS(dt(2015, 6, 10), dt(2024, 6, 3), "A", leg2_rate_fixings=fixings_, calendar="tyo")
irs.rate(curves=curve)
def test_calendar_aligns_with_fixings_syd() -> None:
# using this test in a regular way, and with "-W error" for error on warn ensures that:
# - Curve cal is a business day and fixings cal has no fixing: is a warn
# - Curve cal is not a business day and fixings cal has a fixing: errors
curve = Curve(
{dt(2015, 6, 10): 1.0, dt(2024, 6, 3): 1.0},
calendar="syd",
)
fixings_ = fixings["aud_rfr"][1]
irs = IRS(dt(2015, 6, 10), dt(2024, 6, 3), "A", leg2_rate_fixings=fixings_, calendar="syd")
irs.rate(curves=curve)
def test_book_example() -> None:
res = add_tenor(dt(2001, 9, 28), "-6M", modifier="MF", calendar="ldn")
assert res == dt(2001, 3, 28, 0, 0)
res = add_tenor(dt(2001, 9, 28), "-6M", modifier="MF", calendar="ldn", roll=31)
assert res == dt(2001, 3, 30, 0, 0)
res = add_tenor(dt(2001, 9, 28), "-6M", modifier="MF", calendar="ldn", roll=29)
assert res == dt(2001, 3, 29, 0, 0)
def test_book_example2() -> None:
cal = get_calendar("tgt|nyc")
cal2 = get_calendar("tgt,nyc")
# 11th Nov 09 is a US holiday: test that the holiday is ignored in the settlement cal
result = cal.add_bus_days(dt(2009, 11, 10), 2, True)
result2 = cal2.add_bus_days(dt(2009, 11, 10), 2, True)
assert result == dt(2009, 11, 12)
assert result2 == dt(2009, 11, 13)
# test that the US settlement is honoured
result = cal.add_bus_days(dt(2009, 11, 9), 2, True)
result2 = cal.add_bus_days(dt(2009, 11, 9), 2, False)
assert result == dt(2009, 11, 12)
assert result2 == dt(2009, 11, 11)
def test_pipe_vectors() -> None:
get_calendar("tgt,stk|nyc,osl")
def test_pipe_raises() -> None:
with pytest.raises(
ValueError, match="The calendar cannot be parsed. Is there more than one pipe character?"
):
get_calendar("tgt|nyc|stk")
def test_add_and_get_custom_calendar() -> None:
cal = Cal([dt(2023, 1, 2)], [5, 6])
calendars.add("custom", cal)
result = get_calendar("custom")
assert result == cal
calendars.pop("custom")
def test_add_and_get_custom_calendar_combination() -> None:
cal = Cal([dt(2023, 1, 2)], [5, 6])
cal2 = Cal([dt(2023, 1, 3)], [1, 2, 5, 6])
calendars.add("custom", cal)
calendars.add("custom2", cal2)
result = get_calendar("custom,custom2")
assert result == UnionCal([cal, cal2], [])
calendars.pop("custom")
calendars.pop("custom2")
@pytest.mark.parametrize("name", ["abc,def", "abc|def"])
def test_add_fails_on_comma_or_pipe(name):
with pytest.raises(
ValueError, match=r"`name` cannot contain the comma \(','\) or pipe \('|'\) cha"
):
calendars.add(name, Cal([], []))
@pytest.mark.parametrize("name", ["tgt", "nyc"])
def test_add_fails_on_existing(name):
with pytest.raises(KeyError, match=r"'`name` already exists in calendars.\\nCannot overwri"):
calendars.add(name, Cal([], []))
def test_calendar_pop_all_combinations() -> None:
cal = Cal([dt(2023, 1, 2)], [5, 6])
cal2 = Cal([dt(2023, 1, 3)], [1, 2, 5, 6])
cal3 = Cal([dt(2023, 1, 3)], [1, 2, 4, 6])
calendars.add("custom1", cal)
calendars.add("custom2", cal2)
calendars.add("custom3", cal3)
_ = get_calendar("custom1,custom2")
_ = get_calendar("custom1,custom3")
_ = get_calendar("custom2,custom3")
calendars.pop("custom1")
assert "custom1,custom2" not in calendars
assert "custom1,custom3" not in calendars
assert "custom2,custom3" in calendars
calendars.pop("custom2")
calendars.pop("custom3")
def test_doc_union_cal() -> None:
calendars.add("mondays-off", Cal([], [0, 5, 6]))
calendars.add("fridays-off", Cal([], [4, 5, 6]))
result = get_calendar("mondays-off, fridays-off").print(2026, 1)
expected = """ January 2026
Su Mo Tu We Th Fr Sa
1 * .
. * 6 7 8 * .
. * 13 14 15 * .
. * 20 21 22 * .
. * 27 28 29 * .
""" # noqa: W293
assert result == expected
calendars.pop("mondays-off")
calendars.pop("fridays-off")
@pytest.mark.parametrize(
("evald", "delivery", "expiry", "expected_expiry"),
[
(dt(2024, 5, 2), 2, "2m", dt(2024, 7, 4)),
(dt(2024, 4, 30), 2, "2m", dt(2024, 7, 1)),
(dt(2024, 5, 31), 2, "1m", dt(2024, 7, 3)),
(dt(2024, 5, 31), 2, "2w", dt(2024, 6, 14)),
],
)
def test_expiries_delivery(evald, delivery, expiry, expected_expiry) -> None:
result_expiry, _, _ = _get_fx_expiry_and_delivery_and_payment(
evald, expiry, delivery, "tgt|fed", "mf", False, 0
)
assert result_expiry == expected_expiry
def test_expiries_delivery_raises() -> None:
with pytest.raises(ValueError, match="Cannot determine FXOption expiry and delivery"):
_get_fx_expiry_and_delivery_and_payment(
dt(2000, 1, 1),
"3m",
dt(2000, 3, 2),
"tgt|fed",
"mf",
False,
0,
)
@pytest.mark.parametrize(
("val", "exp"),
[
("Z24", dt(2024, 12, 18)),
("X89", dt(2089, 11, 16)),
],
)
def test_get_imm_api(val, exp):
result = get_imm(month=1, year=1, code=val)
assert result == exp
def test_get_imm_api_no_code():
result = get_imm(month=11, year=2089)
assert result == dt(2089, 11, 16)
@pytest.mark.parametrize("tenor", ["1B", "1b", "3D", "3d", "2W", "2w"])
def test_is_day_type_tenor(tenor):
assert _is_day_type_tenor(tenor)
@pytest.mark.parametrize("tenor", ["1M", "1m", "4Y", "4y"])
def test_is_not_day_type_tenor(tenor):
assert not _is_day_type_tenor(tenor)
@pytest.mark.parametrize(
("start", "method", "expected"),
[
(dt(2025, 1, 1), "wed3_hmuz", dt(2025, 3, 19)),
(dt(2025, 1, 1), "wed3", dt(2025, 1, 15)),
(dt(2025, 1, 1), "day20_hmuz", dt(2025, 3, 20)),
(dt(2025, 1, 1), "day20_HU", dt(2025, 3, 20)),
(dt(2025, 1, 1), "day20_MZ", dt(2025, 6, 20)),
(dt(2025, 1, 15), "wed3", dt(2025, 2, 19)),
(dt(2025, 3, 19), "wed3_hmuz", dt(2025, 6, 18)),
(dt(2025, 3, 20), "day20_hmuz", dt(2025, 6, 20)),
(dt(2025, 3, 20), "day20_HU", dt(2025, 9, 20)),
(dt(2025, 3, 20), "day20_MZ", dt(2025, 6, 20)),
(dt(2025, 9, 20), "day20_HU", dt(2026, 3, 20)),
(dt(2025, 12, 1), "wed3_hmuz", dt(2025, 12, 17)),
(dt(2025, 12, 1), "wed3", dt(2025, 12, 17)),
(dt(2025, 12, 1), "day20_hmuz", dt(2025, 12, 20)),
(dt(2025, 12, 1), "day20_HU", dt(2026, 3, 20)),
(dt(2025, 12, 1), "day20_MZ", dt(2025, 12, 20)),
(dt(2025, 12, 17), "wed3_hmuz", dt(2026, 3, 18)),
(dt(2025, 12, 17), "wed3", dt(2026, 1, 21)),
(dt(2025, 12, 20), "day20_hmuz", dt(2026, 3, 20)),
(dt(2025, 12, 20), "day20_HU", dt(2026, 3, 20)),
(dt(2025, 12, 20), "day20_MZ", dt(2026, 6, 20)),
],
)
def test_next_imm(start, method, expected):
result = next_imm(start, method)
assert result == expected
def test_next_imm_depr():
with pytest.warns(DeprecationWarning):
next_imm(dt(2000, 1, 1), "imm")
def test_get_imm_depr():
with pytest.warns(DeprecationWarning):
get_imm(3, 2000, definition="imm")
def test_fed_nyc_good_friday():
assert not get_calendar("nyc").is_bus_day(dt(2024, 3, 29))
assert get_calendar("fed").is_bus_day(dt(2024, 3, 29))
def test_fed_sunday_to_monday():
fed = get_calendar("fed")
assert fed.is_bus_day(dt(2021, 12, 24))
assert not fed.is_bus_day(dt(2022, 12, 26))
def test_syd_nsw_holidays():
cal = get_calendar("nsw")
assert not cal.is_bus_day(dt(1970, 8, 3))
assert not cal.is_bus_day(dt(1970, 10, 5))
def test_wlg_changes():
cal = get_calendar("wlg")
assert not cal.is_bus_day(dt(2022, 9, 26))
assert not cal.is_bus_day(dt(2025, 1, 20))
assert not cal.is_bus_day(dt(2025, 1, 27))
def test_busdayslag_reverse():
# test that reverse operates over settleable days also
a = Adjuster.BusDaysLagSettle(2)
cal = Cal([dt(2026, 1, 1)], [5, 6])
union = UnionCal([Cal([], [])], [cal])
assert a.adjust(dt(2025, 12, 30), union) == dt(2026, 1, 2)
assert a.adjust(dt(2025, 12, 31), union) == dt(2026, 1, 2)
assert a.reverse(dt(2026, 1, 2), union) == [dt(2025, 12, 31), dt(2025, 12, 30)]
def test_mex_loads():
cal = get_calendar("mex")
assert not cal.is_bus_day(dt(2026, 3, 16))
assert cal.is_bus_day(dt(2026, 3, 17))
def test_bjs_loads():
cal = get_calendar("bjs")
assert not cal.is_bus_day(dt(2026, 9, 19))
assert cal.is_bus_day(dt(2026, 9, 20))
def test_replace_whitespace():
cal1 = get_calendar("nyc, tgt")
cal2 = get_calendar("nyc,tgt")
assert cal1 == cal2
def test_print_month():
cal = get_calendar("nyc,tgt")
output = cal.print(2026, 1)
assert (
output
== r""" January 2026
Su Mo Tu We Th Fr Sa
* 2 .
. 5 6 7 8 9 .
. 12 13 14 15 16 .
. * 20 21 22 23 .
. 26 27 28 29 30 .
""" # noqa: W291, W293
)
def test_print_calendar():
cal = get_calendar("bjs")
output = cal.print(2026)
expected = """
January 2026 April 2026 July 2026 October 2026
Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa
* * . 1 2 3 . 1 2 3 . * * .
4 5 6 7 8 9 . . * 7 8 9 10 . . 6 7 8 9 10 . . * * * 8 9 10
. 12 13 14 15 16 . . 13 14 15 16 17 . . 13 14 15 16 17 . . 12 13 14 15 16 .
. 19 20 21 22 23 . . 20 21 22 23 24 . . 20 21 22 23 24 . . 19 20 21 22 23 .
. 26 27 28 29 30 . . 27 28 29 30 . 27 28 29 30 31 . 26 27 28 29 30 .
February 2026 May 2026 August 2026 November 2026
Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa
. 2 3 4 5 6 . * . . . 2 3 4 5 6 .
. 9 10 11 12 13 14 . * * 6 7 8 9 . 3 4 5 6 7 . . 9 10 11 12 13 .
. * * * * * . . 11 12 13 14 15 . . 10 11 12 13 14 . . 16 17 18 19 20 .
. * 24 25 26 27 28 . 18 19 20 21 22 . . 17 18 19 20 21 . . 23 24 25 26 27 .
. 25 26 27 28 29 . . 24 25 26 27 28 . . 30
. . 31
March 2026 June 2026 September 2026 December 2026
Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa
. 2 3 4 5 6 . 1 2 3 4 5 . 1 2 3 4 . 1 2 3 4 .
. 9 10 11 12 13 . . 8 9 10 11 12 . . 7 8 9 10 11 . . 7 8 9 10 11 .
. 16 17 18 19 20 . . 15 16 17 18 * . . 14 15 16 17 18 . . 14 15 16 17 18 .
. 23 24 25 26 27 . . 22 23 24 25 26 . 20 21 22 23 24 * . . 21 22 23 24 25 .
. 30 31 . 29 30 . 28 29 30 . 28 29 30 31
Legend:
'1-31': Settleable business day 'X': Non-settleable business day
'.': Non-business weekend '*': Non-business day
""" # noqa: W291, W293
assert output == expected
def test_print_compare_calendar():
cal = get_calendar("nyc")
cal2 = get_calendar("fed")
output = cal.print_compare(cal2, 2026)
expected = """
January 2026 April 2026 July 2026 October 2026
Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa
_ _ _ _ _ [] _ _ _ [] _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
February 2026 May 2026 August 2026 November 2026
Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _
March 2026 June 2026 September 2026 December 2026
Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa Su Mo Tu We Th Fr Sa
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _
""" # noqa: W291, W293
assert output == expected
def test_union_cal_try_from_name():
uc = UnionCal.from_name("ldn,tgt|fed")
assert isinstance(uc, UnionCal)
@pytest.mark.parametrize("number", [-3, -2, -1, 0, 1, 2, 3])
@pytest.mark.parametrize(
"start", [dt(2026, 2, 13), dt(2026, 2, 14), dt(2026, 2, 15), dt(2026, 2, 16)]
)
def test_add_bus_days_BusDaysLagSettle_equivalence(number, start):
cal = Cal([], [5, 6])
adj = Adjuster.BusDaysLagSettle(number)
result = cal.adjust(start, adj)
expected = cal.lag_bus_days(start, number, True)
assert result == expected
================================================
FILE: python/tests/scheduling/test_calendarsrs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from pandas import Index
from rateslib import fixings
from rateslib.rs import Adjuster, Cal, CalendarManager, Modifier, NamedCal, RollDay, UnionCal
from rateslib.scheduling import get_calendar
from rateslib.serialization import from_json
class TestRollDay:
@pytest.mark.parametrize(
("left", "right", "expected"),
[
(RollDay.IMM(), RollDay.IMM(), True),
(RollDay.Day(20), RollDay.Day(20), True),
(RollDay.Day(20), RollDay.Day(30), False),
(RollDay.Day(31), RollDay.IMM(), False),
],
)
def test_equality(self, left, right, expected):
result = left == right
assert result is expected
@pytest.mark.parametrize(
"modifier",
[
Modifier.Act,
Modifier.F,
Modifier.ModF,
Modifier.P,
Modifier.ModP,
],
)
def test_modifier_pickle(modifier) -> None:
import pickle
assert modifier == pickle.loads(pickle.dumps(modifier))
@pytest.fixture
def simple_cal():
return Cal([dt(2015, 9, 5), dt(2015, 9, 7)], [5, 6]) # Saturday and Monday
@pytest.fixture
def simple_union(simple_cal):
return UnionCal([simple_cal], None)
@pytest.fixture
def multi_union(simple_cal):
add_cal = Cal([dt(2015, 9, 3), dt(2015, 9, 8)], [5, 6])
return UnionCal([simple_cal, add_cal], None)
class TestCal:
def test_cal_construct(self) -> None:
cal = Cal([dt(2015, 9, 5), dt(2015, 9, 7)], [5, 6])
UnionCal([cal], None)
def test_cal_from_name(self):
cal1 = Cal.from_name("ldn")
cal2 = NamedCal("ldn")
assert cal1 == cal2
assert type(cal1) is not type(cal2)
def test_is_business_day(self, simple_cal, simple_union) -> None:
assert not simple_cal.is_bus_day(dt(2015, 9, 7)) # Monday Holiday
assert simple_cal.is_bus_day(dt(2015, 9, 8)) # Tuesday
assert not simple_cal.is_bus_day(dt(2015, 9, 12)) # Saturday
assert not simple_union.is_bus_day(dt(2015, 9, 7))
assert simple_union.is_bus_day(dt(2015, 9, 8))
@pytest.mark.parametrize("cal", ["basic", "union"])
def test_add_cal_days(self, simple_cal, simple_union, cal) -> None:
cal = simple_cal if cal == "basic" else simple_union
expected = dt(2015, 9, 8)
result = cal.add_cal_days(dt(2015, 9, 4), 2, Adjuster.FollowingSettle())
assert result == expected
expected = dt(2015, 9, 6)
result = cal.add_cal_days(dt(2015, 9, 5), 1, Adjuster.Actual())
assert result == expected
@pytest.mark.parametrize("cal", ["basic", "union"])
@pytest.mark.parametrize(
("start", "days", "expected"),
[
(dt(2015, 9, 4), 0, dt(2015, 9, 4)),
(dt(2015, 9, 4), 1, dt(2015, 9, 8)),
(dt(2015, 9, 8), -1, dt(2015, 9, 4)),
(dt(2015, 9, 4), -1, dt(2015, 9, 3)),
(dt(2015, 9, 8), 1, dt(2015, 9, 9)),
],
)
def test_add_bus_days(self, simple_cal, simple_union, cal, start, days, expected) -> None:
cal = simple_cal if cal == "basic" else simple_union
result = cal.add_bus_days(start, days, True)
assert result == expected
def test_add_bus_days_raises(self, simple_cal, simple_union) -> None:
with pytest.raises(ValueError, match="Cannot add business days"):
simple_cal.add_bus_days(dt(2015, 9, 5), 1, True)
@pytest.mark.parametrize("cal", ["basic", "union"])
@pytest.mark.parametrize(
("start", "months", "expected"),
[
(dt(2015, 9, 4), 2, dt(2015, 11, 4)),
(dt(2015, 9, 4), 36, dt(2018, 9, 4)),
],
)
def test_add_months(self, cal, simple_cal, simple_union, start, months, expected) -> None:
cal = simple_cal if cal == "basic" else simple_union
result = cal.add_months(start, months, Adjuster.FollowingSettle(), None)
assert result == expected
def test_pickle_cal(self, simple_cal) -> None:
import pickle
pickled_cal = pickle.dumps(simple_cal)
pickle.loads(pickled_cal)
def test_pickle_union(self, simple_union) -> None:
import pickle
pickled_cal = pickle.dumps(simple_union)
pickle.loads(pickled_cal)
@pytest.mark.parametrize(
("cal", "exp"),
[
("basic", [dt(2015, 9, 5), dt(2015, 9, 7)]),
("union", [dt(2015, 9, 3), dt(2015, 9, 5), dt(2015, 9, 7), dt(2015, 9, 8)]),
],
)
def test_holidays(self, cal, exp, simple_cal, multi_union) -> None:
cal = simple_cal if cal == "basic" else multi_union
assert cal.holidays == exp
# def test_rules(self):
# rules = get_calendar("tyo").rules
# assert rules[:10] == "Jan 1 (New"
def test_tyo_cal(self) -> None:
tokyo = get_calendar("tyo")
assert tokyo.holidays[0] == dt(1970, 1, 1)
def test_fed_cal(self) -> None:
cal = get_calendar("fed")
assert cal.holidays[0] == dt(1970, 1, 1)
def test_wlg_cal(self):
cal = get_calendar("wlg")
assert cal.holidays[0] == dt(1970, 1, 1)
def test_mum_cal(self):
cal = get_calendar("mum")
assert cal.holidays[0] == dt(1970, 1, 26)
def test_json_round_trip(self, simple_cal) -> None:
json = simple_cal.to_json()
from_cal = from_json(json)
assert simple_cal == from_cal
def test_json_round_trip_union(self, multi_union) -> None:
json = multi_union.to_json()
from_cal = from_json(json)
assert multi_union == from_cal
def test_json_raises(self) -> None:
with pytest.raises(ValueError, match="Could not create Class or Struct from given JSON"):
from_json('{"Cal":{"holidays":[]}}')
with pytest.raises(ValueError, match="Could not create Class or Struct from given JSON"):
from_json('{"UnionCal":{"settlement_calendars":[]}}')
@pytest.mark.parametrize(
("left", "right", "expected"),
[
(Cal([], [5, 6]), Cal([], [5, 6]), True),
(Cal([dt(2006, 1, 2)], [5, 6]), Cal([dt(2006, 1, 2)], [5, 6]), True),
(Cal([dt(2006, 1, 2)], [5, 6]), Cal([dt(2007, 1, 2)], [5, 6]), False),
(Cal([], [4, 6]), Cal([], [5, 6]), False),
(UnionCal([Cal([], [5, 6])]), Cal([], [5, 6]), True),
(UnionCal([Cal([dt(2006, 1, 2)], [5, 6])]), Cal([], [5, 6]), False),
(
UnionCal([Cal([dt(2006, 1, 2)], [5, 6])]),
Cal([dt(2006, 1, 2)], [5, 6]),
True,
),
(
UnionCal([Cal([dt(2006, 1, 2)], [5, 6]), Cal([dt(2006, 1, 3)], [5, 6])]),
Cal([dt(2006, 1, 2), dt(2006, 1, 3)], [5, 6]),
True,
),
(
UnionCal([Cal([dt(2006, 1, 2)], [5, 6]), Cal([dt(2006, 1, 3)], [5, 6])]),
UnionCal([Cal([dt(2006, 1, 2), dt(2006, 1, 3)], [5, 6])]),
True,
),
],
)
def test_equality(self, left, right, expected) -> None:
assert (left == right) is expected
assert (right == left) is expected
def test_attributes(self) -> None:
ncal = get_calendar("tgt,LDN|Fed")
assert ncal.name == "ldn,tgt|fed"
assert isinstance(ncal.inner, UnionCal)
assert len(ncal.inner.calendars) == 2
assert len(ncal.inner.settlement_calendars) == 1
ncal = get_calendar("tgt")
assert isinstance(ncal.inner, Cal)
def test_adjusts(self, simple_cal):
dates = [dt(2015, 9, 4), dt(2015, 9, 5), dt(2015, 9, 6), dt(2015, 9, 7)]
result = simple_cal.adjusts(dates, Adjuster.Following())
expected = [dt(2015, 9, 4), dt(2015, 9, 8), dt(2015, 9, 8), dt(2015, 9, 8)]
assert result == expected
def test_roll(self, simple_cal):
result = simple_cal.roll(dt(2015, 9, 5), "F", False)
assert result == dt(2015, 9, 8)
class TestUnionCal:
def test_week_mask(self, multi_union) -> None:
result = multi_union.week_mask
assert result == {5, 6}
def test_adjusts(self, simple_union):
dates = [dt(2015, 9, 4), dt(2015, 9, 5), dt(2015, 9, 6), dt(2015, 9, 7)]
result = simple_union.adjusts(dates, Adjuster.Following())
expected = [dt(2015, 9, 4), dt(2015, 9, 8), dt(2015, 9, 8), dt(2015, 9, 8)]
assert result == expected
def test_roll(self, simple_union):
result = simple_union.roll(dt(2015, 9, 5), "F", False)
assert result == dt(2015, 9, 8)
class TestNamedCal:
def test_equality_named_cal(self) -> None:
cal = Cal.from_name("fed")
ncal = NamedCal("fed")
assert cal == ncal
assert ncal == cal
ucal = UnionCal.from_name("ldn,tgt|fed")
ncal = NamedCal("ldn,tgt|fed")
assert ucal == ncal
assert ncal == ucal
def test_adjusts(self):
ncal = NamedCal("fed")
dates = [dt(2015, 9, 4), dt(2015, 9, 5), dt(2015, 9, 6), dt(2015, 9, 7)]
result = ncal.adjusts(dates, Adjuster.Following())
expected = [dt(2015, 9, 4), dt(2015, 9, 8), dt(2015, 9, 8), dt(2015, 9, 8)]
assert result == expected
def test_roll(self):
ncal = NamedCal("fed")
result = ncal.roll(dt(2015, 9, 5), "F", False)
assert result == dt(2015, 9, 8)
@pytest.mark.parametrize(
("datafile", "calendar", "known_exceptions"),
[
("usd_rfr", "nyc", []),
("gbp_rfr", "ldn", []),
("cad_rfr", "tro", []),
("eur_rfr", "tgt", []),
("jpy_rfr", "tyo", []),
("sek_rfr", "stk", []),
("nok_rfr", "osl", []),
("aud_rfr", "syd", []),
("inr_rfr", "mum", []),
],
)
def test_calendar_against_historical_fixings(datafile, calendar, known_exceptions):
fixings_ = fixings[datafile][1]
calendar_ = get_calendar(calendar)
bus_days = Index(calendar_.bus_date_range(fixings_.index[0], fixings_.index[-1]))
diff = fixings_.index.symmetric_difference(bus_days)
errors = 0
if len(diff) != 0:
print(f"{calendar} for {datafile}")
for i, date in enumerate(diff):
if date in known_exceptions:
continue
elif date in fixings_.index:
print(f"{date} exists in fixings: does calendar wrongly classify as a holiday?")
else:
# print(f'Holiday("adhoc{i}", year={date.year}, month={date.month}, day={date.day}),') # noqa: E501
print(f"{date} exists in calendar: should this date be classified as a holiday?")
errors += 1
assert errors == 0
class TestAdjuster:
def test_adjusts(self, simple_cal):
dates = [dt(2015, 9, 4), dt(2015, 9, 5), dt(2015, 9, 6), dt(2015, 9, 7)]
result = Adjuster.Following().adjusts(dates, simple_cal)
expected = [dt(2015, 9, 4), dt(2015, 9, 8), dt(2015, 9, 8), dt(2015, 9, 8)]
assert result == expected
class TestCalendarManager:
def test_add_and_pop(self):
c = CalendarManager()
c.add("mycalendar", Cal([], [2]))
nc = c.get("mycalendar")
assert isinstance(nc, NamedCal)
assert nc == Cal([], [2])
pop = c.pop("mycalendar")
assert pop == Cal([], [2])
assert isinstance(pop, Cal)
with pytest.raises(KeyError):
c.get("mycalendar")
def test_add_union_cal_raises(self):
c = CalendarManager()
with pytest.raises(TypeError, match="argument 'calendar': 'UnionCal' object is not"):
c.add("mycalendar", UnionCal([Cal([], [])], None))
def test_add_and_get_composition(self):
c = CalendarManager()
x = c.get("ldn,tgt")
y = c.get("tgt,ldn")
assert x == y
assert x.inner_ptr_eq(y)
def test_get_raises(self):
c = CalendarManager()
with pytest.raises(KeyError, match="`name` does not exist in calendars."):
c.get("bad_calendar")
with pytest.raises(KeyError, match="`name` does not exist in calendars."):
c.get("ldn,bad_calendar")
================================================
FILE: python/tests/scheduling/test_frequency.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib.rs import Adjuster, Cal, Frequency, RollDay
@pytest.mark.parametrize(
("method", "args", "exp"),
[
("unext", (dt(2000, 1, 1),), dt(2000, 1, 11)),
("uprevious", (dt(2000, 1, 11),), dt(2000, 1, 1)),
(
"uregular",
(dt(2000, 1, 1), dt(2000, 1, 21)),
[dt(2000, 1, 1), dt(2000, 1, 11), dt(2000, 1, 21)],
),
("infer_ustub", (dt(2000, 1, 1), dt(2000, 1, 17), True, True), dt(2000, 1, 7)),
("infer_ustub", (dt(2000, 1, 1), dt(2000, 1, 27), False, True), dt(2000, 1, 17)),
("infer_ustub", (dt(2000, 1, 1), dt(2000, 1, 17), True, False), dt(2000, 1, 11)),
("infer_ustub", (dt(2000, 1, 1), dt(2000, 1, 27), False, False), dt(2000, 1, 11)),
],
)
def test_frequency_cal_days(method, args, exp):
f = Frequency.CalDays(10)
result = getattr(f, method)(*args)
assert result == exp
@pytest.mark.parametrize(
("method", "args", "exp"),
[
("unext", (dt(2025, 1, 1),), dt(2025, 1, 8)),
("uprevious", (dt(2025, 1, 8),), dt(2025, 1, 1)),
(
"uregular",
(dt(2025, 1, 1), dt(2025, 1, 15)),
[dt(2025, 1, 1), dt(2025, 1, 8), dt(2025, 1, 15)],
),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 1, 23), True, True), dt(2025, 1, 2)),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 1, 23), False, True), dt(2025, 1, 9)),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 1, 23), True, False), dt(2025, 1, 22)),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 1, 23), False, False), dt(2025, 1, 15)),
],
)
def test_frequency_bus_days(method, args, exp):
cal = Cal([], [5, 6])
f = Frequency.BusDays(5, cal)
result = getattr(f, method)(*args)
assert result == exp
@pytest.mark.parametrize(
("method", "args", "exp"),
[
("unext", (dt(2025, 1, 15),), dt(2025, 2, 15)),
("uprevious", (dt(2025, 2, 15),), dt(2025, 1, 15)),
(
"uregular",
(dt(2025, 1, 15), dt(2025, 3, 15)),
[dt(2025, 1, 15), dt(2025, 2, 15), dt(2025, 3, 15)],
),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 4, 15), True, True), dt(2025, 1, 15)),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 4, 15), False, True), dt(2025, 2, 15)),
("infer_ustub", (dt(2025, 1, 15), dt(2025, 4, 1), True, False), dt(2025, 3, 15)),
("infer_ustub", (dt(2025, 1, 15), dt(2025, 4, 1), False, False), dt(2025, 2, 15)),
],
)
def test_frequency_months(method, args, exp):
f = Frequency.Months(1, RollDay.Day(15))
result = getattr(f, method)(*args)
assert result == exp
@pytest.mark.parametrize(
("method", "args", "exp"),
[
("unext", (dt(2025, 1, 1),), dt(2025, 2, 1)),
("uprevious", (dt(2025, 2, 1),), dt(2025, 1, 1)),
(
"uregular",
(dt(2025, 1, 1), dt(2025, 3, 1)),
[dt(2025, 1, 1), dt(2025, 2, 1), dt(2025, 3, 1)],
),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 4, 15), True, True), dt(2025, 1, 15)),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 4, 15), False, True), dt(2025, 2, 15)),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 4, 15), True, False), dt(2025, 4, 1)),
("infer_ustub", (dt(2025, 1, 1), dt(2025, 4, 15), False, False), dt(2025, 3, 1)),
],
)
def test_frequency_months_undefined(method, args, exp):
with pytest.raises(ValueError, match="`udate` cannot be validated since RollDay is None."):
getattr(Frequency.Months(1, None), method)(*args)
@pytest.mark.parametrize(
("method", "args", "exp"),
[
("unext", (dt(2025, 1, 1),), dt(9999, 1, 1)),
("uprevious", (dt(2025, 1, 8),), dt(1500, 1, 1)),
("uregular", (dt(2025, 1, 1), dt(2025, 1, 15)), [dt(2025, 1, 1), dt(2025, 1, 15)]),
],
)
def test_frequency_zero(method, args, exp):
f = Frequency.Zero()
result = getattr(f, method)(*args)
assert result == exp
@pytest.mark.parametrize("front", [True, False])
def test_frequency_zero_raise(front):
f = Frequency.Zero()
result = f.infer_ustub(dt(2000, 1, 1), dt(2001, 1, 1), True, front)
assert result is None
def test_equality():
f = Frequency.Zero()
assert f == Frequency.Zero()
f = Frequency.CalDays(10)
assert isinstance(f, Frequency.CalDays)
assert not isinstance(f, Frequency.BusDays)
def test_rollday_equality():
assert RollDay.Day(15) == RollDay.Day(15)
assert RollDay.Day(15) != RollDay.Day(16)
assert RollDay.Day(15) != RollDay.IMM()
assert RollDay.IMM() == RollDay.IMM()
def test_string():
assert Frequency.Zero().string() == "Z"
assert Frequency.CalDays(10).string() == "10D"
assert Frequency.Months(3, None).string() == "Q"
def test_adjuster_reverse():
cal = Cal([dt(2010, 1, 1)], [])
result = Adjuster.Following().reverse(dt(2010, 1, 2), cal)
assert result == [dt(2010, 1, 2), dt(2010, 1, 1)]
================================================
FILE: python/tests/scheduling/test_imm.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib.rs import Imm
from rateslib.scheduling import get_imm
@pytest.mark.parametrize(
("date", "expected"),
[
(dt(2022, 3, 16), True),
(dt(2022, 6, 15), True),
(dt(2022, 9, 25), False),
(dt(2022, 8, 17), False),
],
)
def test_is_imm(date, expected) -> None:
result = Imm.Wed3_HMUZ.validate(date)
assert result is expected
def test_is_imm_serial() -> None:
result = Imm.Wed3.validate(dt(2022, 8, 17)) # imm in Aug
assert result
@pytest.mark.parametrize(
("month", "year", "expected"),
[
(3, 2022, dt(2022, 3, 16)),
(6, 2022, dt(2022, 6, 15)),
(9, 2022, dt(2022, 9, 21)),
(12, 2022, dt(2022, 12, 21)),
],
)
def test_get_imm(month, year, expected) -> None:
result = Imm.Wed3.get(year, month)
assert result == expected
def test_get_imm_namespace():
from rateslib import get_imm as f
f(code="h24")
@pytest.mark.parametrize(
("month", "year", "expected"),
[
(2, 2022, dt(2022, 2, 28)),
(2, 2024, dt(2024, 2, 29)),
(8, 2022, dt(2022, 8, 31)),
],
)
def test_get_eom(month, year, expected) -> None:
result = Imm.Eom.get(year, month)
assert result == expected
def test_get_som() -> None:
assert Imm.Som.get(2000, 3) == dt(2000, 3, 1)
assert get_imm(code="H25", definition="som") == dt(2025, 3, 1)
================================================
FILE: python/tests/scheduling/test_schedule.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import numpy as np
import pytest
from pandas import DataFrame, DatetimeIndex, date_range
from pandas.testing import assert_index_equal
from pandas.tseries.holiday import Holiday
from rateslib import defaults
from rateslib.default import NoInput
from rateslib.rs import Adjuster, Frequency, RollDay
from rateslib.scheduling import Cal
from rateslib.scheduling.schedule import Schedule
@pytest.fixture
def cal_():
return Cal([dt(_, 1, 3) for _ in range(1970, 2200)], [5, 6])
@pytest.mark.parametrize(
("dt1", "dt2", "fm", "expected"),
[
(dt(2022, 3, 16), dt(2022, 6, 30), 3, False),
(dt(2022, 3, 16), dt(2024, 9, 16), 3, True),
(dt(2022, 3, 16), dt(2028, 9, 16), 6, True),
(dt(2022, 3, 16), dt(2029, 3, 16), 12, True),
(dt(2022, 3, 16), dt(2022, 10, 16), 3, False),
(dt(2022, 3, 31), dt(2024, 4, 1), 12, False),
],
)
def test_is_divisible_months(dt1, dt2, fm, expected) -> None:
f = Frequency.Months(fm, RollDay.Day(16))
try:
f.uregular(dt1, dt2)
except ValueError:
assert not expected
else:
assert expected
@pytest.mark.parametrize(
("effective", "termination", "expected", "expected2"),
[
(dt(2022, 2, 22), dt(2024, 2, 22), 22, 22),
(dt(2022, 2, 22), dt(2024, 2, 15), 15, 15),
(dt(2022, 2, 28), dt(2024, 2, 29), 29, 31),
(dt(2022, 6, 30), dt(2024, 9, 30), 30, 31),
(dt(2022, 6, 30), dt(2024, 12, 30), 30, 30),
(dt(2022, 2, 28), dt(2024, 9, 30), 30, 31),
(dt(2024, 3, 31), dt(2024, 9, 30), 31, 31),
],
)
def test_get_unspecified_roll(effective, termination, expected, expected2) -> None:
result = Schedule(
effective,
termination,
Frequency.Months(1, None),
eom=False,
)
assert result.frequency_obj.roll == RollDay.Day(expected)
result = Schedule(
effective,
termination,
Frequency.Months(1, None),
eom=True,
)
assert result.frequency_obj.roll == RollDay.Day(expected2)
@pytest.mark.parametrize(
("e", "t", "stub", "exp_roll", "exp_stub"),
[
(dt(2022, 2, 26), dt(2024, 4, 22), "SHORTFRONT", 22, dt(2022, 4, 22)),
(dt(2022, 2, 26), dt(2024, 4, 22), "LONGFRONT", 22, dt(2022, 7, 22)),
(dt(2022, 2, 26), dt(2024, 4, 22), "SHORTBACK", 26, dt(2024, 2, 26)),
(dt(2022, 2, 26), dt(2024, 4, 22), "LONGBACK", 26, dt(2023, 11, 26)),
],
)
def test_infer_stub_date(e, t, stub, exp_roll, exp_stub, cal_) -> None:
result = Schedule(
e,
t,
"Q",
eom=False,
stub=stub,
calendar=cal_,
)
if "FRONT" in stub:
assert result.ufront_stub == exp_stub
assert result.roll == exp_roll
else:
assert result.uback_stub == exp_stub
assert result.roll == exp_roll
@pytest.mark.parametrize(
("e", "t", "stub", "exp_roll", "exp_stub"),
[
(dt(2022, 2, 26), dt(2024, 2, 26), "SHORTFRONT", 26, NoInput(0)),
(dt(2022, 2, 26), dt(2024, 2, 26), "LONGFRONT", 26, NoInput(0)),
(dt(2022, 2, 26), dt(2024, 2, 26), "SHORTBACK", 26, NoInput(0)),
(dt(2022, 2, 26), dt(2024, 2, 26), "LONGBACK", 26, NoInput(0)),
],
)
def test_infer_stub_date_no_inference_on_regular(e, t, stub, exp_roll, exp_stub, cal_) -> None:
result = Schedule(
e,
t,
"Q",
stub=stub,
eom=False,
calendar=cal_,
)
assert result.is_regular()
def test_infer_stub_date_no_inference_on_regular_dual(cal_) -> None:
result = Schedule(
dt(2022, 2, 26),
dt(2024, 4, 26),
"Q",
stub="SHORTFRONT",
front_stub=NoInput(0),
back_stub=dt(2024, 2, 26),
calendar=cal_,
)
assert result.ufront_stub is None
assert result.roll == 26
result = Schedule(
dt(2022, 2, 26),
dt(2024, 4, 26),
"Q",
stub="SHORTBACK",
front_stub=dt(2022, 4, 26),
back_stub=NoInput(0),
calendar=cal_,
)
assert result.uback_stub is None
assert result.roll == 26
@pytest.mark.parametrize(
("e", "t", "stub"),
[
(dt(2022, 2, 26), dt(2024, 4, 22), "SHORTFRONT"),
(dt(2022, 2, 26), dt(2024, 4, 22), "LONGFRONT"),
(dt(2022, 2, 26), dt(2024, 4, 22), "SHORTBACK"),
(dt(2022, 2, 26), dt(2024, 4, 22), "LONGBACK"),
],
)
def test_infer_stub_date_invalid_roll(e, t, stub, cal_) -> None:
with pytest.raises(ValueError, match="A Schedule could not be generated from"):
Schedule(e, t, "Q", stub=stub, roll=14, calendar=cal_)
@pytest.mark.parametrize(
("e", "fs", "t", "stub", "exp_roll", "exp_stub"),
[
(dt(2022, 1, 1), dt(2022, 2, 26), dt(2024, 4, 26), "FRONTSHORTBACK", 26, dt(2024, 2, 26)),
(dt(2022, 1, 1), dt(2022, 2, 26), dt(2024, 4, 26), "FRONTLONGBACK", 26, dt(2023, 11, 26)),
],
)
def test_infer_stub_date_dual_sided(e, fs, t, stub, exp_roll, exp_stub, cal_) -> None:
result = Schedule(e, t, "Q", stub=stub, front_stub=fs, calendar=cal_)
assert result.ueffective == e
assert result.uback_stub == exp_stub
assert result.utermination == t
assert result.roll == exp_roll
@pytest.mark.parametrize(
("e", "bs", "t", "stub", "exp_roll", "exp_stub"),
[
(dt(2022, 1, 1), dt(2024, 2, 26), dt(2024, 4, 26), "SHORTFRONT", 26, dt(2022, 2, 26)),
(dt(2022, 1, 1), dt(2024, 2, 26), dt(2024, 4, 26), "LONGFRONT", 26, dt(2022, 5, 26)),
],
)
def test_infer_stub_date_dual_sided2(e, bs, t, stub, exp_roll, exp_stub, cal_) -> None:
result = Schedule(e, t, "Q", stub=stub, back_stub=bs, calendar=cal_)
assert result.ueffective == e
assert result.ufront_stub == exp_stub
assert result.uback_stub == bs
assert result.utermination == t
assert result.roll == exp_roll
def test_infer_stub_date_dual_sided_invalid(cal_) -> None:
with pytest.raises(ValueError, match="A Schedule could not be generated from"):
Schedule(
dt(2022, 1, 1),
dt(2022, 12, 31),
"Q",
stub="FRONTSHORT",
front_stub=dt(2022, 2, 13),
calendar=cal_,
)
def test_infer_stub_date_eom(cal_) -> None:
result = Schedule(
dt(2022, 1, 1),
dt(2023, 2, 28),
"Q",
stub="LONGFRONT",
eom=True, # <- the EOM parameter forces the stub to be 31 May and not 28 May
calendar=cal_,
)
assert result.ufront_stub == dt(2022, 5, 31)
def test_repr():
schedule = Schedule(
dt(2022, 1, 1),
"2M",
"M",
)
expected = f""
assert expected == schedule.__repr__()
def test_schedule_str(cal_) -> None:
schedule = Schedule(dt(2022, 1, 1), "2M", "M", eom=False, calendar=cal_, roll=1, payment_lag=1)
expected = "freq: 1M (roll: 1), accrual adjuster: MF, payment adjuster: 1B,\n"
df = DataFrame(
{
defaults.headers["stub_type"]: ["Regular", "Regular"],
defaults.headers["u_acc_start"]: [dt(2022, 1, 1), dt(2022, 2, 1)],
defaults.headers["u_acc_end"]: [dt(2022, 2, 1), dt(2022, 3, 1)],
defaults.headers["a_acc_start"]: [dt(2022, 1, 4), dt(2022, 2, 1)],
defaults.headers["a_acc_end"]: [dt(2022, 2, 1), dt(2022, 3, 1)],
defaults.headers["payment"]: [dt(2022, 2, 2), dt(2022, 3, 2)],
},
)
result = schedule.__str__()
assert result == expected + df.__repr__()
def test_schedule_raises(cal_) -> None:
with pytest.raises(ValueError, match="Frequency can not be determined from `frequency` input."):
_ = Schedule(dt(2022, 1, 1), dt(2022, 12, 31), "Unknown")
with pytest.raises(ValueError, match="`termination` must be after"):
_ = Schedule(dt(2022, 1, 1), dt(2021, 12, 31), "Q")
with pytest.raises(ValueError):
_ = Schedule(
dt(2022, 1, 1),
dt(2022, 12, 31),
"Q",
stub="SHORTFRONT",
front_stub=None,
back_stub=dt(2022, 11, 15),
eom=False,
modifier="MF",
calendar=cal_,
roll=1,
)
with pytest.raises(ValueError):
_ = Schedule(
dt(2022, 1, 1),
dt(2022, 12, 31),
"Q",
stub="SHORTBACK",
front_stub=dt(2022, 3, 15),
eom=False,
calendar=cal_,
roll=1,
)
with pytest.raises(ValueError):
_ = Schedule(
dt(2022, 1, 1),
dt(2022, 12, 31),
"Q",
stub="SBLB",
front_stub=dt(2022, 3, 15),
eom=False,
calendar=cal_,
roll=1,
)
@pytest.mark.parametrize(
("eff", "term", "f", "roll"),
[
(
dt(2022, 3, 16),
dt(2024, 9, 10),
"Q",
"imm",
), # cannot build because termination does not align with IMM and no back stub specified.
(
dt(2022, 3, 1),
dt(2023, 3, 2),
"A",
"som",
), # fails because roll is explicit and a short stub to 1st march 2022 this does not align.
(
dt(2022, 2, 20),
dt(2025, 8, 21),
"S",
20,
), # fails because a short stub cannot be generated aligned with specified roll.
(
dt(2022, 2, 28),
dt(2024, 2, 28),
"S",
30,
), # is leap year 2024 and front stub is specified so 28th Feb '24 does not align with roll
],
)
def test_unadjusted_regular_swap_dead_stubs(eff, term, f, roll) -> None:
# this test isn't really about dead stubs more about misalignment with rolls.
with pytest.raises(ValueError, match="A Schedule could not be generated from the parameter c"):
# the default `stub` is SHORTFRONT
Schedule(eff, term, f, eom=False, roll=roll)
@pytest.mark.parametrize(
("eff", "term", "f", "roll", "stub"),
[
(
dt(2022, 3, 31),
dt(2023, 3, 30),
"A",
"eom",
"shortfront",
), # this builds because it is a single period short stub.
(
dt(2022, 3, 1),
dt(2023, 3, 2),
"A",
"som",
"shortback",
), # corrects the above test issue by specifying a back stub.
(
dt(2022, 2, 20),
dt(2025, 8, 21),
"S",
20,
"longback",
), # corrects to provide a single period stub
(
dt(2022, 2, 20),
dt(2025, 8, 21),
"S",
20,
"shortback",
), # corrects to provide a shoprt back stub with regular rolling on 20th
(
dt(2022, 2, 28),
dt(2024, 2, 28),
"S",
30,
"shortback",
), # corrects the above to specify a back stub
(
dt(2022, 2, 28),
dt(2024, 2, 28),
"S",
30,
"longback",
), # or alternatively with a long back stub
],
)
def test_unadjusted_regular_swap_dead_stubs_corrections(eff, term, f, roll, stub) -> None:
Schedule(eff, term, f, eom=False, roll=roll, stub=stub)
@pytest.mark.parametrize(
("eff", "term", "f", "roll", "exp"),
[
(dt(2022, 3, 16), dt(2022, 6, 30), "S", NoInput(0), False), # frequency
(dt(2022, 3, 15), dt(2022, 9, 21), "Q", "imm", False), # non-imm eff
(dt(2022, 3, 30), dt(2029, 3, 31), "A", "eom", False), # non-eom eff
(dt(2022, 3, 2), dt(2029, 3, 1), "A", "som", False), # non-som eff
(dt(2022, 3, 30), dt(2023, 9, 30), "S", 31, False), # non-eom
(dt(2024, 2, 28), dt(2025, 8, 30), "S", 30, False), # is leap
(dt(2024, 2, 29), dt(2025, 8, 30), "S", 30, True), # is leap
(dt(2022, 2, 28), dt(2025, 8, 29), "S", 29, True), # is end feb
(dt(2022, 2, 20), dt(2025, 8, 20), "S", 20, True), # OK
(dt(2022, 2, 21), dt(2025, 8, 20), "S", 20, False), # roll
(dt(2022, 2, 22), dt(2024, 2, 15), "S", NoInput(0), False), # no valid roll
(dt(2022, 2, 28), dt(2024, 2, 29), "S", NoInput(0), True), # 29 or eom
(dt(2022, 6, 30), dt(2024, 12, 30), "S", NoInput(0), True), # 30
],
)
def test_unadjusted_regular_swap(eff, term, f, roll, exp) -> None:
result = Schedule(eff, term, f, eom=False, roll=roll)
assert result.is_regular() is exp
# 12th and 13th of Feb and March are Saturday and Sunday
@pytest.mark.parametrize(
("eff", "term", "roll", "e_ueff", "e_uterm", "e_roll"),
[
(dt(2022, 2, 11), dt(2022, 3, 11), 11, dt(2022, 2, 11), dt(2022, 3, 11), 11),
(dt(2022, 2, 14), dt(2022, 3, 14), 14, dt(2022, 2, 14), dt(2022, 3, 14), 14),
(dt(2022, 2, 14), dt(2022, 3, 14), NoInput(0), dt(2022, 2, 14), dt(2022, 3, 14), 14),
(dt(2022, 2, 13), dt(2022, 3, 14), NoInput(0), dt(2022, 2, 13), dt(2022, 3, 13), 13),
(dt(2022, 2, 12), dt(2022, 3, 14), NoInput(0), dt(2022, 2, 12), dt(2022, 3, 12), 12),
(dt(2022, 2, 14), dt(2022, 3, 12), NoInput(0), dt(2022, 2, 12), dt(2022, 3, 12), 12),
(dt(2022, 2, 14), dt(2022, 3, 14), 12, dt(2022, 2, 12), dt(2022, 3, 12), 12),
(dt(2022, 2, 28), dt(2022, 3, 31), NoInput(0), dt(2022, 2, 28), dt(2022, 3, 31), 31),
(dt(2022, 2, 28), dt(2022, 3, 31), "eom", dt(2022, 2, 28), dt(2022, 3, 31), 31),
(
dt(2022, 2, 12),
dt(2022, 3, 13),
NoInput(0),
dt(2022, 2, 12),
dt(2022, 3, 13),
13,
), # dead stub converts to long stub
],
)
def test_check_regular_swap_mf(eff, term, roll, e_ueff, e_uterm, e_roll, cal_) -> None:
result = Schedule(eff, term, "M", modifier="MF", eom=False, roll=roll, calendar=cal_)
assert result.ueffective == e_ueff
assert result.utermination == e_uterm
assert result.roll == e_roll
# 12th and 13th of Feb and March are Saturday and Sunday
@pytest.mark.parametrize(
("eff", "term", "roll"),
[
(dt(2022, 2, 14), dt(2022, 3, 14), 11), # fails due to roll misalignment
(dt(2022, 2, 28), dt(2022, 3, 31), 28), # fails due to wrong stub side
],
)
def test_check_regular_swap_mf_failures(eff, term, roll, cal_) -> None:
with pytest.raises(ValueError):
Schedule(eff, term, "M", modifier="MF", eom=False, roll=roll, calendar=cal_)
@pytest.mark.parametrize(
("effective", "termination", "uf", "ub", "roll", "expected"),
[
(
dt(2023, 2, 4),
dt(2023, 9, 4),
dt(2023, 3, 4),
NoInput(0),
4,
[dt(2023, 2, 4), dt(2023, 3, 4), dt(2023, 6, 4), dt(2023, 9, 4)],
),
(
dt(2023, 2, 4),
dt(2023, 9, 4),
NoInput(0),
dt(2023, 8, 4),
4,
[dt(2023, 2, 4), dt(2023, 5, 4), dt(2023, 8, 4), dt(2023, 9, 4)],
),
(
dt(2023, 3, 4),
dt(2023, 9, 4),
NoInput(0),
NoInput(0),
4,
[dt(2023, 3, 4), dt(2023, 6, 4), dt(2023, 9, 4)],
),
(
dt(2023, 2, 4),
dt(2023, 10, 4),
dt(2023, 3, 4),
dt(2023, 9, 4),
4,
[dt(2023, 2, 4), dt(2023, 3, 4), dt(2023, 6, 4), dt(2023, 9, 4), dt(2023, 10, 4)],
),
],
)
def test_generate_irregular_uschedule(effective, termination, uf, ub, roll, expected) -> None:
result = Schedule(effective, termination, "Q", roll=roll, front_stub=uf, back_stub=ub)
assert result.uschedule == expected
@pytest.mark.parametrize(
("effective", "termination", "roll", "expected"),
[
(dt(2023, 3, 4), dt(2023, 9, 4), 4, [dt(2023, 3, 4), dt(2023, 6, 4), dt(2023, 9, 4)]),
(dt(2023, 3, 6), dt(2023, 9, 6), 6, [dt(2023, 3, 6), dt(2023, 6, 6), dt(2023, 9, 6)]),
(
dt(2023, 4, 30),
dt(2023, 10, 31),
31,
[dt(2023, 4, 30), dt(2023, 7, 31), dt(2023, 10, 31)],
),
(
dt(2022, 2, 28),
dt(2022, 8, 31),
"eom",
[dt(2022, 2, 28), dt(2022, 5, 31), dt(2022, 8, 31)],
),
(
dt(2021, 11, 30),
dt(2022, 5, 31),
31,
[dt(2021, 11, 30), dt(2022, 2, 28), dt(2022, 5, 31)],
),
(
dt(2023, 4, 30),
dt(2023, 10, 30),
30,
[dt(2023, 4, 30), dt(2023, 7, 30), dt(2023, 10, 30)],
),
(
dt(2022, 3, 16),
dt(2022, 9, 21),
"imm",
[dt(2022, 3, 16), dt(2022, 6, 15), dt(2022, 9, 21)],
),
(dt(2022, 12, 1), dt(2023, 6, 1), "som", [dt(2022, 12, 1), dt(2023, 3, 1), dt(2023, 6, 1)]),
],
)
def test_generate_regular_uschedule(effective, termination, roll, expected) -> None:
result = Schedule(effective, termination, "Q", roll=roll)
assert result.uschedule == expected
@pytest.mark.parametrize(
("effective", "termination", "frequency", "expected"),
[
(dt(2022, 2, 15), dt(2022, 8, 15), "M", 6),
(dt(2022, 2, 15), dt(2022, 8, 15), "Q", 2),
(dt(2022, 2, 15), dt(2032, 2, 15), "Q", 40),
(dt(2022, 2, 15), dt(2032, 2, 15), "Z", 1),
],
)
def test_regular_n_periods(effective, termination, frequency, expected) -> None:
result = Schedule(effective, termination, frequency)
assert result.n_periods == expected
@pytest.mark.parametrize(
("eff", "term", "freq", "ss", "eom", "roll", "expected"),
[
(dt(2022, 1, 1), dt(2023, 2, 15), "M", "SHORTFRONT", False, NoInput(0), dt(2022, 1, 15)),
(dt(2022, 1, 1), dt(2023, 2, 15), "Q", "SHORTFRONT", False, NoInput(0), dt(2022, 2, 15)),
(dt(2022, 1, 1), dt(2023, 2, 15), "S", "SHORTFRONT", False, NoInput(0), dt(2022, 2, 15)),
(dt(2022, 2, 15), dt(2023, 2, 1), "S", "SHORTFRONT", False, NoInput(0), dt(2022, 8, 1)),
(dt(2022, 1, 1), dt(2023, 2, 15), "M", "SHORTBACK", False, NoInput(0), dt(2023, 2, 1)),
(dt(2022, 1, 1), dt(2023, 2, 15), "Q", "SHORTBACK", False, NoInput(0), dt(2023, 1, 1)),
(dt(2022, 1, 1), dt(2023, 2, 15), "S", "SHORTBACK", False, NoInput(0), dt(2023, 1, 1)),
(dt(2022, 2, 15), dt(2023, 2, 1), "S", "SHORTBACK", False, NoInput(0), dt(2022, 8, 15)),
(dt(2022, 1, 1), dt(2023, 2, 28), "M", "SHORTFRONT", True, NoInput(0), dt(2022, 1, 31)),
(dt(2022, 3, 1), dt(2023, 2, 28), "Q", "SHORTFRONT", True, NoInput(0), dt(2022, 5, 31)),
(dt(2022, 3, 1), dt(2023, 2, 17), "Q", "SHORTFRONT", False, 17, dt(2022, 5, 17)),
],
)
def test_get_unadjusted_short_stub_date(eff, term, freq, ss, eom, roll, expected) -> None:
result = Schedule(eff, term, freq, stub=ss, eom=eom, roll=roll)
if ss == "SHORTFRONT":
assert result.ufront_stub == expected
else:
assert result.uback_stub == expected
@pytest.mark.parametrize(
("eff", "term", "freq", "stub", "eom", "roll", "expected"),
[
(dt(2022, 1, 1), dt(2023, 2, 15), "M", "LONGFRONT", False, NoInput(0), dt(2022, 2, 15)),
(dt(2022, 1, 1), dt(2023, 2, 15), "Q", "LONGFRONT", False, NoInput(0), dt(2022, 5, 15)),
(dt(2022, 1, 1), dt(2023, 2, 15), "S", "LONGFRONT", False, NoInput(0), dt(2022, 8, 15)),
(dt(2022, 2, 15), dt(2024, 2, 1), "S", "LONGFRONT", False, NoInput(0), dt(2023, 2, 1)),
(dt(2022, 1, 1), dt(2023, 2, 15), "M", "LONGBACK", False, NoInput(0), dt(2023, 1, 1)),
(dt(2022, 1, 1), dt(2023, 2, 15), "Q", "LONGBACK", False, NoInput(0), dt(2022, 10, 1)),
(dt(2022, 1, 1), dt(2023, 2, 15), "S", "LONGBACK", False, NoInput(0), dt(2022, 7, 1)),
(dt(2022, 2, 15), dt(2024, 2, 1), "S", "LONGBACK", False, NoInput(0), dt(2023, 2, 15)),
(dt(2022, 1, 1), dt(2023, 2, 28), "M", "LONGFRONT", True, NoInput(0), dt(2022, 2, 28)),
(dt(2022, 3, 1), dt(2023, 2, 28), "Q", "LONGFRONT", True, NoInput(0), dt(2022, 8, 31)),
(dt(2022, 3, 1), dt(2023, 2, 17), "Q", "LONGFRONT", False, 17, dt(2022, 8, 17)),
(dt(2022, 4, 30), dt(2023, 2, 18), "Q", "LONGBACK", True, NoInput(0), dt(2022, 10, 31)),
],
)
def test_get_unadjusted_stub_date_long(eff, term, freq, stub, eom, roll, expected) -> None:
result = Schedule(eff, term, freq, stub=stub, eom=eom, roll=roll)
if stub == "LONGFRONT":
assert result.ufront_stub == expected
else:
assert result.uback_stub == expected
@pytest.mark.parametrize(
("e", "t", "r", "exp_roll", "exp_ue", "exp_ut"),
[
(
dt(2020, 8, 31),
dt(2021, 2, 26),
NoInput(0),
31,
dt(2020, 8, 31),
dt(2021, 2, 28),
),
(
dt(2021, 2, 26),
dt(2021, 8, 31),
NoInput(0),
31,
dt(2021, 2, 28),
dt(2021, 8, 31),
),
(dt(2021, 2, 26), dt(2021, 8, 30), 29, 29, dt(2021, 2, 28), dt(2021, 8, 29)),
],
)
def test_schedule_eom(e, t, r, exp_roll, exp_ue, exp_ut, cal_) -> None:
sched = Schedule(e, t, "S", roll=r, modifier="MF", calendar=cal_)
assert sched.ueffective == exp_ue
assert sched.utermination == exp_ut
assert sched.roll == exp_roll
def test_payment_lag_is_business_days() -> None:
sched = Schedule(dt(2022, 11, 16), "1M", "M", modifier="MF", calendar="ldn")
assert sched.pschedule[1] == dt(2022, 12, 20)
# not 19th Dec which is adjusted(16th Dec + 2 days)
def test_schedule_bad_stub_combinations_raise() -> None:
with pytest.raises(ValueError, match="Must supply at least one stub date"):
_ = Schedule(
effective=dt(2022, 1, 1),
termination=dt(2023, 1, 1),
frequency="S",
stub="SHORTFRONTSHORTBACK",
)
@pytest.mark.skip(reason="StubInference enum behaves differently to versions <= 2.0")
def test_schedule_bad_stub_combinations_raise2() -> None:
with pytest.raises(ValueError, match="`stub` is only front sided but `back_stub` given"):
_ = Schedule(
effective=dt(2022, 1, 1),
termination=dt(2023, 1, 1),
frequency="S",
stub="FRONT",
front_stub=dt(2022, 2, 1),
back_stub=dt(2022, 12, 1),
)
@pytest.mark.parametrize(
("st", "fs", "bs"),
[
("SHORTFRONTSHORTBACK", NoInput(0), dt(2023, 1, 1)),
("SHORTFRONTLONGBACK", dt(2022, 2, 1), NoInput(0)),
("SHORTFRONTSHORTBACK", dt(2022, 4, 15), dt(2022, 10, 15)),
("SHORTFRONT", NoInput(0), NoInput(0)),
("SHORTFRONT", dt(2022, 2, 1), NoInput(0)),
("SHORTBACK", NoInput(0), dt(2023, 1, 1)),
("SHORTBACK", NoInput(0), NoInput(0)),
],
)
def test_schedule_combinations_valid(st, fs, bs) -> None:
Schedule(
effective=dt(2022, 1, 1),
termination=dt(2023, 2, 1),
frequency="S",
stub=st,
back_stub=bs,
front_stub=fs,
)
@pytest.mark.parametrize(
("st", "fs", "bs", "roll"),
[
("FRONTBACK", NoInput(0), dt(2023, 1, 15), 20),
("FRONTBACK", dt(2022, 2, 1), NoInput(0), 20),
("FRONTBACK", dt(2022, 4, 15), dt(2023, 11, 25), NoInput(0)),
("FRONT", NoInput(0), NoInput(0), 20),
("FRONT", dt(2022, 3, 12), NoInput(0), 20),
("BACK", NoInput(0), dt(2022, 12, 5), 20),
("BACK", NoInput(0), NoInput(0), 20),
],
)
def test_schedule_combinations_invalid(st, fs, bs, roll) -> None:
with pytest.raises(ValueError, match="A Schedule could not be generated from the parameter co"):
Schedule(
effective=dt(2022, 1, 1),
termination=dt(2023, 2, 1),
frequency="S",
stub=st,
back_stub=bs,
front_stub=fs,
roll=roll,
)
def test_schedule_n_periods() -> None:
result = Schedule(
effective=dt(2022, 1, 1),
termination=dt(2023, 2, 1),
frequency="S",
stub="SHORTFRONT",
)
assert result.n_periods == 3
@pytest.mark.parametrize(
("ue", "ut", "exp"),
[
(dt(2023, 3, 17), dt(2023, 12, 20), dt(2023, 9, 20)),
(dt(2022, 12, 19), dt(2023, 12, 20), dt(2023, 3, 15)),
], # PR #9
)
def test_get_unadjusted_long_stub_imm(ue, ut, exp) -> None:
result = Schedule(ue, ut, "Q", stub="LONGFRONT", eom=False, roll="imm")
assert result.ufront_stub == exp
@pytest.mark.parametrize(
("ue", "ut"),
[
(dt(2023, 3, 15), dt(2023, 12, 20)),
],
)
def test_get_unadjusted_short_stub_imm(ue, ut) -> None:
result = Schedule(ue, ut, "Q", stub="SHORTFRONT", eom=False)
assert result.is_regular()
assert result.roll == "IMM"
def test_dead_stubs() -> None:
# this was a bug detected in performance testing which generated a 1d invalid stub.
# this failed originally because a 1D stub between Sun 2nd May 27 and Mon 3rd May 27
# was invalid since the adjusted accrual schedule modified the sunday to be
# equal to the Monday giving a 0 day period.
s = Schedule(
dt(2027, 5, 2),
dt(2046, 5, 3),
"A",
stub="LONGFRONT",
calendar="bus",
)
assert s.uschedule[0:2] == [dt(2027, 5, 2), dt(2028, 5, 3)]
assert s.aschedule[0:2] == [dt(2027, 5, 3), dt(2028, 5, 3)]
# manipulate this test to cover the case for dual sided stubs
s = Schedule(
dt(2027, 5, 2),
dt(2046, 6, 3),
"A",
stub="LONGFRONTSHORTBACK",
back_stub=dt(2046, 5, 3), # back stub means front stub is inferred
calendar="bus",
)
assert s.uschedule[0:2] == [dt(2027, 5, 2), dt(2028, 5, 3)]
assert s.aschedule[0:2] == [dt(2027, 5, 3), dt(2028, 5, 3)]
# this was a bug detected in performance testing which generated a 1d invalid stub.
# this failed originally because the ueffective date of Sat 20-dec-25 and the
# inferred front stub of Sun 21-dec-25 both adjusted forwards to 22-dec-25
# giving a 0 day period.
s = Schedule(
dt(2025, 12, 20),
dt(2069, 12, 21),
"A",
stub="LONGFRONT",
calendar="bus",
)
assert s.uschedule[0:2] == [dt(2025, 12, 20), dt(2026, 12, 21)]
assert s.aschedule[0:2] == [dt(2025, 12, 22), dt(2026, 12, 21)]
# this was a bug detected in performance testing which generated a 1d invalid stub.
# this failed originally because the utermination date of Sat 20-dec-25 and the
# inferred front stub of Sun 21-dec-25 both adjusted forwards to 22-dec-25
# giving a 0 day period.
s = Schedule(
dt(2027, 10, 19),
dt(2047, 10, 20),
"A",
stub="LONGBACK",
calendar="bus",
)
assert s.uschedule[-2:] == [dt(2046, 10, 19), dt(2047, 10, 20)]
assert s.aschedule[-2:] == [dt(2046, 10, 19), dt(2047, 10, 21)]
# manipulate this test for dual sided stubs
s = Schedule(
dt(2027, 8, 19),
dt(2047, 10, 20),
"A",
stub="SHORTFRONTLONGBACK",
front_stub=dt(2027, 10, 19),
calendar="bus",
)
assert s.uschedule[-2:] == [dt(2046, 10, 19), dt(2047, 10, 20)]
assert s.aschedule[-2:] == [dt(2046, 10, 19), dt(2047, 10, 21)]
@pytest.mark.parametrize(
("mode", "end", "roll"),
[
(NoInput(0), dt(2025, 8, 17), 17),
("swaps_align", dt(2025, 8, 17), 17),
("swaptions_align", dt(2025, 8, 19), 19),
],
)
def test_eval_mode(mode, end, roll) -> None:
sch = Schedule(
effective="1Y",
termination="1Y",
frequency="S",
calendar="tgt",
eval_date=dt(2023, 8, 17),
eval_mode=mode,
)
assert sch.roll == roll
assert sch.utermination == end
def test_eval_date_raises() -> None:
with pytest.raises(ValueError, match="For `effective` given as string tenor, must"):
Schedule(
effective="1Y",
termination="1Y",
frequency="S",
)
def test_single_period_imm_roll():
s = Schedule(
effective=dt(2024, 12, 18),
termination=dt(2025, 3, 19),
roll="imm",
frequency="a",
calendar="stk",
)
assert len(s.aschedule) == 2
def test_deviate_from_effective_in_inference() -> None:
# 28th and 30th are both valid rolls for this schedule
# test that 30th is inferred since it deviates the least from effective input.
s = Schedule(
effective=dt(2024, 12, 30),
termination=dt(2025, 11, 28),
frequency="m",
eom=False,
calendar="bus",
)
assert s.ueffective == dt(2024, 12, 30)
assert s.utermination == dt(2025, 11, 30)
assert s.roll == 30
@pytest.mark.parametrize(
("f", "expected"),
[
(Frequency.CalDays(10), NoInput(0)),
(Frequency.Months(1, None), 16),
(Frequency.Months(1, RollDay.Day(16)), 16),
],
)
def test_roll_property(f, expected) -> None:
s = Schedule(dt(2000, 1, 16), dt(2001, 1, 16), f)
result = s.roll
assert result == expected
def test_day_type_tenor() -> None:
# should convert MF to Following only
s = Schedule(
dt(2024, 12, 30),
"1d",
"A",
modifier="mf",
calendar="stk",
)
assert s.utermination == dt(2025, 1, 2)
def test_cds_standard_example() -> None:
# https://www.cdsmodel.com/documentation.html?# standard example
# use Adjuster.FollowingExLast to avoid adjusting the final accrual date.
s = Schedule(
dt(2008, 12, 20),
dt(2010, 3, 20),
"Q",
modifier="fex",
calendar="bus",
payment_lag=0,
)
expected = [
dt(2008, 12, 22),
dt(2009, 3, 20),
dt(2009, 6, 22),
dt(2009, 9, 21),
dt(2009, 12, 21),
dt(2010, 3, 20),
]
assert s.aschedule == expected
expected = [
dt(2008, 12, 22),
dt(2009, 3, 20),
dt(2009, 6, 22),
dt(2009, 9, 21),
dt(2009, 12, 21),
dt(2010, 3, 22),
]
assert s.pschedule == expected
@pytest.mark.parametrize(
"frequency",
[
"M", # monthly,
"Q", # quarterly,
"S", # semi-annually,
"A", # annually,
"10D", # 10-cal-days
"10B", # 10-bus-days
"2W", # 14-cal-days
"8M", # 8-months
"1Y", # 1-year
],
)
def test_all_frequency_as_str(frequency):
s = Schedule(
dt(2000, 1, 1),
dt(2010, 1, 1),
frequency=frequency,
stub="ShortFront",
calendar="bus",
)
s.__str__()
def test_inference_busdays():
# the effective is given adjusted whilst termination is unadjusted
s = Schedule(
effective=dt(2000, 1, 6),
termination=dt(2000, 3, 1),
frequency=Frequency.Months(1, None),
modifier=Adjuster.BusDaysLagSettle(5),
)
assert s.uschedule == [dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 3, 1)]
assert s.aschedule == [dt(2000, 1, 6), dt(2000, 2, 6), dt(2000, 3, 6)]
def test_payment_adjuster_2_and_3():
s = Schedule(
dt(2000, 1, 1),
dt(2000, 3, 1),
"M",
calendar="all",
modifier="none",
payment_lag=1,
payment_lag_exchange=2,
extra_lag=-2,
)
assert s.pschedule == [dt(2000, 1, 2), dt(2000, 2, 2), dt(2000, 3, 2)]
assert s.pschedule2 == [dt(2000, 1, 3), dt(2000, 2, 3), dt(2000, 3, 3)]
assert s.pschedule3 == [dt(1999, 12, 30), dt(2000, 1, 30), dt(2000, 2, 28)]
@pytest.mark.parametrize(
("eff", "front", "back", "term"),
[
# All unadjusted
(dt(2025, 1, 15), NoInput(0), NoInput(0), dt(2025, 4, 15)),
(dt(2025, 1, 15), dt(2025, 2, 15), NoInput(0), dt(2025, 4, 15)),
(dt(2025, 1, 15), NoInput(0), dt(2025, 3, 15), dt(2025, 4, 15)),
(dt(2025, 1, 15), dt(2025, 2, 15), dt(2025, 3, 15), dt(2025, 4, 15)),
# Stub given as adjusted
(dt(2025, 1, 15), dt(2025, 2, 17), NoInput(0), dt(2025, 4, 15)),
(dt(2025, 1, 15), NoInput(0), dt(2025, 3, 17), dt(2025, 4, 15)),
(dt(2025, 1, 15), dt(2025, 2, 17), dt(2025, 3, 17), dt(2025, 4, 15)),
# Stub given as mixed
(dt(2025, 1, 15), dt(2025, 2, 17), dt(2025, 3, 15), dt(2025, 4, 15)),
],
)
def test_schedule_when_stub_input_is_regular(eff, front, back, term):
# GH-dev 142
s_base = Schedule(
effective=dt(2025, 1, 15),
termination=dt(2025, 3, 17),
calendar="bus",
frequency="M",
modifier="mf",
)
assert s_base.uschedule == [dt(2025, 1, 15), dt(2025, 2, 15), dt(2025, 3, 15)]
assert s_base.aschedule == [dt(2025, 1, 15), dt(2025, 2, 17), dt(2025, 3, 17)]
s = Schedule(
effective=eff,
termination=term,
front_stub=front,
back_stub=back,
calendar="bus",
frequency="M",
modifier="mf",
)
assert s._stubs == [False, False, False]
@pytest.mark.skip(reason="multiple stubs, where one may be a genuine stub is not implemented.")
@pytest.mark.parametrize("fs", [dt(2025, 2, 15), dt(2025, 2, 17)])
def test_schedule_when_one_front_stub_of_two_is_regular(fs):
# GH-dev 142
# this tests that one stub might be genuine whilst the other is a regular period and
# the schedule still generates correctly.
# this requires additional branching in the Rust scheduling code in the pre-check which has
# not been developed. The most common use case for this pre-check is when only a front stub,
# i.e. the first coupon date of a bond is provided.
s = Schedule(
effective=dt(2025, 1, 15),
termination=dt(2025, 4, 25),
front_stub=fs,
back_stub=dt(2025, 4, 15),
calendar="bus",
frequency="M",
modifier="mf",
)
assert s._stubs == [False, False, False, True]
def test_schedule_in_advance_payment():
# used by FRA constructor
from rateslib.scheduling import Adjuster
s = Schedule(
effective=dt(2024, 3, 20),
termination=dt(2024, 12, 18),
calendar="bus",
frequency="Q",
modifier="mf",
payment_lag=Adjuster.BusDaysLagSettleInAdvance(1),
)
assert s.aschedule == [dt(2024, 3, 20), dt(2024, 6, 19), dt(2024, 9, 18), dt(2024, 12, 18)]
assert s.pschedule == [dt(2024, 3, 21), dt(2024, 3, 21), dt(2024, 6, 20), dt(2024, 9, 19)]
assert s.pschedule3 == s.pschedule
@pytest.mark.parametrize("tenor", ["3b", "3d", "7d", "14d", "2w", "1m", "6m", "12m", "18m", "2y"])
def test_single_period_from_str_matching_frequency(tenor):
# test was introduced for a Bill that derives a termination from a string tenor.
# When the frequency matches the tenor it should generate only a single period.
s = Schedule(effective=dt(2025, 1, 15), termination=tenor, frequency=tenor)
assert s.n_periods == 1
@pytest.mark.parametrize("stub", ["shortfront", "shortback"])
def test_dead_stub_failures(stub) -> None:
# this test attempts to build a schedule from unadjusted saturday to unadjusted sunday
# using a 7d frequency. This converts the dead short stub to a long stub and thereby
# defines only one period.
s = Schedule(
effective=dt(2026, 1, 3), # saturday
termination=dt(2026, 1, 11), # sunday
frequency="7d",
calendar="bus",
modifier="f",
stub=stub,
)
assert s.uschedule == [dt(2026, 1, 3), dt(2026, 1, 11)]
================================================
FILE: python/tests/scheduling/test_schedulers.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib.enums.generics import NoInput
from rateslib.rs import Adjuster, Cal, Frequency, RollDay, Schedule, StubInference
@pytest.mark.parametrize(
("ueff", "uterm", "si", "exp"),
[
(
dt(2000, 1, 1),
dt(2000, 7, 1),
StubInference.NeitherSide,
[dt(2000, 1, 1), dt(2000, 4, 1), dt(2000, 7, 1)],
),
(
dt(2000, 1, 1),
dt(2000, 8, 1),
StubInference.ShortFront,
[dt(2000, 1, 1), dt(2000, 2, 1), dt(2000, 5, 1), dt(2000, 8, 1)],
),
(
dt(2000, 1, 1),
dt(2000, 8, 1),
StubInference.LongFront,
[dt(2000, 1, 1), dt(2000, 5, 1), dt(2000, 8, 1)],
),
(
dt(2000, 1, 1),
dt(2000, 8, 1),
StubInference.ShortBack,
[dt(2000, 1, 1), dt(2000, 4, 1), dt(2000, 7, 1), dt(2000, 8, 1)],
),
(
dt(2000, 1, 1),
dt(2000, 8, 1),
StubInference.LongBack,
[dt(2000, 1, 1), dt(2000, 4, 1), dt(2000, 8, 1)],
),
],
)
def test_schedule(ueff, uterm, si, exp):
s = Schedule(
effective=ueff,
termination=uterm,
frequency=Frequency.Months(3, RollDay.Day(1)),
calendar=Cal([], [5, 6]),
accrual_adjuster=Adjuster.ModifiedFollowing(),
payment_adjuster=Adjuster.BusDaysLagSettle(2),
payment_adjuster2=Adjuster.Actual(),
eom=True,
front_stub=None,
back_stub=None,
stub_inference=si,
)
assert s.uschedule == exp
def test_imm_schedule():
# test that IMM rolls are automatically determined.
s = Schedule(
effective=dt(2025, 3, 19),
termination=dt(2025, 9, 17),
frequency=Frequency.Months(3, None),
calendar=Cal([], [5, 6]),
accrual_adjuster=Adjuster.ModifiedFollowing(),
payment_adjuster=Adjuster.BusDaysLagSettle(2),
payment_adjuster2=Adjuster.Actual(),
eom=True,
front_stub=None,
back_stub=None,
stub_inference=StubInference.NeitherSide,
)
assert s.frequency == Frequency.Months(3, RollDay.IMM())
def test_single_period_schedule():
s = Schedule(
effective=dt(2025, 3, 19),
termination=dt(2025, 9, 19),
frequency=Frequency.Months(12, RollDay.Day(19)),
calendar=Cal([], [5, 6]),
accrual_adjuster=Adjuster.ModifiedFollowing(),
payment_adjuster=Adjuster.BusDaysLagSettle(2),
payment_adjuster2=Adjuster.Actual(),
eom=True,
front_stub=None,
back_stub=None,
stub_inference=StubInference.NeitherSide,
)
assert s.uschedule == [dt(2025, 3, 19), dt(2025, 9, 19)]
def test_single_period_schedule2():
from rateslib import IRS
IRS(dt(2022, 7, 1), "3M", "A", curves="eureur", notional=1e6)
@pytest.mark.parametrize(
("a", "b", "expected"),
[
(Adjuster.ModifiedFollowing(), Adjuster.ModifiedFollowing(), True),
(Adjuster.Following(), Adjuster.ModifiedFollowing(), False),
(Adjuster.BusDaysLagSettleInAdvance(3), Adjuster.BusDaysLagSettleInAdvance(3), True),
(Adjuster.BusDaysLagSettleInAdvance(3), Adjuster.Following(), False),
(Adjuster.BusDaysLagSettle(2), Adjuster.BusDaysLagSettle(1), False),
],
)
def test_adjuster_equality(a, b, expected):
result = a == b
assert result is expected
================================================
FILE: python/tests/serialization/test_json.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import pytest
from rateslib import Curve, Dual, Dual2, FXForwards, FXRates, dt, from_json
from rateslib.enums import FloatFixingMethod, IROptionMetric, LegIndexBase
from rateslib.rs import Schedule as ScheduleRs
from rateslib.scheduling import (
Adjuster,
Convention,
Frequency,
Imm,
NamedCal,
RollDay,
Schedule,
StubInference,
)
from rateslib.splines import PPSplineDual, PPSplineDual2, PPSplineF64
@pytest.mark.parametrize(
"obj",
[
Dual(2, vars=["v0", "v2"], dual=[0, 3]),
Dual2(2.5, ["a", "bb"], [1.2, 3.4], []),
FXRates({"usdnok": 8.0, "eurusd": 1.05}),
Imm.Wed1_Post9_HMUZ,
StubInference.LongFront,
RollDay.Day(31),
RollDay.IMM(),
Frequency.Zero(),
Frequency.CalDays(3),
Frequency.BusDays(3, NamedCal("tgt")),
Frequency.Months(4, None),
Frequency.Months(3, RollDay.IMM()),
Adjuster.ModifiedFollowing(),
Adjuster.BusDaysLagSettle(2),
Convention.ActActICMA,
Convention.ActActISDA,
ScheduleRs(
effective=dt(2000, 1, 1),
termination=dt(2001, 1, 1),
frequency=Frequency.Months(6, None),
calendar=NamedCal("tgt"),
accrual_adjuster=Adjuster.Actual(),
payment_adjuster=Adjuster.BusDaysLagSettle(2),
payment_adjuster2=Adjuster.Actual(),
front_stub=None,
back_stub=None,
eom=False,
stub_inference=StubInference.NeitherSide,
),
Schedule(
effective=dt(2000, 1, 1),
termination=dt(2001, 1, 1),
frequency="S",
calendar="tgt",
),
PPSplineF64(3, [0, 0, 0, 1, 1, 1], [0.1, 0.2, 0.3]),
PPSplineDual(
3, [0, 0, 0, 1, 1, 1], [Dual(0.1, [], []), Dual(0.2, [], []), Dual(0.3, [], [])]
),
PPSplineDual2(
3,
[0, 0, 0, 1, 1, 1],
[Dual2(0.1, [], [], []), Dual2(0.2, [], [], []), Dual2(0.3, [], [], [])],
),
FloatFixingMethod.RFRPaymentDelay(),
FloatFixingMethod.RFRPaymentDelayAverage(),
FloatFixingMethod.RFRObservationShift(2),
FloatFixingMethod.RFRObservationShiftAverage(2),
FloatFixingMethod.RFRLookback(3),
FloatFixingMethod.RFRLookbackAverage(3),
FloatFixingMethod.RFRLockout(4),
FloatFixingMethod.RFRLockoutAverage(4),
FloatFixingMethod.IBOR(2),
IROptionMetric.Premium(),
IROptionMetric.PercentNotional(),
IROptionMetric.NormalVol(),
IROptionMetric.BlackVolShift(25),
LegIndexBase.Initial,
LegIndexBase.PeriodOnPeriod,
],
)
def test_json_round_trip(obj) -> None:
jstring = obj.to_json()
reconstituted = from_json(jstring)
assert obj == reconstituted
================================================
FILE: python/tests/serialization/test_pickle.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import pickle
import pytest
from rateslib import (
ADOrder,
Dual,
Dual2,
FXForwards,
FXRates,
Imm,
NamedCal,
Variable,
dt,
)
from rateslib.curves import (
CompositeCurve,
CreditImpliedCurve,
Curve,
LineCurve,
MultiCsaCurve,
ProxyCurve,
)
from rateslib.enums import FloatFixingMethod, IROptionMetric, LegIndexBase
from rateslib.rs import Schedule as ScheduleRs
from rateslib.scheduling import (
Adjuster,
Cal,
Convention,
Frequency,
RollDay,
Schedule,
StubInference,
UnionCal,
)
from rateslib.splines import PPSplineDual, PPSplineDual2, PPSplineF64
@pytest.mark.parametrize(
"obj",
[
# core
dt(2000, 1, 1),
# ad
Dual(1.2, ["x"], [2.3]),
Dual2(1.3, ["y"], [1.0], [2.0]),
Variable(2.0, ["r"]),
# calendars
Cal.from_name("bus"),
UnionCal([Cal.from_name("bus")], []),
NamedCal("bus"),
# scheduling
# fx
FXRates({"eurusd": 1.0}, dt(2000, 1, 1)),
# curves
Curve(
{dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98, dt(2002, 1, 1): 0.96},
interpolation="spline",
),
LineCurve({dt(2000, 1, 1): 2.0, dt(2000, 1, 2): 3.0}),
CompositeCurve(
[
Curve({dt(2000, 1, 1): 1.0, dt(2000, 1, 2): 0.98}),
Curve({dt(2000, 1, 1): 1.0, dt(2000, 1, 2): 0.98}),
]
),
MultiCsaCurve(
[
Curve({dt(2000, 1, 1): 1.0, dt(2000, 1, 2): 0.98}),
Curve({dt(2000, 1, 1): 1.0, dt(2000, 1, 2): 0.98}),
],
),
CreditImpliedCurve(
Curve({dt(2000, 1, 1): 1.0, dt(2000, 1, 2): 0.98}),
Curve({dt(2000, 1, 1): 1.0, dt(2000, 1, 2): 0.98}),
),
ProxyCurve(
"usd",
"eur",
FXForwards(
fx_rates=FXRates({"eurusd": 1.0}, dt(2000, 1, 1)),
fx_curves={
"eureur": Curve({dt(2000, 1, 1): 1.0, dt(2000, 1, 2): 0.98}),
"eurusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 1, 2): 0.98}),
"usdusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 1, 2): 0.98}),
},
),
),
Curve({dt(2000, 1, 1): 1.0, dt(2000, 7, 1): 0.98}).shift(10),
Curve({dt(2000, 1, 1): 1.0, dt(2000, 7, 1): 0.98}).roll("1m"),
Curve({dt(2000, 1, 1): 1.0, dt(2000, 7, 1): 0.98}).translate(dt(2000, 1, 15)),
ScheduleRs(
effective=dt(2000, 1, 1),
termination=dt(2001, 1, 10),
frequency=Frequency.Months(6, RollDay.Day(1)),
calendar=NamedCal("tgt"),
accrual_adjuster=Adjuster.ModifiedFollowing(),
payment_adjuster=Adjuster.BusDaysLagSettle(2),
payment_adjuster2=Adjuster.Actual(),
front_stub=None,
back_stub=None,
eom=False,
stub_inference=StubInference.ShortBack,
),
Schedule(
effective=dt(2000, 1, 1),
termination=dt(2001, 1, 10),
frequency=Frequency.Months(6, RollDay.Day(1)),
calendar=NamedCal("tgt"),
modifier=Adjuster.ModifiedFollowing(),
payment_lag=Adjuster.BusDaysLagSettle(2),
stub=StubInference.ShortBack,
),
Schedule(
effective=dt(2000, 1, 1),
termination=dt(2001, 1, 1),
frequency=Frequency.Months(6, RollDay.Day(1)),
calendar=NamedCal("tgt"),
modifier=Adjuster.ModifiedFollowing(),
payment_lag=Adjuster.BusDaysLagSettle(2),
stub=StubInference.NeitherSide,
),
PPSplineF64(3, [0, 0, 0, 1, 1, 1], [0.1, 0.2, 0.3]),
PPSplineDual(
3, [0, 0, 0, 1, 1, 1], [Dual(0.1, [], []), Dual(0.2, [], []), Dual(0.3, [], [])]
),
PPSplineDual2(
3,
[0, 0, 0, 1, 1, 1],
[Dual2(0.1, [], [], []), Dual2(0.2, [], [], []), Dual2(0.3, [], [], [])],
),
],
)
def test_pickle_round_trip_obj_via_equality(obj):
pickled = pickle.dumps(obj)
loaded = pickle.loads(pickled)
assert obj == loaded
@pytest.mark.parametrize(
("a1", "a2", "b1"),
[
(Imm.Eom, Imm.Eom, Imm.Leap),
(StubInference.LongBack, StubInference.LongBack, StubInference.ShortFront),
(ADOrder.Zero, ADOrder.Zero, ADOrder.One),
(RollDay.Day(21), RollDay.Day(21), RollDay.Day(16)),
(RollDay.Day(21), RollDay.Day(21), RollDay.IMM),
(Adjuster.Actual(), Adjuster.Actual(), Adjuster.BusDaysLagSettle(5)),
(
Frequency.Months(4, RollDay.Day(2)),
Frequency.Months(4, RollDay.Day(2)),
Frequency.CalDays(3),
),
(
Frequency.Months(4, RollDay.Day(2)),
Frequency.Months(4, RollDay.Day(2)),
Frequency.Months(4, None),
),
(Convention.ActActICMA, Convention.ActActICMA, Convention.ActActISDA),
(FloatFixingMethod.IBOR(2), FloatFixingMethod.IBOR(2), FloatFixingMethod.RFRLookback(2)),
(FloatFixingMethod.IBOR(2), FloatFixingMethod.IBOR(2), FloatFixingMethod.IBOR(5)),
(IROptionMetric.Premium(), IROptionMetric.Premium(), IROptionMetric.BlackVolShift(200)),
(
IROptionMetric.BlackVolShift(200),
IROptionMetric.BlackVolShift(200),
IROptionMetric.BlackVolShift(100),
),
(LegIndexBase.Initial, LegIndexBase.Initial, LegIndexBase.PeriodOnPeriod),
],
)
def test_enum_equality(a1, a2, b1):
assert a1 == a2
assert a2 != b1
@pytest.mark.parametrize(
("enum", "klass"),
[
(FloatFixingMethod.IBOR(2), FloatFixingMethod.IBOR),
(IROptionMetric.BlackVolShift(2), IROptionMetric.BlackVolShift),
],
)
def test_complex_enum_isinstance(enum, klass):
assert isinstance(enum, klass)
type_enum = type(enum)
assert type_enum is klass
assert type_enum in [klass]
assert not isinstance(enum, FloatFixingMethod.RFRLookback)
assert type(enum) is not FloatFixingMethod.RFRLookback
assert enum != FloatFixingMethod.RFRLookback(2)
@pytest.mark.parametrize(
("enum", "method_filter"),
[
(Imm, ["next", "get", "validate", "to_json"]),
(StubInference, ["to_json"]),
(ADOrder, []),
(Convention, ["dcf", "to_json"]),
(LegIndexBase, ["to_json"]),
],
)
def test_simple_enum_pickle(enum, method_filter):
variants = [v for v in enum.__dict__ if "__" not in v and v not in method_filter]
for v in variants:
obj = enum.__dict__[v]
pickled = pickle.dumps(obj)
unpickled = pickle.loads(pickled)
assert unpickled == enum.__dict__[v]
@pytest.mark.parametrize(
("enum"),
[
RollDay.Day(31),
RollDay.IMM(),
Adjuster.Actual(),
Adjuster.Following(),
Adjuster.ModifiedFollowing(),
Adjuster.Previous(),
Adjuster.ModifiedPrevious(),
Adjuster.FollowingSettle(),
Adjuster.ModifiedFollowingSettle(),
Adjuster.PreviousSettle(),
Adjuster.ModifiedPreviousSettle(),
Adjuster.BusDaysLagSettle(4),
Adjuster.CalDaysLagSettle(2),
Adjuster.FollowingExLast(),
Adjuster.FollowingExLastSettle(),
Adjuster.BusDaysLagSettleInAdvance(2),
Frequency.Months(4, RollDay.Day(2)),
Frequency.Months(4, None),
Frequency.BusDays(2, NamedCal("tgt")),
Frequency.Zero(),
Frequency.CalDays(3),
FloatFixingMethod.RFRPaymentDelay(),
FloatFixingMethod.RFRPaymentDelayAverage(),
FloatFixingMethod.RFRObservationShift(2),
FloatFixingMethod.RFRObservationShiftAverage(2),
FloatFixingMethod.RFRLookback(3),
FloatFixingMethod.RFRLookbackAverage(3),
FloatFixingMethod.RFRLockout(4),
FloatFixingMethod.RFRLockoutAverage(4),
FloatFixingMethod.IBOR(2),
],
)
def test_complex_enum_pickle(enum):
pickled = pickle.dumps(enum)
unpickled = pickle.loads(pickled)
assert unpickled == enum
================================================
FILE: python/tests/serialization/test_repr.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import pytest
from rateslib import dt
from rateslib.dual import Dual, Dual2
from rateslib.enums import FloatFixingMethod, LegIndexBase
from rateslib.scheduling import (
Adjuster,
Cal,
Frequency,
Imm,
NamedCal,
RollDay,
Schedule,
StubInference,
UnionCal,
)
from rateslib.splines import PPSplineDual, PPSplineDual2, PPSplineF64
@pytest.mark.parametrize(
("obj", "expected"),
[
(Imm.Wed1_Post9_HMUZ, "Imm.Wed1_Post9_HMUZ"),
(StubInference.ShortFront, "StubInference.ShortFront"),
(RollDay.Day(31), "RollDay.Day(31)"),
(RollDay.IMM(), "RollDay.IMM"),
(Frequency.Zero(), "Frequency.Zero"),
(Frequency.CalDays(2), "Frequency.CalDays(2)"),
(Frequency.BusDays(3, NamedCal("tgt")), "Frequency.BusDays(3, ...)"),
(Frequency.Months(2, RollDay.Day(31)), "Frequency.Months(2, Day(31))"),
(Frequency.Months(4, None), "Frequency.Months(4, None)"),
(Adjuster.ModifiedFollowing(), "Adjuster.ModifiedFollowing"),
(Adjuster.BusDaysLagSettle(4), "Adjuster.BusDaysLagSettle(4)"),
(Schedule(dt(2000, 1, 1), dt(2001, 2, 1), "M"), "Schedule"),
(PPSplineF64(3, [0, 0, 0, 1, 1, 1], [0.1, 0.2, 0.3]), "PPSplineF64"),
(
PPSplineDual(
3, [0, 0, 0, 1, 1, 1], [Dual(0.1, [], []), Dual(0.2, [], []), Dual(0.3, [], [])]
),
"PPSplineDual",
),
(
PPSplineDual2(
3,
[0, 0, 0, 1, 1, 1],
[Dual2(0.1, [], [], []), Dual2(0.2, [], [], []), Dual2(0.3, [], [], [])],
),
"PPSplineDual2",
),
(Cal([], []), "Cal"),
(UnionCal([Cal([], []), Cal([], [])], []), "UnionCal"),
(NamedCal("tgt,ldn|fed"), "NamedCal:'tgt,ldn|fed'"),
(FloatFixingMethod.IBOR(2), "FloatFixingMethod.IBOR(2)"),
(FloatFixingMethod.RFRPaymentDelay(), "FloatFixingMethod.RFRPaymentDelay"),
(LegIndexBase.Initial, "LegIndexBase.Initial"),
],
)
def test_repr_strings(obj, expected) -> None:
repr_ = obj.__repr__()
assert f" None:
assert __version__ == "2.7.1"
def test_context_raises() -> None:
with pytest.raises(ValueError, match="Need to invoke as "):
default_context("only 1 arg")
def test_reset_defaults() -> None:
defaults.modifier = "MP"
defaults.base_currency = "gbp"
assert defaults.modifier == "MP"
assert defaults.base_currency == "gbp"
defaults.reset_defaults()
assert defaults.modifier == "MF"
assert defaults.base_currency == "usd"
def test_defaults_singleton() -> None:
from rateslib.default import Defaults
other = Defaults()
assert id(other) == id(defaults)
def test_fixings_singleton() -> None:
from rateslib.data.loader import Fixings
other = Fixings()
assert id(other) == id(fixings)
def test_fx_index_change() -> None:
# test that default fx indexes can be overwritten and are loaded by constructed objects
from rateslib.data.fixings import FXFixing, FXIndex
from rateslib.scheduling import Adjuster
eurusd = FXFixing("eurusd", dt(2000, 1, 1))
assert eurusd.fx_index.calendar == NamedCal("tgt|fed")
assert eurusd.fx_index.settle == Adjuster.BusDaysLagSettle(2)
defaults.fx_index["eurusd"] = {"pair": "eurusd", "calendar": "stk", "settle": 3}
eurusd = FXFixing("eurusd", dt(2000, 1, 1))
assert eurusd.fx_index.calendar == NamedCal("stk")
assert eurusd.fx_index.settle == Adjuster.BusDaysLagSettle(3)
defaults.reset_defaults()
assert defaults.fx_index["eurusd"]["calendar"] == NamedCal("tgt|fed")
def test_float_series_change():
from rateslib import IRS
with pytest.raises(ValueError, match="The FloatRateSeries: 'monkey' was not found "):
IRS(dt(2000, 1, 1), "1y", "A", leg2_fixing_series="monkey")
defaults.float_series["monkey"] = dict(
lag=0, calendar="nyc", modifier="f", eom=False, convention="act360"
)
IRS(dt(2000, 1, 1), "1y", "A", leg2_fixing_series="monkey")
defaults.reset_defaults()
assert "monkey" not in defaults.float_series
def collect_and_remove_licence() -> tuple[str | None, str | None]:
env_licence = os.getenv("RATESLIB_LICENCE")
if env_licence is not None:
del os.environ["RATESLIB_LICENCE"]
try:
file_licence = licence.print_licence()
licence.remove_licence()
except ValueError:
file_licence = None
return env_licence, file_licence
def replace_collected_licence(env_licence, file_licence) -> None:
if env_licence is not None:
os.environ["RATESLIB_LICENCE"] = env_licence
if file_licence is not None:
licence.add_licence(file_licence)
class TestLicence:
def test_valid_licence(self):
# test that this system has a valid licence
assert licence.status == _LicenceStatus.VALID
@pytest.mark.skipif(
os.getenv("RATESLIB_LICENCE") is not None, reason="env licence already tested."
)
def test_env_licence(self):
# this test relies on `test_valid_licence`
assert licence.status == _LicenceStatus.VALID # licence is loaded from file.
os.environ["RATESLIB_LICENCE"] = licence.print_licence()
licence.remove_licence() # remove the file licence
x = Licence()
assert x.status == _LicenceStatus.VALID
licence.add_licence(os.environ["RATESLIB_LICENCE"])
del os.environ["RATESLIB_LICENCE"]
def test_licence_no_licence_warning(self):
# test just the
env_licence, file_licence = collect_and_remove_licence()
with pytest.warns(LicenceNotice, match="No commercial licence is registered"):
Licence()
replace_collected_licence(env_licence, file_licence)
def test_licence_warning_for_expired_as_file(self):
env_licence, file_licence = collect_and_remove_licence()
licence.add_licence(
'{"expiry": "1900-01-01", "id": "Rateslib Tests", "xkey": "0x2cec1be74d8b2d2bdfa41aec384a4a8ede06c8c7873d6130035c19fcf244b5b92e29c7087a5e51c453a1fe7da345a689ef3d0953b8841ab1b3895a69a209aa529ff3e4d6b8217ce16b37c5572d737ece0a7f381696a3f3901bced9f843b48504930b25d204d910955f52c76eccd208a975a3a0e4433d70dd090ef5adb8de83cb", "name": "System"}' # noqa: E501
)
with pytest.warns(LicenceNotice, match="expired on 1900-01-01"):
Licence()
licence.remove_licence()
replace_collected_licence(env_licence, file_licence)
def test_licence_warning_for_expired_as_env_var(self):
env_licence, file_licence = collect_and_remove_licence()
os.environ["RATESLIB_LICENCE"] = (
'{"expiry": "1900-01-01", "id": "Rateslib Tests", "xkey": "0x2cec1be74d8b2d2bdfa41aec384a4a8ede06c8c7873d6130035c19fcf244b5b92e29c7087a5e51c453a1fe7da345a689ef3d0953b8841ab1b3895a69a209aa529ff3e4d6b8217ce16b37c5572d737ece0a7f381696a3f3901bced9f843b48504930b25d204d910955f52c76eccd208a975a3a0e4433d70dd090ef5adb8de83cb", "name": "System"}' # noqa: E501
)
with pytest.warns(LicenceNotice, match="expired on 1900-01-01"):
Licence()
del os.environ["RATESLIB_LICENCE"]
replace_collected_licence(env_licence, file_licence)
def test_invalid_signature(self):
env_licence, file_licence = collect_and_remove_licence()
os.environ["RATESLIB_LICENCE"] = (
'{"expiry": "2100-01-01", "id": "Rateslib Tests", "xkey": "0x2cec1be74d8b2d2bdfa41aec384a4a8ede06c8c7873d6130035c19fcf244b5b92e29c7087a5e51c453a1fe7da345a689ef3d0953b8841ab1b3895a69a209aa529ff3e4d6b8217ce16b37c5572d737ece0a7f381696a3f3901bced9f843b48504930b25d204d910955f52c76eccd208a975a3a0e4433d70dd090ef5adb8de83cb", "name": "System"}' # noqa: E501
)
with pytest.warns(LicenceNotice, match="An invalid licence file is detected"):
Licence()
del os.environ["RATESLIB_LICENCE"]
replace_collected_licence(env_licence, file_licence)
@pytest.mark.parametrize(
"licence_text",
[
"garbage",
'{"expiry": "1900-01-01", "id": "Rateslib Tests", "xkey": "0x2cec1", "name": "System"}',
],
)
def test_add_invalid_licence(self, licence_text):
with pytest.raises(ValueError):
licence.add_licence(licence_text)
@pytest.mark.parametrize(
"licence_text",
[
'{"name": "RL Expiry Test", "xkey": "0x68178a21511a36f8270bb4f73451bf3a6575e23e11bc9d0ebead841fa77bfef16cbae1341ad2e6d80f0b717923a48fbd3580eb6cc216a31c0d23618a32e8b2773cc52998e6bcb0315a8f46d003ce04f7ddeb8c19e66a16c73d2e925218dff044ba5f43f7d05503626e89fadbf85751807737f73c55b2048f96fd331b202abe45"}', # noqa: E501
'{"name": "RL xkey missing"}',
],
)
def test_licence_missing_keys(self, licence_text):
from rateslib.verify import _verify_licence
assert _verify_licence(licence_text) is None
================================================
FILE: python/tests/test_dual.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import math
from statistics import NormalDist
import numpy as np
import pytest
from packaging import version
from rateslib import IRS, Curve, FXRates, Solver, default_context, dt
from rateslib.dual import (
Dual,
Dual2,
Variable,
dual_exp,
dual_inv_norm_cdf,
dual_log,
dual_norm_cdf,
dual_norm_pdf,
dual_solve,
gradient,
set_order,
)
from rateslib.dual.utils import _abs_float, _set_ad_order_objects
DUAL_CORE_PY = False
@pytest.fixture
def x_1():
return Dual(1, vars=["v0", "v1"], dual=[1, 2])
@pytest.fixture
def x_2():
return Dual(2, vars=["v0", "v2"], dual=[0, 3])
@pytest.fixture
def y_1():
return Dual2(1, vars=["v0", "v1"], dual=[1, 2], dual2=[])
@pytest.fixture
def y_2():
return Dual2(1, vars=["v0", "v1"], dual=[1, 2], dual2=[1.0, 1.0, 1.0, 1.0])
@pytest.fixture
def y_3():
return Dual2(2, vars=["v0", "v2"], dual=[0, 3], dual2=[1.0, 1.0, 1.0, 1.0])
@pytest.fixture
def A():
return np.random.randn(25).reshape(5, 5)
@pytest.fixture
def A_sparse():
return np.array(
[
[24, -36, 12, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0.25, 0.583333333333, 0.1666666666, 0, 0, 0, 0, 0],
[0, 0, 0.1666666666, 0.6666666666, 0.1666666666, 0, 0, 0, 0],
[0, 0, 0, 0.1666666666, 0.6666666666, 0.1666666666, 0, 0, 0],
[0, 0, 0, 0, 0.1666666666, 0.6666666666, 0.1666666666, 0, 0],
[0, 0, 0, 0, 0, 0.1666666666, 0.583333333333, 0.25, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 12, -36, 24],
],
)
@pytest.fixture
def b():
return np.random.randn(5).reshape(5, 1)
def test_zero_init() -> None:
x = Dual(1, ["x"], [])
assert np.all(x.dual == np.ones(1))
y = Dual2(1, ["x"], [], [])
assert np.all(y.dual == np.ones(1))
assert np.all(y.dual2 == np.zeros((1, 1)))
@pytest.mark.parametrize(
"op",
[
"__add__",
"__sub__",
"__mul__",
"__truediv__",
"__eq__",
],
)
def test_no_type_crossing_on_ops(x_1, y_1, op) -> None:
# getattr(x_1, op)(y_1)
with pytest.raises(TypeError):
getattr(x_1, op)(y_1)
with pytest.raises(TypeError):
getattr(y_1, op)(x_1)
def test_functions_of_two_duals_analytic_formula():
# test the analytic formula for determining the resultant dual number of a function of
# 2 dual numbers
a = Dual2(2.0, ["a"], [], [])
b = Dual2(3.0, ["b"], [], [])
# z and p contain 2nd order manifolds
z = a**2 * b # = 12
p = b**2 * a # = 18
p = Dual2.vars_from(z, p.real, p.vars, p.dual, np.ravel(p.dual2))
# f is the actual expected result, calculated using dual number arithmetic
expected = z**2 * p**3
# result is pieced together using the analytic formula
f_0 = 12**2 * 18**3
f_z = 2 * 12 * 18**3
f_p = 3 * 12**2 * 18**2
f_zz = 2 * 18**3
f_zp = 6 * 12 * 18**2
f_pp = 6 * 12**2 * 18
real = f_0
dual = z.dual * f_z + p.dual * f_p
dual2 = f_z * z.dual2 + f_p * p.dual2
dual2 += 0.5 * f_zz * np.outer(z.dual, z.dual)
dual2 += 0.5 * f_pp * np.outer(p.dual, p.dual)
dual2 += 0.5 * f_zp * (np.outer(z.dual, p.dual) + np.outer(p.dual, z.dual))
result = Dual2.vars_from(z, real, z.vars, dual, np.ravel(dual2))
assert result == expected
def test_dual_repr(x_1, y_2) -> None:
result = x_1.__repr__()
assert result == ""
result = y_2.__repr__()
assert result == ""
@pytest.mark.skipif(not DUAL_CORE_PY, reason="Rust Dual does not format string in this way.")
def test_dual_str(x_1, y_2) -> None:
result = x_1.__str__()
assert result == " val = 1.00000000\n dv0 = 1.000000\n dv1 = 2.000000\n"
result = y_2.__str__()
assert (
result == " val = 1.00000000\n"
" dv0 = 1.000000\n"
" dv1 = 2.000000\n"
"dv0dv0 = 2.000000\n"
"dv0dv1 = 2.000000\n"
"dv1dv1 = 2.000000\n"
)
@pytest.mark.parametrize(
("vars_", "expected"),
[
(["v0"], 1.00),
(["v1", "v0"], np.array([2.0, 1.0])),
],
)
def test_gradient_method(vars_, expected, x_1, y_2) -> None:
result = gradient(x_1, vars_)
assert np.all(result == expected)
result = gradient(y_2, vars_)
assert np.all(result == expected)
def test_gradient_on_float():
result = gradient(1.0, ["v0", "s"])
assert np.all(result == np.array([0.0, 0.0]))
result = gradient(1.0, ["s"], order=2)
assert np.all(result == np.array([[0.0, 0.0], [0.0, 0.0]]))
@pytest.mark.parametrize(
("vars_", "expected"),
[
(["v0"], 2.00),
(["v1", "v0"], np.array([[2.0, 2.0], [2.0, 2.0]])),
],
)
def test_gradient_method2(vars_, expected, y_2) -> None:
result = gradient(y_2, vars_, 2)
assert np.all(result == expected)
def test_rdiv_raises(x_1, y_1) -> None:
with pytest.raises(TypeError):
_ = "string" / x_1
with pytest.raises(TypeError):
_ = "string" / y_1
def test_neg(x_1, y_2) -> None:
assert -x_1 == Dual(-1, ["v0", "v1"], [-1.0, -2.0])
assert -y_2 == Dual2(-1, ["v0", "v1"], [-1.0, -2.0], [-1.0, -1.0, -1.0, -1.0])
def test_eq_ne(x_1, y_1, y_2) -> None:
# non-matching types
assert Dual(0, ["single_var"], []) != 0
assert Dual2(0, ["single_var"], [], []) != 0
# ints
assert Dual(2, [], []) == 2
assert Dual2(2, [], [], []) == 2
# floats
assert Dual(3.3, [], []) == 3.3
assert Dual2(3.3, [], [], []) == 3.3
# no type crossing
with pytest.raises(TypeError):
assert x_1 != y_1
# equality
assert x_1 == Dual(1, ["v0", "v1"], [1, 2])
assert y_1 == Dual2(1, ["v0", "v1"], [1, 2], [])
assert y_2 == Dual2(1, ["v0", "v1"], [1, 2], [1.0, 1.0, 1.0, 1.0])
# non-matching elements
assert x_1 != Dual(2, ["v0", "v1"], [1, 2])
assert x_1 != Dual(1, ["v0", "v1"], [2, 2])
assert x_1 != Dual(1, ["v2", "v1"], [1, 2])
# non-matching elements
assert y_1 != Dual2(2, ["v0", "v1"], [1, 2], [])
assert y_1 != Dual2(1, ["v0", "v1"], [2, 2], [])
assert y_1 != Dual2(1, ["v2", "v1"], [1, 2], [])
# non-matching dual2
assert y_2 != Dual2(1, ["v0", "v1"], [1, 2], [2.0, 2.0, 2.0, 2.0])
def test_lt() -> None:
assert Dual(1, ["x"], []) < Dual(2, ["y"], [])
assert Dual2(1, ["z"], [], []) < Dual2(2, ["x"], [], [])
assert Dual(1, ["x"], []) < 10
assert not Dual(1, ["x"], []) < 0
def test_lt_raises() -> None:
with pytest.raises(TypeError, match="Cannot compare"):
assert Dual(1, ["x"], []) < Dual2(2, ["y"], [], [])
def test_gt() -> None:
assert Dual(2, ["x"], []) > Dual(1, ["y"], [])
assert Dual2(2, ["z"], [], []) > Dual2(1, ["x"], [], [])
assert Dual(1, ["x"], []) > 0
assert not Dual(1, ["x"], []) > 10
def test_gt_raises() -> None:
with pytest.raises(TypeError, match="Cannot compare"):
assert Dual(2, ["x"], []) > Dual2(1, ["y"], [], [])
def test_dual2_abs_float(x_1, y_1, y_2) -> None:
assert _abs_float(x_1) == 1
assert _abs_float(y_1) == 1
assert _abs_float(y_2) == 1
assert float(x_1) == float(1)
assert float(y_1) == float(1)
assert float(y_2) == float(1)
assert abs(-x_1) == x_1
assert abs(-y_1) == y_1
assert abs(-y_2) == y_2
@pytest.mark.parametrize("op", ["__add__", "__sub__", "__mul__", "__truediv__"])
def test_dual2_immutable(y_1, y_2, op) -> None:
_ = getattr(y_1, op)(y_2)
assert y_1 == Dual2(1, vars=["v0", "v1"], dual=np.array([1, 2]), dual2=[])
assert y_2 == Dual2(1, vars=["v0", "v1"], dual=np.array([1, 2]), dual2=[1.0, 1.0, 1.0, 1.0])
@pytest.mark.parametrize("op", ["__add__", "__sub__", "__mul__", "__truediv__"])
def test_dual_immutable(x_1, op) -> None:
_ = getattr(x_1, op)(Dual(2, vars=["new"], dual=np.array([4])))
assert x_1 == Dual(1, vars=["v0", "v1"], dual=np.array([1, 2]))
def test_dual_raises(x_1) -> None:
with pytest.raises(ValueError, match="`Dual` variable cannot possess `dual2`"):
x_1.dual2
def test_dual_is_not_iterable(x_1, y_1):
# do not want isinstance checks for Dual to identify them as a Sequence kind
assert getattr(x_1, "__iter__", None) is None
assert getattr(y_1, "__iter__", None) is None
def test_dual_has_no_len(x_1, y_1):
# do not want isinstance checks for Dual to identify them as a Sequence kind
assert getattr(x_1, "__len__", None) is None
assert getattr(y_1, "__len__", None) is None
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual(3, vars=["v0", "v1", "v2"], dual=np.array([1, 2, 3]))),
("__sub__", Dual(-1, vars=["v0", "v1", "v2"], dual=np.array([1, 2, -3]))),
("__mul__", Dual(2, vars=["v0", "v1", "v2"], dual=np.array([2, 4, 3]))),
("__truediv__", Dual(0.5, vars=["v0", "v1", "v2"], dual=np.array([0.5, 1, -0.75]))),
],
)
def test_ops(x_1, x_2, op, expected) -> None:
result = getattr(x_1, op)(x_2)
assert result == expected
def test_op_inversions(x_1, x_2) -> None:
assert (x_1 + x_2) - (x_2 + x_1) == 0
assert (x_1 / x_2) * (x_2 / x_1) == 1
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual2(3, ["v0", "v1", "v2"], [1, 2, 3], [2, 1, 1, 1, 1, 0, 1, 0, 1])),
("__sub__", Dual2(-1, ["v0", "v1", "v2"], [1, 2, -3], [0, 1, -1, 1, 1, 0, -1, 0, -1])),
("__mul__", Dual2(2, ["v0", "v1", "v2"], [2, 4, 3], [3, 2, 2.5, 2, 2, 3, 2.5, 3, 1])),
(
"__truediv__",
Dual2(
0.5,
["v0", "v1", "v2"],
[0.5, 1.0, -0.75],
[0.25, 0.5, -0.625, 0.5, 0.5, -0.75, -0.625, -0.75, 0.875],
),
),
],
)
def test_ops2(y_2, y_3, op, expected) -> None:
result = getattr(y_2, op)(y_3)
assert result == expected
def test_op_inversions2(y_2, y_3) -> None:
assert (y_2 + y_3) - (y_3 + y_2) == 0
assert (y_2 / y_3) * (y_3 / y_2) == 1
def test_inverse(x_1, y_2) -> None:
assert x_1 * x_1**-1 == 1
assert y_2 * y_2**-1 == 1
def test_power_identity(x_1, y_2) -> None:
result = x_1**1
assert result == x_1
result = y_2**1
assert result == y_2
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual(1 + 2.5, vars=["v0", "v1"], dual=np.array([1, 2]))),
("__sub__", Dual(1 - 2.5, vars=["v0", "v1"], dual=np.array([1, 2]))),
("__mul__", Dual(1 * 2.5, vars=["v0", "v1"], dual=np.array([1, 2]) * 2.5)),
("__truediv__", Dual(1 / 2.5, vars=["v0", "v1"], dual=np.array([1, 2]) / 2.5)),
],
)
def test_left_op_with_float(x_1, op, expected) -> None:
result = getattr(x_1, op)(2.5)
assert result == expected
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual2(1 + 2.5, ["v0", "v1"], [1, 2], [1.0, 1.0, 1.0, 1.0])),
(
"__sub__",
Dual2(1 - 2.5, ["v0", "v1"], [1, 2], [1.0, 1.0, 1.0, 1.0]),
),
("__mul__", Dual2(1 * 2.5, ["v0", "v1"], [2.5, 5.0], [2.5, 2.5, 2.5, 2.5])),
(
"__truediv__",
Dual2(1 / 2.5, ["v0", "v1"], [1 / 2.5, 2 / 2.5], [1 / 2.5, 1 / 2.5, 1 / 2.5, 1 / 2.5]),
),
],
)
def test_left_op_with_float2(y_2, op, expected) -> None:
result = getattr(y_2, op)(2.5)
assert result == expected
def test_right_op_with_float(x_1) -> None:
assert 2.5 + x_1 == Dual(1 + 2.5, vars=["v0", "v1"], dual=np.array([1, 2]))
assert 2.5 - x_1 == Dual(2.5 - 1, vars=["v0", "v1"], dual=-np.array([1, 2]))
assert 2.5 * x_1 == x_1 * 2.5
assert 2.5 / x_1 == (x_1 / 2.5) ** -1
def test_right_op_with_float2(y_2) -> None:
assert 2.5 + y_2 == Dual2(
1 + 2.5,
vars=["v0", "v1"],
dual=[1.0, 2.0],
dual2=[1.0, 1.0, 1.0, 1.0],
)
assert 2.5 - y_2 == Dual2(
2.5 - 1,
vars=["v0", "v1"],
dual=[-1.0, -2.0],
dual2=[-1.0, -1.0, -1.0, -1.0],
)
assert 2.5 * y_2 == y_2 * 2.5
assert 2.5 / y_2 == (y_2 / 2.5) ** -1
def test_dual2_second_derivatives() -> None:
"test power, multiplication, addition"
def f(x, y, z):
"""
f_x = 4x^3 y^2, f_y = 2y x^4 + z, f_z = 3z^2 +y
f_xx = 12x^2 y^2, f_xy = 8 x^3 y, f_xz = 0,
f_yx = 8x^3 y, f_yy = 2 x^4, f_yz = 1,
f_zx = 0, f_zy = 1, f_zz = 6z
"""
return x**4 * y**2 + z**3 + y * z
x_, y_, z_ = 3, 2, 1
x = Dual2(x_, vars=["x"], dual=[1], dual2=[])
y = Dual2(y_, vars=["y"], dual=[1], dual2=[])
z = Dual2(z_, vars=["z"], dual=[1], dual2=[])
result = f(x, y, z)
assert result.dual[0] == 4 * x_**3 * y_**2 # 432
assert result.dual[1] == 2 * y_ * x_**4 + z_ # 325
assert result.dual[2] == 3 * z_**2 + y_ # 5
assert result.dual2[0, 0] * 2 == 12 * x_**2 * y_**2
assert result.dual2[0, 1] * 2 == 8 * x_**3 * y_
assert result.dual2[0, 2] * 2 == 0
assert result.dual2[1, 0] * 2 == 8 * x_**3 * y_
assert result.dual2[1, 1] * 2 == 2 * x_**4
assert result.dual2[1, 2] * 2 == 1
assert result.dual2[2, 0] * 2 == 0
assert result.dual2[2, 1] * 2 == 1
assert result.dual2[2, 2] * 2 == 6 * z_
def test_dual2_second_derivatives2() -> None:
"test dual_exp, multiplication, division, dual_log"
def f(x, y, z):
return (x / z).__exp__() + (x * y).__log__()
x_, y_, z_ = 3, 2, 1
x = Dual2(x_, vars=["x"], dual=[1], dual2=[])
y = Dual2(y_, vars=["y"], dual=[1], dual2=[])
z = Dual2(z_, vars=["z"], dual=[1], dual2=[])
result = f(x, y, z)
xi = result.vars.index("x")
yi = result.vars.index("y")
zi = result.vars.index("z")
assert result.dual[xi] == math.exp(x_ / z_) / z_ + 1 / x_
assert result.dual[yi] == 1 / y_
assert result.dual[zi] == -x_ * math.exp(x_ / z_) / z_**2
assert result.dual2[xi, xi] * 2 == math.exp(x_ / z_) / z_**2 - 1 / x_**2
assert result.dual2[xi, yi] * 2 == 0
assert result.dual2[xi, zi] * 2 == math.exp(x_ / z_) * (-1 / z_**2 - x_ / z_**3)
assert result.dual2[yi, xi] * 2 == 0
assert result.dual2[yi, yi] * 2 == -1 / y_**2
assert result.dual2[yi, zi] * 2 == 0
assert result.dual2[zi, xi] * 2 == math.exp(x_ / z_) * (-1 / z_**2 - x_ / z_**3)
assert result.dual2[zi, yi] * 2 == 0
assert result.dual2[zi, zi] * 2 == math.exp(x_ / z_) * (x_**2 / z_**4 + 2 * x_ / z_**3)
def test_dual2_second_derivatives3() -> None:
"""
h, f = dual_log(f), x^3y+y
f_x = 1/f 3x^2y, f_y = 1/f (x^3+1),
f_xx = -1/f^2 (3x^2y)^2 + 1/f 6xy, f_xy = -1/f^2 (3x^2y)(x^3+1),
f_yy = -1/f^2 (x^3+1)^2 +1/f (0)
"""
x_, y_ = 2, 1
x = Dual2(x_, vars=["x"], dual=[1], dual2=[])
y = Dual2(y_, vars=["y"], dual=[1], dual2=[])
f = y * x**3 + y
f_, fx_, fy_ = f.real, 3 * y_ * x_**2, x_**3 + 1
fxx_, fxy_, fyy_ = 6 * x_ * y_, 3 * x_**2, 0
xi = f.vars.index("x")
yi = f.vars.index("y")
assert f.dual[xi] == fx_
assert f.dual[yi] == fy_
assert f.dual2[xi, xi] * 2 == fxx_
assert f.dual2[xi, yi] * 2 == fxy_
assert f.dual2[yi, yi] * 2 == 0
h = f.__log__()
assert h.real == math.log(y_ * x_**3 + y_)
assert h.dual[xi] == 1 / f_ * fx_
assert h.dual[yi] == 1 / f_ * fy_
assert h.dual2[xi, xi] * 2 == -1 / f_**2 * fx_**2 + 1 / f_ * fxx_
assert h.dual2[xi, yi] * 2 == -1 / f_**2 * fx_ * fy_ + 1 / f_ * fxy_
assert h.dual2[yi, xi] * 2 == -1 / f_**2 * fx_ * fy_ + 1 / f_ * fxy_
assert h.dual2[yi, yi] * 2 == -1 / f_**2 * fy_**2 + 1 / f_ * fyy_
@pytest.mark.parametrize(
("power", "expected"),
[
(1, (2, 1, 0)),
(2, (4, 4, 2)),
(3, (8, 12, 12)),
(4, (16, 32, 48)),
(5, (32, 80, 160)),
(6, (64, 192, 480)),
],
)
def test_dual_power_1d(power, expected) -> None:
x = Dual(2, vars=["x"], dual=[1])
y = Dual2(2, vars=["x"], dual=[1], dual2=[])
f, g = x**power, y**power
assert f.real == expected[0]
assert f.dual[0] == expected[1]
assert g.real == expected[0]
assert g.dual[0] == expected[1]
assert g.dual2[0, 0] * 2 == expected[2]
def test_dual2_power2_1d() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
assert (x**2) * (x ** (-2)) == 1
assert (x**5) * (x ** (-5)) == 1
z = (x**7.35) * (x ** (-7.35))
assert abs(z - 1.0) < 1e-12
def test_dual2_power_2d() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
y = Dual2(3, vars=["y"], dual=[1], dual2=[])
f = (x**4 * y**3) ** 2
assert f.dual2[0, 1] * 2 == 1492992
assert f.dual2[1, 0] * 2 == 1492992
def test_dual2_inv_specific() -> None:
z = Dual2(2, vars=["x", "y"], dual=[2, 3], dual2=[])
result = z**-1
expected = Dual2(
0.5,
vars=["x", "y"],
dual=[-0.5, -0.75],
dual2=[0.5, 0.75, 0.75, 9 / 8],
)
assert result == expected
def test_dual_truediv(x_1) -> None:
expected = Dual(1, [], [])
result = x_1 / x_1
assert result == expected
def test_dual2_exp_1d() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
f = x.__exp__()
assert f.real == math.exp(2)
assert f.dual[0] == math.exp(2)
assert f.dual2[0, 0] * 2 == math.exp(2)
def test_dual2_log_1d() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
f = x.__log__()
assert f.real == math.log(2)
assert f.dual[0] == 0.5
assert f.dual2[0] * 2 == -0.25
def test_dual2_log_exp() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
y = x.__log__()
z = y.__exp__()
assert x == z
def test_combined_vars_sorted(y_3) -> None:
x = Dual2(2, vars=["a", "v0", "z"], dual=[1, 1, 1], dual2=[])
result = x * y_3
assert set(result.vars) == {"a", "v0", "v2", "z"}
@pytest.mark.parametrize(
"x",
[
2,
Dual(2, [], []),
Dual2(2, [], [], []),
],
)
def test_log(x) -> None:
result = dual_log(x)
expected = math.log(2)
assert result == expected
def test_dual_log_base() -> None:
result = dual_log(16, 2)
assert result == 4
result = dual_log(Dual(16, [], []), 2)
assert result == Dual(4, [], [])
@pytest.mark.parametrize(
"x",
[
2,
Dual(2, [], []),
Dual2(2, [], [], []),
],
)
def test_exp(x) -> None:
result = dual_exp(x)
expected = math.exp(2)
assert result == expected
@pytest.mark.parametrize(
"x",
[
Dual(1.25, ["x"], []),
Dual2(1.25, ["x"], [], []),
],
)
def test_norm_cdf(x) -> None:
result = dual_norm_cdf(x)
expected = NormalDist().cdf(1.250)
assert abs(result - expected) < 1e-10
approx_grad = (NormalDist().cdf(1.25001) - NormalDist().cdf(1.25)) * 100000
assert abs(gradient(result, ["x"])[0] - approx_grad) < 1e-5
if isinstance(x, Dual2):
approx_grad2 = (NormalDist().cdf(1.25) - NormalDist().cdf(1.24999)) * 100000
approx_grad2 = (approx_grad - approx_grad2) * 100000
assert abs(gradient(result, ["x"], order=2)[0] - approx_grad2) < 1e-5
@pytest.mark.parametrize(
"x",
[
Dual(0.75, ["x"], []),
Dual2(0.75, ["x"], [], []),
],
)
def test_inv_norm_cdf(x) -> None:
result = dual_inv_norm_cdf(x)
expected = NormalDist().inv_cdf(0.75)
assert abs(result - expected) < 1e-10
approx_grad = (NormalDist().inv_cdf(0.75001) - NormalDist().inv_cdf(0.75)) * 100000
assert abs(gradient(result, ["x"])[0] - approx_grad) < 1e-4
if isinstance(x, Dual2):
approx_grad2 = (NormalDist().inv_cdf(0.75) - NormalDist().inv_cdf(0.74999)) * 100000
approx_grad2 = (approx_grad - approx_grad2) * 100000
assert abs(gradient(result, ["x"], order=2)[0] - approx_grad2) < 1e-4
def test_norm_cdf_value() -> None:
result = dual_norm_cdf(1.0)
expected = 0.8413
assert abs(result - expected) < 1e-4
def test_inv_norm_cdf_value() -> None:
result = dual_inv_norm_cdf(0.50)
expected = 0.0
assert abs(result - expected) < 1e-4
@pytest.mark.skip(reason="downcast vars is not used within the library, kept only for compat.")
def test_downcast_vars() -> None:
w = Dual(2, ["x", "y", "z"], [0, 1, 1])
assert w.__downcast_vars__().vars == ("y", "z")
x = Dual2(2, ["x", "y", "z"], [0, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1])
assert x.__downcast_vars__().vars == ("y", "z")
y = Dual2(2, ["x", "y", "z"], [0, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 1])
assert y.__downcast_vars__().vars == ("z",)
z = Dual2(2, ["x", "y", "z"], [0, 0, 1], [0, 0, 0, 0, 0, 1, 0, 1, 1])
assert z.__downcast_vars__().vars == ("y", "z")
def test_gradient_of_non_present_vars(x_1) -> None:
result = gradient(x_1)
assert np.all(np.isclose(result, np.array([1, 2])))
@pytest.mark.parametrize(("base", "exponent"), [(0, 1), (1, 0)])
def test_powers_bad_type(base, exponent, x_1, y_1) -> None:
base = x_1 if base else y_1
exponent = x_1 if exponent else y_1
with pytest.raises(TypeError):
base**exponent
def test_keep_manifold_gradient() -> None:
du2 = Dual2(
10,
["x", "y", "z"],
dual=[1, 2, 3],
dual2=[2, 3, 4, 3, 4, 5, 4, 5, 6],
)
result = gradient(du2, ["x", "z"], 1, keep_manifold=True)
expected = np.array([Dual2(1, ["x", "z"], [4, 8], []), Dual2(3, ["x", "z"], [8, 12], [])])
assertions = result == expected
assert all(assertions)
def test_dual_set_order(x_1, y_1) -> None:
assert set_order(x_1, 1) == x_1
assert set_order(y_1, 2) == y_1
assert set_order(1.0, 2) == 1.0
assert set_order(x_1, 2) == y_1
assert set_order(y_1, 1) == x_1
assert set_order(x_1, 0) == 1.0
def test_variable_set_order() -> None:
x = Variable(2.0, ["x"])
x_dual = set_order(x, order=1)
assert isinstance(x_dual, Dual)
x_dual2 = set_order(x, order=2)
assert isinstance(x_dual2, Dual2)
def test_perturbation_confusion() -> None:
# https://www.bcl.hamilton.ie/~barak/papers/ifl2005.pdf
# Utilised tagged variables
x = Dual(1.0, ["x"], [])
y = Dual(1.0, ["y"], [])
z = gradient(x + y, ["y"])[0]
result = gradient(x * z, ["x"])
assert result == 1.0
# Replicates untagged variables
x = Dual(1.0, ["x"], [])
y = Dual(1.0, ["x"], [])
z = gradient(x + y, ["x"])[0]
result = gradient(x * z, ["x"])
assert result == 2.0
# Linalg dual_solve tests
def test_solve(A, b) -> None:
x = dual_solve(A, b)
x_np = np.linalg.solve(A, b)
diff = x - x_np
assertions = [abs(diff[i, 0]) < 1e-10 for i in range(A.shape[0])]
assert all(assertions)
def test_solve_lsqrs() -> None:
A = np.array([[0, 1], [1, 1], [2, 1], [3, 1]])
b = np.array([[-1, 0.2, 0.9, 2.1]]).T
result = dual_solve(A, b, allow_lsq=True, types=(float, float))
assert abs(result[0, 0] - 1.0) < 1e-9
assert abs(result[1, 0] + 0.95) < 1e-9
def test_solve_dual() -> None:
A = np.array([[1, 0], [0, 1]], dtype="object")
b = np.array([Dual(2, ["x"], np.array([1])), Dual(5, ["x", "y"], np.array([1, 1]))])[
:,
np.newaxis,
]
x = dual_solve(A, b, types=(float, Dual))
assertions = abs(b - x) < 1e-10
assert all(assertions)
def test_solve_dual2() -> None:
A = np.array(
[
[Dual2(1, [], [], []), Dual2(0, [], [], [])],
[Dual2(0, [], [], []), Dual2(1, [], [], [])],
],
dtype="object",
)
b = np.array([Dual2(2, ["x"], [1], []), Dual2(5, ["x", "y"], [1, 1], [])])[:, np.newaxis]
x = dual_solve(A, b, types=(Dual2, Dual2))
assertions = abs(b - x) < 1e-10
assert all(assertions)
def test_sparse_solve(A_sparse) -> None:
b = np.array(
[0, 0.90929743, 0.14112001, -0.7568025, -0.95892427, -0.2794155, 0.6569866, 0.98935825, 0],
)
b = b[:, np.newaxis]
x = dual_solve(A_sparse, b)
x_np = np.linalg.solve(A_sparse, b)
diff = x - x_np
assertions = [abs(diff[i, 0]) < 1e-10 for i in range(A_sparse.shape[0])]
assert all(assertions)
@pytest.mark.skipif(not DUAL_CORE_PY, reason="Rust Dual has not implemented Multi-Dim Solve")
def test_multi_dim_solve() -> None:
A = np.array([[Dual(0.5, [], []), Dual(2, ["y"], [])], [Dual(2.5, ["y"], []), Dual(4, [], [])]])
b = np.array(
[[Dual(6.5, [], []), Dual(9, ["z"], [])], [Dual(14.5, ["y"], []), Dual(21, ["z"], [])]],
)
x = dual_solve(A, b)
result = np.matmul(A, x).flatten()
expected = b.flatten()
for i in range(4):
assert abs(result[i] - expected[i]) < 1e-13
assert all(np.isclose(gradient(result[i], ["y", "z"]), gradient(expected[i], ["y", "z"])))
# Test numpy compat
def test_numpy_isclose(y_2) -> None:
# np.isclose not supported for non-numeric dtypes
a = np.array([y_2, y_2])
b = np.array([y_2, y_2])
with pytest.raises(TypeError):
assert np.isclose(a, b)
def test_numpy_equality(y_2) -> None:
# instead of isclose use == (which uses math.isclose elementwise) and then np.all
a = np.array([y_2, y_2])
b = np.array([y_2, y_2])
result = a == b
assert np.all(result)
@pytest.mark.parametrize(
"z",
[
Dual(2.0, ["y"], []),
Dual2(3.0, ["x"], [1], [2]),
],
)
@pytest.mark.parametrize(
"arg",
[
2.2,
Dual(3, ["x"], []),
Dual2(3, ["x"], [2], [3]),
],
)
@pytest.mark.parametrize(
"op_str",
[
"add",
"sub",
"mul",
"truediv",
],
)
def test_numpy_broadcast_ops_types(z, arg, op_str) -> None:
op = "__" + op_str + "__"
if type(z) in [Dual, Dual2] and type(arg) in [Dual, Dual2] and type(arg) is not type(z):
pytest.skip("Cannot operate Dual and Dual2 together.")
result = getattr(np.array([z, z]), op)(arg)
expected = np.array([getattr(z, op)(arg), getattr(z, op)(arg)])
assert np.all(result == expected)
result = getattr(arg, op)(np.array([z, z]))
if result is NotImplemented:
opr = "__r" + op_str + "__"
result = getattr(np.array([z, z]), opr)(arg)
expected = np.array([getattr(z, opr)(arg), getattr(z, opr)(arg)])
else:
expected = np.array([getattr(arg, op)(z), getattr(arg, op)(z)])
assert np.all(result == expected)
@pytest.mark.parametrize(
"z",
[
Dual(2.0, ["y"], []),
Dual2(3.0, ["x"], [1], [2]),
],
)
def test_numpy_broadcast_pow_types(z) -> None:
result = np.array([z, z]) ** 3
expected = np.array([z**3, z**3])
assert np.all(result == expected)
result = z ** np.array([3, 4])
expected = np.array([z**3, z**4])
assert np.all(result == expected)
def test_numpy_matmul(y_2, y_1) -> None:
a = np.array([y_2, y_1])
result = np.matmul(a[:, np.newaxis], a[np.newaxis, :])
expected = np.array([[y_2 * y_2, y_2 * y_1], [y_2 * y_1, y_1 * y_1]])
assert np.all(result == expected)
@pytest.mark.skipif(
version.parse(np.__version__) >= version.parse("1.25.0"),
reason="Object dtypes accepted by NumPy in 1.25.0+",
)
def test_numpy_einsum(y_2, y_1) -> None:
# einsum does not work with object dtypes
a = np.array([y_2, y_1])
with pytest.raises(TypeError):
_ = np.einsum("i,j", a, a, optimize=True)
@pytest.mark.skipif(
version.parse(np.__version__) < version.parse("1.25.0"),
reason="Object dtypes not accepted by NumPy in <1.25.0",
)
def test_numpy_einsum_works(y_2, y_1) -> None:
a = np.array([y_2, y_1])
result = np.einsum("i,j", a, a, optimize=True)
expected = np.array([[y_2 * y_2, y_2 * y_1], [y_2 * y_1, y_1 * y_1]])
assert np.all(result == expected)
@pytest.mark.parametrize(
"z",
[
Dual(2.0, ["y"], []),
Dual2(3.0, ["x"], [1], [2]),
],
)
@pytest.mark.parametrize(
"dtype",
[
np.int8,
np.int16,
np.int32,
np.int64,
np.float16,
np.float32,
np.float64,
np.longdouble,
],
)
def test_numpy_dtypes(z, dtype) -> None:
np.array([1, 2], dtype=dtype) + z
z + np.array([1, 2], dtype=dtype)
z + dtype(2)
dtype(2) + z
class TestVariable:
@pytest.mark.parametrize(
("op", "exp"),
[
("__add__", Variable(4.0, ["x"])),
("__radd__", Variable(4.0, ["x"])),
("__sub__", Variable(1.0, ["x"])),
("__rsub__", -Variable(1.0, ["x"])),
("__mul__", Variable(3.75, ["x"], [1.5])),
("__rmul__", Variable(3.75, ["x"], [1.5])),
("__truediv__", Variable(2.5 / 1.5, ["x"], [1.0 / 1.5])),
("__rtruediv__", Dual(1.5, [], []) / Dual(2.5, ["x"], [])),
],
)
def test_variable_f64(self, op, exp):
with default_context("_global_ad_order", 1):
f = 1.5
v = Variable(2.5, ("x",))
result = getattr(v, op)(f)
assert result == exp
def test_variable_f64_reverse(self):
v = Variable(2.5, ("x",))
assert (1.5 + v) == Variable(4.0, ["x"], [])
assert (1.5 - v) == Variable(-1.0, ["x"], [-1.0])
assert (1.5 * v) == Variable(1.5 * 2.5, ["x"], [1.5])
assert (1.5 / v) == Dual(1.5, [], []) / Dual(2.5, ["x"], [])
def test_rtruediv_global_ad(self):
exp = Dual2(1.5, [], [], []) / Dual2(2.5, ["x"], [], [])
with default_context("_global_ad_order", 2):
f = 1.5
v = Variable(2.5, ("x",))
result = f / v
assert result == exp
@pytest.mark.parametrize(
("op", "exp"),
[
("__add__", Dual(4.0, ["x"], [2])),
("__radd__", Dual(4.0, ["x"], [2])),
("__sub__", Dual(1.0, ["x"], [0])),
("__rsub__", Dual(-1.0, ["x"], [0])),
("__mul__", Dual(3.75, ["x"], [4.0])),
("__rmul__", Dual(3.75, ["x"], [4.0])),
("__truediv__", Dual(2.5, ["x"], []) / Dual(1.5, ["x"], [])),
("__rtruediv__", Dual(1.5, ["x"], []) / Dual(2.5, ["x"], [])),
],
)
def test_variable_dual(self, op, exp):
f = Dual(1.5, ["x"], [])
v = Variable(2.5, ("x",))
result = getattr(v, op)(f)
assert result == exp
def test_variable_dual_reverse(self):
f = Dual(1.5, ["x"], [])
v = Variable(2.5, ("x",))
assert f + v == Dual(4.0, ["x"], [2.0])
assert f - v == Dual(-1.0, ["x"], [0.0])
assert f * v == Dual(1.5 * 2.5, ["x"], [4.0])
assert f / v == Dual(1.5, ["x"], [1.0]) / Dual(2.5, ["x"], [1.0])
@pytest.mark.parametrize(
("op", "exp"),
[
("__add__", Dual2(4.0, ["x"], [2], [])),
("__radd__", Dual2(4.0, ["x"], [2], [])),
("__sub__", Dual2(1.0, ["x"], [0], [])),
("__rsub__", Dual2(-1.0, ["x"], [0], [])),
("__mul__", Dual2(1.5, ["x"], [1.0], []) * Dual2(2.5, ["x"], [1.0], [])),
("__rmul__", Dual2(1.5, ["x"], [1.0], []) * Dual2(2.5, ["x"], [1.0], [])),
("__truediv__", Dual2(2.5, ["x"], [], []) / Dual2(1.5, ["x"], [], [])),
("__rtruediv__", Dual2(1.5, ["x"], [], []) / Dual2(2.5, ["x"], [], [])),
],
)
def test_variable_dual2(self, op, exp):
f = Dual2(1.5, ["x"], [], [])
v = Variable(2.5, ("x",))
result = getattr(v, op)(f)
assert result == exp
def test_variable_dual2_reverse(self):
f = Dual2(1.5, ["x"], [], [])
v = Variable(2.5, ("x",))
assert f + v == Dual2(4.0, ["x"], [2.0], [])
assert f - v == Dual2(-1.0, ["x"], [0.0], [])
assert f * v == Dual2(1.5, ["x"], [], []) * Dual2(2.5, ["x"], [], [])
assert f / v == Dual2(1.5, ["x"], [], []) / Dual2(2.5, ["x"], [], [])
@pytest.mark.parametrize(
("op", "exp"),
[
("__add__", Dual(4.0, ["x"], [2])),
("__radd__", Dual(4.0, ["x"], [2])),
("__sub__", Dual(1.0, ["x"], [0])),
("__rsub__", Dual(-1.0, ["x"], [0])),
("__mul__", Dual(1.5, ["x"], [1.0]) * Dual(2.5, ["x"], [1.0])),
("__rmul__", Dual(1.5, ["x"], [1.0]) * Dual(2.5, ["x"], [1.0])),
("__truediv__", Dual(2.5, ["x"], []) / Dual(1.5, ["x"], [])),
],
)
def test_variable_variable_ad1(self, op, exp):
f = Variable(1.5, ("x",))
v = Variable(2.5, ("x",))
with default_context("_global_ad_order", 1):
result = getattr(v, op)(f)
assert result == exp
@pytest.mark.parametrize(
("op", "exp"),
[
("__add__", Dual2(4.0, ["x"], [2], [])),
("__radd__", Dual2(4.0, ["x"], [2], [])),
("__sub__", Dual2(1.0, ["x"], [0], [])),
("__rsub__", Dual2(-1.0, ["x"], [0], [])),
("__mul__", Dual2(1.5, ["x"], [1.0], []) * Dual2(2.5, ["x"], [1.0], [])),
("__rmul__", Dual2(1.5, ["x"], [1.0], []) * Dual2(2.5, ["x"], [1.0], [])),
("__truediv__", Dual2(2.5, ["x"], [], []) / Dual2(1.5, ["x"], [], [])),
],
)
def test_variable_variable_ad2(self, op, exp):
f = Variable(1.5, ("x",))
v = Variable(2.5, ("x",))
with default_context("_global_ad_order", 2):
result = getattr(v, op)(f)
assert result == exp
@pytest.mark.parametrize(
("op", "ad", "exp"),
[
("__exp__", 1, Dual(0.5, ["x"], []).__exp__()),
("__exp__", 2, Dual2(0.5, ["x"], [], []).__exp__()),
("__log__", 1, Dual(0.5, ["x"], []).__log__()),
("__log__", 2, Dual2(0.5, ["x"], [], []).__log__()),
("__norm_cdf__", 1, Dual(0.5, ["x"], []).__norm_cdf__()),
("__norm_cdf__", 2, Dual2(0.5, ["x"], [], []).__norm_cdf__()),
("__norm_inv_cdf__", 1, Dual(0.5, ["x"], []).__norm_inv_cdf__()),
("__norm_inv_cdf__", 2, Dual2(0.5, ["x"], [], []).__norm_inv_cdf__()),
],
)
def test_variable_funcs(self, op, ad, exp):
with default_context("_global_ad_order", ad):
var = Variable(0.5, ["x"])
result = getattr(var, op)()
assert result == exp
@pytest.mark.parametrize(
("op", "ad", "exp"),
[
("__pow__", 1, Dual(2.5, ["x"], []).__pow__(2)),
("__pow__", 2, Dual2(2.5, ["x"], [], []).__pow__(2)),
],
)
def test_variable_pow(self, op, ad, exp):
with default_context("_global_ad_order", ad):
var = Variable(2.5, ["x"])
result = getattr(var, op)(2)
assert result == exp
@pytest.mark.parametrize(("order", "exp"), [(1, 2.0), (2, 0.0)])
def test_gradient(self, order, exp):
var = Variable(2.0, ["x"], [2.0])
result = gradient(var, ["x"], order=order)[0]
assert result == exp
def test_eq(self):
v1 = Variable(1.0, ["x", "y"])
v2 = Variable(1.0, ["x", "y"])
assert v1 == v2
@pytest.mark.parametrize(
("func", "exp"),
[
(dual_exp, Dual(0.5, ["x"], []).__exp__()),
(dual_log, Dual(0.5, ["x"], []).__log__()),
(dual_norm_cdf, Dual(0.5, ["x"], []).__norm_cdf__()),
(dual_inv_norm_cdf, Dual(0.5, ["x"], []).__norm_inv_cdf__()),
(dual_norm_pdf, dual_norm_pdf(Dual(0.5, ["x"], []))),
],
)
def test_standalone_funcs(self, func, exp):
var = Variable(0.5, ["x"])
result = func(var)
assert result == exp
def test_z_exogenous_example(self):
curve = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="curve")
solver = Solver(
curves=[curve], instruments=[IRS(dt(2000, 1, 1), "6m", "S", curves=curve)], s=[2.50]
)
irs = IRS(
effective=dt(2000, 1, 1),
termination="6m",
frequency="S",
leg2_frequency="M",
fixed_rate=Variable(3.0, ["R"]),
notional=Variable(5e6, ["N"]),
leg2_float_spread=Variable(0.0, ["z"]),
curves="curve",
)
result = irs.exo_delta(vars=["N", "R", "z"], vars_scalar=[1.0, 0.01, 1.0], solver=solver)
exp0 = irs.npv(solver=solver) / 5e6
exp1 = irs.analytic_delta(curves=curve)
exp2 = irs.analytic_delta(curves=curve, leg=2)
assert abs(exp0 - result.iloc[0, 0]) < 1e-8
assert abs(exp1 + result.iloc[1, 0]) < 1e-8
assert abs(exp2 + result.iloc[2, 0]) < 1e-8
def test_set_multiple_objects_order():
a = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="a")
b = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="b")
c = a
result = _set_ad_order_objects([2, 2, 0], [a, b, c])
assert a._ad == 2
assert b._ad == 2
assert c._ad == 2 # c is a!
expected = {
id(a): 0,
id(b): 0,
}
assert result == expected
_set_ad_order_objects(result, [a, b, c])
assert a._ad == 0
assert b._ad == 0
assert c._ad == 0 # c is a!
def test_set_multiple_objects_order_raises():
a = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="a")
with pytest.raises(ValueError):
_set_ad_order_objects([0], [a, a])
================================================
FILE: python/tests/test_dualpy.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import math
from statistics import NormalDist
import numpy as np
import pytest
from packaging import version
from rateslib.dual import (
Dual,
Dual2,
Variable,
dual_exp,
dual_inv_norm_cdf,
dual_log,
dual_norm_cdf,
dual_solve,
gradient,
set_order,
)
from rateslib.dual.utils import _abs_float, _dual_round
@pytest.fixture
def x_1():
return Dual(1, vars=["v0", "v1"], dual=[1, 2])
@pytest.fixture
def x_2():
return Dual(2, vars=["v0", "v2"], dual=[0, 3])
@pytest.fixture
def y_1():
return Dual2(1, vars=["v0", "v1"], dual=[1, 2], dual2=[])
@pytest.fixture
def y_2():
return Dual2(1, vars=["v0", "v1"], dual=[1, 2], dual2=[1.0, 1.0, 1.0, 1.0])
@pytest.fixture
def y_3():
return Dual2(2, vars=["v0", "v2"], dual=[0, 3], dual2=[1.0, 1.0, 1.0, 1.0])
@pytest.fixture
def A():
return np.random.randn(25).reshape(5, 5)
@pytest.fixture
def A_sparse():
return np.array(
[
[24, -36, 12, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0.25, 0.583333333333, 0.1666666666, 0, 0, 0, 0, 0],
[0, 0, 0.1666666666, 0.6666666666, 0.1666666666, 0, 0, 0, 0],
[0, 0, 0, 0.1666666666, 0.6666666666, 0.1666666666, 0, 0, 0],
[0, 0, 0, 0, 0.1666666666, 0.6666666666, 0.1666666666, 0, 0],
[0, 0, 0, 0, 0, 0.1666666666, 0.583333333333, 0.25, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 12, -36, 24],
],
)
@pytest.fixture
def b():
return np.random.randn(5).reshape(5, 1)
def test_zero_init() -> None:
x = Dual(1, ["x"], [])
assert np.all(x.dual == np.ones(1))
y = Dual2(1, ["x"], [], [])
assert np.all(y.dual == np.ones(1))
assert np.all(y.dual2 == np.zeros((1, 1)))
@pytest.mark.parametrize(
"op",
[
"__add__",
"__sub__",
"__mul__",
"__truediv__",
"__eq__",
],
)
def test_no_type_crossing_on_ops(x_1, y_1, op) -> None:
# getattr(x_1, op)(y_1)
with pytest.raises(TypeError):
getattr(x_1, op)(y_1)
with pytest.raises(TypeError):
getattr(y_1, op)(x_1)
def test_dual_repr(x_1, y_2) -> None:
result = x_1.__repr__()
assert result == ""
result = y_2.__repr__()
assert result == ""
def test_dual_str(x_1, y_2) -> None:
result = x_1.__str__()
assert result == ""
result = y_2.__str__()
assert result == ""
def test_rdiv_raises(x_1, y_1) -> None:
with pytest.raises(TypeError):
_ = "string" / x_1
with pytest.raises(TypeError):
_ = "string" / y_1
@pytest.mark.parametrize(
("input_", "expected"),
[
(Variable(2.354, ["x"], [1.1011]), Variable(2.35, ["x"], [1.1011])),
(Dual(2.354, ["x"], [1.1011]), Dual(2.35, ["x"], [1.1011])),
(Dual2(2.354, ["x"], [1.1011], [2.1111]), Dual2(2.35, ["x"], [1.1011], [2.1111])),
],
)
def test_dual_round(input_, expected):
result = _dual_round(input_, 2)
assert result == expected
def test_neg(x_1, y_2) -> None:
assert -x_1 == Dual(-1, ["v0", "v1"], [-1.0, -2.0])
assert -y_2 == Dual2(-1, ["v0", "v1"], [-1.0, -2.0], [-1.0, -1.0, -1.0, -1.0])
def test_eq_ne(x_1, y_1, y_2) -> None:
# non-matching types
assert Dual(0, ["single_var"], []) != 0
assert Dual2(0, ["single_var"], [], []) != 0
# ints
assert Dual(2, [], []) == 2
assert Dual2(2, [], [], []) == 2
# floats
assert Dual(3.3, [], []) == 3.3
assert Dual2(3.3, [], [], []) == 3.3
# no type crossing
with pytest.raises(TypeError):
assert x_1 != y_1
# equality
assert x_1 == Dual(1, ["v0", "v1"], [1, 2])
assert y_1 == Dual2(1, ["v0", "v1"], [1, 2], [])
assert y_2 == Dual2(1, ["v0", "v1"], [1, 2], [1.0, 1.0, 1.0, 1.0])
# non-matching elements
assert x_1 != Dual(2, ["v0", "v1"], [1, 2])
assert x_1 != Dual(1, ["v0", "v1"], [2, 2])
assert x_1 != Dual(1, ["v2", "v1"], [1, 2])
# non-matching elements
assert y_1 != Dual2(2, ["v0", "v1"], [1, 2], [])
assert y_1 != Dual2(1, ["v0", "v1"], [2, 2], [])
assert y_1 != Dual2(1, ["v2", "v1"], [1, 2], [])
# non-matching dual2
assert y_2 != Dual2(1, ["v0", "v1"], [1, 2], [2.0, 2.0, 2.0, 2.0])
def test_lt() -> None:
assert Dual(1, ["x"], []) < Dual(2, ["y"], [])
assert Dual2(1, ["z"], [], []) < Dual2(2, ["x"], [], [])
assert Dual(1, ["x"], []) < 10
assert not Dual(1, ["x"], []) < 0
def test_lt_raises() -> None:
with pytest.raises(TypeError, match="Cannot compare"):
assert Dual(1, ["x"], []) < Dual2(2, ["y"], [], [])
def test_gt() -> None:
assert Dual(2, ["x"], []) > Dual(1, ["y"], [])
assert Dual2(2, ["z"], [], []) > Dual2(1, ["x"], [], [])
assert Dual(1, ["x"], []) > 0
assert not Dual(1, ["x"], []) > 10
def test_gt_raises() -> None:
with pytest.raises(TypeError, match="Cannot compare"):
assert Dual(2, ["x"], []) > Dual2(1, ["y"], [], [])
def test_dual2_abs_float(x_1, y_1, y_2) -> None:
assert _abs_float(x_1) == 1
assert _abs_float(y_1) == 1
assert _abs_float(y_2) == 1
assert float(x_1) == float(1)
assert float(y_1) == float(1)
assert float(y_2) == float(1)
@pytest.mark.parametrize("op", ["__add__", "__sub__", "__mul__", "__truediv__"])
def test_dual2_immutable(y_1, y_2, op) -> None:
_ = getattr(y_1, op)(y_2)
assert y_1 == Dual2(1, vars=["v0", "v1"], dual=np.array([1, 2]), dual2=[])
assert y_2 == Dual2(1, vars=["v0", "v1"], dual=np.array([1, 2]), dual2=[1.0, 1.0, 1.0, 1.0])
@pytest.mark.parametrize("op", ["__add__", "__sub__", "__mul__", "__truediv__"])
def test_dual_immutable(x_1, op) -> None:
_ = getattr(x_1, op)(Dual(2, vars=["new"], dual=np.array([4])))
assert x_1 == Dual(1, vars=["v0", "v1"], dual=np.array([1, 2]))
def test_dual_raises(x_1) -> None:
with pytest.raises(ValueError, match="`Dual` variable cannot possess `dual2`"):
x_1.dual2
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual(3, vars=["v0", "v1", "v2"], dual=np.array([1, 2, 3]))),
("__sub__", Dual(-1, vars=["v0", "v1", "v2"], dual=np.array([1, 2, -3]))),
("__mul__", Dual(2, vars=["v0", "v1", "v2"], dual=np.array([2, 4, 3]))),
("__truediv__", Dual(0.5, vars=["v0", "v1", "v2"], dual=np.array([0.5, 1, -0.75]))),
],
)
def test_ops(x_1, x_2, op, expected) -> None:
result = getattr(x_1, op)(x_2)
assert result == expected
def test_op_inversions(x_1, x_2) -> None:
assert (x_1 + x_2) - (x_2 + x_1) == 0
assert (x_1 / x_2) * (x_2 / x_1) == 1
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual2(3, ["v0", "v1", "v2"], [1, 2, 3], [2, 1, 1, 1, 1, 0, 1, 0, 1])),
("__sub__", Dual2(-1, ["v0", "v1", "v2"], [1, 2, -3], [0, 1, -1, 1, 1, 0, -1, 0, -1])),
("__mul__", Dual2(2, ["v0", "v1", "v2"], [2, 4, 3], [3, 2, 2.5, 2, 2, 3, 2.5, 3, 1])),
(
"__truediv__",
Dual2(
0.5,
["v0", "v1", "v2"],
[0.5, 1.0, -0.75],
[0.25, 0.5, -0.625, 0.5, 0.5, -0.75, -0.625, -0.75, 0.875],
),
),
],
)
def test_ops2(y_2, y_3, op, expected) -> None:
result = getattr(y_2, op)(y_3)
assert result == expected
def test_op_inversions2(y_2, y_3) -> None:
assert (y_2 + y_3) - (y_3 + y_2) == 0
assert (y_2 / y_3) * (y_3 / y_2) == 1
def test_inverse(x_1, y_2) -> None:
assert x_1 * x_1**-1 == 1
assert y_2 * y_2**-1 == 1
def test_power_identity(x_1, y_2) -> None:
result = x_1**1
assert result == x_1
result = y_2**1
assert result == y_2
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual(1 + 2.5, vars=["v0", "v1"], dual=np.array([1, 2]))),
("__sub__", Dual(1 - 2.5, vars=["v0", "v1"], dual=np.array([1, 2]))),
("__mul__", Dual(1 * 2.5, vars=["v0", "v1"], dual=np.array([1, 2]) * 2.5)),
("__truediv__", Dual(1 / 2.5, vars=["v0", "v1"], dual=np.array([1, 2]) / 2.5)),
],
)
def test_left_op_with_float(x_1, op, expected) -> None:
result = getattr(x_1, op)(2.5)
assert result == expected
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual2(1 + 2.5, ["v0", "v1"], [1, 2], [1.0, 1.0, 1.0, 1.0])),
(
"__sub__",
Dual2(1 - 2.5, ["v0", "v1"], [1, 2], [1.0, 1.0, 1.0, 1.0]),
),
("__mul__", Dual2(1 * 2.5, ["v0", "v1"], [2.5, 5.0], [2.5, 2.5, 2.5, 2.5])),
(
"__truediv__",
Dual2(1 / 2.5, ["v0", "v1"], [1 / 2.5, 2 / 2.5], [1 / 2.5, 1 / 2.5, 1 / 2.5, 1 / 2.5]),
),
],
)
def test_left_op_with_float2(y_2, op, expected) -> None:
result = getattr(y_2, op)(2.5)
assert result == expected
def test_right_op_with_float(x_1) -> None:
assert 2.5 + x_1 == Dual(1 + 2.5, vars=["v0", "v1"], dual=np.array([1, 2]))
assert 2.5 - x_1 == Dual(2.5 - 1, vars=["v0", "v1"], dual=-np.array([1, 2]))
assert 2.5 * x_1 == x_1 * 2.5
assert 2.5 / x_1 == (x_1 / 2.5) ** -1
def test_right_op_with_float2(y_2) -> None:
assert 2.5 + y_2 == Dual2(
1 + 2.5,
vars=["v0", "v1"],
dual=[1.0, 2.0],
dual2=[1.0, 1.0, 1.0, 1.0],
)
assert 2.5 - y_2 == Dual2(
2.5 - 1,
vars=["v0", "v1"],
dual=[-1.0, -2.0],
dual2=[-1.0, -1.0, -1.0, -1.0],
)
assert 2.5 * y_2 == y_2 * 2.5
assert 2.5 / y_2 == (y_2 / 2.5) ** -1
def test_dual2_second_derivatives() -> None:
"test power, multiplication, addition"
def f(x, y, z):
"""
f_x = 4x^3 y^2, f_y = 2y x^4 + z, f_z = 3z^2 +y
f_xx = 12x^2 y^2, f_xy = 8 x^3 y, f_xz = 0,
f_yx = 8x^3 y, f_yy = 2 x^4, f_yz = 1,
f_zx = 0, f_zy = 1, f_zz = 6z
"""
return x**4 * y**2 + z**3 + y * z
x_, y_, z_ = 3, 2, 1
x = Dual2(x_, vars=["x"], dual=[1], dual2=[])
y = Dual2(y_, vars=["y"], dual=[1], dual2=[])
z = Dual2(z_, vars=["z"], dual=[1], dual2=[])
result = f(x, y, z)
assert result.dual[0] == 4 * x_**3 * y_**2 # 432
assert result.dual[1] == 2 * y_ * x_**4 + z_ # 325
assert result.dual[2] == 3 * z_**2 + y_ # 5
assert result.dual2[0, 0] * 2 == 12 * x_**2 * y_**2
assert result.dual2[0, 1] * 2 == 8 * x_**3 * y_
assert result.dual2[0, 2] * 2 == 0
assert result.dual2[1, 0] * 2 == 8 * x_**3 * y_
assert result.dual2[1, 1] * 2 == 2 * x_**4
assert result.dual2[1, 2] * 2 == 1
assert result.dual2[2, 0] * 2 == 0
assert result.dual2[2, 1] * 2 == 1
assert result.dual2[2, 2] * 2 == 6 * z_
def test_dual2_second_derivatives2() -> None:
"test dual_exp, multiplication, division, dual_log"
def f(x, y, z):
return (x / z).__exp__() + (x * y).__log__()
x_, y_, z_ = 3, 2, 1
x = Dual2(x_, vars=["x"], dual=[1], dual2=[])
y = Dual2(y_, vars=["y"], dual=[1], dual2=[])
z = Dual2(z_, vars=["z"], dual=[1], dual2=[])
result = f(x, y, z)
xi = result.vars.index("x")
yi = result.vars.index("y")
zi = result.vars.index("z")
assert result.dual[xi] == math.exp(x_ / z_) / z_ + 1 / x_
assert result.dual[yi] == 1 / y_
assert result.dual[zi] == -x_ * math.exp(x_ / z_) / z_**2
assert result.dual2[xi, xi] * 2 == math.exp(x_ / z_) / z_**2 - 1 / x_**2
assert result.dual2[xi, yi] * 2 == 0
assert result.dual2[xi, zi] * 2 == math.exp(x_ / z_) * (-1 / z_**2 - x_ / z_**3)
assert result.dual2[yi, xi] * 2 == 0
assert result.dual2[yi, yi] * 2 == -1 / y_**2
assert result.dual2[yi, zi] * 2 == 0
assert result.dual2[zi, xi] * 2 == math.exp(x_ / z_) * (-1 / z_**2 - x_ / z_**3)
assert result.dual2[zi, yi] * 2 == 0
assert result.dual2[zi, zi] * 2 == math.exp(x_ / z_) * (x_**2 / z_**4 + 2 * x_ / z_**3)
def test_dual2_second_derivatives3() -> None:
"""
h, f = dual_log(f), x^3y+y
f_x = 1/f 3x^2y, f_y = 1/f (x^3+1),
f_xx = -1/f^2 (3x^2y)^2 + 1/f 6xy, f_xy = -1/f^2 (3x^2y)(x^3+1),
f_yy = -1/f^2 (x^3+1)^2 +1/f (0)
"""
x_, y_ = 2, 1
x = Dual2(x_, vars=["x"], dual=[1], dual2=[])
y = Dual2(y_, vars=["y"], dual=[1], dual2=[])
f = y * x**3 + y
f_, fx_, fy_ = f.real, 3 * y_ * x_**2, x_**3 + 1
fxx_, fxy_, fyy_ = 6 * x_ * y_, 3 * x_**2, 0
xi = f.vars.index("x")
yi = f.vars.index("y")
assert f.dual[xi] == fx_
assert f.dual[yi] == fy_
assert f.dual2[xi, xi] * 2 == fxx_
assert f.dual2[xi, yi] * 2 == fxy_
assert f.dual2[yi, yi] * 2 == 0
h = f.__log__()
assert h.real == math.log(y_ * x_**3 + y_)
assert h.dual[xi] == 1 / f_ * fx_
assert h.dual[yi] == 1 / f_ * fy_
assert h.dual2[xi, xi] * 2 == -1 / f_**2 * fx_**2 + 1 / f_ * fxx_
assert h.dual2[xi, yi] * 2 == -1 / f_**2 * fx_ * fy_ + 1 / f_ * fxy_
assert h.dual2[yi, xi] * 2 == -1 / f_**2 * fx_ * fy_ + 1 / f_ * fxy_
assert h.dual2[yi, yi] * 2 == -1 / f_**2 * fy_**2 + 1 / f_ * fyy_
@pytest.mark.parametrize(
("power", "expected"),
[
(1, (2, 1, 0)),
(2, (4, 4, 2)),
(3, (8, 12, 12)),
(4, (16, 32, 48)),
(5, (32, 80, 160)),
(6, (64, 192, 480)),
],
)
def test_dual_power_1d(power, expected) -> None:
x = Dual(2, vars=["x"], dual=[1])
y = Dual2(2, vars=["x"], dual=[1], dual2=[])
f, g = x**power, y**power
assert f.real == expected[0]
assert f.dual[0] == expected[1]
assert g.real == expected[0]
assert g.dual[0] == expected[1]
assert g.dual2[0, 0] * 2 == expected[2]
def test_dual2_power2_1d() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
assert (x**2) * (x ** (-2)) == 1
assert (x**5) * (x ** (-5)) == 1
z = (x**7.35) * (x ** (-7.35))
assert abs(z - 1.0) < 1e-12
def test_dual2_power_2d() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
y = Dual2(3, vars=["y"], dual=[1], dual2=[])
f = (x**4 * y**3) ** 2
assert f.dual2[0, 1] * 2 == 1492992
assert f.dual2[1, 0] * 2 == 1492992
def test_dual2_inv_specific() -> None:
z = Dual2(2, vars=["x", "y"], dual=[2, 3], dual2=[])
result = z**-1
expected = Dual2(
0.5,
vars=["x", "y"],
dual=[-0.5, -0.75],
dual2=[0.5, 0.75, 0.75, 9 / 8],
)
assert result == expected
def test_dual_truediv(x_1) -> None:
expected = Dual(1, [], [])
result = x_1 / x_1
assert result == expected
def test_dual2_exp_1d() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
f = x.__exp__()
assert f.real == math.exp(2)
assert f.dual[0] == math.exp(2)
assert f.dual2[0, 0] * 2 == math.exp(2)
def test_dual2_log_1d() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
f = x.__log__()
assert f.real == math.log(2)
assert f.dual[0] == 0.5
assert f.dual2[0] * 2 == -0.25
def test_dual2_log_exp() -> None:
x = Dual2(2, vars=["x"], dual=[1], dual2=[])
y = x.__log__()
z = y.__exp__()
assert x == z
def test_combined_vars_sorted(y_3) -> None:
x = Dual2(2, vars=["a", "v0", "z"], dual=[1, 1, 1], dual2=[])
result = x * y_3
assert set(result.vars) == {"a", "v0", "v2", "z"}
@pytest.mark.parametrize(
"x",
[
2,
Dual(2, [], []),
Dual2(2, [], [], []),
],
)
def test_log(x) -> None:
result = dual_log(x)
expected = math.log(2)
assert result == expected
def test_dual_log_base() -> None:
result = dual_log(16, 2)
assert result == 4
result = dual_log(Dual(16, [], []), 2)
assert result == Dual(4, [], [])
@pytest.mark.parametrize(
"x",
[
2,
Dual(2, [], []),
Dual2(2, [], [], []),
],
)
def test_exp(x) -> None:
result = dual_exp(x)
expected = math.exp(2)
assert result == expected
@pytest.mark.parametrize(
"x",
[
Dual(1.25, ["x"], []),
Dual2(1.25, ["x"], [], []),
],
)
def test_norm_cdf(x) -> None:
result = x.__norm_cdf__()
expected = NormalDist().cdf(1.250)
assert abs(result - expected) < 1e-10
@pytest.mark.parametrize(
"x",
[
Dual(0.75, ["x"], []),
Dual2(0.75, ["x"], [], []),
],
)
def test_inv_norm_cdf(x) -> None:
result = x.__norm_inv_cdf__()
expected = NormalDist().inv_cdf(0.75)
assert abs(result - expected) < 1e-10
def test_norm_cdf_value() -> None:
result = dual_norm_cdf(1.0)
expected = 0.8413
assert abs(result - expected) < 1e-4
def test_inv_norm_cdf_value() -> None:
result = dual_inv_norm_cdf(0.50)
expected = 0.0
assert abs(result - expected) < 1e-4
@pytest.mark.skip(reason="downcast vars is not used within the library, kept only for compat.")
def test_downcast_vars() -> None:
w = Dual(2, ["x", "y", "z"], [0, 1, 1])
assert w.__downcast_vars__().vars == ("y", "z")
x = Dual2(2, ["x", "y", "z"], [0, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1])
assert x.__downcast_vars__().vars == ("y", "z")
y = Dual2(2, ["x", "y", "z"], [0, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 1])
assert y.__downcast_vars__().vars == ("z",)
z = Dual2(2, ["x", "y", "z"], [0, 0, 1], [0, 0, 0, 0, 0, 1, 0, 1, 1])
assert z.__downcast_vars__().vars == ("y", "z")
# Linalg dual_solve tests
def test_solve(A, b) -> None:
x = dual_solve(A, b)
x_np = np.linalg.solve(A, b)
diff = x - x_np
assertions = [abs(diff[i, 0]) < 1e-10 for i in range(A.shape[0])]
assert all(assertions)
def test_solve_lsqrs() -> None:
A = np.array([[0, 1], [1, 1], [2, 1], [3, 1]])
b = np.array([[-1, 0.2, 0.9, 2.1]]).T
result = dual_solve(A, b, allow_lsq=True, types=(float, float))
assert abs(result[0, 0] - 1.0) < 1e-9
assert abs(result[1, 0] + 0.95) < 1e-9
def test_sparse_solve(A_sparse) -> None:
b = np.array(
[0, 0.90929743, 0.14112001, -0.7568025, -0.95892427, -0.2794155, 0.6569866, 0.98935825, 0],
)
b = b[:, np.newaxis]
x = dual_solve(A_sparse, b)
x_np = np.linalg.solve(A_sparse, b)
diff = x - x_np
assertions = [abs(diff[i, 0]) < 1e-10 for i in range(A_sparse.shape[0])]
assert all(assertions)
# Test numpy compat
def test_numpy_isclose(y_2) -> None:
# np.isclose not supported for non-numeric dtypes
a = np.array([y_2, y_2])
b = np.array([y_2, y_2])
with pytest.raises(TypeError):
assert np.isclose(a, b)
def test_numpy_equality(y_2) -> None:
# instead of isclose use == (which uses math.isclose elementwise) and then np.all
a = np.array([y_2, y_2])
b = np.array([y_2, y_2])
result = a == b
assert np.all(result)
@pytest.mark.parametrize(
"z",
[
Dual(2.0, ["y"], []),
Dual2(3.0, ["x"], [1], [2]),
],
)
@pytest.mark.parametrize(
"arg",
[
2.2,
Dual(3, ["x"], []),
Dual2(3, ["x"], [2], [3]),
],
)
@pytest.mark.parametrize(
"op_str",
[
"add",
"sub",
"mul",
"truediv",
],
)
def test_numpy_broadcast_ops_types(z, arg, op_str) -> None:
op = "__" + op_str + "__"
if type(z) in [Dual, Dual2] and type(arg) in [Dual, Dual2] and type(arg) is not type(z):
pytest.skip("Cannot operate Dual and Dual2 together.")
result = getattr(np.array([z, z]), op)(arg)
expected = np.array([getattr(z, op)(arg), getattr(z, op)(arg)])
assert np.all(result == expected)
result = getattr(arg, op)(np.array([z, z]))
if result is NotImplemented:
opr = "__r" + op_str + "__"
result = getattr(np.array([z, z]), opr)(arg)
expected = np.array([getattr(z, opr)(arg), getattr(z, opr)(arg)])
else:
expected = np.array([getattr(arg, op)(z), getattr(arg, op)(z)])
assert np.all(result == expected)
@pytest.mark.parametrize(
"z",
[
Dual(2.0, ["y"], []),
Dual2(3.0, ["x"], [1], [2]),
],
)
def test_numpy_broadcast_pow_types(z) -> None:
result = np.array([z, z]) ** 3
expected = np.array([z**3, z**3])
assert np.all(result == expected)
result = z ** np.array([3, 4])
expected = np.array([z**3, z**4])
assert np.all(result == expected)
def test_numpy_matmul(y_2, y_1) -> None:
a = np.array([y_2, y_1])
result = np.matmul(a[:, np.newaxis], a[np.newaxis, :])
expected = np.array([[y_2 * y_2, y_2 * y_1], [y_2 * y_1, y_1 * y_1]])
assert np.all(result == expected)
@pytest.mark.skipif(
version.parse(np.__version__) >= version.parse("1.25.0"),
reason="Object dtypes accepted by NumPy in 1.25.0+",
)
def test_numpy_einsum(y_2, y_1) -> None:
# einsum does not work with object dtypes
a = np.array([y_2, y_1])
with pytest.raises(TypeError):
_ = np.einsum("i,j", a, a, optimize=True)
@pytest.mark.skipif(
version.parse(np.__version__) < version.parse("1.25.0"),
reason="Object dtypes not accepted by NumPy in <1.25.0",
)
def test_numpy_einsum_works(y_2, y_1) -> None:
a = np.array([y_2, y_1])
result = np.einsum("i,j", a, a, optimize=True)
expected = np.array([[y_2 * y_2, y_2 * y_1], [y_2 * y_1, y_1 * y_1]])
assert np.all(result == expected)
@pytest.mark.parametrize(
"z",
[
Dual(2.0, ["y"], []),
Dual2(3.0, ["x"], [1], [2]),
],
)
@pytest.mark.parametrize(
"dtype",
[
np.int8,
np.int16,
np.int32,
np.int64,
np.float16,
np.float32,
np.float64,
np.longdouble,
],
)
def test_numpy_dtypes(z, dtype) -> None:
np.array([1, 2], dtype=dtype) + z
z + np.array([1, 2], dtype=dtype)
z + dtype(2)
dtype(2) + z
================================================
FILE: python/tests/test_dualrs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import math
import numpy as np
import pytest
from packaging import version
from rateslib.dual import Dual, Dual2, dual_exp, dual_log, dual_solve, gradient
from rateslib.rs import ADOrder
DUAL_CORE_PY = False
@pytest.fixture
def x_1():
return Dual(1, vars=["v0", "v1"], dual=[1, 2])
@pytest.fixture
def x_2():
return Dual(2, vars=["v0", "v2"], dual=[0, 3])
def test_zero_init() -> None:
x = Dual(1, vars=["x"], dual=[])
assert np.all(x.dual == np.ones(1))
def test_dual_repr(x_1) -> None:
result = x_1.__repr__()
assert result == ""
def test_dual_repr_4vars() -> None:
x = Dual(1.23456789, ["a", "b", "c", "d"], [1.01, 2, 3.50001, 4])
result = x.__repr__()
assert result == ""
def test_dual_str(x_1) -> None:
result = x_1.__str__()
assert result == ""
@pytest.mark.skipif(DUAL_CORE_PY, reason="Gradient comparison cannot compare Py and Rs Duals.")
@pytest.mark.parametrize(
("vars_", "expected"),
[
(["v1"], 2.00),
(["v1", "v0"], np.array([2.0, 1.0])),
],
)
def test_gradient_method(vars_, expected, x_1) -> None:
result = gradient(x_1, vars_)
assert np.all(result == expected)
def test_neg(x_1) -> None:
result = -x_1
expected = Dual(-1, vars=["v0", "v1"], dual=[-1, -2])
assert result == expected
def test_eq_ne(x_1) -> None:
# non-matching types
assert Dual(0, ["single_var"], []) != 0
# floats
assert Dual(2, [], []) == 2.0
assert Dual(2, [], []) == 2.0
# equality
assert x_1 == Dual(1, vars=["v0", "v1"], dual=np.array([1, 2]))
# non-matching elements
assert x_1 != Dual(2, vars=["v0", "v1"], dual=np.array([1, 2]))
assert x_1 != Dual(1, vars=["v0", "v1"], dual=np.array([2, 2]))
assert x_1 != Dual(1, vars=["v2", "v1"], dual=np.array([1, 2]))
def test_lt() -> None:
assert Dual(1, ["x"], []) < Dual(2, ["y"], [])
assert Dual(1, ["x"], []) < 10
assert Dual(1, ["x"], []) > 0.5
def test_le() -> None:
assert Dual(1.0, ["x"], []) <= Dual(1.0, ["y"], [])
assert Dual(1, ["x"], []) <= 1.0
assert Dual(1.0, ["x"], []) >= 1.0
def test_gt() -> None:
assert Dual(3, ["x"], []) > Dual(2, ["y"], [])
assert Dual(1, ["x"], []) > 0.5
assert Dual(0.3, ["x"], []) < 0.5
def test_ge() -> None:
assert Dual(1.0, ["x"], []) >= Dual(1.0, ["y"], [])
assert Dual(1, ["x"], []) >= 1.0
assert Dual(1.0, ["x"], []) <= 1.0
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual(3, vars=["v0", "v1", "v2"], dual=[1, 2, 3])),
("__sub__", Dual(-1, vars=["v0", "v1", "v2"], dual=[1, 2, -3])),
("__mul__", Dual(2, vars=["v0", "v1", "v2"], dual=[2, 4, 3])),
("__truediv__", Dual(0.5, vars=["v0", "v1", "v2"], dual=[0.5, 1, -0.75])),
],
)
def test_ops(x_1, x_2, op, expected) -> None:
result = getattr(x_1, op)(x_2)
assert result == expected
@pytest.mark.parametrize(
("op", "expected"),
[
("__add__", Dual(1 + 2.5, vars=["v0", "v1"], dual=[1, 2])),
("__sub__", Dual(1 - 2.5, vars=["v0", "v1"], dual=[1, 2])),
("__mul__", Dual(1 * 2.5, vars=["v0", "v1"], dual=[2.5, 5.0])),
("__truediv__", Dual(1 / 2.5, vars=["v0", "v1"], dual=[1 / 2.5, 2 / 2.5])),
],
)
def test_left_op_with_float(x_1, op, expected) -> None:
result = getattr(x_1, op)(2.5)
assert result == expected
def test_right_op_with_float(x_1) -> None:
assert 2.5 + x_1 == Dual(1 + 2.5, vars=["v0", "v1"], dual=[1, 2])
assert 2.5 - x_1 == Dual(2.5 - 1, vars=["v0", "v1"], dual=[-1, -2])
assert 2.5 * x_1 == x_1 * 2.5
assert 2.5 / x_1 == (x_1 / 2.5) ** -1.0
def test_op_inversions(x_1, x_2) -> None:
assert (x_1 + x_2) - (x_2 + x_1) == 0
assert (x_1 / x_2) * (x_2 / x_1) == 1
def test_inverse(x_1) -> None:
assert x_1 * x_1**-1 == 1
def test_power_identity(x_1) -> None:
result = x_1**1
assert result == x_1
@pytest.mark.parametrize(
("power", "expected"),
[
(1, (2, 1)),
(2, (4, 4)),
(3, (8, 12)),
(4, (16, 32)),
(5, (32, 80)),
(6, (64, 192)),
],
)
def test_dual_power_1d(power, expected) -> None:
x = Dual(2, vars=["x"], dual=[1])
f = x**power
assert f.real == expected[0]
assert f.dual[0] == expected[1]
def test_dual_truediv(x_1) -> None:
expected = Dual(1, [], [])
result = x_1 / x_1
assert result == expected
def test_combined_vars_sorted(x_1) -> None:
x = Dual(2, vars=["a", "v0", "z"], dual=[])
result = x_1 * x
expected = ["v0", "v1", "a", "z"]
assert result.vars == expected
# x vars are stored first
result = x * x_1
expected = ["a", "v0", "z", "v1"]
assert result.vars == expected
def test_exp(x_1) -> None:
result = x_1.__exp__()
expected = Dual(math.e, ["v0", "v1"], [math.e, 2 * math.e])
assert result == expected
def test_log(x_1) -> None:
result = x_1.__log__()
expected = Dual(0.0, ["v0", "v1"], [1.0, 2.0])
assert result == expected
# Test NumPy compat
def test_numpy_isclose(x_1) -> None:
# np.isclose not supported for non-numeric dtypes
a = np.array([x_1, x_1])
b = np.array([x_1, x_1])
with pytest.raises(TypeError):
assert np.isclose(a, b)
def test_numpy_equality(x_1) -> None:
# instead of isclose use == (which uses math.isclose elementwise) and then np.all
a = np.array([x_1, x_1])
b = np.array([x_1, x_1])
result = a == b
assert np.all(result)
@pytest.mark.parametrize(
"z",
[
Dual(2.0, ["y"], []),
# Dual2(3.0, "x", np.array([1]), np.array([[2]])),
],
)
@pytest.mark.parametrize(
"arg",
[
2.2,
Dual(3, ["x"], []),
# Dual2(3, "x", np.array([2]), np.array([[3]])),
],
)
@pytest.mark.parametrize(
"op_str",
[
"add",
"sub",
"mul",
"truediv",
],
)
def test_numpy_broadcast_ops_types(z, arg, op_str) -> None:
op = "__" + op_str + "__"
types = [Dual] # ,Dual2]
if type(z) in types and type(arg) in types and type(arg) is not type(z):
pytest.skip("Cannot operate Dual and Dual2 together.")
result = getattr(np.array([z, z]), op)(arg)
expected = np.array([getattr(z, op)(arg), getattr(z, op)(arg)])
assert np.all(result == expected)
result = getattr(arg, op)(np.array([z, z]))
if result is NotImplemented:
opr = "__r" + op_str + "__"
result = getattr(np.array([z, z]), opr)(arg)
expected = np.array([getattr(z, opr)(arg), getattr(z, opr)(arg)])
else:
expected = np.array([getattr(arg, op)(z), getattr(arg, op)(z)])
assert np.all(result == expected)
@pytest.mark.parametrize(
"z",
[
Dual(2.0, ["y"], []),
# Dual2(3.0, "x", np.array([1]), np.array([[2]])),
],
)
def test_numpy_broadcast_pow_types(z) -> None:
result = np.array([z, z]) ** 3
expected = np.array([z**3, z**3])
assert np.all(result == expected)
result = z ** np.array([3, 4])
expected = np.array([z**3, z**4])
assert np.all(result == expected)
def test_numpy_matmul(x_1) -> None:
x_2 = Dual(2.5, ["x", "y"], [3.0, -2.0])
a = np.array([x_1, x_2])
result = np.matmul(a[:, np.newaxis], a[np.newaxis, :])
expected = np.array([[x_1 * x_1, x_1 * x_2], [x_2 * x_1, x_2 * x_2]])
assert np.all(result == expected)
@pytest.mark.skipif(
version.parse(np.__version__) < version.parse("1.25.0"),
reason="Object dtypes not accepted by NumPy in <1.25.0",
)
def test_numpy_einsum_works(x_1) -> None:
x_2 = Dual(2.5, ["x", "y"], [3.0, -2.0])
a = np.array([x_1, x_2])
result = np.einsum("i,j", a, a, optimize=True)
expected = np.array([[x_1 * x_1, x_1 * x_2], [x_2 * x_1, x_2 * x_2]])
assert np.all(result == expected)
@pytest.mark.parametrize(
"z",
[
Dual(2.0, ["y"], []),
# Dual2(3.0, "x", np.array([1]), np.array([[2]])),
],
)
@pytest.mark.parametrize(
"dtype",
[
np.int8,
np.int16,
np.int32,
np.int64,
np.float16,
np.float32,
np.float64,
np.longdouble,
],
)
def test_numpy_dtypes(z, dtype) -> None:
np.array([1, 2], dtype=dtype) + z
z + np.array([1, 2], dtype=dtype)
z + dtype(2)
dtype(2) + z
def test_dual_solve() -> None:
a = np.array([[Dual(1.0, [], []), Dual(0.0, [], [])], [Dual(0.0, [], []), Dual(1.0, [], [])]])
b = np.array([Dual(2.0, ["x"], [1.0]), Dual(5.0, ["x", "y"], [1.0, 1.0])])
result = dual_solve(a, b[:, None], types=(Dual, Dual))[:, 0]
expected = np.array([Dual(2.0, ["x", "y"], [1.0, 0.0]), Dual(5.0, ["x", "y"], [1.0, 1.0])])
assert np.all(result == expected)
@pytest.mark.parametrize(
"obj",
[
Dual(1.0, ["x", "y"], [1.0, 2.0]),
Dual2(2.0, ["x", "y"], [1.0, 2.0], [1.0, 2.0, 2.0, 3.0]),
],
)
def test_pickle(obj) -> None:
import pickle
pickled_obj = pickle.dumps(obj)
reloaded = pickle.loads(pickled_obj)
assert obj == reloaded
@pytest.mark.parametrize("z", [2.0, Dual(2.0, ["z"], [])])
@pytest.mark.parametrize("p", [2.0, Dual(2.0, ["p"], [])])
def test_dual_powers_finite_diff(z, p):
if isinstance(z, float) and isinstance(p, float):
return None # float power not in scope
result = z**p
if isinstance(z, Dual):
# Finite diff test
z_diff = ((z + 0.00001) ** p - result) / 0.00001
assert abs(gradient(result, ["z"])[0] - z_diff) < 1e-4
if isinstance(p, Dual):
# Finite diff test
p_diff = (z ** (p + 0.00001) - result) / 0.00001
assert abs(gradient(result, ["p"])[0] - p_diff) < 1e-4
def test_dual_powers_operators() -> None:
z = Dual(2.3, ["x", "y", "z"], [1.0, 2.0, 3.0])
p = Dual(4.4, ["x", "y", "p"], [2.0, 3.0, 4.0])
result = z**p
expected = dual_exp(p * dual_log(z))
assert abs(result - expected) < 1e-12
assert np.all(
np.isclose(gradient(result, ["x", "y", "z", "p"]), gradient(expected, ["x", "y", "z", "p"]))
)
@pytest.mark.parametrize("z", [2.0, Dual2(2.0, ["z"], [], [])])
@pytest.mark.parametrize("p", [2.0, Dual2(2.0, ["p"], [], [])])
def test_dual2_powers_finite_diff_first_order(z, p):
if isinstance(z, float) and isinstance(p, float):
return None # float power not in scope
result = z**p
if isinstance(z, Dual2):
# Finite diff test
z_diff = ((z + 0.00001) ** p - result) / 0.00001
assert abs(gradient(result, ["z"])[0] - z_diff) < 1e-4
if isinstance(p, Dual2):
# Finite diff test
p_diff = (z ** (p + 0.00001) - result) / 0.00001
assert abs(gradient(result, ["p"])[0] - p_diff) < 1e-4
@pytest.mark.parametrize("z", [2.0, Dual2(2.0, ["z"], [], [])])
@pytest.mark.parametrize("p", [2.0, Dual2(2.0, ["p"], [], [])])
def test_dual2_powers_finite_diff_second_order(z, p):
if isinstance(z, float) and isinstance(p, float):
return None # float power not in scope
result = z**p
vars_ = (isinstance(z, Dual2), isinstance(p, Dual2))
if vars_[0]:
z_up = (z + 0.00001) ** p
z_dw = (z - 0.00001) ** p
diff = (z_up + z_dw - 2 * result) / 1e-10
assert abs(gradient(result, ["z"], order=2)[0][0] - diff) < 1e-4
if vars_[1]:
p_up = z ** (p + 0.00001)
p_dw = z ** (p - 0.00001)
diff = (p_up + p_dw - 2 * result) / 1e-10
assert abs(gradient(result, ["p"], order=2)[0][0] - diff) < 1e-4
if vars_[1] and vars_[0]:
upup = (z + 0.00001) ** (p + 0.00001)
dwdw = (z - 0.00001) ** (p - 0.00001)
updw = (z + 0.00001) ** (p - 0.00001)
dwup = (z - 0.00001) ** (p + 0.00001)
diff = (upup + dwdw - updw - dwup) / 4e-10
assert abs(gradient(result, ["z", "p"], order=2)[0, 1] - diff) < 1e-4
def test_dual2_powers_operators() -> None:
z = Dual2(2.3, ["x", "y", "z"], [1.0, 2.0, 3.0], [1, 2, 3, 4, 5, 6, 7, 8, 9])
p = Dual2(4.4, ["x", "y", "p"], [2.0, 3.0, 4.0], [2, 3, 4, 5, 2, 3, 4, 3, 4])
result = z**p
expected = dual_exp(p * dual_log(z))
assert abs(result - expected) < 1e-12
assert np.all(
np.isclose(gradient(result, ["x", "y", "z", "p"]), gradient(expected, ["x", "y", "z", "p"]))
)
assert np.all(
np.isclose(
gradient(result, ["x", "y", "z", "p"], order=2),
gradient(expected, ["x", "y", "z", "p"], order=2),
)
)
================================================
FILE: python/tests/test_enums.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from rateslib.enums import FloatFixingMethod
def test_method_param():
a1 = FloatFixingMethod.RFRPaymentDelay()
a2 = FloatFixingMethod.RFRPaymentDelayAverage()
for obj in [a1, a2]:
assert obj.method_param() == 0
b1 = FloatFixingMethod.IBOR(6)
b2 = FloatFixingMethod.RFRLookback(6)
for obj in [b1, b2]:
assert obj.method_param() == 6
================================================
FILE: python/tests/test_fixings.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import os
import pytest
from pandas import Series
from rateslib import dt, fixings
from rateslib.curves import Curve
from rateslib.data.fixings import (
FloatRateIndex,
FloatRateSeries,
FXFixing,
FXIndex,
IRSFixing,
RFRFixing,
_FXFixingMajor,
_UnitFixing,
)
from rateslib.enums.generics import NoInput
from rateslib.enums.parameters import FloatFixingMethod, SwaptionSettlementMethod
from rateslib.instruments import IRS
from rateslib.scheduling import Adjuster, get_calendar
@pytest.mark.parametrize("name", ["estr", "sonia", "sofr", "swestr", "nowa"])
def test_fixings(name) -> None:
result = fixings[name]
assert isinstance(result[1], Series)
def test_calendar_matches_fixings_corra() -> None:
# this should run without warnings or errors if the "tro" calendar matches the fixings.
swap = IRS(
effective=dt(2017, 1, 1),
termination=dt(2023, 7, 1),
frequency="A",
leg2_rate_fixings=fixings["corra"][1],
calendar="tro",
fixed_rate=1.0,
)
curve = Curve({dt(2017, 1, 1): 1.0, dt(2027, 1, 1): 1.0}, calendar="tro")
swap.npv(curves=curve)
def test_fixings_raises_file_error() -> None:
with pytest.raises(ValueError, match="Fixing data for the index "):
fixings["nofile"]
def test_add_fixings_directly() -> None:
s = Series(
index=[dt(2000, 2, 1), dt(2000, 3, 1), dt(2000, 1, 1)],
data=[200.0, 300.0, 100.0],
)
fixings.add("my_values", s)
assert fixings["my_values"][1].is_monotonic_increasing
assert fixings["my_values"][1].name == "rate"
assert fixings["my_values"][1].index.name == "reference_date"
fixings.pop("my_values")
def test_add_fixings_directly_with_specific_state() -> None:
s = Series(
index=[dt(2000, 2, 1), dt(2000, 3, 1), dt(2000, 1, 1)],
data=[200.0, 300.0, 100.0],
)
fixings.add("my_values", s, 10103)
assert fixings["my_values"][0] == 10103
fixings.pop("my_values")
def test_get_stub_ibor_fixings() -> None:
s = Series(
index=[dt(2000, 2, 1), dt(2000, 3, 1), dt(2000, 1, 1)],
data=[200.0, 300.0, 100.0],
)
fixings.add("usd_IBOR_3w", s)
fixings.add("usd_IBOR_1m", s)
fixings.add("usd_IBOR_2m", s)
fixings.add("USD_ibor_3M", s)
s, _, _ = fixings.get_stub_ibor_fixings(
value_start_date=dt(2000, 1, 1),
value_end_date=dt(2000, 2, 15),
fixing_calendar=get_calendar("nyc"),
fixing_modifier=Adjuster.Following(),
fixing_identifier="USD_IBOR",
fixing_date=dt(1999, 12, 30),
)
fixings.pop("usd_IBOR_3w")
fixings.pop("usd_IBOR_1m")
fixings.pop("usd_IBOR_2m")
fixings.pop("USD_ibor_3M")
assert s == ["1M", "2M"]
@pytest.mark.parametrize(("fixing"), [True, False])
def test_get_stub_ibor_fixings_no_left(fixing) -> None:
s = Series(
index=[dt(2000, 2, 1), dt(2000, 3, 1), dt(2000, 1, 1)],
data=[200.0, 300.0, 100.0],
)
if fixing:
s[dt(1999, 12, 30)] = 12345.0
fixings.add("usd_IBOR_2w", s)
fixings.add("usd_IBOR_3w", s)
s, _, f = fixings.get_stub_ibor_fixings(
value_start_date=dt(2000, 1, 1),
value_end_date=dt(2000, 1, 8),
fixing_calendar=get_calendar("nyc"),
fixing_modifier=Adjuster.Following(),
fixing_identifier="USD_IBOR",
fixing_date=dt(1999, 12, 30),
)
fixings.pop("usd_IBOR_2w")
fixings.pop("usd_IBOR_3w")
assert s == ["2W"]
assert f == [12345.0 if fixing else None]
@pytest.mark.parametrize(("fixing"), [True, False])
def test_get_stub_ibor_fixings_no_right(fixing) -> None:
s = Series(
index=[dt(2000, 2, 1), dt(2000, 3, 1), dt(2000, 1, 1)],
data=[200.0, 300.0, 100.0],
)
if fixing:
s[dt(1999, 12, 30)] = 12345.0
fixings.add("usd_IBOR_2m", s)
fixings.add("USD_ibor_3M", s)
s, _, f = fixings.get_stub_ibor_fixings(
value_start_date=dt(2000, 1, 1),
value_end_date=dt(2000, 7, 8),
fixing_calendar=get_calendar("nyc"),
fixing_modifier=Adjuster.Following(),
fixing_identifier="USD_IBOR",
fixing_date=dt(1999, 12, 30),
)
fixings.pop("usd_IBOR_2m")
fixings.pop("USD_ibor_3M")
assert s == ["3M"]
assert f == [12345.0 if fixing else None]
def test_get_stub_ibor_fixings_no_left_no_right() -> None:
s, _, _ = fixings.get_stub_ibor_fixings(
value_start_date=dt(2000, 1, 1),
value_end_date=dt(2000, 7, 8),
fixing_calendar=get_calendar("nyc"),
fixing_modifier=Adjuster.Following(),
fixing_identifier="USD_NONE",
fixing_date=dt(1999, 12, 30),
)
assert s == []
def test_state_id():
s = Series(
index=[dt(2000, 2, 1), dt(2000, 3, 1), dt(2000, 1, 1)],
data=[200.0, 300.0, 100.0],
)
fixings.add("usd_IBOR_3w", s)
before = fixings["usd_IBOR_3w"][0]
fixings.pop("usd_IBOR_3w")
fixings.add("usd_IBOR_3w", s)
assert before != fixings["usd_IBOR_3w"][0]
def test_series_combine():
from rateslib.periods.protocols.fixings import _s2_before_s1
s1 = Series(index=[2, 3], data=[100.0, 200.0])
s2 = Series(index=[1, 2], data=[300.0, 400.0])
result = s1.combine(s2, _s2_before_s1)
assert all(result == Series(index=[1, 2, 3], data=[300.0, 400.0, 200.0]))
def test_reset_doc():
fx_fixing1 = FXFixing(delivery=dt(2021, 1, 1), fx_index="eurusd", identifier="A")
fx_fixing2 = FXFixing(delivery=dt(2021, 1, 1), fx_index="gbpusd", identifier="B")
fixings.add("A_eurusd", Series(index=[dt(2020, 12, 30)], data=[1.1]), state=100)
fixings.add("B_gbpusd", Series(index=[dt(2020, 12, 30)], data=[1.4]), state=200)
# data is populated from the available Series
assert fx_fixing1.value == 1.1
assert fx_fixing2.value == 1.4
# fixings are reset according to the data state
fx_fixing1.reset(state=100)
fx_fixing2.reset(state=100)
# only the private data for fixing1 is removed because of its link to the data state
assert fx_fixing1._value == NoInput.blank
assert fx_fixing2._value == 1.4
fixings.pop("A_eurusd")
fixings.pop("B_gbpusd")
class TestRFRFixing:
def test_rfr_lockout(self) -> None:
name = str(hash(os.urandom(8))) + "_1B"
estr_1b = Series(
index=[dt(2025, 9, 12), dt(2025, 9, 15), dt(2025, 9, 16)], data=[1.91, 1.92, 1.93]
)
fixings.add(name, estr_1b)
rfr_fixing = RFRFixing(
accrual_start=dt(2025, 9, 12),
accrual_end=dt(2025, 9, 19),
identifier=name,
spread_compound_method="NoneSimple",
fixing_method=FloatFixingMethod.RFRLockout(2),
float_spread=100.0,
rate_index=FloatRateIndex(frequency="1B", series="eur_rfr"),
)
result = rfr_fixing.value
assert abs(result - 2.9202637862854033) < 1e-10
assert len(rfr_fixing.populated) == 5
class TestFXFixing:
def test_direct(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_USDRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
fx_fixing = FXFixing(
publication=dt(2000, 1, 1),
fx_index=FXIndex("usdrub", "fed", 2),
identifier=name,
)
assert fx_fixing.value == 2.0
fixings.pop(name + "_USDRUB")
def test_inverted(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_USDRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
fx_fixing = FXFixing(
publication=dt(2000, 1, 1),
fx_index=FXIndex("rubusd", "fed", 2),
identifier=name,
)
assert fx_fixing.value == 0.5
fixings.pop(name + "_USDRUB")
def test_cross1(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_USDRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
fixings.add(name + "_USDINR", Series(index=[dt(2000, 1, 1)], data=[4.0]))
fx_fixing = FXFixing(
publication=dt(2000, 1, 1),
fx_index=FXIndex("rubinr", "fed", 2),
identifier=name,
)
assert fx_fixing.value == 1 / 2.0 * 4.0
fixings.pop(name + "_USDRUB")
fixings.pop(name + "_USDINR")
def test_cross2(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_RUBUSD", Series(index=[dt(2000, 1, 1)], data=[2.0]))
fixings.add(name + "_INRUSD", Series(index=[dt(2000, 1, 1)], data=[4.0]))
fx_fixing = FXFixing(
publication=dt(2000, 1, 1),
fx_index=FXIndex("rubinr", "fed", 2),
identifier=name,
)
assert fx_fixing.value == 2.0 * 1 / 4.0
fixings.pop(name + "_RUBUSD")
fixings.pop(name + "_INRUSD")
def test_reset(self):
fx_fixing = FXFixing(
publication=dt(2000, 1, 1), fx_index=FXIndex("rubusd", "fed", 1), identifier="test"
)
fixings.add("test_USDRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
assert fx_fixing.value == 0.5
fx_fixing.reset(state=1)
assert fx_fixing._value == 0.5
fx_fixing.reset(state=fixings["TEST_USDRUB"][0])
assert fx_fixing._value == NoInput(0)
fixings.pop("test_USDRUB")
def test_no_state_update(self):
# test that the fixing value and state is updated at the appropriate times.
fx_fixing = FXFixing(
delivery=dt(2000, 1, 1),
fx_index=FXIndex("rubusd", "fed", 1, "all", 0),
identifier="test",
)
fixings.add("test_USDRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
assert fx_fixing.value == 0.5
old_state = fx_fixing._state
fixings.pop("test_USDRUB")
fixings.add("test_USDRUB", Series(index=[dt(2000, 1, 1)], data=[5.0]))
# value and state are unchanged
assert fx_fixing.value == 0.5
assert fx_fixing._state == old_state
fx_fixing.reset()
# value are state are now set after reset
assert fx_fixing.value == 0.20
assert fx_fixing._state == hash(fixings["TEST_USDRUB"][0])
fixings.pop("test_USDRUB")
# test all cross constructions
def test_construct_1_major_usd(self):
fx_fixing = FXFixing(fx_index="usdeur", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing2, _UnitFixing)
fx_fixing = FXFixing(fx_index="eurusd", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing2, _UnitFixing)
fx_fixing = FXFixing(fx_index=FXIndex("usdbrl", "fed", 2), publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing2, _UnitFixing)
fx_fixing = FXFixing(fx_index=FXIndex("brlusd", "fed", 2), publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing2, _UnitFixing)
def test_construct_1_major_eur(self):
fx_fixing = FXFixing(fx_index="eursek", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing2, _UnitFixing)
fx_fixing = FXFixing(fx_index="sekeur", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing2, _UnitFixing)
def test_construct_2_major_eur(self):
fx_fixing = FXFixing(fx_index="usdsek", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert fx_fixing.fx_fixing1.pair == "usdeur"
assert isinstance(fx_fixing.fx_fixing2, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing3, _UnitFixing)
fx_fixing = FXFixing(fx_index="sekusd", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert fx_fixing.fx_fixing1.pair == "sekeur"
assert isinstance(fx_fixing.fx_fixing2, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing3, _UnitFixing)
fx_fixing = FXFixing(fx_index="seknok", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert fx_fixing.fx_fixing1.pair == "sekeur"
assert isinstance(fx_fixing.fx_fixing2, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing3, _UnitFixing)
def test_construct_2_major_usd(self):
fx_fixing = FXFixing(fx_index="eurgbp", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert fx_fixing.fx_fixing1.pair == "eurusd"
assert isinstance(fx_fixing.fx_fixing2, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing3, _UnitFixing)
fx_fixing = FXFixing(fx_index="gbpeur", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert fx_fixing.fx_fixing1.pair == "gbpusd"
assert isinstance(fx_fixing.fx_fixing2, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing3, _UnitFixing)
fx_fixing = FXFixing(fx_index="gbpcad", publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert fx_fixing.fx_fixing1.pair == "gbpusd"
assert isinstance(fx_fixing.fx_fixing2, _FXFixingMajor)
assert isinstance(fx_fixing.fx_fixing3, _UnitFixing)
def test_construct_3_major(self):
fx_fixing = FXFixing(fx_index=FXIndex("nokcad", "tro", 2), publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert fx_fixing.fx_fixing1.pair == "nokeur"
assert isinstance(fx_fixing.fx_fixing2, _FXFixingMajor)
assert fx_fixing.fx_fixing2.pair == "eurusd"
assert isinstance(fx_fixing.fx_fixing3, _FXFixingMajor)
assert fx_fixing.fx_fixing3.pair == "usdcad"
fx_fixing = FXFixing(fx_index=FXIndex("cadnok", "tro", 2), publication=dt(2000, 1, 1))
assert isinstance(fx_fixing.fx_fixing1, _FXFixingMajor)
assert fx_fixing.fx_fixing1.pair == "cadusd"
assert isinstance(fx_fixing.fx_fixing2, _FXFixingMajor)
assert fx_fixing.fx_fixing2.pair == "usdeur"
assert isinstance(fx_fixing.fx_fixing3, _FXFixingMajor)
assert fx_fixing.fx_fixing3.pair == "eurnok"
class TestFXFixingMajor:
def test_direct(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_USDRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
fx_fixing = _FXFixingMajor(
publication=dt(2000, 1, 1),
fx_index=FXIndex("usdrub", "fed", 2),
identifier=name,
)
assert fx_fixing.value == 2.0
fixings.pop(name + "_USDRUB")
def test_inverted(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_USDRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
fx_fixing = _FXFixingMajor(
publication=dt(2000, 1, 1),
fx_index=FXIndex("rubusd", "fed", 2),
identifier=name,
)
assert fx_fixing.value == 0.5
fixings.pop(name + "_USDRUB")
def test_cross1(self) -> None:
name = str(hash(os.urandom(8)))
fixings.add(name + "_INRRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
fx_fixing = _FXFixingMajor(
publication=dt(2000, 1, 1),
fx_index=FXIndex("rubinr", "fed", 2),
identifier=name,
)
assert fx_fixing.value == 1 / 2.0
fixings.pop(name + "_INRRUB")
def test_reset(self):
fx_fixing = _FXFixingMajor(
publication=dt(2000, 1, 1), fx_index=FXIndex("rubusd", "fed", 1), identifier="test"
)
fixings.add("test_USDRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
assert fx_fixing.value == 0.5
fx_fixing.reset(state=1)
assert fx_fixing._value == 0.5
fx_fixing.reset(state=fixings["TEST_USDRUB"][0])
assert fx_fixing._value == NoInput(0)
fixings.pop("test_USDRUB")
def test_no_state_update(self):
# test that the fixing value and state is updated at the appropriate times.
fx_fixing = _FXFixingMajor(
delivery=dt(2000, 1, 1),
fx_index=FXIndex("rubusd", "fed", 1, "all", 0),
identifier="test",
)
fixings.add("test_USDRUB", Series(index=[dt(2000, 1, 1)], data=[2.0]))
assert fx_fixing.value == 0.5
old_state = fx_fixing._state
fixings.pop("test_USDRUB")
fixings.add("test_USDRUB", Series(index=[dt(2000, 1, 1)], data=[5.0]))
# value and state are unchanged
assert fx_fixing.value == 0.5
assert fx_fixing._state == old_state
fx_fixing.reset()
# value are state are now set after reset
assert fx_fixing.value == 0.20
assert fx_fixing._state == fixings["TEST_USDRUB"][0]
fixings.pop("test_USDRUB")
class TestIRSFixing:
@pytest.mark.parametrize(
("method", "expected"),
[
(SwaptionSettlementMethod.Physical, 192.8729663786536),
(SwaptionSettlementMethod.CashParTenor, 189.90825721068495),
(SwaptionSettlementMethod.CashCollateralized, 192.8729663786536),
],
)
def test_annuity(self, method, expected) -> None:
fixing = IRSFixing(
irs_series="usd_irs",
publication=dt(2026, 2, 18),
tenor="2Y",
)
curve = Curve(nodes={dt(2026, 2, 18): 1.0, dt(2029, 2, 18): 0.9})
result = fixing.annuity(
settlement_method=method,
rate_curve=curve,
index_curve=curve,
)
assert abs(result - expected) < 1e-6
================================================
FILE: python/tests/test_fx.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
from random import choice, shuffle
import numpy as np
import pytest
from matplotlib import pyplot as plt
from pandas import DataFrame, Series
from pandas.testing import assert_frame_equal, assert_series_equal
from rateslib.curves import CompositeCurve, Curve, LineCurve, MultiCsaCurve
from rateslib.data.fixings import FXIndex
from rateslib.default import NoInput
from rateslib.dual import Dual, Dual2, gradient
from rateslib.fx import (
FXForwards,
FXRates,
forward_fx,
)
from rateslib.fx.fx_forwards import _recursive_pair_population
from rateslib.serialization import from_json
class TestStateAndCache:
def test_method_state_chg(self):
fxr = FXRates({"eurusd": 1.0, "usdgbp": 1.0})
original = fxr._state
fxr.update({"eurusd": 2.0})
new = fxr._state
assert new != original
def test_method_does_not_chg_state(self):
fxr = FXRates({"eurusd": 1.0, "usdgbp": 1.0})
original = fxr._state
fxr._set_ad_order(2)
new = fxr._state
assert new == original
def test_cached_property_fxarray(self):
fxr = FXRates({"eurusd": 1.0, "usdgbp": 1.0})
original = fxr.rate("eurgbp")
fxr.update({"eurusd": 2.0}) # clear the FXarray cached property
new = fxr.rate("eurgbp")
assert new != original
@pytest.mark.parametrize(
("meth", "args"), [("update", ([{"eurusd": 1.0}],)), ("_set_ad_order", (2,))]
)
def test_fxforwards_cache_clearing(self, meth, args):
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fx_rates=[fxr1], # FXRates as list
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
fxf._cache[(dt(2000, 1, 1), "eurusd")] = 100.0
getattr(fxf, meth)(*args)
assert fxf._cache == {}
@pytest.mark.parametrize(
"fx_rates",
[
{"eurusd": 1.0, "seknok": 1.0},
{"eurusd": 1.0, "usdeur": 1.0, "usdgbp": 1.0},
{"eurusd": 1.0, "usdeur": 1.0, "seknok": 1.0},
],
)
def test_ill_constrained(fx_rates) -> None:
with pytest.raises(ValueError, match="FX Array cannot be solved."):
FXRates(fx_rates)
def test_avoid_recursion_error() -> None:
pairs = ["jpymxp", "usdnok", "usdgbp", "audmxp", "gbpsek", "eurnok", "eursek"]
with pytest.raises(ValueError, match="FX Array cannot be solved. There are degenerate"):
FXRates(dict.fromkeys(pairs, 1.2))
def test_rates() -> None:
fxr = FXRates({"usdeur": 2.0, "usdgbp": 2.5})
assert fxr.currencies == {"usd": 0, "eur": 1, "gbp": 2}
assert fxr.currencies_list == ["usd", "eur", "gbp"]
assert fxr.pairs == ["usdeur", "usdgbp"]
assert fxr.q == 3
assert fxr.fx_array[1, 2].real == 1.25
assert fxr.fx_array[1, 2] == Dual(1.25, ["fx_usdeur", "fx_usdgbp"], [-0.625, 0.50])
assert fxr.rate("eurgbp") == Dual(1.25, ["fx_usdeur", "fx_usdgbp"], [-0.625, 0.50])
assert fxr.rate(FXIndex("eurgbp", "tgt", 2)) == Dual(
1.25, ["fx_usdeur", "fx_usdgbp"], [-0.625, 0.50]
)
def test_fxrates_multi_single_currency() -> None:
fxr = FXRates({"eurusd": 0.5, "usdgbp": 1.25, "usdjpy": 100.0, "usdnok": 10.0, "usdbrl": 50.0})
fxr._set_ad_order(0)
expected = np.array(
[
[1.0, 2.0, 1.25, 100.0, 10.0, 50.0],
[0.5, 1.0, 0.625, 50.0, 5.0, 25.0],
[0.8, 1.6, 1.0, 80.0, 8.0, 40.0],
[0.01, 0.02, 0.0125, 1.0, 0.1, 0.5],
[0.1, 0.2, 0.125, 10.0, 1.0, 5.0],
[0.02, 0.04, 0.025, 2.0, 0.2, 1.0],
]
)
for i in range(6):
for j in range(6):
assert abs(fxr.fx_array[i, j] - expected[i, j]) < 1e-8
def test_fxrates_multi_chain() -> None:
fxr = FXRates({"eurusd": 0.5, "usdgbp": 1.25, "gbpjpy": 100.0, "nokjpy": 10.0, "nokbrl": 5.0})
fxr._set_ad_order(0)
expected = np.array(
[
[1.0, 2.0, 1.25, 125.0, 12.5, 62.5],
[0.5, 1.0, 0.625, 62.5, 6.25, 31.25],
[0.8, 1.6, 1.0, 100.0, 10.0, 50.0],
[0.008, 0.016, 0.01, 1.0, 0.1, 0.5],
[0.08, 0.16, 0.10, 10.0, 1.0, 5.0],
[0.016, 0.032, 0.02, 2.0, 0.2, 1.0],
]
)
for i in range(6):
for j in range(6):
assert abs(fxr.fx_array[i, j] - expected[i, j]) < 1e-8
def test_fxrates_pickle():
fxr = FXRates({"usdeur": 2.0, "usdgbp": 2.5}, settlement=dt(2002, 1, 1))
import pickle
pickled = pickle.dumps(fxr)
result = pickle.loads(pickled)
assert result == fxr
def test_rates_repr():
fxr = FXRates({"usdeur": 2.0, "usdgbp": 2.5})
result = fxr.__repr__()
expected = f""
assert result == expected
fxr = FXRates({"usdeur": 2.0, "usdgbp": 2.5, "audcad": 2.6, "usdaud": 1.2, "cadjpy": 100})
result = fxr.__repr__()
expected = f""
assert result == expected
def test_fx_update_blank() -> None:
fxr = FXRates({"usdeur": 2.0, "usdgbp": 2.5})
result = fxr.update()
assert result is None
def test_convert_and_base() -> None:
fxr = FXRates({"usdnok": 8.0})
expected = Dual(125000, ["fx_usdnok"], [-15625])
result = fxr.convert(1e6, "nok", "usd")
result2 = fxr.convert_positions([0, 1e6], "usd")
assert result == expected
assert result2 == expected
result3 = fxr.positions(expected, "usd")
assert np.all(result3 == np.array([0, 1e6]))
def test_convert_none() -> None:
fxr = FXRates({"usdnok": 8.0})
assert fxr.convert(1, "usd", "gbp") is None
def test_convert_warn() -> None:
fxr = FXRates({"usdnok": 8.0})
with pytest.warns(UserWarning):
fxr.convert(1, "usd", "gbp", on_error="warn")
def test_convert_error() -> None:
fxr = FXRates({"usdnok": 8.0})
with pytest.raises(ValueError):
fxr.convert(1, "usd", "gbp", on_error="raise")
def test_positions_value() -> None:
fxr = FXRates({"usdnok": 8.0})
result = fxr.positions(80, "nok")
assert all(result == np.array([0, 80.0]))
def test_fxrates_set_order() -> None:
fxr = FXRates({"usdnok": 8.0})
fxr._set_ad_order(order=2)
expected = np.array(
[Dual2(1.0, ["fx_usdnok"], [0.0], []), Dual2(8.0, ["fx_usdnok"], [1.0], [])],
)
assert all(fxr.fx_vector == expected)
def test_update_raises() -> None:
fxr = FXRates({"usdnok": 8.0})
with pytest.raises(
ValueError,
match="The given `fx_rates` pairs are not contained in the `FXRates` object.",
):
fxr.update({"usdnok": 9.0, "gbpnok": 10.0})
def test_restate() -> None:
fxr = FXRates({"usdnok": 8.0, "gbpnok": 10})
fxr2 = fxr.restate(["gbpusd", "usdnok"])
assert fxr2.pairs == ["gbpusd", "usdnok"]
assert fxr2.rate("gbpusd") == Dual(1.25, ["fx_gbpusd"], [1.0])
assert fxr2.rate("usdnok") == Dual(8.0, ["fx_usdnok"], [1.0])
def test_restate_return_self() -> None:
# test a new object is always returned even if nothing is restated
fxr = FXRates({"usdnok": 8.0, "gbpnok": 10})
assert id(fxr) != id(fxr.restate(["gbpnok", "usdnok"], True))
def test_rates_table() -> None:
fxr = FXRates({"EURNOK": 10.0})
result = fxr.rates_table()
expected = DataFrame([[1.0, 10.0], [0.1, 1.0]], index=["eur", "nok"], columns=["eur", "nok"])
assert_frame_equal(result, expected)
def test_fxrates_to_json() -> None:
fxr = FXRates({"usdnok": 8.0, "eurusd": 1.05})
result = fxr.to_json()
expected = (
'{"PyWrapped":{"FXRates":{"fx_rates":['
'{"pair":[{"name":"usd"},{"name":"nok"}],"rate":{"F64":8.0},"settlement":null},'
'{"pair":[{"name":"eur"},{"name":"usd"}],"rate":{"F64":1.05},"settlement":null}'
'],"currencies":[{"name":"usd"},{"name":"nok"},{"name":"eur"}]}}}'
)
assert result == expected
fxr = FXRates({"usdnok": 8.0, "eurusd": 1.05}, dt(2022, 1, 3))
result = fxr.to_json()
expected = (
'{"PyWrapped":{"FXRates":{"fx_rates":['
'{"pair":[{"name":"usd"},{"name":"nok"}],"rate":{"F64":8.0},"settlement":"2022-01-03T00:00:00"},'
'{"pair":[{"name":"eur"},{"name":"usd"}],"rate":{"F64":1.05},"settlement":"2022-01-03T00:00:00"}'
'],"currencies":[{"name":"usd"},{"name":"nok"},{"name":"eur"}]}}}'
)
assert result == expected
def test_from_json_and_equality() -> None:
fxr1 = FXRates({"usdnok": 8.0, "eurusd": 1.05})
fxr2 = FXRates({"usdnok": 2.0, "eurusd": 4.0})
assert fxr1 != fxr2
fxr3 = from_json(
'{"PyWrapped":{"FXRates":{"fx_rates":[{"pair":[{"name":"usd"},{"name":"nok"}],"rate":{"F64":2.0},"settlement":null},{"pair":[{"name":"eur"},{"name":"usd"}],"rate":{"F64":4.0},"settlement":null}],"currencies":[{"name":"usd"},{"name":"nok"},{"name":"eur"}],"fx_array":{"Dual":{"v":1,"dim":[3,3],"data":[{"real":1.0,"vars":[],"dual":{"v":1,"dim":[0],"data":[]}},{"real":2.0,"vars":["fx_usdnok"],"dual":{"v":1,"dim":[1],"data":[1.0]}},{"real":0.25,"vars":["fx_eurusd"],"dual":{"v":1,"dim":[1],"data":[-0.0625]}},{"real":0.5,"vars":["fx_usdnok"],"dual":{"v":1,"dim":[1],"data":[-0.25]}},{"real":1.0,"vars":[],"dual":{"v":1,"dim":[0],"data":[]}},{"real":0.125,"vars":["fx_usdnok","fx_eurusd"],"dual":{"v":1,"dim":[2],"data":[-0.0625,-0.03125]}},{"real":4.0,"vars":["fx_eurusd"],"dual":{"v":1,"dim":[1],"data":[1.0]}},{"real":8.0,"vars":["fx_usdnok","fx_eurusd"],"dual":{"v":1,"dim":[2],"data":[4.0,2.0]}},{"real":1.0,"vars":[],"dual":{"v":1,"dim":[0],"data":[]}}]}}}}}',
)
assert fxr2 == fxr3
fxr4 = FXRates({"usdnok": 2.0, "eurusd": 4.0}, base="NOK")
assert fxr3 != fxr4 # base is different
def test_copy() -> None:
fxr1 = FXRates({"usdnok": 8.0, "eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = fxr1.__copy__()
assert fxr1 == fxr2
assert id(fxr1) != id(fxr2)
def test_set_ad_order() -> None:
fxr = FXRates({"usdnok": 10.0})
fxr._set_ad_order(1)
fxr._set_ad_order(2)
assert fxr._ad == 2
assert type(fxr.fx_vector[0]) is Dual2
assert type(fxr.fx_vector[1]) is Dual2
fxr._set_ad_order(0)
assert fxr._ad == 0
assert fxr.fx_vector[0] == 1.0
assert fxr.fx_vector[1] == 10.0
with pytest.raises(ValueError, match="Order for AD can only be in {0,1,2}"):
fxr._set_ad_order("bad arg")
def test_set_ad_order_second_order_gradients() -> None:
# test ensures that FX Array is consecutively constructed passing correct 2nd order gradients.
# Versions <1.3.0 failed to correctly handle this becuase they simply upcast FX rates vector.
fxr = FXRates({"usdnok": 10.0, "eurnok": 8.0})
un = Dual2(10, ["fx_usdnok"], [], [])
en = Dual2(8.0, ["fx_eurnok"], [], [])
expected = un / en
row, col = fxr.currencies["usd"], fxr.currencies["eur"]
fxr._set_ad_order(2)
assert fxr._ad == 2
assert type(fxr.fx_vector[0]) is Dual2
assert type(fxr.fx_vector[1]) is Dual2
assert np.all(
np.isclose(
gradient(fxr.fx_array[row, col], ["fx_usdnok", "fx_eurnok"]),
gradient(expected, ["fx_usdnok", "fx_eurnok"]),
),
)
assert np.all(
np.isclose(
gradient(fxr.fx_array[row, col], ["fx_usdnok", "fx_eurnok"], order=2),
gradient(expected, ["fx_usdnok", "fx_eurnok"], order=2),
),
)
@pytest.fixture
def usdusd():
nodes = {dt(2022, 1, 1): 1.00, dt(2022, 4, 1): 0.99}
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def eureur():
nodes = {dt(2022, 1, 1): 1.00, dt(2022, 4, 1): 0.997}
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def usdeur():
nodes = {dt(2022, 1, 1): 1.00, dt(2022, 4, 1): 0.996}
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def cadcad():
nodes = {dt(2022, 1, 1): 1.00, dt(2022, 4, 1): 0.987}
return Curve(nodes=nodes, interpolation="log_linear")
@pytest.fixture
def cadcol():
nodes = {dt(2022, 1, 1): 1.00, dt(2022, 4, 1): 0.984}
return Curve(nodes=nodes, interpolation="log_linear")
def test_fxforwards_repr(usdusd, eureur, usdeur) -> None:
fxf = FXForwards(
FXRates({"usdeur": 2.0}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur},
)
result = fxf.__repr__()
expected = f""
assert result == expected
fxf = FXForwards(
FXRates(
{
"usdeur": 2.0,
"usdgbp": 3.0,
"usdaud": 4.0,
"usdnok": 5.0,
"usdsek": 6.0,
},
settlement=dt(2022, 1, 3),
),
{
"usdusd": usdusd,
"eureur": eureur,
"gbpgbp": usdusd,
"audaud": eureur,
"noknok": usdusd,
"seksek": eureur,
"usdeur": usdeur,
"usdaud": usdusd,
"eurnok": eureur,
"eursek": usdeur,
"eurgbp": usdusd,
},
)
result = fxf.__repr__()
expected = f""
assert result == expected
def test_fxforwards_rates_unequal(usdusd, eureur, usdeur) -> None:
fxf = FXForwards(
FXRates({"usdeur": 2.0}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur},
)
fxr = FXRates({"usdeur": 2.0}, settlement=dt(2022, 1, 3))
assert fxf != fxr
assert fxr != fxf
fxf_other = FXForwards(
FXRates({"usdeur": 3.0}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur},
)
assert fxf != fxf_other
fxf2 = fxf.copy()
assert fxf2 == fxf
fxf2.base = "eur"
assert fxf2 != fxf
def test_fxforwards_without_settlement_raise() -> None:
fxr = FXRates({"usdeur": 1.0})
crv = Curve({dt(2022, 1, 1): 1.0})
with pytest.raises(ValueError, match="`fx_rates` as FXRates supplied to FXForwards must cont"):
FXForwards(fx_rates=fxr, fx_curves={"usdusd": crv, "usdeur": crv, "eureur": crv})
def test_fxforwards_set_order(usdusd, eureur, usdeur) -> None:
fxf = FXForwards(
FXRates({"usdeur": 2.0}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur},
)
fxf._set_ad_order(order=2)
expected = np.array(
[Dual2(1.0, ["fx_usdeur"], [0.0], []), Dual2(2.0, ["fx_usdeur"], [1.0], [])],
)
assert all(fxf.fx_rates.fx_vector == expected)
assert usdusd.ad == 2
assert eureur.ad == 2
assert usdeur.ad == 2
def test_fxforwards_set_order_list(usdusd, eureur, usdeur) -> None:
fxf = FXForwards(
[
FXRates({"usdeur": 2.0}, settlement=dt(2022, 1, 3)),
FXRates({"usdgbp": 3.0}, settlement=dt(2022, 1, 4)),
],
{
"usdusd": usdusd,
"eureur": eureur,
"usdeur": usdeur,
"usdgbp": usdeur.copy(),
"gbpgbp": eureur.copy(),
},
)
fxf._set_ad_order(order=2)
# expected = np.array(
# [
# Dual2(1.0, "fx_usdeur", [0.0]),
# Dual2(2.0, "fx_usdeur", [1.0]),
# ]
# )
assert type(fxf.fx_rates_immediate.fx_vector[0]) is Dual2
assert usdusd.ad == 2
assert eureur.ad == 2
assert usdeur.ad == 2
assert fxf.curve("usd", "gbp").ad == 2
def test_fxforwards_and_swap(usdusd, eureur, usdeur) -> None:
fxf = FXForwards(
FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur},
)
result = fxf.rate(FXIndex("usdeur", "tgt", 2), dt(2022, 3, 25))
expected = Dual(0.8991875219289739, ["fx_usdeur"], [0.99909725])
assert abs(result - expected) < 1e-10
assert np.isclose(result.dual, expected.dual)
# test fx_swap price
result = fxf.swap("usdeur", [dt(2022, 1, 3), dt(2022, 3, 25)])
expected = (expected - fxf.rate("usdeur", dt(2022, 1, 3))) * 10000
assert abs(result - expected) < 1e-10
assert np.isclose(result.dual, expected.dual)
result2 = fxf.swap(FXIndex("usdeur", "fed", 2), [dt(2022, 1, 3), dt(2022, 3, 25)])
assert abs(result2 - result) < 1e-12
result = fxf.rate("eurusd", dt(2022, 3, 25))
expected = Dual(1.1121150767915007, ["fx_usdeur"], [-1.23568342])
assert abs(result - expected) < 1e-10
assert np.isclose(result.dual, expected.dual)
def test_fxforwards2() -> None:
fx_rates = FXRates({"usdeur": 0.9, "eurnok": 8.888889}, dt(2022, 1, 3))
fx_curves = {
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.96}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.991}),
"noknok": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}),
"nokeur": Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.978}),
}
fxf = FXForwards(fx_rates, fx_curves)
# First check the Immediate rates are correct:
d = dt(2022, 1, 3)
v, w = fxf.curve("usd", "usd"), fxf.curve("eur", "usd")
F_usdeur_exp = Dual(0.9, ["fx_usdeur"], []) * w[d] / v[d]
F_usdeur_res = fxf.rate("usdeur", dt(2022, 1, 1))
assert abs(F_usdeur_exp - F_usdeur_res) < 1e-14
# And the other
v2, w2 = fxf.curve("eur", "eur"), fxf.curve("nok", "eur")
F_eurnok_exp = Dual(8.888889, ["fx_eurnok"], []) * w2[d] / v2[d]
F_eurnok_res = fxf.rate("eurnok", dt(2022, 1, 1))
assert abs(F_eurnok_exp - F_eurnok_res) < 1e-14
# Now we will look to evaluate a cross forward rate
d = dt(2022, 8, 16)
f_usdnok_res = fxf.rate("usdnok", dt(2022, 8, 16))
f_usdnok_exp = F_usdeur_exp * F_eurnok_exp * v[d] * v2[d] / (w[d] * w2[d])
# expected = Dual(7.9039924628096845, ["fx_eurnok", "fx_usdeur"], [0.88919914, 8.78221385])
assert abs(f_usdnok_res - f_usdnok_exp) < 1e-14
assert all(
np.isclose(
gradient(f_usdnok_res, ["fx_eurnok", "fx_usdeur"]),
gradient(f_usdnok_exp, ["fx_eurnok", "fx_usdeur"]),
),
)
def test_fxforwards_immediate() -> None:
fx_rates = FXRates({"usdeur": 0.95}, dt(2022, 1, 3))
fx_curves = {
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 0.95}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 1.0}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 1.0}),
}
fxf = FXForwards(fx_rates, fx_curves)
F0_usdeur = 0.95 * 1.0 / 0.95 # f_usdeur * w_eurusd / v_usdusd
assert abs(fxf.fx_rates_immediate.fx_array[0, 1].real - F0_usdeur) < 1e-15
assert abs(fxf.rate("usdeur").real - F0_usdeur) < 1e-15
result = fxf.rate("usdeur", dt(2022, 1, 1))
expected = Dual(1, ["fx_usdeur"], [1 / 0.95])
assert abs(result - expected) < 1e-10
assert np.isclose(result.dual, expected.dual)
result = fxf.rate("usdeur", dt(2022, 1, 3))
expected = Dual(0.95, ["fx_usdeur"], [1.0])
assert abs(result - expected) < 1e-10
assert np.isclose(result.dual, expected.dual)
def test_fxforwards_immediate2() -> None:
fx_rates = FXRates({"usdeur": 0.9, "eurnok": 8.888889}, dt(2022, 1, 3))
fx_curves = {
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 0.998}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 0.997}),
"noknok": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 0.996}),
"nokeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 0.995}),
}
fxf = FXForwards(fx_rates, fx_curves)
F0_usdeur = 0.9 * 0.997 / 0.999 # f_usdeur * v_eurusd / w_usdusd
F0_eurnok = 8.888889 * 0.995 / 0.998 # f_eurnok * w_nokeur / v_eureur
assert abs(fxf.fx_rates_immediate.fx_array[0, 1].real - F0_usdeur) < 1e-14
assert abs(fxf.fx_rates_immediate.fx_array[1, 2].real - F0_eurnok) < 1e-14
def test_fxforwards_bad_curves_raises(usdusd, eureur, usdeur) -> None:
bad_curve = Curve({dt(2000, 1, 1): 1.00, dt(2023, 1, 1): 0.99})
with pytest.raises(ValueError, match="`fx_curves` do not have the same initial"):
FXForwards(
FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": bad_curve},
)
bad_curve = LineCurve({dt(2022, 1, 1): 1.00, dt(2023, 1, 1): 0.99})
with pytest.raises(TypeError, match="`fx_curves` must be DF based, not type Line"):
FXForwards(
FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": bad_curve},
)
# SHOULD NOT NECESSARILY FAIL
# with pytest.raises(ValueError):
# FXForwards(
# FXRates({"usdeur": 0.9, "eurgbp": 0.9}, fx_settlement=dt(2022, 1, 3)),
# {"usdusd": usdusd,
# "eureur": eureur,
# "usdeur": usdeur,
# "usdgbp": usdeur,
# "gbpgbp": eureur
# }
# )
def test_fxforwards_convert(usdusd, eureur, usdeur) -> None:
fxf = FXForwards(
FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur},
)
result = fxf.convert(
100,
domestic="usd",
foreign="eur",
settlement=dt(2022, 1, 15),
value_date=dt(2022, 1, 30),
)
expected = Dual(90.12374519723947, ["fx_usdeur"], [100.13749466359941])
assert abs(result - expected) < 1e-13
assert np.isclose(expected.dual, result.dual)
result = fxf.convert(
100,
domestic="usd",
foreign="eur",
settlement=NoInput(0), # should imply immediate settlement
value_date=NoInput(0), # should imply same as settlement
)
expected = Dual(90.00200704713323, ["fx_usdeur"], [100.00223005237025])
assert abs(result - expected) < 1e-13
assert np.isclose(expected.dual, result.dual)
def test_fxforwards_convert_not_in_ccys(usdusd, eureur, usdeur) -> None:
fxf = FXForwards(
FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur},
)
ccy = "gbp"
with pytest.raises(ValueError, match=f"'{ccy}' not in FXForwards.currencies"):
fxf.convert(
100,
domestic=ccy,
foreign="eur",
settlement=dt(2022, 1, 15),
value_date=dt(2022, 1, 30),
on_error="raise",
)
result = fxf.convert(
100,
domestic=ccy,
foreign="eur",
settlement=dt(2022, 1, 15),
value_date=dt(2022, 1, 30),
on_error="ignore",
)
assert result is None
with pytest.warns(UserWarning):
result = fxf.convert(
100,
domestic=ccy,
foreign="eur",
settlement=dt(2022, 1, 15),
value_date=dt(2022, 1, 30),
on_error="warn",
)
assert result is None
def test_fxforwards_position_not_dual(usdusd, eureur, usdeur) -> None:
fxf = FXForwards(
FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur},
)
result = fxf.positions(100)
expected = DataFrame(
{dt(2022, 1, 1): [100.0, 0.0], dt(2022, 1, 3): [0.0, 0.0]},
index=["usd", "eur"],
)
assert_frame_equal(result, expected)
result = fxf.positions(100, aggregate=True)
expected = Series(
[100.0, 0.0],
index=["usd", "eur"],
name=dt(2022, 1, 1),
)
assert_series_equal(result, expected)
def test_fx_curves_locals_raises():
with pytest.raises(ValueError, match="`fx_curves` must contain local cash-collateral curves"):
FXForwards(
fx_rates=FXRates({"eurusd": 1.0, "usdnok": 1.0}, settlement=dt(2000, 1, 1)),
fx_curves={
"eurusd": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99}),
"usdnok": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99}),
"usdeur": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99}),
"eureur": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99}),
"noknok": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99}),
},
)
def test_multiple_currencies_number_raises(usdusd) -> None:
fxr1 = FXRates({"eurusd": 0.95}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"gbpcad": 1.1}, settlement=dt(2022, 1, 2))
with pytest.raises(ValueError, match="`fx_curves` is underspecified."):
FXForwards([fxr1, fxr2], {})
with pytest.raises(ValueError, match="`fx_curves` is overspecified."):
FXForwards(
fxr1,
{
"eureur": usdusd,
"usdusd": usdusd,
"usdeur": usdusd,
"eurusd": usdusd,
},
)
def test_forwards_unexpected_curve_raise(usdusd) -> None:
fxr = FXRates({"eurusd": 0.95}, settlement=dt(2022, 1, 3))
with pytest.raises(ValueError, match="`fx_curves` contains an unexpected currency"):
FXForwards(
fxr,
{
"eureur": usdusd,
"usdusd": usdusd,
"usdeur": usdusd,
"usdcad": usdusd,
},
)
def test_forwards_codependent_curve_raise(usdusd) -> None:
fxr = FXRates({"eurusd": 0.95, "usdnok": 10.0}, settlement=dt(2022, 1, 3))
with pytest.raises(ValueError, match="`fx_curves` contains co-dependent rates"):
FXForwards(
fxr,
{
"eureur": usdusd,
"usdusd": usdusd,
"usdeur": usdusd,
"eurusd": usdusd,
"noknok": usdusd,
},
)
class TestFXForwardsBase:
# these tests will validate the base argument supplied to the FXForwards object
# in different framework type constructions
def test_single_system(self, usdusd, eureur):
# test that creating 2 currencies setting base as either yields the same FX rates.
fxr = FXRates({"eurusd": 200.0}, settlement=dt(2022, 1, 3))
fxf1 = FXForwards(fxr, {"eureur": eureur, "eurusd": eureur, "usdusd": usdusd}, base="usd")
fxf2 = FXForwards(fxr, {"eureur": eureur, "eurusd": eureur, "usdusd": usdusd}, base="eur")
res1 = fxf1.rate("eurusd", dt(2022, 3, 1))
res2 = fxf2.rate("eurusd", dt(2022, 3, 1))
assert res1 == res2
@pytest.mark.parametrize("base", ["usd", "eur", "cad", NoInput(0)])
@pytest.mark.parametrize("idx", [0, 1])
def test_multi_currency_system(self, base, idx, usdusd, eureur, cadcad, cadcol, usdeur):
ccys = ["usd", "eur", "cad"]
shuffle(ccys)
pairs = [f"{ccys[0]}{ccys[1]}", f"{ccys[idx]}{ccys[2]}"]
fxr = FXRates(dict(zip(pairs, [5.0, 15.0])), base=base, settlement=dt(2022, 1, 3))
shuffle(ccys)
curv_pairs = [f"{ccys[0]}{ccys[1]}", f"{ccys[idx]}{ccys[2]}"]
fxc = {
"eureur": eureur,
"cadcad": cadcad,
"usdusd": usdusd,
**dict(zip(curv_pairs, [cadcol, usdeur])),
}
fxf1 = FXForwards(fxr, fxc, base="usd")
fxf2 = FXForwards(fxr, fxc, base="eur")
fxf3 = FXForwards(fxr, fxc, base="cad")
fxf4 = FXForwards(fxr, fxc, base=NoInput(0))
shuffle(ccys)
r1 = fxf1.rate(f"{ccys[0]}{ccys[1]}", dt(2022, 2, 27))
r2 = fxf2.rate(f"{ccys[0]}{ccys[1]}", dt(2022, 2, 27))
r3 = fxf3.rate(f"{ccys[0]}{ccys[1]}", dt(2022, 2, 27))
r4 = fxf4.rate(f"{ccys[0]}{ccys[1]}", dt(2022, 2, 27))
assert r1 == r2
assert r1 == r3
assert r1 == r4
@pytest.mark.parametrize("base1", [NoInput(0), "usd", "cad"])
@pytest.mark.parametrize("base2", [NoInput(0), "eur", "usd"])
@pytest.mark.parametrize("pair1", ["cadusd", "usdcad"])
@pytest.mark.parametrize("pair2", ["usdeur", "eurusd"])
def test_separable_system(
self, usdusd, eureur, usdeur, cadcad, cadcol, base1, base2, pair1, pair2
):
fxr1 = FXRates({pair1: 1.25}, settlement=dt(2022, 1, 3), base=base1)
fxr2 = FXRates({pair2: 2.0}, settlement=dt(2022, 1, 2), base=base2)
curves = {
"usdusd": usdusd,
"eureur": eureur,
"cadcad": cadcad,
"cadusd": cadcol,
"usdeur": usdeur,
}
fxf1 = FXForwards([fxr2, fxr1], curves, base="usd")
fxf2 = FXForwards([fxr2, fxr1], curves, base="eur")
fxf3 = FXForwards([fxr2, fxr1], curves, base="cad")
for pair in ["usdcad", "cadeur", "eurusd"]:
assert fxf1.rate(pair, dt(2022, 3, 20)) == fxf2.rate(pair, dt(2022, 3, 20))
assert fxf1.rate(pair, dt(2022, 3, 20)) == fxf3.rate(pair, dt(2022, 3, 20))
def test_dependent_acyclic_system(self, usdusd, eureur, usdeur, cadcad, cadcol):
pair = choice(["usdcad", "cadusd"])
pair2 = choice(["eurusd", "usdeur"])
fxr1 = FXRates({pair2: 1.25}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({pair: 2.0}, settlement=dt(2022, 1, 2))
curves = {
"usdusd": usdusd,
"eureur": eureur,
"cadcad": cadcad,
"cadeur": cadcol,
"usdeur": usdeur,
}
fxf1 = FXForwards([fxr1, fxr2], curves, base="usd")
fxf2 = FXForwards([fxr1, fxr2], curves, base="eur")
fxf3 = FXForwards([fxr1, fxr2], curves, base="cad")
for pair in ["usdcad", "cadeur", "eurusd"]:
assert fxf1.rate(pair, dt(2022, 3, 20)) == fxf2.rate(pair, dt(2022, 3, 20))
assert fxf1.rate(pair, dt(2022, 3, 20)) == fxf3.rate(pair, dt(2022, 3, 20))
def test_multiple_settlement_forwards() -> None:
fxr1 = FXRates({"usdeur": 0.95}, dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, dt(2022, 1, 2))
fxf = FXForwards(
[fxr1, fxr2],
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 0.95}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 1.0}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 3): 1.0}),
"cadusd": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.97}),
"cadcad": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.969}),
},
)
F0_usdeur = 0.95 * 1.0 / 0.95 # f_usdeur * w_eurusd / v_usdusd
F0_usdeur_result = fxf.rate("usdeur", dt(2022, 1, 1))
assert abs(F0_usdeur_result.real - F0_usdeur) < 1e-13
expected = Dual(0.95, ["fx_usdeur"], [1.0])
result = fxf.rate("usdeur", dt(2022, 1, 3))
assert abs(result - expected) < 1e-13
assert np.isclose(gradient(result, ["fx_usdeur"]), expected.dual)
def test_generate_proxy_curve() -> None:
fxr1 = FXRates({"usdeur": 0.95}, dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, dt(2022, 1, 2))
fxf = FXForwards(
[fxr1, fxr2],
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.95}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 1.0}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.99}),
"cadusd": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.97}),
"cadcad": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.969}),
},
)
c1 = fxf.curve("cad", "cad")
assert c1[dt(2022, 10, 1)] == 0.969
c2 = fxf.curve("cad", "usd")
assert c2[dt(2022, 10, 1)] == 0.97
c3 = fxf.curve("cad", "eur")
assert type(c3) is not Curve # should be ProxyCurve
expected = Dual(0.9797979797979798, ["fx_usdcad", "fx_usdeur"], [0, 0])
result = c3[dt(2022, 10, 1)]
assert abs(result - expected) < 1e-12
assert all(np.isclose(gradient(expected, result.vars), gradient(result)))
def test_generate_multi_csa_curve() -> None:
fxr1 = FXRates({"usdeur": 0.95}, dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, dt(2022, 1, 2))
fxf = FXForwards(
[fxr1, fxr2],
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.95}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 1.0}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.99}),
"cadusd": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.97}),
"cadcad": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.969}),
},
)
c1 = fxf.curve("cad", ["cad", "usd", "eur"])
assert isinstance(c1, MultiCsaCurve)
def test_proxy_curves_update_with_underlying() -> None:
# Test ProxyCurves update after construction and underlying update
fxr1 = FXRates({"usdeur": 0.95}, dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, dt(2022, 1, 2))
fxf = FXForwards(
[fxr1, fxr2],
{
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.95}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 1.0}),
"eurusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 10, 1): 0.99}),
"cadusd": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.97}),
"cadcad": Curve({dt(2022, 1, 1): 1.00, dt(2022, 10, 1): 0.969}),
},
)
proxy_curve = fxf.curve("cad", "eur")
prev_value = proxy_curve[dt(2022, 10, 1)]
fxf.fx_curves["eureur"].update_node(dt(2022, 10, 1), 0.90)
new_value = proxy_curve[dt(2022, 10, 1)]
assert prev_value != new_value
def test_full_curves(usdusd, eureur, usdeur) -> None:
usdusd = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.999})
eureur = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.998})
eurusd = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.9985})
noknok = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.997})
nokeur = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.9965})
fxr = FXRates({"usdnok": 8.0, "eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fxr,
{
"usdusd": usdusd,
"eureur": eureur,
"eurusd": eurusd,
"noknok": noknok,
"nokeur": nokeur,
},
)
curve = fxf._full_curve("usd", "nok")
assert type(curve) is Curve
assert curve.nodes.n == 10 # constructed with DF on every date
def test_rate_dynamic_path_calculation() -> None:
# test that a path is dynamically determined for regular settle dates
usdusd = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.999})
eureur = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.998})
eurusd = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.9985})
noknok = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.997})
nokeur = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.9965})
fxr = FXRates({"eurusd": 1.05, "usdnok": 8.0}, settlement=dt(2022, 1, 3), base="usd")
fxf = FXForwards(
fxr,
{
"usdusd": usdusd,
"eureur": eureur,
"eurusd": eurusd,
"noknok": noknok,
"nokeur": nokeur,
},
)
_ = fxf.rate("nokusd", dt(2022, 1, 7))
assert fxf.currencies_list == ["usd", "eur", "nok"]
assert fxf._paths[(2, 0)] == 1
@pytest.mark.parametrize("settlement", [dt(2022, 1, 3), dt(2022, 1, 1)])
def test_no_rate_path_on_immediate(settlement) -> None:
# test that a path is not dynamically determined for an immediate calculation
usdusd = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.999})
eureur = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.998})
eurusd = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.9985})
noknok = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.997})
nokeur = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.9965})
fxr = FXRates({"eurusd": 1.05, "usdnok": 8.0}, settlement=dt(2022, 1, 3), base="usd")
fxf = FXForwards(
fxr,
{
"usdusd": usdusd,
"eureur": eureur,
"eurusd": eurusd,
"noknok": noknok,
"nokeur": nokeur,
},
)
_ = fxf.rate("nokusd", settlement)
assert fxf.currencies_list == ["usd", "eur", "nok"]
assert (2, 0) not in fxf._paths
@pytest.mark.parametrize(
"left",
[
NoInput(0),
dt(2022, 1, 1),
"0d",
],
)
@pytest.mark.parametrize(
"right",
[
NoInput(0),
dt(2022, 1, 10),
"9d",
],
)
def test_fx_plot(left, right) -> None:
usdusd = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.999})
eureur = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.998})
eurusd = Curve({dt(2022, 1, 1): 1.0, dt(2022, 1, 10): 0.9985})
fxr = FXRates({"usdeur": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fxr,
{
"usdusd": usdusd,
"eureur": eureur,
"eurusd": eurusd,
},
)
result = fxf.plot("eurusd", left=left, right=right)
assert len(result) == 3
y_data = result[2][0].get_data()[1]
assert abs(float(y_data[8]) - 0.9520631477714822) < 1e-10
plt.close("all")
def test_delta_risk_equivalence() -> None:
start, end = dt(2022, 1, 1), dt(2023, 1, 1)
fx_curves = {
"usdusd": Curve({start: 1.0, end: 0.96}, id="uu", ad=1),
"eureur": Curve({start: 1.0, end: 0.99}, id="ee", ad=1),
"eurusd": Curve({start: 1.0, end: 0.991}, id="eu", ad=1),
"noknok": Curve({start: 1.0, end: 0.98}, id="nn", ad=1),
"nokeur": Curve({start: 1.0, end: 0.978}, id="ne", ad=1),
}
fx_rates = FXRates({"usdeur": 0.9, "eurnok": 8.888889}, dt(2022, 1, 3))
fxf = FXForwards(fx_rates, fx_curves)
discounted_nok = fx_curves["nokeur"][dt(2022, 8, 15)] * 1000
result1 = discounted_nok * fxf.rate("nokusd", dt(2022, 1, 1))
forward_eur = fxf.rate("nokeur", dt(2022, 8, 15)) * 1000
discounted_eur = forward_eur * fx_curves["eureur"][dt(2022, 8, 15)]
result2 = discounted_eur * fxf.rate("eurusd", dt(2022, 1, 1))
forward_usd = fxf.rate("nokusd", dt(2022, 8, 15)) * 1000
discounted_usd = forward_usd * fxf.curve("usd", "eur")[dt(2022, 8, 15)]
result3 = discounted_usd
assert set(result1.vars) == {
"ee0",
"ee1",
"eu0",
"eu1",
"fx_eurnok",
"fx_usdeur",
"ne0",
"ne1",
"uu0",
"uu1",
}
v = result1.vars
assert abs(result1 - result2) < 1e-12
assert abs(result1 - result3) < 1e-12
assert all(np.isclose(gradient(result1, v), gradient(result3, v)))
assert all(np.isclose(gradient(result1, v), gradient(result2, v)))
def test_fx_immediate_rate_equivalence_to_forward() -> None:
# this test checks that the FX Immediate object created has the same dual values
# expected from manual calculation.
start, end = dt(2022, 1, 1), dt(2023, 1, 1)
fx_curves = {
"usdusd": Curve({start: 1.0, end: 0.96}, id="uu", ad=1),
"eureur": Curve({start: 1.0, end: 0.99}, id="ee", ad=1),
"eurusd": Curve({start: 1.0, end: 0.991}, id="eu", ad=1),
"noknok": Curve({start: 1.0, end: 0.98}, id="nn", ad=1),
"nokeur": Curve({start: 1.0, end: 0.978}, id="ne", ad=1),
}
fx_rates = FXRates({"usdeur": 0.9, "eurnok": 8.888889}, dt(2022, 1, 3))
fxf = FXForwards(fx_rates, fx_curves)
# nokeur
ne = fxf.curve("nok", "eur")
ee = fxf.curve("eur", "eur")
ne0, ne1 = ne[start], ne[dt(2022, 1, 3)]
ee0, ee1 = ee[start], ee[dt(2022, 1, 3)]
expected = 1 / Dual(8.888889, ["fx_eurnok"], []) * ne0 / ne1 * ee1 / ee0
result = fxf.fx_rates_immediate.rate("nokeur")
assert abs(result - expected) < 1e-12
assert all(np.isclose(gradient(result, result.vars), gradient(expected, result.vars)))
def test_rates_update_empty_dict() -> None:
# test updating an FXRates with empty dict does nothing.
fxr = FXRates({"usdeur": 2.0, "usdgbp": 2.5})
fxr.update({})
assert float(fxr.rate("usdeur")) == 2.0
assert float(fxr.rate("usdgbp")) == 2.5
def test_oo_update_rates_and_id() -> None:
# Test the FXRates object can be updated with new FX Rates without creating new
fxr = FXRates({"usdeur": 2.0, "usdgbp": 2.5})
id_ = id(fxr)
assert fxr.rate("eurgbp") == Dual(1.25, ["fx_usdeur", "fx_usdgbp"], [-0.625, 0.5])
fxr.update({"usdGBP": 3.0})
assert fxr.rate("eurgbp") == Dual(1.5, ["fx_usdeur", "fx_usdgbp"], [-0.75, 0.5])
assert id(fxr) == id_
@pytest.mark.parametrize(
("fx_rates", "err"),
[
({"usdeur": 1.2}, "`fx_rates` must be a list of dicts"),
([{"usdjpy": 100.5}, {"eursek": 3.0}], "The given `fx_rates` pairs are not contained"),
([{"usdeur": 100.5}], "`fx_rates` must be a list of dicts with length"),
],
)
def test_fx_forwards_update_list(fx_rates, err):
start, end = dt(2022, 1, 1), dt(2023, 1, 1)
fx_curves = {
"usdusd": Curve({start: 1.0, end: 0.96}, id="uu", ad=1),
"eureur": Curve({start: 1.0, end: 0.99}, id="ee", ad=1),
"eurusd": Curve({start: 1.0, end: 0.991}, id="eu", ad=1),
"noknok": Curve({start: 1.0, end: 0.98}, id="nn", ad=1),
"nokeur": Curve({start: 1.0, end: 0.978}, id="ne", ad=1),
}
fxr1 = FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 2))
fxr2 = FXRates({"eurnok": 8.888889}, settlement=dt(2022, 1, 3))
fxf = FXForwards([fxr1, fxr2], fx_curves)
with pytest.raises(ValueError, match=err):
fxf.update(fx_rates)
def test_oo_update_forwards_rates() -> None:
# Test the FXForwards object update method will react to an update of FXRates
start, end = dt(2022, 1, 1), dt(2023, 1, 1)
fx_curves = {
"usdusd": Curve({start: 1.0, end: 0.96}, id="uu", ad=1),
"eureur": Curve({start: 1.0, end: 0.99}, id="ee", ad=1),
"eurusd": Curve({start: 1.0, end: 0.991}, id="eu", ad=1),
"noknok": Curve({start: 1.0, end: 0.98}, id="nn", ad=1),
"nokeur": Curve({start: 1.0, end: 0.978}, id="ne", ad=1),
}
fx_rates = FXRates({"usdeur": 0.9, "eurnok": 8.888889}, dt(2022, 1, 3))
fxf = FXForwards(fx_rates, fx_curves)
original_fwd = fxf.rate("usdnok", dt(2022, 7, 15)) # 7.917 = 0.9 * 8.888
fx_rates.update({"usdeur": 1.0})
fxf.update()
updated_fwd = fxf.rate("usdnok", dt(2022, 7, 15)) # 8.797 = 1.0 * 8.888
assert original_fwd != updated_fwd
@pytest.mark.parametrize("curve_up", [True, False])
@pytest.mark.parametrize("fxr_up", [True, False])
def test_oo_update_forwards(curve_up, fxr_up) -> None:
# FXForwards.update() has dependencies to FXRates and Curve.
# If either is updated then the immediates FXRates should change
start, end = dt(2022, 1, 1), dt(2023, 1, 1)
curve = Curve({start: 1.0, end: 0.96}, id="uu", ad=1)
fx_curves = {
"usdusd": curve,
"eureur": Curve({start: 1.0, end: 0.99}, id="ee", ad=1),
"eurusd": Curve({start: 1.0, end: 0.991}, id="eu", ad=1),
"noknok": Curve({start: 1.0, end: 0.98}, id="nn", ad=1),
"nokeur": Curve({start: 1.0, end: 0.978}, id="ne", ad=1),
}
fx_rates1 = FXRates({"usdeur": 0.9}, dt(2022, 1, 2))
fx_rates2 = FXRates({"eurnok": 8.888889}, dt(2022, 1, 3))
fxf = FXForwards([fx_rates1, fx_rates2], fx_curves)
original_fwd = fxf.rate("usdnok", dt(2022, 7, 15))
if curve_up:
curve._set_node_vector([0.94], 1)
if fxr_up:
fx_rates1.update({"usdeur": 0.8})
fxf.update()
new_fwd = fxf.rate("usdnok", dt(2022, 7, 15))
assert (new_fwd != original_fwd) is (curve_up or fxr_up)
def test_oo_update_forwards_rates_list() -> None:
# Test the FXForwards object update method will react to an update of FXRates
start, end = dt(2022, 1, 1), dt(2023, 1, 1)
fx_curves = {
"usdusd": Curve({start: 1.0, end: 0.96}, id="uu", ad=1),
"eureur": Curve({start: 1.0, end: 0.99}, id="ee", ad=1),
"eurusd": Curve({start: 1.0, end: 0.991}, id="eu", ad=1),
"noknok": Curve({start: 1.0, end: 0.98}, id="nn", ad=1),
"nokeur": Curve({start: 1.0, end: 0.978}, id="ne", ad=1),
}
fx_rates1 = FXRates({"usdeur": 0.9}, dt(2022, 1, 2))
fx_rates2 = FXRates({"eurnok": 8.888889}, dt(2022, 1, 3))
fxf = FXForwards([fx_rates1, fx_rates2], fx_curves)
original_fwd = fxf.rate("usdnok", dt(2022, 7, 15)) # 7.917 = 0.9 * 8.888
assert abs(original_fwd - 7.917) < 1e-3
fx_rates1.update({"usdeur": 1.0})
fxf.update()
updated_fwd = fxf.rate("usdnok", dt(2022, 7, 15)) # 8.797 = 1.0 * 8.888
assert abs(updated_fwd - 8.797) < 1e-3
assert original_fwd != updated_fwd
def test_oo_update_forwards_rates_equivalence() -> None:
# Test the FXForwards object update method is equivalent to an FXRates update
start, end = dt(2022, 1, 1), dt(2023, 1, 1)
fx_curves = {
"usdusd": Curve({start: 1.0, end: 0.96}, id="uu", ad=1),
"eureur": Curve({start: 1.0, end: 0.99}, id="ee", ad=1),
"eurusd": Curve({start: 1.0, end: 0.991}, id="eu", ad=1),
"noknok": Curve({start: 1.0, end: 0.98}, id="nn", ad=1),
"nokeur": Curve({start: 1.0, end: 0.978}, id="ne", ad=1),
}
fx_rates1 = FXRates({"usdeur": 0.9, "eurnok": 8.888889}, dt(2022, 1, 3))
fx_rates2 = FXRates({"usdeur": 0.9, "eurnok": 8.888889}, dt(2022, 1, 3))
fxf1 = FXForwards(fx_rates1, fx_curves)
fxf2 = FXForwards(fx_rates2, fx_curves)
fx_rates1.update({"usdeur": 1.0})
fxf1.update()
fxf2.update([{"usdeur": 1.0}])
assert fxf1.rate("usdnok", dt(2022, 7, 15)) == fxf2.rate("usdnok", dt(2022, 7, 15))
@pytest.mark.parametrize(
"fxr",
[
FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 3)),
[
FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 3)),
],
],
)
def test_fxforwards_to_json_round_trip(fxr, usdusd, eureur, usdeur) -> None:
fxc = {"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur}
fxf = FXForwards(fxr, fxc)
result = fxf.to_json()
fxf1 = FXForwards.from_json(result)
fxr1, fxc1 = fxf1.fx_rates, fxf1.fx_curves
assert fxc1 == fxc
assert fxr1 == fxr
assert fxf1 == fxf
def test_bad_settlement_date(usdusd, usdeur, eureur) -> None:
fxf = FXForwards(
FXRates({"usdeur": 0.9}, settlement=dt(2022, 1, 3)),
{"usdusd": usdusd, "eureur": eureur, "usdeur": usdeur},
)
with pytest.raises(ValueError, match="`settlement` cannot"):
fxf.rate("usdeur", dt(1999, 1, 1)) # < date before curves
def test_fxforwards_separable_system() -> None:
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxf = FXForwards(
fx_rates=[fxr1, fxr2],
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
result = fxf.rate("eurcad", dt(2022, 2, 1))
expected = 1.05 * 1.10
assert abs(result - expected) < 1e-2
def test_fxforwards_acyclic_system() -> None:
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxf = FXForwards(
fx_rates=[fxr1, fxr2],
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
result = fxf.rate("eurcad", dt(2022, 2, 1))
expected = 1.05 * 1.10
assert abs(result - expected) < 1e-2
def test_fxforwards_cyclic_system_fails() -> None:
fxr1 = FXRates({"eurusd": 1.05, "gbpusd": 1.2}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
with pytest.raises(ValueError, match="`fx_curves` is underspecified."):
FXForwards(
fx_rates=[fxr1, fxr2],
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpgbp": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
def test_fxforwards_cyclic_system_restructured() -> None:
# this system as reported in the book has two settlement dates but must be adjusted
# given the curve currency one-hot matrix
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxr3 = FXRates({"gbpusd": 1.2}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fx_rates=[fxr1, fxr2, fxr3],
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpgbp": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
result = fxf.rate("eurcad", dt(2022, 2, 1))
expected = 1.05 * 1.10
assert abs(result - expected) < 1e-2
def test_fxforwards_cyclic_system_restructured2() -> None:
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3), base="eur")
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2), base="cad")
fxr3 = FXRates({"gbpusd": 1.2}, settlement=dt(2022, 1, 3), base="gbp")
fxf = FXForwards(
fx_rates=[fxr1, fxr2, fxr3],
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpgbp": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
result = fxf.rate("eurcad", dt(2022, 2, 1))
expected = 1.05 * 1.10
assert abs(result - expected) < 1e-2
def test_fxforwards_settlement_pairs() -> None:
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxr3 = FXRates({"gbpusd": 1.2}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fx_rates=[fxr1, fxr2, fxr3], # FXRates as list
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpgbp": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
assert fxf.pairs_settlement["eurusd"] == dt(2022, 1, 3)
assert fxf.pairs_settlement["usdcad"] == dt(2022, 1, 2)
assert fxf.pairs_settlement["gbpusd"] == dt(2022, 1, 3)
fxf = FXForwards(
fx_rates=fxr1, # FXRates as list
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
assert fxf.pairs_settlement["eurusd"] == dt(2022, 1, 3)
def test_fxforwards_positions_when_immediate_aligns_with_settlement() -> None:
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 1))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 1))
fxf = FXForwards(
fx_rates=[fxr1, fxr2],
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
pv = Dual(100000, ["fx_eurusd", "fx_usdcad"], [-100000, -150000])
result = fxf.positions(pv, base="usd")
expected = DataFrame(
index=["cad", "eur", "usd"],
columns=[dt(2022, 1, 1)],
data=[[181500.0], [-100000.0], [40000]],
)
assert_frame_equal(result, expected)
def test_fxforwards_positions_multiple_fx_rates() -> None:
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxf = FXForwards(
fx_rates=[fxr1, fxr2],
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
pv = Dual(100000, ["fx_eurusd", "fx_usdcad"], [-100000, -150000])
result = fxf.positions(pv, base="usd")
expected = DataFrame(
index=["cad", "eur", "usd"],
columns=[dt(2022, 1, 1), dt(2022, 1, 2), dt(2022, 1, 3)],
data=[[0.0, 181500.0, 0.0], [0.0, 0.0, -100000.0], [100000, -165000, 105000]],
)
assert_frame_equal(result, expected)
def test_forward_fx_immediate() -> None:
d_curve = Curve(nodes={dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, interpolation="log_linear")
f_curve = Curve(nodes={dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.95})
result = forward_fx(dt(2022, 4, 1), d_curve, f_curve, 10.0)
assert abs(result - 10.102214) < 1e-6
result = forward_fx(dt(2022, 1, 1), d_curve, f_curve, 10.0, dt(2022, 1, 1))
assert abs(result - 10.0) < 1e-6
result = forward_fx(dt(2022, 1, 1), d_curve, f_curve, 10.0)
assert abs(result - 10.0) < 1e-6
def test_forward_fx_spot_equivalent() -> None:
d_curve = Curve(nodes={dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, interpolation="log_linear")
f_curve = Curve(nodes={dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.95})
result = forward_fx(dt(2022, 7, 1), d_curve, f_curve, 10.102214, dt(2022, 4, 1))
assert abs(result - 10.206626) < 1e-6
class TestFXForwards:
@pytest.mark.parametrize(
("method", "args"),
[
("rate", ("cadeur", dt(2022, 1, 12))),
("convert", (100, "cad")),
("positions", (100, "cad")),
("convert_positions", ([100, -100, 100, -100],)),
("swap", ("cadeur", [dt(2022, 1, 10), dt(2022, 1, 16)])),
("to_json", tuple()),
],
)
def test_hash_update_on_fxr_update(self, method, args):
# test validate cache works correctly on various methods after FXRates update
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxr3 = FXRates({"gbpusd": 1.2}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fx_rates=[fxr1, fxr2, fxr3], # FXRates as list
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpgbp": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
before = fxf._state
getattr(fxf, method)(*args)
# no cache update is necessary
assert before == fxf._state
fxr1.update({"eurusd": 2.0})
getattr(fxf, method)(*args)
# cache update should have occurred
assert before != fxf._state
@pytest.mark.parametrize(
("method", "args"),
[
("rate", ("cadeur", dt(2022, 1, 12))),
("convert", (100, "cad")),
("positions", (100, "cad")),
("convert_positions", ([100, -100, 100, -100],)),
("swap", ("cadeur", [dt(2022, 1, 10), dt(2022, 1, 16)])),
("to_json", tuple()),
],
)
def test_hash_update_on_curve_update(self, method, args):
# test validate cache works correctly on various methods after Curve update
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxr3 = FXRates({"gbpusd": 1.2}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fx_rates=[fxr1, fxr2, fxr3], # FXRates as list
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpgbp": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
before = fxf._state
getattr(fxf, method)(*args)
# no cache update is necessary
assert before == fxf._state
fxf.curve("eur", "eur")._set_node_vector([0.998], 1)
getattr(fxf, method)(*args)
# cache update should have occurred
assert before != fxf._state
def test_update_does_nothing_with_same_hashes(self):
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fx_rates=[fxr1], # FXRates as list
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
before = fxf._state
fxf.update()
after = fxf._state
assert before == after
before = fxf._state
fxf.update([{"eurusd": 2.0}])
after = fxf._state
assert before != after
def test_cache_population(self):
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxr2 = FXRates({"usdcad": 1.1}, settlement=dt(2022, 1, 2))
fxr3 = FXRates({"gbpusd": 1.2}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fx_rates=[fxr1, fxr2, fxr3], # FXRates as list
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"cadeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpcad": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"gbpgbp": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
fxf._set_ad_order(0)
assert fxf._cache == {}
fxf.rate("gbpeur", dt(2022, 1, 11))
assert fxf._cache == {
("gbpcad", dt(2022, 1, 11)): 1.3199999999999998,
("cadeur", dt(2022, 1, 11)): 0.8658008658008657,
("gbpeur", dt(2022, 1, 11)): 1.1428571428571426,
}
def test_proxy_curve_cache(self):
fxr1 = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fx_rates=[fxr1], # FXRates as list
fx_curves={
"usdusd": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"eureur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
"usdeur": Curve({dt(2022, 1, 1): 1.0, dt(2022, 2, 1): 0.999}),
},
)
c = fxf.curve("eur", "usd")
assert "eurusd" in fxf.fx_proxy_curves
c2 = fxf.curve("eur", "usd")
assert id(c) == id(c2)
def test_creation_composite_curve(self):
c1 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
c2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99})
cc = CompositeCurve([c1, c2])
mc = MultiCsaCurve([c1, c2])
fxr = FXRates({"eurusd": 1.5}, settlement=dt(2000, 1, 1))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={"eureur": mc, "eurusd": c2, "usdusd": cc},
)
pc = fxf.curve("usd", "eur")
result = pc[dt(2000, 1, 15)]
assert abs(result - 0.998456) < 1e-6
def test_creation_proxy_curve(self):
c1 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98})
c2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99})
cc = CompositeCurve([c1, c2])
mc = MultiCsaCurve([c1, c2])
fxr = FXRates({"eurusd": 1.5}, settlement=dt(2000, 1, 1))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={"eureur": mc, "eurusd": c2, "usdusd": cc},
)
pc = fxf.curve("usd", "eur")
fxf2 = FXForwards(
fx_rates=fxr,
fx_curves={"eureur": pc, "eurusd": pc, "usdusd": cc},
)
pc = fxf2.curve("usd", "eur")
result = pc[dt(2000, 1, 15)]
assert abs(result - 0.998843) < 1e-6
def test_creation_operations_curve(self):
c1 = Curve({dt(2000, 1, 2): 1.0, dt(2001, 1, 1): 0.98})
c2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.99})
rc = c1.roll("3d")
sc = c1.shift(10.0)
tc = c2.translate(dt(2000, 1, 2))
fxr = FXRates({"eurusd": 1.5}, settlement=dt(2000, 1, 2))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={"eureur": rc, "eurusd": sc, "usdusd": tc},
)
pc = fxf.curve("usd", "eur")
result = pc[dt(2000, 1, 15)]
assert abs(result - 0.999679) < 1e-6
def test_recursive_pair_population1():
arr = np.array(
[
[1, 0, 1],
[0, 1, 0],
[0, 1, 1],
]
)
result = _recursive_pair_population(arr)
expected = {
(0, 1): 2,
(0, 2): -1,
(1, 0): 2,
(1, 2): -1,
(2, 0): -1,
(2, 1): -1,
}
assert result[1] == expected
def test_recursive_pair_population2():
# 5 currency example in 'Coding Interest Rates'
arr = np.array(
[
[1, 0, 0, 0, 0],
[1, 1, 0, 0, 0],
[0, 1, 1, 0, 0],
[1, 0, 0, 1, 0],
[0, 0, 0, 1, 1],
]
)
result = _recursive_pair_population(arr)
expected = {
(0, 1): -1,
(0, 2): 1,
(0, 3): -1,
(0, 4): 3,
(1, 0): -1,
(1, 2): -1,
(1, 3): 0,
(1, 4): 3,
(2, 0): 1,
(2, 1): -1,
(2, 3): 1,
(2, 4): 3,
(3, 0): -1,
(3, 1): 0,
(3, 2): 1,
(3, 4): -1,
(4, 0): 3,
(4, 1): 3,
(4, 2): 3,
(4, 3): -1,
}
assert result[1] == expected
================================================
FILE: python/tests/test_fx_volatility.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
from itertools import combinations
import numpy as np
import pytest
from matplotlib import pyplot as plt
from pandas import DataFrame, Series
from pandas.testing import assert_frame_equal, assert_series_equal
from rateslib import default_context
from rateslib.curves import CompositeCurve, Curve, LineCurve
from rateslib.default import NoInput
from rateslib.dual import Dual, Dual2, Variable, gradient
from rateslib.enums.parameters import _get_fx_delta_type
from rateslib.fx import (
FXForwards,
FXRates,
forward_fx,
)
from rateslib.periods import FXCallPeriod
from rateslib.scheduling import get_calendar
from rateslib.volatility import (
FXDeltaVolSmile,
FXDeltaVolSurface,
FXSabrSmile,
FXSabrSurface,
)
from rateslib.volatility.utils import (
_SabrModel,
_SabrSmileNodes,
)
@pytest.fixture
def fxfo():
# FXForwards for FX Options tests
eureur = Curve(
{dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.9851909811629752},
calendar="tgt",
id="eureur",
)
usdusd = Curve(
{dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.976009366603271},
calendar="nyc",
id="usdusd",
)
eurusd = Curve({dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.987092591908283}, id="eurusd")
fxr = FXRates({"eurusd": 1.0615}, settlement=dt(2023, 3, 20))
fxf = FXForwards(fx_curves={"eureur": eureur, "eurusd": eurusd, "usdusd": usdusd}, fx_rates=fxr)
# fxf.swap("eurusd", [dt(2023, 3, 20), dt(2023, 6, 20)]) = 60.10
return fxf
class TestFXDeltaVolSmile:
@pytest.mark.parametrize("k", [0.2, 0.8, 0.9, 1.0, 1.05, 1.10, 1.25, 1.5, 9.0])
def test_get_from_strike(self, fxfo, k) -> None:
fxvs = FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.5: 7.8,
0.75: 8.9,
},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
)
put_vol = fxvs.get_from_strike(
k=k,
f=fxfo.rate("eurusd", dt(2023, 6, 20)),
z_w=fxfo.curve("eur", "usd")[dt(2023, 6, 20)]
/ fxfo.curve("eur", "usd")[dt(2023, 3, 20)],
)
call_vol = fxvs.get_from_strike(
k=k,
f=fxfo.rate("eurusd", dt(2023, 6, 20)),
z_w=fxfo.curve("eur", "usd")[dt(2023, 6, 20)]
/ fxfo.curve("eur", "usd")[dt(2023, 3, 20)],
)
assert abs(put_vol[1] - call_vol[1]) < 1e-9
@pytest.mark.parametrize(
("var", "idx", "val"),
[("vol0", 0.25, 10.15), ("vol1", 0.5, 7.8), ("vol2", 0.75, 8.9)],
)
@pytest.mark.parametrize("k", [0.9, 1.0, 1.05, 1.10, 1.4])
def test_get_from_strike_ad(self, fxfo, var, idx, val, k) -> None:
fxvs = FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.5: 7.8,
0.75: 8.9,
},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
ad=1,
)
kwargs = dict(
k=k,
f=fxfo.rate("eurusd", dt(2023, 6, 20)),
z_w=fxfo.curve("eur", "usd")[dt(2023, 6, 20)]
/ fxfo.curve("eur", "usd")[dt(2023, 3, 20)],
)
put_vol = fxvs.get_from_strike(**kwargs)
fxvs.update_node(idx, Dual(val + 0.0000001, [var], []))
put_vol_plus = fxvs.get_from_strike(**kwargs)
finite_diff = (put_vol_plus[1] - put_vol[1]) * 10000000.0
ad_grad = gradient(put_vol[1], [var])[0]
assert abs(finite_diff - ad_grad) < 1e-7
@pytest.mark.parametrize("k", [0.9, 1.0, 1.05, 1.10, 1.4])
@pytest.mark.parametrize(
"cross",
[
(["vol0", 10.15, 0.25], ["vol1", 7.8, 0.5]),
(["vol0", 10.15, 0.25], ["vol2", 8.9, 0.75]),
(["vol1", 7.8, 0.5], ["vol2", 8.9, 0.75]),
],
)
def test_get_from_strike_ad_2(self, fxfo, k, cross) -> None:
fxvs = FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.5: 7.8,
0.75: 8.9,
},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
ad=2,
)
fxfo._set_ad_order(2)
kwargs = dict(
k=k,
f=fxfo.rate("eurusd", dt(2023, 6, 20)),
z_w=fxfo.curve("eur", "usd")[dt(2023, 6, 20)]
/ fxfo.curve("eur", "usd")[dt(2023, 3, 20)],
)
pv00 = fxvs.get_from_strike(**kwargs)
fxvs.update_node(cross[0][2], Dual2(cross[0][1] + 0.00001, [cross[0][0]], [], []))
fxvs.update_node(cross[1][2], Dual2(cross[1][1] + 0.00001, [cross[1][0]], [], []))
pv11 = fxvs.get_from_strike(**kwargs)
fxvs.update_node(cross[0][2], Dual2(cross[0][1] + 0.00001, [cross[0][0]], [], []))
fxvs.update_node(cross[1][2], Dual2(cross[1][1] - 0.00001, [cross[1][0]], [], []))
pv1_1 = fxvs.get_from_strike(**kwargs)
fxvs.update_node(cross[0][2], Dual2(cross[0][1] - 0.00001, [cross[0][0]], [], []))
fxvs.update_node(cross[1][2], Dual2(cross[1][1] - 0.00001, [cross[1][0]], [], []))
pv_1_1 = fxvs.get_from_strike(**kwargs)
fxvs.update_node(cross[0][2], Dual2(cross[0][1] - 0.00001, [cross[0][0]], [], []))
fxvs.update_node(cross[1][2], Dual2(cross[1][1] + 0.00001, [cross[1][0]], [], []))
pv_11 = fxvs.get_from_strike(**kwargs)
finite_diff = (pv11[1] + pv_1_1[1] - pv1_1[1] - pv_11[1]) * 1e10 / 4.0
ad_grad = gradient(pv00[1], [cross[0][0], cross[1][0]], 2)[0, 1]
assert abs(finite_diff - ad_grad) < 5e-5
def test_get_from_unsimilar_delta(self) -> None:
fxvs = FXDeltaVolSmile(
nodes={0.25: 10.0, 0.5: 10.0, 0.75: 11.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
)
result = fxvs.get(0.65, "spot_pa", 1.0, 0.99 / 0.999)
expected = 10.0
assert (result - expected) < 0.01
@pytest.mark.parametrize(("delta_type", "exp"), [("spot", 10.00000489), ("forward", 10.0)])
def test_get_from_similar_delta(self, delta_type, exp) -> None:
fxvs = FXDeltaVolSmile(
nodes={0.25: 11.0, 0.5: 10.0, 0.75: 11.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
)
result = fxvs.get(0.5, delta_type, 1.0, 0.99 / 0.991)
assert abs(result - exp) < 1e-6
@pytest.mark.parametrize(
("delta_type", "exp"), [("spot_pa", 10.000085036853598), ("forward_pa", 10.0)]
)
def test_get_from_similar_delta_pa(self, delta_type, exp) -> None:
fxvs = FXDeltaVolSmile(
nodes={0.25: 11.0, 0.5: 10.0, 0.75: 11.0},
delta_type="forward_pa",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
)
result = fxvs.get(-0.5, delta_type, -1.0, 0.99 / 0.991)
assert abs(result - exp) < 1e-6
def test_get_from_unsimilar_delta2(self):
# GH 730
fdvs = FXDeltaVolSmile(
nodes={
0.1: 5,
0.25: 4,
0.5: 3,
0.75: 4,
0.9: 5,
},
expiry=dt(2025, 5, 10),
eval_date=dt(2025, 4, 10),
delta_type="forward",
)
result = fdvs.get(delta=0.1, delta_type="forward_pa", phi=1, z_w=1.0)
expected = 4.995304045589985
assert abs(result - expected) < 1e-9
def test_set_same_ad_order(self) -> None:
fxvs = FXDeltaVolSmile(
nodes={0.25: 10.0, 0.5: 10.0, 0.75: 11.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
ad=1,
)
assert fxvs._set_ad_order(1) is None
assert fxvs.nodes.nodes[0.25] == Dual(10.0, ["vol0"], [])
def test_set_ad_order_raises(self) -> None:
fxvs = FXDeltaVolSmile(
nodes={0.25: 10.0, 0.5: 10.0, 0.75: 11.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
ad=1,
)
with pytest.raises(ValueError, match="`order` can only be in"):
fxvs._set_ad_order(10)
def test_iter_raises(self) -> None:
fxvs = FXDeltaVolSmile(
nodes={0.5: 1.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
)
with pytest.raises(TypeError, match="`Smile` types are not iterable."):
fxvs.__iter__()
def test_update_node(self):
fxvs = FXDeltaVolSmile(
nodes={0.5: 1.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
)
with pytest.raises(KeyError, match=r"`key`: '0.4' is not in Curve ``nodes``"):
fxvs.update_node(0.4, 10.0)
fxvs.update_node(0.5, 12.0)
assert fxvs[0.5] == 12.0
@pytest.mark.parametrize(
"nodes", [{0.5: 10.0}, {0.35: 10.0, 0.65: 9.0}, {0.25: 10.0, 0.5: 8.0, 0.75: 11.0}]
)
def test_delta_index_range_for_spot(self, nodes):
# spot delta type can lead to a delta index greater than 1.0
# test ensures extrapolation of a DeltaVolSmile is possible, but it is a flat function
fxv = FXDeltaVolSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2001, 1, 1),
nodes=nodes,
delta_type="spot",
)
result = fxv[1.025]
assert result == fxv[1.0]
def test_update_csolve(self):
import rateslib
anchor = rateslib.dt(2025, 5, 22)
expiry = rateslib.dt(2025, 6, 24)
test_smile = rateslib.FXDeltaVolSmile(
nodes={
0.1: 5,
0.25: 4,
0.5: 3,
0.75: 4,
0.9: 5,
},
expiry=expiry,
eval_date=anchor,
delta_type="forward",
id="test_vol",
)
prior_c = test_smile.nodes.spline.spline.c
# update node
nodes_bump = {k: v + 0.5 for k, v in test_smile.nodes.nodes.items()}
test_smile.update(nodes_bump)
after_c = test_smile.nodes.spline.spline.c
assert after_c != prior_c
def test_flat_smile_with_zero_delta_index_input(self):
smile = FXDeltaVolSmile(
nodes={0.0: 10.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
id="vol",
expiry=dt(2023, 6, 16),
)
assert abs(smile[0.5] - 10.0) < 1e-14
class TestFXDeltaVolSurface:
def test_expiry_before_eval(self) -> None:
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[11, 10, 12], [8, 7, 9]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
with pytest.raises(ValueError, match="`expiry` before the `eval_date` of"):
fxvs.get_smile(dt(2022, 1, 1))
def test_smile_0_no_interp(self) -> None:
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[11, 10, 12], [8, 7, 9]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
result = fxvs.get_smile(dt(2023, 2, 1))
expected = FXDeltaVolSmile(
nodes={0.25: 11, 0.5: 10, 0.75: 12},
eval_date=dt(2023, 1, 1),
expiry=dt(2023, 2, 1),
delta_type="forward",
)
assert result.nodes == expected.nodes
assert result.meta.expiry == expected.meta.expiry
assert result.meta.delta_type == expected.meta.delta_type
assert result.meta.eval_date == expected.meta.eval_date
def test_smile_end_no_interp(self) -> None:
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[11, 10, 12], [8, 7, 9]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
result = fxvs.get_smile(dt(2029, 2, 1))
expected = FXDeltaVolSmile(
nodes={0.25: 8, 0.5: 7, 0.75: 9},
eval_date=dt(2023, 1, 1),
expiry=dt(2029, 2, 1),
delta_type="forward",
)
assert result.nodes == expected.nodes
assert result.meta.expiry == expected.meta.expiry
assert result.meta.delta_type == expected.meta.delta_type
assert result.meta.eval_date == expected.meta.eval_date
def test_smile_tot_var_lin_interp(self) -> None:
# See Foreign Exchange Option Pricing: Iain Clarke Table 4.5
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[19.590, 18.250, 18.967], [18.801, 17.677, 18.239]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
result = fxvs.get_smile(dt(2024, 7, 1))
expected = FXDeltaVolSmile(
nodes={0.25: 19.0693, 0.5: 17.8713, 0.75: 18.4864},
eval_date=dt(2023, 1, 1),
expiry=dt(2024, 7, 1),
delta_type="forward",
)
for v1, v2 in zip(result.nodes.values, expected.nodes.values):
assert abs(v1 - v2) < 0.0001
assert result.meta.expiry == expected.meta.expiry
assert result.meta.delta_type == expected.meta.delta_type
assert result.meta.eval_date == expected.meta.eval_date
def test_smile_from_exact_expiry(self) -> None:
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[19.590, 18.250, 18.967], [18.801, 17.677, 18.239]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
id="surf",
)
expected = FXDeltaVolSmile(
nodes={0.25: 19.590, 0.5: 18.25, 0.75: 18.967},
eval_date=dt(2023, 1, 1),
expiry=dt(2024, 1, 1),
delta_type="forward",
id="surf_0_",
)
result = fxvs.get_smile(dt(2024, 1, 1))
for v1, v2 in zip(result.nodes.values, expected.nodes.values):
assert abs(v1 - v2) < 0.0001
assert result.meta.expiry == expected.meta.expiry
assert result.meta.delta_type == expected.meta.delta_type
assert result.meta.eval_date == expected.meta.eval_date
assert result.id == expected.id
def test_get_vol_from_strike(self) -> None:
# from a surface creates a smile and then re-uses methods
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[19.590, 18.250, 18.967], [18.801, 17.677, 18.239]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
result = fxvs.get_from_strike(k=1.05, f=1.03, z_w=0.99 / 0.999, expiry=dt(2024, 7, 1))[1]
# expected close to delta index of 0.5 i.e around 17.87% vol
expected = 17.882603173
assert abs(result - expected) < 1e-8
def test_get_vol_from_strike_raises(self) -> None:
# from a surface creates a smile and then re-uses methods
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[19.590, 18.250, 18.967], [18.801, 17.677, 18.239]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
with pytest.raises(ValueError, match="`expiry` required to get cross-section"):
fxvs.get_from_strike(k=1.05, f=1.03, z_w=0.99 / 0.999)
def test_set_node_vector(self) -> None:
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[19.590, 18.250, 18.967], [18.801, 17.677, 18.239]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
vec = np.array([3, 2, 4, 5, 4, 6])
fxvs._set_node_vector(vec, 1)
for v1, v2 in zip(vec[:3], fxvs.smiles[0].nodes.values):
assert abs(v1 - v2) < 1e-10
for v1, v2 in zip(vec[3:], fxvs.smiles[1].nodes.values):
assert abs(v1 - v2) < 1e-10
def test_expiries_unsorted(self) -> None:
with pytest.raises(ValueError, match="Surface `expiries` are not sorted or"):
FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2024, 1, 1)],
node_values=[[19.590, 18.250, 18.967], [18.801, 17.677, 18.239]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
def test_set_weights(self) -> None:
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2024, 2, 1), dt(2024, 3, 1)],
node_values=[[11, 10, 12], [8, 7, 9], [9, 7.5, 10]],
eval_date=dt(2023, 12, 1),
delta_type="forward",
weights=Series(2.0, index=[dt(2024, 1, 5), dt(2024, 1, 12), dt(2024, 2, 5)]),
)
assert fxvs.meta.weights.loc[dt(2023, 12, 15)] == 1.0
assert fxvs.meta.weights.loc[dt(2024, 1, 4)] == 0.9393939393939394
assert fxvs.meta.weights.loc[dt(2024, 1, 5)] == 1.878787878787879
assert fxvs.meta.weights.loc[dt(2024, 2, 2)] == 0.9666666666666667
assert fxvs.meta.weights.loc[dt(2024, 2, 5)] == 1.9333333333333333
assert fxvs.meta.weights.loc[dt(2027, 12, 15)] == 1.0
# test that the sum of weights to each expiry node is as expected.
for e in fxvs.meta.expiries:
assert (
abs(
fxvs.meta.weights[fxvs.meta.eval_date : e].sum()
- (e - fxvs.meta.eval_date).days
)
< 1e-13
)
@pytest.mark.parametrize("scalar", [1.0, 0.5])
def test_weights_get_vol(self, scalar) -> None:
# from a surface creates a smile and then re-uses methods
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2023, 2, 1), dt(2023, 3, 1)],
node_values=[[19.590, 18.250, 18.967], [18.801, 17.677, 18.239]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
fxvs_weights = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2023, 2, 1), dt(2023, 3, 1)],
node_values=[[19.590, 18.250, 18.967], [18.801, 17.677, 18.239]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
weights=Series(scalar, index=[dt(2023, 2, 2), dt(2023, 2, 3)]),
)
kwargs = dict(k=1.03, f=1.03, z_w=0.99 / 0.999, expiry=dt(2023, 2, 3))
result = fxvs.get_from_strike(**kwargs)
result2 = fxvs_weights.get_from_strike(**kwargs)
w = fxvs_weights.meta.weights
expected = result[1] * (w[: dt(2023, 2, 3)].sum() / 33.0) ** 0.5
# This result is not exact because the shape of the spline changes
assert abs(expected - result2[1]) < 5e-2
def test_weights_get_vol_clark(self) -> None:
cal = get_calendar("bus")
weights = Series(0.0, index=cal.cal_date_range(dt(2024, 2, 9), dt(2024, 3, 9)))
weights.update(Series(1.0, index=cal.bus_date_range(dt(2024, 2, 9), dt(2024, 3, 8))))
fxvs_weights = FXDeltaVolSurface(
delta_indexes=[0.5],
expiries=[
dt(2024, 2, 12),
dt(2024, 2, 16),
dt(2024, 2, 23),
dt(2024, 3, 1),
dt(2024, 3, 8),
],
node_values=[[8.15], [11.95], [11.97], [11.75], [11.80]],
eval_date=dt(2024, 2, 9),
delta_type="forward",
weights=weights,
)
# Clark FX Option Pricing Table 4.7
expected = [
0.0,
0.0,
8.15,
9.99,
10.95,
11.54,
11.95,
11.18,
10.54,
10.96,
11.29,
11.56,
11.78,
11.97,
11.56,
11.20,
11.34,
11.46,
11.57,
11.66,
11.75,
11.48,
11.23,
11.36,
11.49,
11.60,
11.70,
11.80,
11.59,
]
for i, date in enumerate(cal.cal_date_range(dt(2024, 2, 10), dt(2024, 3, 9))):
smile = fxvs_weights.get_smile(date)
assert abs(smile.nodes.nodes[0.5] - expected[i]) < 5e-3
def test_cache_clear_and_defaults(self):
fxvs = FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[19.590, 18.250, 18.967], [18.801, 17.677, 18.239]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
)
fxvs.get_smile(dt(2024, 7, 1))
assert dt(2024, 7, 1) in fxvs._cache
fxvs._clear_cache()
assert dt(2024, 7, 1) not in fxvs._cache
with default_context("curve_caching", False):
fxvs.get_smile(dt(2024, 7, 1))
# no clear cache required, but value will re-calc anyway
assert dt(2024, 7, 1) not in fxvs._cache
@pytest.mark.parametrize("smile_expiry", [dt(2026, 5, 1), dt(2026, 6, 9), dt(2026, 7, 1)])
def test_flat_surface_and_get_smile_one_expiry(self, smile_expiry):
# gh 911
anchor = dt(2025, 6, 9)
expiry = dt(2026, 6, 9)
surf = FXDeltaVolSurface(
eval_date=anchor,
expiries=[expiry],
delta_indexes=[0.5],
node_values=[[10]],
delta_type="forward",
)
smile = surf.get_smile(smile_expiry)
assert abs(smile[0.3] - 10.0) < 1e-13
class TestFXSabrSmile:
@pytest.mark.parametrize(
("strike", "vol"),
[
(1.2034, 19.49),
(1.2050, 19.47),
(1.3395, 18.31), # f == k
(1.3620, 18.25),
(1.5410, 18.89),
(1.5449, 18.93),
],
)
def test_sabr_vol(self, strike, vol):
# test the SABR function using Clark 'FX Option Pricing' Table 3.7 as benchmark.
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
)
# F_0,T is stated in section 3.5.4 as 1.3395
result = fxss.get_from_strike(strike, 1.3395)[1]
assert abs(result - vol) < 1e-2
@pytest.mark.parametrize(("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33)])
def test_sabr_vol_finite_diff_first_order(self, k, f):
# Test all of the first order gradients using finite diff, for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
ad=2,
)
# F_0,T is stated in section 3.5.4 as 1.3395
base = fxss.get_from_strike(Dual2(k, ["k"], [], []), Dual2(f, ["f"], [], []))[1]
a = fxss.nodes.alpha
p = fxss.nodes.rho
v = fxss.nodes.nu
def inc_(key1, inc1):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
fxss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = (
fxss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[0]
* 100.0
)
# reset
fxss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
for key in ["k", "f", "alpha", "rho", "nu"]:
map_ = {"k": "k", "f": "f", "alpha": "vol0", "rho": "vol1", "nu": "vol2"}
up_ = inc_(key, 1e-5)
dw_ = inc_(key, -1e-5)
assert abs((up_ - dw_) / 2e-5 - gradient(base, [map_[key]])[0]) < 1e-5
@pytest.mark.parametrize(
("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33), (1.3399, 1.34), (1.34, 1.3401)]
)
@pytest.mark.parametrize("pair", list(combinations(["k", "f", "alpha", "rho", "nu"], 2)))
def test_sabr_vol_cross_finite_diff_second_order(self, k, f, pair):
# Test all of the second order cross gradients using finite diff,
# for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="v",
ad=2,
)
a = fxss.nodes.alpha
p = fxss.nodes.rho
v = fxss.nodes.nu
# F_0,T is stated in section 3.5.4 as 1.3395
base = fxss.get_from_strike(Dual2(k, ["k"], [], []), Dual2(f, ["f"], [], []))[1]
def inc_(key1, key2, inc1, inc2):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
in_[key2] += inc2
fxss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = (
fxss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[0]
* 100.0
)
# reset
fxss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
v_map = {"k": "k", "f": "f", "alpha": "v0", "rho": "v1", "nu": "v2"}
upup = inc_(pair[0], pair[1], 1e-3, 1e-3)
updown = inc_(pair[0], pair[1], 1e-3, -1e-3)
downup = inc_(pair[0], pair[1], -1e-3, 1e-3)
downdown = inc_(pair[0], pair[1], -1e-3, -1e-3)
expected = (upup + downdown - updown - downup) / 4e-6
result = gradient(base, [v_map[pair[0]], v_map[pair[1]]], order=2)[0][1]
assert abs(result - expected) < 1e-2
@pytest.mark.parametrize(
("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33), (1.3399, 1.34), (1.34, 1.3401)]
)
@pytest.mark.parametrize("var", ["k", "f", "alpha", "rho", "nu"])
def test_sabr_vol_same_finite_diff_second_order(self, k, f, var):
# Test all of the second order cross gradients using finite diff,
# for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="v",
ad=2,
)
a = fxss.nodes.alpha
p = fxss.nodes.rho
v = fxss.nodes.nu
# F_0,T is stated in section 3.5.4 as 1.3395
base = fxss.get_from_strike(Dual2(k, ["k"], [], []), Dual2(f, ["f"], [], []))[1]
def inc_(key1, inc1):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
fxss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = (
fxss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[0]
* 100.0
)
# reset
fxss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
v_map = {"k": "k", "f": "f", "alpha": "v0", "rho": "v1", "nu": "v2"}
up = inc_(var, 1e-4)
down = inc_(var, -1e-4)
expected = (up + down - 2 * base) / 1e-8
result = gradient(base, [v_map[var]], order=2)[0][0]
assert abs(result - expected) < 5e-3
def test_sabr_vol_root_multi_duals_neighbourhood(self):
# test the SABR function when regular arithmetic operations produce an undefined 0/0
# value so AD has to be hard coded into the solution. This occurs when f == k.
# test by comparing derivatives with those captured at a nearby valid point
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
ad=2,
)
# F_0,T is stated in section 3.5.4 as 1.3395
base = fxss.get_from_strike(Dual2(1.34, ["k"], [], []), Dual2(1.34, ["f"], [], []))[1]
comparison1 = fxss.get_from_strike(Dual2(1.341, ["k"], [], []), Dual2(1.34, ["f"], [], []))[
1
]
assert np.all(abs(base.dual - comparison1.dual) < 1e-1)
diff = base.dual2 - comparison1.dual2
dual2 = abs(diff) < 5e-1
assert np.all(dual2)
@pytest.mark.parametrize("param", ["alpha", "beta", "rho", "nu"])
def test_missing_param_raises(self, param):
nodes = {
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
}
nodes.pop(param)
with pytest.raises(ValueError):
FXSabrSmile(
nodes=nodes,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
)
def test_non_iterable(self):
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
)
with pytest.raises(TypeError):
_ = list(fxss)
def test_update_node_raises(self):
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
)
with pytest.raises(KeyError, match="`key` is not in ``nodes``."):
fxss.update_node("bananas", 12.0)
def test_set_ad_order_raises(self):
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
)
with pytest.raises(ValueError, match="`order` can only be in {0, 1, 2} "):
fxss._set_ad_order(12)
def test_get_node_vars_and_vector(self):
fxss = FXSabrSmile(
nodes={
"alpha": 0.20,
"beta": 1.0,
"rho": -0.10,
"nu": 0.80,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="myid",
)
result = fxss._get_node_vars()
expected = ("myid0", "myid1", "myid2")
assert result == expected
result = fxss._get_node_vector()
expected = np.array([0.20, -0.1, 0.80])
assert np.all(result == expected)
def test_get_from_strike_expiry_raises(self):
fxss = FXSabrSmile(
nodes={
"alpha": 0.20,
"beta": 1.0,
"rho": -0.10,
"nu": 0.80,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
ad=2,
)
with pytest.raises(ValueError, match="`expiry` of VolSmile and OptionPeriod do not match"):
fxss.get_from_strike(k=1.0, f=1.0, z_w=1.0, expiry=(1999, 1, 1))
@pytest.mark.parametrize("k", [1.2034, 1.2050, 1.3620, 1.5410, 1.5449])
def test_get_from_strike_ad_2(self, fxfo, k) -> None:
# Use finite diff to validate the 2nd order AD of the SABR function in alpha and rho.
fxss = FXSabrSmile(
nodes={
"alpha": 0.20,
"beta": 1.0,
"rho": -0.10,
"nu": 0.80,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
ad=2,
)
fxfo._set_ad_order(2)
args = (
k,
fxfo.rate("eurusd", dt(2023, 6, 20)),
)
pv00 = fxss.get_from_strike(*args)
fxss.update_node("alpha", 0.20 + 0.00001)
fxss.update_node("rho", -0.10 + 0.00001)
pv11 = fxss.get_from_strike(*args)
fxss.update_node("alpha", 0.20 + 0.00001)
fxss.update_node("rho", -0.10 - 0.00001)
pv1_1 = fxss.get_from_strike(*args)
fxss.update_node("alpha", 0.20 - 0.00001)
fxss.update_node("rho", -0.10 - 0.00001)
pv_1_1 = fxss.get_from_strike(*args)
fxss.update_node("alpha", 0.20 - 0.00001)
fxss.update_node("rho", -0.10 + 0.00001)
pv_11 = fxss.get_from_strike(*args)
finite_diff = (pv11[1] + pv_1_1[1] - pv1_1[1] - pv_11[1]) * 1e10 / 4.0
ad_grad = gradient(pv00[1], ["vol0", "vol1"], 2)[0, 1]
assert abs(finite_diff - ad_grad) < 1e-4
@pytest.mark.parametrize("p", [-0.1, 0.15])
@pytest.mark.parametrize("a", [0.05, 0.2])
@pytest.mark.parametrize("k_", [1.15, 1.3620, 1.45, 1.3395])
def test_sabr_derivative(self, a, p, k_):
# test the analytic derivative of the SABR function with respect to k created by sympy
b = 1.0
v = 0.8
f = 1.3395
t = 1.0
k = Dual(k_, ["k"], [1.0])
sabr_vol, result = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p, v, 1)
expected = gradient(sabr_vol, ["k"])[0]
assert abs(result - expected) < 1e-13
@pytest.mark.parametrize("p", [-0.1, 0.15])
@pytest.mark.parametrize("a", [0.05, 0.2])
@pytest.mark.parametrize("f_", [1.15, 1.3620, 1.45, 1.3395])
def test_sabr_derivative_f(self, a, p, f_):
# test the analytic derivative of the SABR function with respect to f created by sympy
# tests the regular case as well as the limit z->0 where a separate AD calculation o
# is branched.
b = 1.0
v = 0.8
k = 1.3395
t = 1.0
f = Dual(f_, ["f"], [1.0])
sabr_vol, result = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p, v, 2)
expected = gradient(sabr_vol, ["f"])[0]
assert abs(result - expected) < 1e-13
@pytest.mark.parametrize(("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33)])
def test_sabr_derivative_finite_diff_first_order(self, k, f):
# Test all of the first order gradients using finite diff, for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
ad=2,
)
t = dt(2002, 1, 1)
base = fxss._d_sabr_d_k_or_f(
Dual2(k, ["k"], [1.0], []), Dual2(f, ["f"], [1.0], []), t, False, 1
)[1]
a = fxss.nodes.alpha
p = fxss.nodes.rho
v = fxss.nodes.nu
def inc_(key1, inc1):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
fxss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = fxss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[1]
# reset
fxss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
for key in ["k", "f", "alpha", "rho", "nu"]:
map_ = {"k": "k", "f": "f", "alpha": "vol0", "rho": "vol1", "nu": "vol2"}
up_ = inc_(key, 1e-5)
dw_ = inc_(key, -1e-5)
assert abs((up_ - dw_) / 2e-5 - gradient(base, [map_[key]])[0]) < 2e-3
@pytest.mark.parametrize(
("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33), (1.3395, 1.34), (1.34, 1.3405)]
)
@pytest.mark.parametrize("pair", list(combinations(["k", "f", "alpha", "rho", "nu"], 2)))
def test_sabr_derivative_cross_finite_diff_second_order(self, k, f, pair):
# Test all of the second order cross gradients using finite diff,
# for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="v",
ad=2,
)
a = fxss.nodes.alpha
p = fxss.nodes.rho
v = fxss.nodes.nu
# F_0,T is stated in section 3.5.4 as 1.3395
base = fxss._d_sabr_d_k_or_f(
Dual2(k, ["k"], [], []), Dual2(f, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
def inc_(key1, key2, inc1, inc2):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
in_[key2] += inc2
fxss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = fxss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[1]
# reset
fxss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
v_map = {"k": "k", "f": "f", "alpha": "v0", "rho": "v1", "nu": "v2"}
upup = inc_(pair[0], pair[1], 1e-3, 1e-3)
updown = inc_(pair[0], pair[1], 1e-3, -1e-3)
downup = inc_(pair[0], pair[1], -1e-3, 1e-3)
downdown = inc_(pair[0], pair[1], -1e-3, -1e-3)
expected = (upup + downdown - updown - downup) / 4e-6
result = gradient(base, [v_map[pair[0]], v_map[pair[1]]], order=2)[0][1]
assert abs(result - expected) < 5e-3
@pytest.mark.parametrize(
("k", "f"),
[(1.34, 1.34), (1.33, 1.35), (1.35, 1.33), (1.3395, 1.34), (1.34, 1.3405)],
)
@pytest.mark.parametrize("var", ["k", "f", "alpha", "rho", "nu"])
def test_sabr_derivative_same_finite_diff_second_order(self, k, f, var):
# Test all of the second order cross gradients using finite diff,
# for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="v",
ad=2,
)
a = fxss.nodes.alpha
p = fxss.nodes.rho
v = fxss.nodes.nu
# F_0,T is stated in section 3.5.4 as 1.3395
base = fxss._d_sabr_d_k_or_f(
Dual2(k, ["k"], [], []), Dual2(f, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
def inc_(key1, inc1):
k_ = k
f_ = f
if key1 == "k":
k_ = k + inc1
elif key1 == "f":
f_ = f + inc1
else:
fxss.update_node(key1, getattr(fxss.nodes, key1) + inc1)
# fxss.nodes[key1] = fxss.nodes[key1] + inc1
_ = fxss._d_sabr_d_k_or_f(
Dual2(k_, ["k"], [], []), Dual2(f_, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
fxss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
v_map = {"k": "k", "f": "f", "alpha": "v0", "rho": "v1", "nu": "v2"}
up = inc_(var, 1e-3)
down = inc_(var, -1e-3)
expected = (up + down - 2 * base) / 1e-6
result = gradient(base, [v_map[var]], order=2)[0][0]
assert abs(result - expected) < 3e-3
def test_sabr_derivative_root_multi_duals_neighbourhood(self):
# test the SABR function when regular arithmetic operations produce an undefined 0/0
# value so AD has to be hard coded into the solution. This occurs when f == k.
# test by comparing derivatives with those captured at a nearby valid point
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
ad=2,
)
# F_0,T is stated in section 3.5.4 as 1.3395
base = fxss._d_sabr_d_k_or_f(
Dual2(1.34, ["k"], [], []), Dual2(1.34, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
comparison1 = fxss._d_sabr_d_k_or_f(
Dual2(1.341, ["k"], [], []), Dual2(1.34, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
assert np.all(abs(base.dual - comparison1.dual) < 5e-3)
diff = base.dual2 - comparison1.dual2
dual2 = abs(diff) < 3e-2
assert np.all(dual2)
def test_sabr_derivative_ad(self):
# Test is probably superceded by test_sabr_derivative_same/cross_finite_diff
# test the analytic derivative of the SABR function and its preservation of AD.
a = 0.10
b = 1.0
p = Dual2(-0.20, ["p"], [1.0], [0.0])
v = 0.8
f = 1.3395
t = 1.0
k = Dual2(1.45, ["k"], [1.0], [0.0])
_, result = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p, v, 1)
_, r1 = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p + 1e-4, v, 1)
_, r_1 = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p - 1e-4, v, 1)
expected = (r1 - r_1) / (2e-4)
result = gradient(result, ["p"])[0]
assert abs(result - expected) < 1e-9
_, result = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p, v, 1)
_, r1 = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p + 1e-4, v, 1)
_, r_1 = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p - 1e-4, v, 1)
expected = (r1 - 2 * result + r_1) / (1e-8)
result = gradient(result, ["p"], order=2)[0][0]
assert abs(result - expected) < 1e-8
def test_sabr_derivative_root(self):
# Test is probably superceded by test_sabr_derivative_same/cross_finite_diff
# test the analytic derivative of the SABR function when f == k
a = 0.10
b = 1.0
p = -0.20
v = 0.8
f = 1.3395
t = 1.0
k = Dual(1.3395, ["k"], [1.0])
sabr_vol, result = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p, v, 1)
expected = gradient(sabr_vol, ["k"])[0]
assert abs(result - expected) < 1e-13
def test_sabr_derivative_root_ad(self):
# Test is probably superceded by test_sabr_derivative_same/cross_finite_diff
# test the analytic derivative of the SABR function when f == k, and its preservation of AD.
a = 0.10
b = 1.0
p = Dual2(-0.20, ["p"], [1.0], [0.0])
v = 0.8
f = 1.3395
t = 1.0
k = Dual2(1.3395, ["k"], [1.0], [0.0])
_, result = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p, v, 1)
_, r1 = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p + 1e-4, v, 1)
_, r_1 = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p - 1e-4, v, 1)
expected = (r1 - r_1) / (2e-4)
result = gradient(result, ["p"])[0]
assert abs(result - expected) < 1e-9
_, result = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p, v, 1)
_, r1 = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p + 1e-4, v, 1)
_, r_1 = _SabrModel._d_sabr_d_k_or_f(k, f, t, a, b, p - 1e-4, v, 1)
expected = (r1 - 2 * result + r_1) / (1e-8)
result = gradient(result, ["p"], order=2)[0][0]
assert abs(result - expected) < 1e-8
def test_f_with_fxforwards(self, fxfo):
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 4, 16),
id="v",
ad=1,
pair="eurusd",
calendar="tgt|fed",
)
result = fxss.get_from_strike(1.02, fxfo)[1]
expected = 17.803563
assert abs(result - expected) < 1e-6
def test_f_with_fxrates_raises(self, fxfo):
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 4, 16),
id="v",
ad=1,
pair="eurusd",
calendar="tgt|fed",
)
with pytest.raises(ValueError):
fxss.get_from_strike(1.02, FXRates({"eurusd": 1.06}))
def test_plot_domain(self):
ss = FXSabrSmile(
eval_date=dt(2024, 5, 28),
expiry=dt(2054, 5, 28),
nodes={"alpha": 0.02, "beta": 1.0, "rho": 0.01, "nu": 0.05},
)
ax, fig, lines = ss.plot(f=1.60)
assert abs(lines[0]._x[0] - 1.3427) < 1e-4
assert abs(lines[0]._x[-1] - 1.9299) < 1e-4
assert abs(lines[0]._y[0] - 2.0698) < 1e-4
assert abs(lines[0]._y[-1] - 2.0865) < 1e-4
def test_get_from_strike_raises_fx(self, fxfo):
fxss = FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 4, 16),
id="v",
ad=1,
calendar="tgt|fed",
)
with pytest.raises(ValueError, match="`FXSabrSmile` must be specified with a `pair` arg"):
fxss.get_from_strike(1.02, fxfo)
def test_solver_variable_numbers(self):
from rateslib import IRS, FXBrokerFly, FXCall, FXRiskReversal, FXStraddle, FXSwap, Solver
usdusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="nyc", id="usdusd")
eureur = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="tgt", id="eureur")
eurusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, id="eurusd")
# Create an FX Forward market with spot FX rate data
fxr = FXRates({"eurusd": 1.0760}, settlement=dt(2024, 5, 9))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={"eureur": eureur, "usdusd": usdusd, "eurusd": eurusd},
)
pre_solver = Solver(
curves=[eureur, eurusd, usdusd],
instruments=[
IRS(dt(2024, 5, 9), "3W", spec="eur_irs", curves="eureur"),
IRS(dt(2024, 5, 9), "3W", spec="usd_irs", curves="usdusd"),
FXSwap(
dt(2024, 5, 9), "3W", pair="eurusd", curves=[None, "eurusd", None, "usdusd"]
),
],
s=[3.90, 5.32, 8.85],
fx=fxf,
id="rates_sv",
)
dv_smile = FXSabrSmile(
nodes={"alpha": 0.05, "beta": 1.0, "rho": 0.01, "nu": 0.03},
eval_date=dt(2024, 5, 7),
expiry=dt(2024, 5, 28),
id="eurusd_3w_smile",
pair="eurusd",
)
option_args = dict(
pair="eurusd",
expiry=dt(2024, 5, 28),
calendar="tgt|fed",
delta_type="spot",
curves=["eurusd", "usdusd"],
vol="eurusd_3w_smile",
)
dv_solver = Solver(
pre_solvers=[pre_solver],
curves=[dv_smile],
instruments=[
FXStraddle(strike="atm_delta", **option_args),
FXRiskReversal(strike=("-25d", "25d"), **option_args),
FXRiskReversal(strike=("-10d", "10d"), **option_args),
FXBrokerFly(strike=(("-25d", "25d"), "atm_delta"), **option_args),
FXBrokerFly(strike=(("-10d", "10d"), "atm_delta"), **option_args),
],
s=[5.493, -0.157, -0.289, 0.071, 0.238],
fx=fxf,
id="dv_solver",
)
fc = FXCall(
expiry=dt(2024, 5, 28),
pair="eurusd",
strike=1.07,
notional=100e6,
curves=["eurusd", "usdusd"],
vol="eurusd_3w_smile",
premium=98.216647 * 1e8 / 1e4,
premium_ccy="usd",
delta_type="spot",
)
fc.delta(solver=dv_solver)
@pytest.mark.parametrize("a", [0.02, 0.06])
@pytest.mark.parametrize("b", [0.0, 0.4, 0.65, 1.0])
@pytest.mark.parametrize("p", [-0.1, 0.1])
@pytest.mark.parametrize("v", [0.05, 0.15])
@pytest.mark.parametrize("k", [1.05, 1.25, 1.6])
def test_sabr_function_values(self, a, b, p, v, k):
fxs = FXSabrSmile(
nodes={"alpha": a, "beta": b, "rho": p, "nu": v},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
ad=0,
)
# this code is taken from PySabr, another library implementing SABR.
# it is used as a benchmark
def _x(rho, z):
"""Return function x used in Hagan's 2002 SABR lognormal vol expansion."""
a = (1 - 2 * rho * z + z**2) ** 0.5 + z - rho
b = 1 - rho
return np.log(a / b)
def lognormal_vol(k, f, t, alpha, beta, rho, volvol):
"""
Hagan's 2002 SABR lognormal vol expansion.
The strike k can be a scalar or an array, the function will return an array
of lognormal vols.
"""
# Negative strikes or forwards
if k <= 0 or f <= 0:
return 0.0
eps = 1e-07
logfk = np.log(f / k)
fkbeta = (f * k) ** (1 - beta)
a = (1 - beta) ** 2 * alpha**2 / (24 * fkbeta)
b = 0.25 * rho * beta * volvol * alpha / fkbeta**0.5
c = (2 - 3 * rho**2) * volvol**2 / 24
d = fkbeta**0.5
v = (1 - beta) ** 2 * logfk**2 / 24
w = (1 - beta) ** 4 * logfk**4 / 1920
z = volvol * fkbeta**0.5 * logfk / alpha
# if |z| > eps
if abs(z) > eps:
vz = alpha * z * (1 + (a + b + c) * t) / (d * (1 + v + w) * _x(rho, z))
return vz
# if |z| <= eps
else:
v0 = alpha * (1 + (a + b + c) * t) / (d * (1 + v + w))
return v0
expected = lognormal_vol(k, 1.25, 1.0, a, b, p, v)
result = fxs.get_from_strike(k, 1.25)[1] / 100.0
assert abs(result - expected) < 1e-4
class TestFXSabrSurface:
@pytest.mark.parametrize(
"expiries",
[
[dt(2024, 5, 29), dt(2024, 7, 29), dt(2024, 6, 29)],
[dt(2024, 5, 29), dt(2024, 6, 29), dt(2024, 6, 29)],
],
)
def test_unsorted_expiries(self, expiries):
with pytest.raises(ValueError, match="Surface `expiries` are not sorted or contain dupl"):
FXSabrSurface(
eval_date=dt(2024, 5, 28),
expiries=expiries,
node_values=[[0.05, 1.0, 0.01, 0.15]] * 3,
pair="eurusd",
delivery_lag=2,
calendar="tgt|fed",
id="eurusd_vol",
)
def test_z_eurusd_surface_cookbook(self):
from rateslib import (
IRS,
XCS,
FXBrokerFly,
FXRiskReversal,
FXStraddle,
FXSwap,
Solver,
add_tenor,
)
fxr = FXRates({"eurusd": 1.0867}, settlement=dt(2024, 5, 30))
mkt_data = DataFrame(
data=[
[
"1w",
3.9035,
5.3267,
3.33,
],
[
"2w",
3.9046,
5.3257,
6.37,
],
[
"3w",
3.8271,
5.3232,
9.83,
],
[
"1m",
3.7817,
5.3191,
13.78,
],
[
"2m",
3.7204,
5.3232,
30.04,
],
["3m", 3.667, 5.3185, 45.85, -2.5],
[
"4m",
3.6252,
5.3307,
61.95,
],
[
"5m",
3.587,
5.3098,
78.1,
],
["6m", 3.5803, 5.3109, 94.25, -3.125],
[
"7m",
3.5626,
5.301,
110.82,
],
[
"8m",
3.531,
5.2768,
130.45,
],
["9m", 3.5089, 5.2614, 145.6, -7.25],
[
"10m",
3.4842,
5.2412,
162.05,
],
[
"11m",
3.4563,
5.2144,
178,
],
["1y", 3.4336, 5.1936, None, -6.75],
["15m", 3.3412, 5.0729, None, -6.75],
["18m", 3.2606, 4.9694, None, -6.75],
["21m", 3.1897, 4.8797, None, -7.75],
["2y", 3.1283, 4.8022, None, -7.875],
["3y", 2.9254, 4.535, None, -9],
["4y", 2.81, 4.364, None, -10.125],
["5y", 2.7252, 4.256, None, -11.125],
["6y", 2.6773, 4.192, None, -12.125],
["7y", 2.6541, 4.151, None, -13],
["8y", 2.6431, 4.122, None, -13.625],
["9y", 2.6466, 4.103, None, -14.25],
["10y", 2.6562, 4.091, None, -14.875],
["12y", 2.6835, 4.084, None, -16.125],
["15y", 2.7197, 4.08, None, -17],
["20y", 2.6849, 4.04, None, -16],
["25y", 2.6032, 3.946, None, -12.75],
["30y", 2.5217, 3.847, None, -9.5],
],
columns=["tenor", "estr", "sofr", "fx_swap", "xccy"],
)
eur = Curve(
nodes={
dt(2024, 5, 28): 1.0,
**{add_tenor(dt(2024, 5, 30), _, "F", "tgt"): 1.0 for _ in mkt_data["tenor"]},
},
calendar="tgt",
interpolation="log_linear",
convention="act360",
id="estr",
)
usd = Curve(
nodes={
dt(2024, 5, 28): 1.0,
**{add_tenor(dt(2024, 5, 30), _, "F", "nyc"): 1.0 for _ in mkt_data["tenor"]},
},
calendar="nyc",
interpolation="log_linear",
convention="act360",
id="sofr",
)
eurusd = Curve(
nodes={
dt(2024, 5, 28): 1.0,
**{add_tenor(dt(2024, 5, 30), _, "F", "tgt"): 1.0 for _ in mkt_data["tenor"]},
},
interpolation="log_linear",
convention="act360",
id="eurusd",
)
fxf = FXForwards(fx_rates=fxr, fx_curves={"eureur": eur, "eurusd": eurusd, "usdusd": usd})
estr_swaps = [
IRS(dt(2024, 5, 30), _, spec="eur_irs", curves="estr") for _ in mkt_data["tenor"]
]
estr_rates = mkt_data["estr"].tolist()
labels = mkt_data["tenor"].to_list()
sofr_swaps = [
IRS(dt(2024, 5, 30), _, spec="usd_irs", curves="sofr") for _ in mkt_data["tenor"]
]
sofr_rates = mkt_data["sofr"].tolist()
eur_solver = Solver(
curves=[eur],
instruments=estr_swaps,
s=estr_rates,
fx=fxf,
instrument_labels=labels,
id="eur",
)
usd_solver = Solver(
curves=[usd],
instruments=sofr_swaps,
s=sofr_rates,
fx=fxf,
instrument_labels=labels,
id="usd",
)
fxswaps = [
FXSwap(dt(2024, 5, 30), _, pair="eurusd", curves=["eurusd", "sofr"])
for _ in mkt_data["tenor"][0:14]
]
fxswap_rates = mkt_data["fx_swap"][0:14].tolist()
xcs = [
XCS(dt(2024, 5, 30), _, spec="eurusd_xcs", curves=["estr", "eurusd", "sofr", "sofr"])
for _ in mkt_data["tenor"][14:]
]
xcs_rates = mkt_data["xccy"][14:].tolist()
fx_solver = Solver(
pre_solvers=[eur_solver, usd_solver],
curves=[eurusd],
instruments=fxswaps + xcs,
s=fxswap_rates + xcs_rates,
fx=fxf,
instrument_labels=labels,
id="eurusd_xccy",
)
vol_data = DataFrame(
data=[
["1w", 4.535, -0.047, 0.07, -0.097, 0.252],
["2w", 5.168, -0.082, 0.077, -0.165, 0.24],
["3w", 5.127, -0.175, 0.07, -0.26, 0.233],
["1m", 5.195, -0.2, 0.07, -0.295, 0.235],
["2m", 5.237, -0.28, 0.087, -0.535, 0.295],
["3m", 5.257, -0.363, 0.1, -0.705, 0.35],
["4m", 5.598, -0.47, 0.123, -0.915, 0.422],
["5m", 5.776, -0.528, 0.133, -1.032, 0.463],
["6m", 5.92, -0.565, 0.14, -1.11, 0.49],
["9m", 6.01, -0.713, 0.182, -1.405, 0.645],
["1y", 6.155, -0.808, 0.23, -1.585, 0.795],
["18m", 6.408, -0.812, 0.248, -1.588, 0.868],
["2y", 6.525, -0.808, 0.257, -1.58, 0.9],
["3y", 6.718, -0.733, 0.265, -1.45, 0.89],
["4y", 7.025, -0.665, 0.265, -1.31, 0.885],
["5y", 7.26, -0.62, 0.26, -1.225, 0.89],
["6y", 7.508, -0.516, 0.27, -0.989, 0.94],
["7y", 7.68, -0.442, 0.278, -0.815, 0.975],
["10y", 8.115, -0.267, 0.288, -0.51, 1.035],
["15y", 8.652, -0.325, 0.362, -0.4, 1.195],
["20y", 8.651, -0.078, 0.343, -0.303, 1.186],
["25y", 8.65, -0.029, 0.342, -0.218, 1.178],
["30y", 8.65, 0.014, 0.341, -0.142, 1.171],
],
columns=["tenor", "atm", "25drr", "25dbf", "10drr", "10dbf"],
)
vol_data["expiry"] = [add_tenor(dt(2024, 5, 28), _, "MF", "tgt") for _ in vol_data["tenor"]]
surface = FXSabrSurface(
eval_date=dt(2024, 5, 28),
expiries=list(vol_data["expiry"]),
node_values=[[0.05, 1.0, 0.01, 0.15]] * 23,
pair="eurusd",
delivery_lag=2,
calendar="tgt|fed",
id="eurusd_vol",
)
fx_args = dict(
pair="eurusd",
curves=["eurusd", "sofr"],
calendar="tgt",
delivery_lag=2,
payment_lag=2,
eval_date=dt(2024, 5, 28),
modifier="MF",
premium_ccy="usd",
vol="eurusd_vol",
)
instruments_le_1y, rates_le_1y, labels_le_1y = [], [], []
for row in range(11):
instruments_le_1y.extend(
[
FXStraddle(
strike="atm_delta",
expiry=vol_data["expiry"][row],
delta_type="spot",
**fx_args,
),
FXRiskReversal(
strike=("-25d", "25d"),
expiry=vol_data["expiry"][row],
delta_type="spot",
**fx_args,
),
FXBrokerFly(
strike=(("-25d", "25d"), "atm_delta"),
expiry=vol_data["expiry"][row],
delta_type="spot",
**fx_args,
),
FXRiskReversal(
strike=("-10d", "10d"),
expiry=vol_data["expiry"][row],
delta_type="spot",
**fx_args,
),
FXBrokerFly(
strike=(("-10d", "10d"), "atm_delta"),
expiry=vol_data["expiry"][row],
delta_type="spot",
**fx_args,
),
]
)
rates_le_1y.extend(
[
vol_data["atm"][row],
vol_data["25drr"][row],
vol_data["25dbf"][row],
vol_data["10drr"][row],
vol_data["10dbf"][row],
]
)
labels_le_1y.extend(
[f"atm_{row}", f"25drr_{row}", f"25dbf_{row}", f"10drr_{row}", f"10dbf_{row}"]
)
instruments_gt_1y, rates_gt_1y, labels_gt_1y = [], [], []
for row in range(11, 23):
instruments_gt_1y.extend(
[
FXStraddle(
strike="atm_delta",
expiry=vol_data["expiry"][row],
delta_type="forward",
**fx_args,
),
FXRiskReversal(
strike=("-25d", "25d"),
expiry=vol_data["expiry"][row],
delta_type="forward",
**fx_args,
),
FXBrokerFly(
strike=(("-25d", "25d"), "atm_delta"),
expiry=vol_data["expiry"][row],
delta_type="forward",
**fx_args,
),
FXRiskReversal(
strike=("-10d", "10d"),
expiry=vol_data["expiry"][row],
delta_type="forward",
**fx_args,
),
FXBrokerFly(
strike=(("-10d", "10d"), "atm_delta"),
expiry=vol_data["expiry"][row],
delta_type="forward",
**fx_args,
),
]
)
rates_gt_1y.extend(
[
vol_data["atm"][row],
vol_data["25drr"][row],
vol_data["25dbf"][row],
vol_data["10drr"][row],
vol_data["10dbf"][row],
]
)
labels_gt_1y.extend(
[f"atm_{row}", f"25drr_{row}", f"25dbf_{row}", f"10drr_{row}", f"10dbf_{row}"]
)
Solver(
surfaces=[surface],
instruments=instruments_le_1y + instruments_gt_1y,
s=rates_le_1y + rates_gt_1y,
instrument_labels=labels_le_1y + labels_gt_1y,
fx=fxf,
pre_solvers=[fx_solver],
id="eurusd_vol",
)
def test_k_derivative_interpolation(self, fxfo):
# test the derivative of the k-interpolated volatility of a SabrSurface against Fwd diff
# and AD.
surface = FXSabrSurface(
eval_date=dt(2023, 3, 16),
expiries=[dt(2025, 5, 28), dt(2026, 5, 28)],
node_values=[
[0.05, 1.0, 0.01, 0.15],
[0.06, 1.0, 0.02, 0.20],
],
pair="eurusd",
delivery_lag=2,
calendar="tgt|fed",
id="eurusd_vol",
)
k = Dual(1.10, ["k"], [1.0])
base = surface.get_from_strike(k, fxfo, dt(2025, 12, 12))[1]
expected_ad = gradient(base, vars=["k"])[0]
expected_fwd_diff = (
surface.get_from_strike(k + 0.0001, fxfo, dt(2025, 12, 12))[1] - base
) / 1e-4
result = surface._d_sabr_d_k_or_f(k, fxfo, dt(2025, 12, 12), False, 1)[1] * 100.0
assert abs(expected_fwd_diff - result) < 1e-3
assert abs(expected_ad - result) < 1e-3
@pytest.mark.parametrize(
("k", "expiry", "expected"),
[
(1.10, dt(2023, 4, 15), 5.011351023668074),
(1.10, dt(2023, 6, 28), 5.011351023668074),
(1.10, dt(2023, 7, 15), 5.333915841859923),
(1.10, dt(2023, 9, 28), 6.021827601466909),
(1.10, dt(2023, 10, 28), 6.022252380963102),
],
)
def test_get_from_strike(self, fxfo, k, expiry, expected):
# test different branches for expiry
surface = FXSabrSurface(
eval_date=dt(2023, 3, 16),
expiries=[dt(2023, 6, 28), dt(2023, 9, 28)],
node_values=[
[0.05, 1.0, 0.01, 0.15],
[0.06, 1.0, 0.02, 0.20],
],
pair="eurusd",
delivery_lag=2,
calendar="tgt|fed",
id="eurusd_vol",
)
result = surface.get_from_strike(k, fxfo, expiry)
assert result[0] == 0.0
assert abs(result[1] - expected) < 1e-14
assert result[2] == k
def test_variables_on_extrapolated_sabr_smiles_before(self, fxfo):
# assert that vars on extrapolated smiles reference the underlying smiles vars
fxss = FXSabrSurface(
eval_date=dt(2023, 3, 16),
expiries=[dt(2023, 7, 15), dt(2023, 9, 15)],
node_values=[[0.05, 1.0, 0.01, 0.15]] * 2,
pair="eurusd",
delivery_lag=2,
calendar="tgt|fed",
id="v",
ad=1,
)
result = fxss.get_from_strike(1.10, fxfo, dt(2023, 4, 14))[1]
assert result.vars == ["v_0_0", "v_0_1", "v_0_2", "fx_eurusd"]
def test_variables_on_extrapolated_sabr_smiles_after(self, fxfo):
# assert that vars on extrapolated smiles reference the underlying smiles vars
fxss = FXSabrSurface(
eval_date=dt(2023, 3, 16),
expiries=[dt(2023, 7, 15), dt(2023, 9, 15)],
node_values=[[0.05, 1.0, 0.01, 0.15]] * 2,
pair="eurusd",
delivery_lag=2,
calendar="tgt|fed",
id="v",
ad=1,
)
result = fxss.get_from_strike(1.10, fxfo, dt(2024, 4, 14))[1]
assert result.vars == ["v_1_0", "v_1_1", "v_1_2", "fx_eurusd"]
def test_update_state(self):
fxss = FXSabrSurface(
eval_date=dt(2023, 3, 16),
expiries=[dt(2023, 7, 15), dt(2023, 9, 15)],
node_values=[[0.05, 1.0, 0.01, 0.15]] * 2,
pair="eurusd",
delivery_lag=2,
calendar="tgt|fed",
id="v",
ad=1,
)
state_ = fxss._state
fxss.smiles[1].update_node("alpha", 0.06)
assert state_ != fxss._get_composited_state()
# calling get from strike will validate
fxss.get_from_strike(1.1, 1.1, dt(2023, 7, 15))
assert fxss._state == fxss._get_composited_state()
@pytest.mark.parametrize("smile_expiry", [dt(2026, 5, 1), dt(2026, 6, 9), dt(2026, 7, 1)])
def test_flat_surface_and_get_smile_one_expiry(self, smile_expiry):
# gh 911
anchor = dt(2025, 6, 9)
expiry = dt(2026, 6, 9)
surf = FXSabrSurface(
eval_date=anchor,
expiries=[expiry],
node_values=[[0.10, 1.0, 0.0, 0.0]],
)
result = surf.get_from_strike(1.0, 1.10, smile_expiry)[1]
assert abs(result - 10.0) < 1e-13
@pytest.mark.parametrize("option_expiry", [dt(2026, 5, 1), dt(2026, 6, 9), dt(2026, 7, 1)])
def test_flat_surface_option_strike_delta(self, option_expiry):
surf = FXSabrSurface(
eval_date=dt(2025, 6, 9),
expiries=[dt(2026, 6, 9)],
node_values=[[0.10, 1.0, 0.0, 0.0]],
)
fxo = FXCallPeriod(
pair="eurusd",
expiry=option_expiry,
delivery=option_expiry,
strike=NoInput(0),
delta_type="forward",
)
result = fxo._index_vol_and_strike_from_delta_sabr(0.25, "forward", surf, 1, 1.10)
assert abs(result[1] - 10.0) < 1e-13
result = fxo._index_vol_and_strike_from_atm_sabr(1.10, 0.50, surf)
assert abs(result[1] - 10.0) < 1e-13
class TestStateAndCache:
@pytest.mark.parametrize(
"curve",
[
FXDeltaVolSmile(
nodes={0.25: 10.0, 0.5: 10.0, 0.75: 11.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
),
FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
),
],
)
@pytest.mark.parametrize(("method", "args"), [("_set_ad_order", (1,))])
def test_method_does_not_change_state(self, curve, method, args):
before = curve._state
getattr(curve, method)(*args)
after = curve._state
assert before == after
@pytest.mark.parametrize(
"curve",
[
FXDeltaVolSmile(
nodes={0.25: 10.0, 0.5: 10.0, 0.75: 11.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
),
],
)
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98, 0.99], 1)),
("update_node", (0.25, 0.98)),
("update", ({0.25: 10.0, 0.5: 10.0, 0.75: 10.1},)),
],
)
def test_method_changes_state(self, curve, method, args):
before = curve._state
getattr(curve, method)(*args)
after = curve._state
assert before != after
@pytest.mark.parametrize(
"curve",
[
FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
)
],
)
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98, 0.99], 1)),
("update_node", ("alpha", 0.98)),
],
)
def test_method_changes_state_sabr(self, curve, method, args):
before = curve._state
getattr(curve, method)(*args)
after = curve._state
assert before != after
def test_populate_cache(self):
# objects have yet to implement cache handling
pass
def test_method_clears_cache(self):
# objects have yet to implement cache handling
pass
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98], 1)),
("_set_ad_order", (2,)),
],
)
def test_surface_clear_cache(self, method, args):
surf = FXDeltaVolSurface(
expiries=[dt(2000, 1, 1), dt(2001, 1, 1)],
delta_indexes=[0.5],
node_values=[[10.0], [9.0]],
eval_date=dt(1999, 1, 1),
delta_type="forward",
)
surf.get_smile(dt(2000, 3, 1))
assert dt(2000, 3, 1) in surf._cache
getattr(surf, method)(*args)
assert len(surf._cache) == 0
@pytest.mark.parametrize(
("method", "args"),
[
("get_from_strike", (1.0, 1.0, dt(2000, 5, 3), NoInput(0))),
("_get_index", (0.9, dt(2000, 5, 3))),
("get_smile", (dt(2000, 5, 3),)),
],
)
def test_surface_populate_cache(self, method, args):
surf = FXDeltaVolSurface(
expiries=[dt(2000, 1, 1), dt(2001, 1, 1)],
delta_indexes=[0.5],
node_values=[[10.0], [9.0]],
eval_date=dt(1999, 1, 1),
delta_type="forward",
)
before = surf._cache_len
getattr(surf, method)(*args)
assert surf._cache_len == before + 1
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98, 0.99, 0.99, 0.98, 0.99], 1)),
],
)
@pytest.mark.parametrize(
"surface",
[
FXDeltaVolSurface(
expiries=[dt(2000, 1, 1), dt(2001, 1, 1)],
delta_indexes=[0.25, 0.5, 0.75],
node_values=[[10.0, 9.0, 8.0], [9.0, 8.0, 7.0]],
eval_date=dt(1999, 1, 1),
delta_type="forward",
),
FXSabrSurface(
expiries=[dt(2000, 1, 1), dt(2001, 1, 1)],
node_values=[[10.0, 1.0, 8.0, 9.0], [9.0, 1.0, 8.0, 7.0]],
eval_date=dt(1999, 1, 1),
),
],
)
def test_surface_change_state(self, method, args, surface):
pre_state = surface._state
getattr(surface, method)(*args)
assert surface._state != pre_state
@pytest.mark.parametrize(
("method", "args"),
[
("_set_ad_order", (2,)),
],
)
@pytest.mark.parametrize(
"surface",
[
FXDeltaVolSurface(
expiries=[dt(2000, 1, 1), dt(2001, 1, 1)],
delta_indexes=[0.25, 0.5, 0.75],
node_values=[[10.0, 9.0, 8.0], [9.0, 8.0, 7.0]],
eval_date=dt(1999, 1, 1),
delta_type="forward",
),
FXSabrSurface(
expiries=[dt(2000, 1, 1), dt(2001, 1, 1)],
node_values=[[10.0, 1.0, 8.0, 9.0], [9.0, 1.0, 8.0, 7.0]],
eval_date=dt(1999, 1, 1),
),
],
)
def test_surface_maintain_state(self, method, args, surface):
pre_state = surface._state
getattr(surface, method)(*args)
assert surface._state == pre_state
def test_surface_validate_states(self):
# test the get_smile method validates the states after a mutation
surf = FXDeltaVolSurface(
expiries=[dt(2000, 1, 1), dt(2001, 1, 1)],
delta_indexes=[0.5],
node_values=[[10.0], [9.0]],
eval_date=dt(1999, 1, 1),
delta_type="forward",
)
pre_state = surf._state
surf.smiles[0].update_node(0.5, 11.0)
surf.get_smile(dt(2000, 1, 9))
post_state = surf._state
assert pre_state != post_state # validate states has been run and updated the state.
@pytest.mark.parametrize(
"smile",
[
FXDeltaVolSmile(
nodes={0.25: 10.0, 0.5: 10.0, 0.75: 11.0},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
),
FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
),
],
)
def test_initialisation_state_smile(self, smile):
assert smile._state != 0
def test_initialisation_state_surface(self):
surf = FXDeltaVolSurface(
expiries=[dt(2000, 1, 1), dt(2001, 1, 1)],
delta_indexes=[0.5],
node_values=[[10.0], [9.0]],
eval_date=dt(1999, 1, 1),
delta_type="forward",
)
assert surf._state != 0
def test_validate_delta_type() -> None:
with pytest.raises(ValueError, match="`delta_type` as string: 'BAD_TYPE' i"):
_get_fx_delta_type("BAD_TYPE")
================================================
FILE: python/tests/test_fxrs.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib.fx import FXRates
from rateslib.rs import Ccy, Dual, Dual2, FXRate
from rateslib.serialization import from_json
def test_ccy_creation() -> None:
c1 = Ccy("usd")
c2 = Ccy("USD")
assert c1 == c2
@pytest.mark.parametrize("val", [0.99, Dual(0.99, ["x"], []), Dual2(0.99, ["x"], [], [])])
def test_fx_rate_creation(val) -> None:
fxr = FXRate("usd", "eur", val, dt(2001, 1, 1))
assert fxr.rate == val
assert fxr.pair == "usdeur"
assert fxr.settlement == dt(2001, 1, 1)
def test_json_round_trip() -> None:
fxr = FXRates({"eurusd": 1.08, "usdjpy": 110.0}, dt(2004, 1, 1))
json = fxr.to_json()
fxr2 = from_json(json)
assert fxr == fxr2
def test_equality() -> None:
fxr = FXRates({"eurusd": 1.08, "usdjpy": 110.0}, dt(2004, 1, 1))
fxr2 = FXRates({"eurusd": 1.08, "usdjpy": 110.0}, dt(2004, 1, 1))
assert fxr == fxr2
================================================
FILE: python/tests/test_ir_volatility.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import sys
from datetime import datetime as dt
from itertools import combinations, product
import numpy as np
import pytest
from matplotlib import pyplot as plt
from pandas import DataFrame, Index, IndexSlice, Series
from pandas.testing import assert_frame_equal, assert_series_equal
from rateslib import calendars, default_context
from rateslib.curves import CompositeCurve, Curve, LineCurve
from rateslib.data.fixings import IRSSeries
from rateslib.default import NoInput
from rateslib.dual import Dual, Dual2, Variable, gradient
from rateslib.instruments import IRS, IRSCall, IRSPut, IRSStraddle, IRVolValue
from rateslib.solver import Solver
from rateslib.splines import PPSplineF64
from rateslib.volatility import (
IRSabrCube,
IRSabrSmile,
IRSplineCube,
IRSplineSmile,
)
from rateslib.volatility.ir.utils import _bilinear_interp, _scale_weights
from rateslib.volatility.utils import _OptionModelBachelier, _OptionModelBlack76, _SabrSmileNodes
@pytest.mark.parametrize(
("h", "v", "expected"),
[
((1, 1), (1, 1), 10),
((0.5, 0.5), (0.5, 0.5), 5.0),
((0.0, 0.0), (0.0, 0.0), 0.0),
((0.0, 0.5), (0.0, 0.0), 0.0),
((0.0, 0.0), (0.8, 0.4), 4.80),
((0.1, 0.2), (0.4, 0.5), 4.0 * 0.1 * 0.5 + 6.0 * 0.8 * 0.4 + 10.0 * 0.2 * 0.5),
],
)
def test_bilinear_interp(h, v, expected):
result = _bilinear_interp(0.0, 4.0, 6.0, 10.0, h, v)
assert abs(result - expected) < 1e-10
def test_numpy_ravel_for_dates_posix():
a = np.array([[1, 1, 2], [3, 4, 5]])
b = np.reshape(list(a.ravel()), (2, 3))
assert np.all(a == b)
@pytest.fixture
def curve():
return Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2032, 3, 31): 0.50,
},
interpolation="log_linear",
id="v",
convention="Act360",
ad=1,
)
class TestIRSabrSmile:
@pytest.mark.parametrize(
("strike", "vol"),
[
(1.2034, 19.49),
(1.2050, 19.47),
(1.3395, 18.31), # f == k
(1.3620, 18.25),
(1.5410, 18.89),
(1.5449, 18.93),
],
)
def test_sabr_vol(self, strike, vol):
# repeat the same test developed for FXSabrSmile
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
)
result = irss.get_from_strike(k=strike, f=1.3395).vol
assert abs(result - vol) < 1e-2
def test_sabr_vol_plot(self):
# repeat the same test developed for FXSabrSmile
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
)
result = irss.plot(f=1.0)
_x = result[2][0]._x
_y = result[2][0]._y
assert (_x[0], _y[0]) == (0.7524348790033292, 23.108399874378378)
assert (_x[-1], _y[-1]) == (1.3743407823531082, 21.950871667495214)
def test_sabr_vol_plot_fail(self):
# repeat the same test developed for FXSabrSmile
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
)
with pytest.raises(
ValueError,
match=r"`f` \(ATM-forward interest rate\) is required by `_BaseIRSmile.plot`.",
):
irss.plot()
@pytest.mark.parametrize(("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33)])
def test_sabr_vol_finite_diff_first_order(self, k, f):
# Test all of the first order gradients using finite diff, for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
ad=2,
)
# F_0,T is stated in section 3.5.4 as 1.3395
base = irss.get_from_strike(k=Dual2(k, ["k"], [], []), f=Dual2(f, ["f"], [], [])).vol
a = irss.nodes.alpha
p = irss.nodes.rho
v = irss.nodes.nu
def inc_(key1, inc1):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
irss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = (
irss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[0]
* 100.0
)
# reset
irss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
for key in ["k", "f", "alpha", "rho", "nu"]:
map_ = {"k": "k", "f": "f", "alpha": "vol0", "rho": "vol1", "nu": "vol2"}
up_ = inc_(key, 1e-5)
dw_ = inc_(key, -1e-5)
assert abs((up_ - dw_) / 2e-5 - gradient(base, [map_[key]])[0]) < 1e-5
@pytest.mark.parametrize(
("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33), (1.3399, 1.34), (1.34, 1.3401)]
)
@pytest.mark.parametrize("pair", list(combinations(["k", "f", "alpha", "rho", "nu"], 2)))
def test_sabr_vol_cross_finite_diff_second_order(self, k, f, pair):
# Test all of the second order cross gradients using finite diff,
# for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
a = irss.nodes.alpha
p = irss.nodes.rho
v = irss.nodes.nu
# F_0,T is stated in section 3.5.4 as 1.3395
base = irss.get_from_strike(k=Dual2(k, ["k"], [], []), f=Dual2(f, ["f"], [], [])).vol
def inc_(key1, key2, inc1, inc2):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
in_[key2] += inc2
irss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = (
irss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[0]
* 100.0
)
# reset
irss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
v_map = {"k": "k", "f": "f", "alpha": "v0", "rho": "v1", "nu": "v2"}
upup = inc_(pair[0], pair[1], 1e-3, 1e-3)
updown = inc_(pair[0], pair[1], 1e-3, -1e-3)
downup = inc_(pair[0], pair[1], -1e-3, 1e-3)
downdown = inc_(pair[0], pair[1], -1e-3, -1e-3)
expected = (upup + downdown - updown - downup) / 4e-6
result = gradient(base, [v_map[pair[0]], v_map[pair[1]]], order=2)[0][1]
assert abs(result - expected) < 1e-2
@pytest.mark.parametrize(
("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33), (1.3399, 1.34), (1.34, 1.3401)]
)
@pytest.mark.parametrize("var", ["k", "f", "alpha", "rho", "nu"])
def test_sabr_vol_same_finite_diff_second_order(self, k, f, var):
# Test all of the second order cross gradients using finite diff,
# for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
a = irss.nodes.alpha
p = irss.nodes.rho
v = irss.nodes.nu
# F_0,T is stated in section 3.5.4 as 1.3395
base = irss.get_from_strike(k=Dual2(k, ["k"], [], []), f=Dual2(f, ["f"], [], [])).vol
def inc_(key1, inc1):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
irss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = (
irss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[0]
* 100.0
)
# reset
irss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
v_map = {"k": "k", "f": "f", "alpha": "v0", "rho": "v1", "nu": "v2"}
up = inc_(var, 1e-4)
down = inc_(var, -1e-4)
expected = (up + down - 2 * base) / 1e-8
result = gradient(base, [v_map[var]], order=2)[0][0]
assert abs(result - expected) < 5e-3
def test_sabr_vol_root_multi_duals_neighbourhood(self):
# test the SABR function when regular arithmetic operations produce an undefined 0/0
# value so AD has to be hard coded into the solution. This occurs when f == k.
# test by comparing derivatives with those captured at a nearby valid point
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
# F_0,T is stated in section 3.5.4 as 1.3395
base = irss.get_from_strike(k=Dual2(1.34, ["k"], [], []), f=Dual2(1.34, ["f"], [], [])).vol
comparison1 = irss.get_from_strike(
k=Dual2(1.341, ["k"], [], []), f=Dual2(1.34, ["f"], [], [])
).vol
assert np.all(abs(base.dual - comparison1.dual) < 1e-1)
diff = base.dual2 - comparison1.dual2
dual2 = abs(diff) < 5e-1
assert np.all(dual2)
@pytest.mark.parametrize("param", ["alpha", "rho", "nu"])
def test_missing_param_raises(self, param):
nodes = {
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
}
nodes.pop(param)
with pytest.raises(ValueError):
IRSabrSmile(
nodes=nodes,
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
def test_non_iterable(self):
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
with pytest.raises(TypeError):
list(irss)
def test_update_node_raises(self):
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
with pytest.raises(KeyError, match="'bananas' is not in `nodes`."):
irss.update_node("bananas", 12.0)
def test_set_ad_order_raises(self):
irss = IRSabrSmile(
nodes={
"alpha": 0.17431060,
"rho": -0.11268306,
"nu": 0.81694072,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
with pytest.raises(ValueError, match="`order` can only be in {0, 1, 2} "):
irss._set_ad_order(12)
def test_get_node_vars_and_vector(self):
irss = IRSabrSmile(
nodes={
"alpha": 0.20,
"rho": -0.10,
"nu": 0.80,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="myid",
)
result = irss._get_node_vars()
expected = ("myid0", "myid1", "myid2")
assert result == expected
result = irss._get_node_vector()
expected = np.array([0.20, -0.1, 0.80])
assert np.all(result == expected)
def test_get_from_strike_expiry_raises(self):
irss = IRSabrSmile(
nodes={
"alpha": 0.20,
"rho": -0.10,
"nu": 0.80,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="myid",
)
with pytest.raises(
ValueError, match="`expiry` of _BaseIRSmile and intended price do not match"
):
irss.get_from_strike(k=1.0, f=1.0, expiry=dt(1999, 1, 1))
@pytest.mark.parametrize("k", [1.2034, 1.2050, 1.3620, 1.5410, 1.5449])
def test_get_from_strike_ad_2(self, k) -> None:
# Use finite diff to validate the 2nd order AD of the SABR function in alpha and rho.
irss = IRSabrSmile(
nodes={
"alpha": 0.20,
"rho": -0.10,
"nu": 0.80,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
ad=2,
)
kwargs = dict(
k=k,
f=1.350,
)
pv00 = irss.get_from_strike(**kwargs)
irss.update_node("alpha", 0.20 + 0.00001)
irss.update_node("rho", -0.10 + 0.00001)
pv11 = irss.get_from_strike(**kwargs)
irss.update_node("alpha", 0.20 + 0.00001)
irss.update_node("rho", -0.10 - 0.00001)
pv1_1 = irss.get_from_strike(**kwargs)
irss.update_node("alpha", 0.20 - 0.00001)
irss.update_node("rho", -0.10 - 0.00001)
pv_1_1 = irss.get_from_strike(**kwargs)
irss.update_node("alpha", 0.20 - 0.00001)
irss.update_node("rho", -0.10 + 0.00001)
pv_11 = irss.get_from_strike(**kwargs)
finite_diff = (pv11.vol + pv_1_1.vol - pv1_1.vol - pv_11.vol) * 1e10 / 4.0
ad_grad = gradient(pv00.vol, ["vol0", "vol1"], 2)[0, 1]
assert abs(finite_diff - ad_grad) < 1e-4
@pytest.mark.parametrize(("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33)])
def test_sabr_derivative_finite_diff_first_order(self, k, f):
# Test all of the first order gradients using finite diff, for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
irss = IRSabrSmile(
nodes={
"alpha": 0.20,
"rho": -0.10,
"nu": 0.80,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
ad=2,
)
t = dt(2002, 1, 1)
base = irss._d_sabr_d_k_or_f(
Dual2(k, ["k"], [1.0], []), Dual2(f, ["f"], [1.0], []), t, False, 1
)[1]
a = irss.nodes.alpha
p = irss.nodes.rho
v = irss.nodes.nu
def inc_(key1, inc1):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
irss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = irss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[1]
# reset
irss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
for key in ["k", "f", "alpha", "rho", "nu"]:
map_ = {"k": "k", "f": "f", "alpha": "vol0", "rho": "vol1", "nu": "vol2"}
up_ = inc_(key, 1e-5)
dw_ = inc_(key, -1e-5)
expected = (up_ - dw_) / 2e-5
result = gradient(base, [map_[key]])[0]
assert abs(expected - result) < 7e-3
@pytest.mark.parametrize(
("k", "f"), [(1.34, 1.34), (1.33, 1.35), (1.35, 1.33), (1.3395, 1.34), (1.34, 1.3405)]
)
@pytest.mark.parametrize("pair", list(combinations(["k", "f", "alpha", "rho", "nu"], 2)))
def test_sabr_derivative_cross_finite_diff_second_order(self, k, f, pair):
# Test all of the second order cross gradients using finite diff,
# for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
irss = IRSabrSmile(
nodes={
"alpha": 0.20,
"rho": -0.10,
"nu": 0.80,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
a = irss.nodes.alpha
p = irss.nodes.rho
v = irss.nodes.nu
# F_0,T is stated in section 3.5.4 as 1.3395
base = irss._d_sabr_d_k_or_f(
Dual2(k, ["k"], [], []), Dual2(f, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
def inc_(key1, key2, inc1, inc2):
in_ = {"k": k, "f": f, "alpha": a, "rho": p, "nu": v}
in_[key1] += inc1
in_[key2] += inc2
irss._nodes = _SabrSmileNodes(
_alpha=in_["alpha"], _beta=1.0, _rho=in_["rho"], _nu=in_["nu"]
)
_ = irss._d_sabr_d_k_or_f(
Dual2(in_["k"], ["k"], [], []),
Dual2(in_["f"], ["f"], [], []),
dt(2002, 1, 1),
False,
1,
)[1]
# reset
irss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
v_map = {"k": "k", "f": "f", "alpha": "v0", "rho": "v1", "nu": "v2"}
upup = inc_(pair[0], pair[1], 1e-3, 1e-3)
updown = inc_(pair[0], pair[1], 1e-3, -1e-3)
downup = inc_(pair[0], pair[1], -1e-3, 1e-3)
downdown = inc_(pair[0], pair[1], -1e-3, -1e-3)
expected = (upup + downdown - updown - downup) / 4e-6
result = gradient(base, [v_map[pair[0]], v_map[pair[1]]], order=2)[0][1]
assert abs(result - expected) < 5e-3
@pytest.mark.parametrize(
("k", "f"),
[(1.34, 1.34), (1.33, 1.35), (1.35, 1.33), (1.3395, 1.34), (1.34, 1.3405)],
)
@pytest.mark.parametrize("var", ["k", "f", "alpha", "rho", "nu"])
def test_sabr_derivative_same_finite_diff_second_order(self, k, f, var):
# Test all of the second order cross gradients using finite diff,
# for the case when f != k and
# when f == k, which is a branched calculation to handle a undefined point.
irss = IRSabrSmile(
nodes={
"alpha": 0.20,
"rho": -0.10,
"nu": 0.80,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
a = irss.nodes.alpha
p = irss.nodes.rho
v = irss.nodes.nu
# F_0,T is stated in section 3.5.4 as 1.3395
base = irss._d_sabr_d_k_or_f(
Dual2(k, ["k"], [], []), Dual2(f, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
def inc_(key1, inc1):
k_ = k
f_ = f
if key1 == "k":
k_ = k + inc1
elif key1 == "f":
f_ = f + inc1
else:
irss.update_node(key1, getattr(irss.nodes, key1) + inc1)
# irss.nodes[key1] = irss.nodes[key1] + inc1
_ = irss._d_sabr_d_k_or_f(
Dual2(k_, ["k"], [], []), Dual2(f_, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
irss._nodes = _SabrSmileNodes(_alpha=a, _beta=1.0, _rho=p, _nu=v)
return _
v_map = {"k": "k", "f": "f", "alpha": "v0", "rho": "v1", "nu": "v2"}
up = inc_(var, 1e-3)
down = inc_(var, -1e-3)
expected = (up + down - 2 * base) / 1e-6
result = gradient(base, [v_map[var]], order=2)[0][0]
assert abs(result - expected) < 3e-3
def test_sabr_derivative_root_multi_duals_neighbourhood(self):
# test the SABR function when regular arithmetic operations produce an undefined 0/0
# value so AD has to be hard coded into the solution. This occurs when f == k.
# test by comparing derivatives with those captured at a nearby valid point
irss = IRSabrSmile(
nodes={
"alpha": 0.20,
"rho": -0.10,
"nu": 0.80,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
# F_0,T is stated in section 3.5.4 as 1.3395
base = irss._d_sabr_d_k_or_f(
Dual2(1.34, ["k"], [], []), Dual2(1.34, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
comparison1 = irss._d_sabr_d_k_or_f(
Dual2(1.341, ["k"], [], []), Dual2(1.34, ["f"], [], []), dt(2002, 1, 1), False, 1
)[1]
assert np.all(abs(base.dual - comparison1.dual) < 5e-3)
diff = base.dual2 - comparison1.dual2
dual2 = abs(diff) < 3e-2
assert np.all(dual2)
#
# def test_plot_domain(self):
# ss = FXSabrSmile(
# eval_date=dt(2024, 5, 28),
# expiry=dt(2054, 5, 28),
# nodes={"alpha": 0.02, "beta": 1.0, "rho": 0.01, "nu": 0.05},
# )
# ax, fig, lines = ss.plot(f=1.60)
# assert abs(lines[0]._x[0] - 1.3427) < 1e-4
# assert abs(lines[0]._x[-1] - 1.9299) < 1e-4
# assert abs(lines[0]._y[0] - 2.0698) < 1e-4
# assert abs(lines[0]._y[-1] - 2.0865) < 1e-4
#
#
# def test_solver_variable_numbers(self):
# from rateslib import IRS, FXBrokerFly, FXCall, FXRiskReversal, FXStraddle, FXSwap, Solver
#
# usdusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="nyc", id="usdusd")
# eureur = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="tgt", id="eureur")
# eurusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, id="eurusd")
#
# # Create an FX Forward market with spot FX rate data
# fxr = FXRates({"eurusd": 1.0760}, settlement=dt(2024, 5, 9))
# fxf = FXForwards(
# fx_rates=fxr,
# fx_curves={"eureur": eureur, "usdusd": usdusd, "eurusd": eurusd},
# )
#
# pre_solver = Solver(
# curves=[eureur, eurusd, usdusd],
# instruments=[
# IRS(dt(2024, 5, 9), "3W", spec="eur_irs", curves="eureur"),
# IRS(dt(2024, 5, 9), "3W", spec="usd_irs", curves="usdusd"),
# FXSwap(
# dt(2024, 5, 9), "3W", pair="eurusd", curves=[None, "eurusd", None, "usdusd"]
# ),
# ],
# s=[3.90, 5.32, 8.85],
# fx=fxf,
# id="rates_sv",
# )
#
# dv_smile = FXSabrSmile(
# nodes={"alpha": 0.05, "beta": 1.0, "rho": 0.01, "nu": 0.03},
# eval_date=dt(2024, 5, 7),
# expiry=dt(2024, 5, 28),
# id="eurusd_3w_smile",
# pair="eurusd",
# )
# option_args = dict(
# pair="eurusd",
# expiry=dt(2024, 5, 28),
# calendar="tgt|fed",
# delta_type="spot",
# curves=["eurusd", "usdusd"],
# vol="eurusd_3w_smile",
# )
#
# dv_solver = Solver(
# pre_solvers=[pre_solver],
# curves=[dv_smile],
# instruments=[
# FXStraddle(strike="atm_delta", **option_args),
# FXRiskReversal(strike=("-25d", "25d"), **option_args),
# FXRiskReversal(strike=("-10d", "10d"), **option_args),
# FXBrokerFly(strike=(("-25d", "25d"), "atm_delta"), **option_args),
# FXBrokerFly(strike=(("-10d", "10d"), "atm_delta"), **option_args),
# ],
# s=[5.493, -0.157, -0.289, 0.071, 0.238],
# fx=fxf,
# id="dv_solver",
# )
#
# fc = FXCall(
# expiry=dt(2024, 5, 28),
# pair="eurusd",
# strike=1.07,
# notional=100e6,
# curves=["eurusd", "usdusd"],
# vol="eurusd_3w_smile",
# premium=98.216647 * 1e8 / 1e4,
# premium_ccy="usd",
# delta_type="spot",
# )
# fc.delta(solver=dv_solver)
#
@pytest.mark.parametrize("a", [0.02, 0.06])
@pytest.mark.parametrize("b", [0.0, 0.4, 0.65, 1.0])
@pytest.mark.parametrize("p", [-0.1, 0.1])
@pytest.mark.parametrize("v", [0.05, 0.15])
@pytest.mark.parametrize("k", [1.05, 1.25, 1.6])
def test_sabr_function_values(self, a, b, p, v, k):
irss = IRSabrSmile(
nodes={
"alpha": a,
"rho": p,
"nu": v,
},
beta=b,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
# this code is taken from PySabr, another library implementing SABR.
# it is used as a benchmark
def _x(rho, z):
"""Return function x used in Hagan's 2002 SABR lognormal vol expansion."""
a = (1 - 2 * rho * z + z**2) ** 0.5 + z - rho
b = 1 - rho
return np.log(a / b)
def lognormal_vol(k, f, t, alpha, beta, rho, volvol):
"""
Hagan's 2002 SABR lognormal vol expansion.
The strike k can be a scalar or an array, the function will return an array
of lognormal vols.
"""
# Negative strikes or forwards
if k <= 0 or f <= 0:
return 0.0
eps = 1e-07
logfk = np.log(f / k)
fkbeta = (f * k) ** (1 - beta)
a = (1 - beta) ** 2 * alpha**2 / (24 * fkbeta)
b = 0.25 * rho * beta * volvol * alpha / fkbeta**0.5
c = (2 - 3 * rho**2) * volvol**2 / 24
d = fkbeta**0.5
v = (1 - beta) ** 2 * logfk**2 / 24
w = (1 - beta) ** 4 * logfk**4 / 1920
z = volvol * fkbeta**0.5 * logfk / alpha
# if |z| > eps
if abs(z) > eps:
vz = alpha * z * (1 + (a + b + c) * t) / (d * (1 + v + w) * _x(rho, z))
return vz
# if |z| <= eps
else:
v0 = alpha * (1 + (a + b + c) * t) / (d * (1 + v + w))
return v0
expected = lognormal_vol(k, 1.25, 1.0, a, b, p, v)
result = irss.get_from_strike(k=k, f=1.25).vol / 100.0
assert abs(result - expected) < 1e-4
def test_init_raises_key(self):
with pytest.raises(
ValueError, match=r"'nu' is a required SABR parameter that must be inclu"
):
IRSabrSmile(
nodes={
"alpha": 0.05,
"rho": 0.1,
"bad": 0.1,
},
beta=-0.03,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
def test_attributes(self):
irss = IRSabrSmile(
nodes={
"alpha": 0.05,
"rho": 0.1,
"nu": 0.1,
},
beta=1.0,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
)
assert irss._n == 4
def test_get_from_strike_with_curves(self):
curve = Curve({dt(2001, 1, 1): 1.0, dt(2003, 1, 1): 0.94})
irss = IRSabrSmile(
nodes={
"alpha": 0.05,
"rho": 0.1,
"nu": 0.1,
},
beta=-0.03,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
)
result = irss.get_from_strike(k=3.0, curves=[curve])
assert abs(result.f - 3.142139380) < 1e-6
assert abs(result.vol - 1.575277) < 1e-4
def test_set_node_vector(self):
irss = IRSabrSmile(
nodes={
"alpha": 0.05,
"rho": 0.1,
"nu": 0.1,
},
beta=-0.03,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
ad=2,
id="v",
)
irss._set_node_vector(np.array([1.0, 2.0, 3.0]), ad=1)
assert irss.nodes.alpha == Dual(1.0, ["v0"], [])
assert irss.nodes.rho == Dual(2.0, ["v1"], [])
assert irss.nodes.nu == Dual(3.0, ["v2"], [])
@pytest.mark.skip(reason="SABR Smile cannot solve to parameters matching the target")
def test_plot_normal_from_black_shift(self):
# test that smiles with shift equate to the same normal vol graph
smile1 = IRSabrSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
nodes={
"alpha": 0.20,
"rho": -0.05,
"nu": 1.5,
},
beta=0.5,
id="sofr_vol",
shift=0.0,
)
smile2 = IRSabrSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
nodes={
"alpha": 0.20,
"rho": -0.06,
"nu": 1.5,
},
beta=0.5,
id="sofr_vol",
shift=10.0,
)
from rateslib import IRS, IRSCall, Solver
curve = Curve(nodes={dt(2000, 1, 1): 1.0, dt(2003, 1, 1): 0.90}, id="sofr")
curve_solver = Solver(
curves=[curve],
instruments=[IRS(dt(2000, 1, 1), "1y", spec="usd_irs", curves="sofr")],
s=[3.0],
instrument_labels=["1Y IRS"],
)
option_args = dict(
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
metric="NormalVol",
curves="sofr",
vol="sofr_vol",
)
instruments = [
IRSCall(strike="-20bps", **option_args),
IRSCall(strike="atm", **option_args),
IRSCall(strike="+20bps", **option_args),
]
def solver_factory(smile):
solver = Solver(
pre_solvers=[curve_solver],
curves=[smile],
instruments=instruments,
s=[50.0, 47.0, 49.0],
instrument_labels=["-20bps Vol", "ATM Vol", "+20bps Vol"],
ini_lambda=(20000, 0.4, 2),
conv_tol=1e-6,
)
return solver
s2 = solver_factory(smile2)
s1 = solver_factory(smile1)
_res1_nvol = [_.rate(solver=s1) for _ in instruments]
_res2_nvol = [_.rate(solver=s2) for _ in instruments]
_res1_lnvol = [_.rate(solver=s1, metric="black_vol_shift_0") for _ in instruments]
_res2_lnvol = [_.rate(solver=s2, metric="black_vol_shift_10") for _ in instruments]
fig, ax, lines = smile1.plot(curves=curve, y_axis="normal_vol", comparators=[smile2])
pp1 = PPSplineF64(k=2, t=[lines[0]._x[0]] + lines[0]._x.tolist() + [lines[0]._x[-1]])
pp1.csolve(tau=lines[0]._x, y=lines[0]._y, left_n=0, right_n=0, allow_lsq=False)
pp2 = PPSplineF64(k=2, t=[lines[1]._x[0]] + lines[1]._x.tolist() + [lines[1]._x[-1]])
pp2.csolve(tau=lines[1]._x, y=lines[1]._y, left_n=0, right_n=0, allow_lsq=False)
x = np.linspace(2.54, 2.83, 101)
eps = [abs(pp1.ppev_single(_) - pp2.ppev_single(_)) for _ in x]
assert all(_ < 0.001 for _ in eps)
@pytest.mark.parametrize(
"klass",
[
(IRSStraddle, IRSPut, IRSCall),
(IRVolValue, IRVolValue, IRVolValue),
],
)
def test_plot_normal_from_black_shift2_with_IROption_Solving(self, klass):
# klass denotes the instruments used in the solving process
from rateslib import IRS, IRSCall, IRSPut, IRSStraddle, Solver
# test that smiles with shift equate to the same normal vol graph
smile_args = dict(
eval_date=dt(2026, 3, 2),
expiry="6m",
tenor="1y",
irs_series="usd_irs",
id="sofr_vol",
)
curve = Curve(
nodes={dt(2026, 3, 2): 1.0, dt(2029, 3, 2): 0.90},
calendar="nyc",
convention="act360",
id="sofr",
)
curve_solver = Solver(
curves=[curve],
instruments=[IRS(dt(2026, 3, 4), "2y", spec="usd_irs", curves=["sofr"])],
s=[3.90],
instrument_labels=["US_2y"],
)
def smile_solver_factory(smile):
_solver = Solver(
pre_solvers=[curve_solver], # <- contains the US SOFR Curve
curves=[smile], # <- mutates only the smile
instruments=[
klass[0](
dt(2026, 9, 2),
"1y",
"atm",
"usd_irs",
curves="sofr",
vol="sofr_vol",
metric="normal_vol",
),
klass[1](
dt(2026, 9, 2),
"1y",
"-20bps",
"usd_irs",
curves="sofr",
vol="sofr_vol",
metric="normal_vol",
),
klass[2](
dt(2026, 9, 2),
"1y",
"+20bps",
"usd_irs",
curves="sofr",
vol="sofr_vol",
metric="normal_vol",
),
],
s=[50, 62, 60],
instrument_labels=["ATM", "-20bps", "20bps"],
id="sofr_sv",
)
smile1 = IRSabrSmile(
shift=0, beta=0.5, nodes={"alpha": 0.2, "rho": -0.05, "nu": 0.5}, **smile_args
)
smile2 = IRSabrSmile(
shift=0, beta=0.75, nodes={"alpha": 0.2, "rho": -0.05, "nu": 0.5}, **smile_args
)
smile3 = IRSabrSmile(
shift=0, beta=0.25, nodes={"alpha": 0.2, "rho": -0.05, "nu": 0.5}, **smile_args
)
smile4 = IRSabrSmile(
shift=100, beta=0.5, nodes={"alpha": 0.2, "rho": -0.05, "nu": 0.5}, **smile_args
)
smile5 = IRSabrSmile(
shift=200, beta=0.5, nodes={"alpha": 0.2, "rho": -0.05, "nu": 0.5}, **smile_args
)
# calibrate each smile similarly
smile_solver_factory(smile1)
smile_solver_factory(smile2)
smile_solver_factory(smile3)
smile_solver_factory(smile4)
smile_solver_factory(smile5)
fig, ax, lines = smile1.plot(
curves=curve, y_axis="normal_vol", comparators=[smile2, smile3, smile4, smile5]
)
pp1 = PPSplineF64(k=2, t=[lines[0]._x[0]] + lines[0]._x.tolist() + [lines[0]._x[-1]])
pp1.csolve(tau=lines[0]._x, y=lines[0]._y, left_n=0, right_n=0, allow_lsq=False)
pp2 = PPSplineF64(k=2, t=[lines[1]._x[0]] + lines[1]._x.tolist() + [lines[1]._x[-1]])
pp2.csolve(tau=lines[1]._x, y=lines[1]._y, left_n=0, right_n=0, allow_lsq=False)
pp3 = PPSplineF64(k=2, t=[lines[2]._x[0]] + lines[2]._x.tolist() + [lines[2]._x[-1]])
pp3.csolve(tau=lines[2]._x, y=lines[2]._y, left_n=0, right_n=0, allow_lsq=False)
pp4 = PPSplineF64(k=2, t=[lines[3]._x[0]] + lines[3]._x.tolist() + [lines[3]._x[-1]])
pp4.csolve(tau=lines[3]._x, y=lines[3]._y, left_n=0, right_n=0, allow_lsq=False)
pp5 = PPSplineF64(k=2, t=[lines[4]._x[0]] + lines[4]._x.tolist() + [lines[4]._x[-1]])
pp5.csolve(tau=lines[4]._x, y=lines[4]._y, left_n=0, right_n=0, allow_lsq=False)
x = np.linspace(3.50, 4.40, 101)
comparators = [pp2, pp3, pp4, pp5]
for pp in comparators:
eps = np.array([abs(pp1.ppev_single(_) - pp.ppev_single(_)) for _ in x])
assert eps.max() < 0.3
assert eps.mean() < 0.08
def test_d_sigma_d_f(self):
irss = IRSabrSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
beta=0.5,
nodes=dict(alpha=0.2, rho=-0.05, nu=0.65),
shift=0.0,
)
result = irss._d_sigma_d_f(k=0.8, f=1.0)
manual = irss.get_from_strike(k=0.8, f=Dual(1.0, ["f"], []))
manual_gradient = gradient(manual.vol, ["f"])[0] / 100.0
assert abs(result - manual_gradient) < 2e-3
def test_time_scalar(self):
irss = IRSabrSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
tenor="1y",
irs_series="usd_irs",
beta=0.5,
nodes=dict(alpha=0.2, rho=-0.05, nu=0.65),
shift=0.0,
time_scalar=0.9,
)
assert irss.meta.t_expiry == 0.9 * (31 + 29 + 31 + 30 + 31 + 30) / 365
class TestIRSabrCube:
def test_init(self):
IRSabrCube(
eval_date=dt(2026, 2, 16),
expiries=["1m", "3m"],
tenors=["1Y", "2y", "3y"],
irs_series="usd_irs",
id="usd_ir_vol",
beta=0.5,
alpha=np.array([[0.1, 0.2, 0.3], [0.11, 0.12, 0.13]]),
rho=np.array([[0.1, 0.2, 0.3], [0.11, 0.12, 0.13]]),
nu=np.array([[0.1, 0.2, 0.3], [0.11, 0.12, 0.13]]),
)
pass
@pytest.mark.parametrize(("ad", "klass"), [(1, Dual), (2, Dual2)])
def test_constructed_sabr_smile_vars(self, ad, klass):
irsc = IRSabrCube(
eval_date=dt(2026, 2, 20),
expiries=["1m", "3m"],
tenors=["2y", "5y"],
irs_series="usd_irs",
beta=0.5,
alpha=0.05,
rho=-0.01,
nu=0.01,
ad=ad,
id="my-c",
)
_ = irsc.get_from_strike(k=1.0, f=1.02, expiry=dt(2026, 3, 30), tenor=dt(2028, 8, 12))
smile = irsc._cache[(dt(2026, 3, 30), dt(2028, 8, 12))]
assert smile.nodes.alpha.vars == ["my-c_a_0_0", "my-c_a_0_1", "my-c_a_1_0", "my-c_a_1_1"]
assert smile.nodes.rho.vars == ["my-c_p_0_0", "my-c_p_0_1", "my-c_p_1_0", "my-c_p_1_1"]
assert smile.nodes.nu.vars == ["my-c_v_0_0", "my-c_v_0_1", "my-c_v_1_0", "my-c_v_1_1"]
assert isinstance(smile.nodes.alpha, klass)
@pytest.mark.parametrize(
("expiry", "tenor", "expected"),
[
# tests on a node directly
(dt(2001, 1, 1), dt(2002, 1, 1), (0.1, 1.0, 10.0)),
(dt(2002, 1, 1), dt(2003, 1, 1), (0.3, 3.0, 30.0)),
(dt(2001, 1, 1), dt(2003, 1, 1), (0.2, 2.0, 20.0)),
(dt(2002, 1, 1), dt(2004, 1, 1), (0.4, 4.0, 40.0)),
# test within bounds
(
dt(2001, 4, 1),
dt(2002, 7, 1),
(0.17424657534246576, 1.7424657534246577, 17.424657534246577),
),
(
dt(2001, 4, 1),
dt(2003, 1, 1),
(0.22465753424657536, 2.2465753424657535, 22.46575342465753),
),
(
dt(2001, 10, 1),
dt(2003, 1, 1),
(0.27479452054794523, 2.747945205479452, 27.47945205479452),
),
(
dt(2001, 10, 1),
dt(2003, 7, 1),
(0.32438356164383564, 3.243835616438356, 32.43835616438356),
),
# test out of bounds
(dt(2000, 7, 1), dt(2001, 1, 1), (0.1, 1.0, 10.0)), # 6m6m
(
dt(2000, 7, 1),
dt(2002, 1, 1),
(0.1504109589041096, 1.504109589041096, 15.04109589041096),
), # 6m18m
(dt(2000, 7, 1), dt(2003, 7, 1), (0.2, 2.0, 20.0)), # 6m3y
(
dt(2001, 7, 1),
dt(2002, 1, 1),
(0.1991780821917808, 1.9917808219178081, 19.91780821917808),
), # 18m6m
(
dt(2001, 7, 1),
dt(2004, 7, 1),
(0.2991780821917808, 2.991780821917808, 29.91780821917808),
), # 18m3y
(dt(2003, 1, 1), dt(2003, 7, 1), (0.30, 3.0, 30.0)), # 3y6m
(
dt(2003, 1, 1),
dt(2004, 7, 1),
(0.34986301369863015, 3.4986301369863018, 34.986301369863014),
), # 3y18m
(dt(2003, 1, 1), dt(2006, 1, 1), (0.4, 4.0, 40.0)), # 3y3y
],
)
def test_interpolation_boundaries(self, expiry, tenor, expected):
# test that the SabrCube will interpolate the parameters if the expiry and tenors are
# - exactly falling on node dates
# - some elements within the node-mesh
# - some elements outside the node-mesh which are mapped to nearest components.
irsc = IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
beta=0.5,
alpha=np.array([[0.1, 0.2], [0.3, 0.4]]),
rho=np.array([[1.0, 2.0], [3.0, 4.0]]),
nu=np.array([[10.0, 20.0], [30.0, 40.0]]),
id="my-c",
)
result = tuple(irsc._bilinear_interpolation(expiry=expiry, tenor=tenor))
assert result == expected
@pytest.mark.parametrize(
("expiry", "tenor", "expected"),
[
(dt(2000, 7, 1), dt(2001, 1, 1), (0.1, 1.0, 10.0)),
(dt(2000, 7, 1), dt(2001, 7, 1), (0.1, 1.0, 10.0)),
(
dt(2000, 7, 1),
dt(2002, 1, 1),
(0.1504109589041096, 1.504109589041096, 15.04109589041096),
),
(dt(2000, 7, 1), dt(2003, 7, 1), (0.2, 2.0, 20.0)),
(dt(2001, 1, 1), dt(2001, 7, 1), (0.1, 1.0, 10.0)),
(dt(2001, 1, 1), dt(2002, 1, 1), (0.1, 1.0, 10.0)),
(
dt(2001, 1, 1),
dt(2002, 7, 1),
(0.1495890410958904, 1.495890410958904, 14.95890410958904),
),
(dt(2001, 1, 1), dt(2003, 7, 1), (0.2, 2.0, 20.0)),
(dt(2002, 1, 1), dt(2002, 7, 1), (0.1, 1.0, 10.0)),
(dt(2002, 1, 1), dt(2003, 1, 1), (0.1, 1.0, 10.0)),
(
dt(2002, 1, 1),
dt(2003, 7, 1),
(0.1495890410958904, 1.495890410958904, 14.95890410958904),
),
(dt(2002, 1, 1), dt(2004, 7, 1), (0.2, 2.0, 20.0)),
],
)
def test_interpolation_single_expiry(self, expiry, tenor, expected):
# test that the SabrCube will interpolate the parameters if the expiry and tenors are
# - exactly falling on node dates
# - some elements within the node-mesh
# - some elements outside the node-mesh which are mapped to nearest components.
irsc = IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["1y"],
tenors=["1y", "2y"],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
beta=0.5,
alpha=np.array([[0.1, 0.2]]),
rho=np.array([[1.0, 2.0]]),
nu=np.array([[10.0, 20.0]]),
id="my-c",
)
result = tuple(irsc._bilinear_interpolation(expiry=expiry, tenor=tenor))
assert result == expected
@pytest.mark.parametrize(
("expiry", "tenor", "expected"),
[
(dt(2000, 7, 1), dt(2001, 1, 1), (0.1, 1.0, 10.0)),
(dt(2000, 7, 1), dt(2001, 7, 1), (0.1, 1.0, 10.0)),
(dt(2000, 7, 1), dt(2002, 1, 1), (0.1, 1.0, 10.0)),
(dt(2001, 1, 1), dt(2001, 7, 1), (0.1, 1.0, 10.0)),
(dt(2001, 1, 1), dt(2002, 1, 1), (0.1, 1.0, 10.0)),
(dt(2001, 1, 1), dt(2002, 7, 1), (0.1, 1.0, 10.0)),
(
dt(2001, 7, 1),
dt(2002, 1, 1),
(0.1495890410958904, 1.495890410958904, 14.95890410958904),
),
(
dt(2001, 7, 1),
dt(2002, 7, 1),
(0.1495890410958904, 1.495890410958904, 14.95890410958904),
),
(
dt(2001, 7, 1),
dt(2003, 1, 1),
(0.1495890410958904, 1.495890410958904, 14.95890410958904),
),
(dt(2002, 7, 1), dt(2003, 1, 1), (0.2, 2.0, 20.0)),
(dt(2002, 7, 1), dt(2003, 7, 1), (0.2, 2.0, 20.0)),
(dt(2002, 7, 1), dt(2004, 7, 1), (0.2, 2.0, 20.0)),
],
)
def test_interpolation_single_tenor(self, expiry, tenor, expected):
# test that the SabrCube will interpolate the parameters if the expiry and tenors are
# - exactly falling on node dates
# - some elements within the node-mesh
# - some elements outside the node-mesh which are mapped to nearest components.
irsc = IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y"],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
beta=0.5,
alpha=np.array([[0.1], [0.2]]),
rho=np.array([[1.0], [2.0]]),
nu=np.array([[10.0], [20.0]]),
id="my-c",
)
result = tuple(irsc._bilinear_interpolation(expiry=expiry, tenor=tenor).tolist())
assert result == expected
def test_alpha(self):
irsc = IRSabrCube(
eval_date=dt(2026, 2, 16),
expiries=["1m", "3m"],
tenors=["1Y", "2Y"],
irs_series="usd_irs",
id="usd_ir_vol",
beta=0.5,
alpha=np.array([[0.1, 0.2], [0.11, 0.12]]),
rho=np.array([[0.1, 0.3], [0.11, 0.12]]),
nu=np.array([[0.1, 0.4], [0.11, 0.12]]),
)
expected = DataFrame(
index=Index(["1m", "3m"], name="expiry"),
columns=Index(["1Y", "2Y"], name="tenor"),
data=[[0.1, 0.2], [0.11, 0.12]],
dtype=object,
)
assert_frame_equal(expected, irsc.alpha)
expected = DataFrame(
index=Index(["1m", "3m"], name="expiry"),
columns=Index(["1Y", "2Y"], name="tenor"),
data=[[0.1, 0.3], [0.11, 0.12]],
dtype=object,
)
assert_frame_equal(expected, irsc.rho)
expected = DataFrame(
index=Index(["1m", "3m"], name="expiry"),
columns=Index(["1Y", "2Y"], name="tenor"),
data=[[0.1, 0.4], [0.11, 0.12]],
dtype=object,
)
assert_frame_equal(expected, irsc.nu)
assert irsc._n == 12
def test_cache(self):
irsc = IRSabrCube(
eval_date=dt(2026, 2, 16),
expiries=["1m", "3m"],
tenors=["1Y", "2Y"],
irs_series="usd_irs",
id="usd_ir_vol",
beta=0.5,
alpha=np.array([[0.1, 0.2], [0.11, 0.12]]),
rho=np.array([[0.1, 0.3], [0.11, 0.12]]),
nu=np.array([[0.1, 0.4], [0.11, 0.12]]),
)
irsc.get_from_strike(k=1.02, f=1.04, expiry=dt(2026, 3, 30), tenor=dt(2027, 8, 12))
assert (dt(2026, 3, 30), dt(2027, 8, 12)) in irsc._cache
def test_get_node_vector(self):
irsc = IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
beta=0.5,
alpha=np.array([[0.1, 0.2], [0.3, 0.4]]),
rho=np.array([[1.0, 2.0], [3.0, 4.0]]),
nu=np.array([[10.0, 20.0], [30.0, 40.0]]),
id="X",
)
result = irsc._get_node_vector()
expected = np.array([0.1, 0.2, 0.3, 0.4, 1.0, 2.0, 3, 4, 10, 20, 30, 40])
assert np.all(result == expected)
def test_get_node_vector_ad1(self):
irsc = IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
beta=0.5,
alpha=np.array([[0.1, 0.2], [0.3, 0.4]]),
rho=np.array([[1.0, 2.0], [3.0, 4.0]]),
nu=np.array([[10.0, 20.0], [30.0, 40.0]]),
id="X",
ad=1,
)
result = irsc._get_node_vector()
assert result[2] == Dual(0.30, ["X_a_1_0"], [])
assert result[9] == Dual(20.0, ["X_v_0_1"], [])
def test_set_node_vector(self):
irsc = IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
beta=0.5,
alpha=np.array([[0.1, 0.2], [0.3, 0.4]]),
rho=np.array([[1.0, 2.0], [3.0, 4.0]]),
nu=np.array([[10.0, 20.0], [30.0, 40.0]]),
id="X",
)
irsc._set_node_vector(np.array([0.1, 0.2, 0.3, 0.4, 1.0, 2.0, 3, 4, 10, 20, 30, 40]), ad=1)
result = irsc._get_node_vector()
assert result[2] == Dual(0.30, ["X_a_1_0"], [])
assert result[9] == Dual(20.0, ["X_v_0_1"], [])
@pytest.mark.parametrize(
("weights", "expiries"),
[
(
Series(index=[dt(2000, 1, 3), dt(2000, 1, 8), dt(2000, 1, 4)], data=0.0),
[dt(2000, 1, 5), dt(2000, 1, 10), dt(2000, 1, 15)],
),
(
Series(index=[dt(2000, 1, 3), dt(2000, 1, 20), dt(2000, 1, 4)], data=0.0),
[dt(2000, 1, 5), dt(2000, 1, 10), dt(2000, 1, 15)],
),
],
)
def test_weights_implementation(self, weights, expiries):
result = _scale_weights(
eval_date=dt(2000, 1, 1),
weights=weights,
expiries=expiries,
)
c = result.cumsum()
for expiry in expiries:
if expiry > c.index[-1]:
assert c.iloc[-1] == (c.index[-1] - dt(2000, 1, 1)).days
else:
assert c[expiry] == (expiry - dt(2000, 1, 1)).days
assert c.iloc[-1] == (c.index[-1] - dt(2000, 1, 1)).days
def test_weights(self):
nyc = calendars.get("nyc")
irsc = IRSabrCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
beta=0.5,
alpha=np.array([[0.1, 0.2], [0.3, 0.4]]),
rho=np.array([[1.0, 2.0], [3.0, 4.0]]),
nu=np.array([[10.0, 20.0], [30.0, 40.0]]),
id="X",
weights=Series(
index=[
_
for _ in nyc.cal_date_range(dt(2000, 1, 1), dt(2001, 2, 3))
if nyc.is_non_bus_day(_)
],
data=0.0,
),
)
result = irsc.meta.time_scalars
assert abs(result.iloc[-1] - 1.0) < 1e-14
class TestIRSplineSmile:
@pytest.mark.parametrize(
("strike", "vol"),
[
(1.2034, 51.0888),
(1.2050, 51.07599999999999),
(1.3395, 50.0), # f == k
(1.3620, 50.2475),
(1.5410, 52.216499999999996),
(1.5449, 52.2594),
],
)
def test_spline_vol(self, strike, vol):
# repeat the same test developed for FXSabrSmile
irss = IRSplineSmile(
nodes={-200.0: 70.0, -100.0: 58, 0: 50.0, 100.0: 61, 200.0: 75.0},
k=2,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
)
result = irss.get_from_strike(k=strike, f=1.3395).vol
assert abs(result - vol) < 1e-2
@pytest.mark.parametrize(
("strike", "vol"),
[
(1.01, 50.0),
(1.85, 50.0),
(1.3395, 50.0), # f == k
],
)
@pytest.mark.parametrize("k", [2, 4])
def test_spline_vol_flat(self, strike, vol, k):
# repeat the same test developed for FXSabrSmile
irss = IRSplineSmile(
nodes={0: 50.0},
k=k,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
)
result = irss.get_from_strike(k=strike, f=1.3395).vol
assert abs(result - vol) < 1e-2
@pytest.mark.parametrize("k", [2, 4])
@pytest.mark.parametrize(
("nodes", "expected_k"),
[
({0.0: 100.0}, 2),
({-10.0: 49.0, 10.0: 53.0}, 2),
({-25.0: 62, 0: 59, 25: 65}, None),
({-25.0: 64, -10: 60, 10: 61, 25: 66}, None),
],
)
def test_spline_construction(self, k, nodes, expected_k):
irss = IRSplineSmile(
nodes=nodes,
k=k,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
)
expected_k = expected_k or k
for key, v in nodes.items():
result = irss.get_from_strike(k=key / 100.0, f=0.0).vol
assert abs(result - v) < 1e-6
assert irss.nodes.spline.k == expected_k
@pytest.mark.parametrize(
("model", "metric"), [("black76", "black_vol_shift_0"), ("bachelier", "normal_vol")]
)
def test_pricing_model(self, model, metric):
irss = IRSplineSmile(
nodes={0: 20.0},
k=2,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="usd_irs",
tenor="3m",
id="vol",
pricing_model=model,
)
curve = Curve({dt(2001, 1, 1): 1.0, dt(2003, 1, 1): 0.94})
iro = IRSCall(
expiry=dt(2002, 1, 1),
tenor="3m",
irs_series="usd_irs",
strike=3.0,
)
result = iro.rate(vol=irss, curves=curve, metric=metric)
expected = 20.0
assert abs(result - expected) < 1e-6
@pytest.mark.parametrize("model", ["black76", "bachelier"])
@pytest.mark.parametrize("k", [2, 4])
def test_d_sigma_d_f(self, model, k):
irss = IRSplineSmile(
nodes={-200.0: 70.0, -100.0: 58, 0: 50.0, 100.0: 61, 200.0: 75.0},
k=k,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
pricing_model=model,
)
result = irss._d_sigma_d_f(k=0.8, f=1.0)
dual = irss.nodes.spline.evaluate(x=(0.8 - Dual(1.0, ["f"], [])) * 100.0, m=0)
manual_gradient = gradient(dual, ["f"])[0] / 100.0
assert abs(result - manual_gradient) < 1e-10
def test_time_scalar(self):
irss = IRSplineSmile(
nodes={-200.0: 70.0, -100.0: 58, 0: 50.0, 100.0: 61, 200.0: 75.0},
k=2,
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 7, 1),
irs_series="eur_irs6",
tenor="2y",
id="vol",
time_scalar=0.9,
)
assert irss.meta.t_expiry == 0.9 * (31 + 29 + 31 + 30 + 31 + 30) / 365
class TestIRSplineCube:
def test_init(self):
IRSplineCube(
eval_date=dt(2026, 2, 16),
expiries=["1m", "3m"],
tenors=["1Y", "2y", "3y"],
strikes=[-100.0, 0.0, 100.0],
irs_series="usd_irs",
id="usd_ir_vol",
parameters=20.0,
)
pass
@pytest.mark.parametrize(("ad", "klass"), [(1, Dual), (2, Dual2)])
def test_constructed_spline_smile_vars(self, ad, klass):
irsc = IRSplineCube(
eval_date=dt(2026, 2, 20),
expiries=["1m", "3m"],
tenors=["2y", "5y"],
strikes=[-10.0],
irs_series="usd_irs",
parameters=10.0,
ad=ad,
id="my-c",
)
_ = irsc.get_from_strike(k=1.0, f=1.02, expiry=dt(2026, 3, 30), tenor=dt(2028, 8, 12))
smile = irsc._cache[(dt(2026, 3, 30), dt(2028, 8, 12))]
vars_ = smile.pricing_params[0].vars
assert vars_ == ["my-c0", "my-c1", "my-c2", "my-c3"]
assert isinstance(smile.pricing_params[0], klass)
@pytest.mark.parametrize(
("expiry", "tenor", "expected"),
[
# tests on a node directly
(dt(2001, 1, 1), dt(2002, 1, 1), (10.0,)),
(dt(2002, 1, 1), dt(2003, 1, 1), (30.0,)),
(dt(2001, 1, 1), dt(2003, 1, 1), (20.0,)),
(dt(2002, 1, 1), dt(2004, 1, 1), (40.0,)),
# test within bounds
(dt(2001, 4, 1), dt(2002, 7, 1), (17.424657534246577,)),
(
dt(2001, 4, 1),
dt(2003, 1, 1),
(22.46575342465753,),
),
(
dt(2001, 10, 1),
dt(2003, 1, 1),
(27.47945205479452,),
),
(
dt(2001, 10, 1),
dt(2003, 7, 1),
(32.43835616438356,),
),
# test out of bounds
(dt(2000, 7, 1), dt(2001, 1, 1), (10.0,)), # 6m6m
(
dt(2000, 7, 1),
dt(2002, 1, 1),
(15.04109589041096,),
), # 6m18m
(dt(2000, 7, 1), dt(2003, 7, 1), (20.0,)), # 6m3y
(
dt(2001, 7, 1),
dt(2002, 1, 1),
(19.91780821917808,),
), # 18m6m
(
dt(2001, 7, 1),
dt(2004, 7, 1),
(29.91780821917808,),
), # 18m3y
(dt(2003, 1, 1), dt(2003, 7, 1), (30.0,)), # 3y6m
(
dt(2003, 1, 1),
dt(2004, 7, 1),
(34.986301369863014,),
), # 3y18m
(dt(2003, 1, 1), dt(2006, 1, 1), (40.0,)), # 3y3y
],
)
def test_interpolation_boundaries(self, expiry, tenor, expected):
# test that the SplineCube will interpolate the parameters if the expiry and tenors are
# - exactly falling on node dates
# - some elements within the node-mesh
# - some elements outside the node-mesh which are mapped to nearest components.
irsc = IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
strikes=[0.0],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
parameters=np.reshape(np.array([10.0, 20.0, 30.0, 40.0]), (2, 2, 1)),
id="my-c",
)
result = tuple(irsc._bilinear_interpolation(expiry=expiry, tenor=tenor))
assert result == expected
@pytest.mark.parametrize(
("expiry", "tenor", "expected"),
[
(dt(2000, 7, 1), dt(2001, 1, 1), (10.0,)),
(dt(2000, 7, 1), dt(2001, 7, 1), (10.0,)),
(
dt(2000, 7, 1),
dt(2002, 1, 1),
(15.04109589041096,),
),
(dt(2000, 7, 1), dt(2003, 7, 1), (20.0,)),
(dt(2001, 1, 1), dt(2001, 7, 1), (10.0,)),
(dt(2001, 1, 1), dt(2002, 1, 1), (10.0,)),
(
dt(2001, 1, 1),
dt(2002, 7, 1),
(14.95890410958904,),
),
(dt(2001, 1, 1), dt(2003, 7, 1), (20.0,)),
(dt(2002, 1, 1), dt(2002, 7, 1), (10.0,)),
(dt(2002, 1, 1), dt(2003, 1, 1), (10.0,)),
(
dt(2002, 1, 1),
dt(2003, 7, 1),
(14.95890410958904,),
),
(dt(2002, 1, 1), dt(2004, 7, 1), (20.0,)),
],
)
def test_interpolation_single_expiry(self, expiry, tenor, expected):
# test that the SplineCube will interpolate the parameters if the expiry and tenors are
# - exactly falling on node dates
# - some elements within the node-mesh
# - some elements outside the node-mesh which are mapped to nearest components.
irsc = IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["1y"],
tenors=["1y", "2y"],
strikes=[0.0],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
parameters=np.reshape(np.array([10.0, 20.0]), (1, 2, 1)),
id="my-c",
)
result = tuple(irsc._bilinear_interpolation(expiry=expiry, tenor=tenor))
assert result == expected
@pytest.mark.parametrize(
("expiry", "tenor", "expected"),
[
(dt(2000, 7, 1), dt(2001, 1, 1), (10.0,)),
(dt(2000, 7, 1), dt(2001, 7, 1), (10.0,)),
(dt(2000, 7, 1), dt(2002, 1, 1), (10.0,)),
(dt(2001, 1, 1), dt(2001, 7, 1), (10.0,)),
(dt(2001, 1, 1), dt(2002, 1, 1), (10.0,)),
(dt(2001, 1, 1), dt(2002, 7, 1), (10.0,)),
(
dt(2001, 7, 1),
dt(2002, 1, 1),
(14.95890410958904,),
),
(
dt(2001, 7, 1),
dt(2002, 7, 1),
(14.95890410958904,),
),
(
dt(2001, 7, 1),
dt(2003, 1, 1),
(14.95890410958904,),
),
(dt(2002, 7, 1), dt(2003, 1, 1), (20.0,)),
(dt(2002, 7, 1), dt(2003, 7, 1), (20.0,)),
(dt(2002, 7, 1), dt(2004, 7, 1), (20.0,)),
],
)
def test_interpolation_single_tenor(self, expiry, tenor, expected):
# test that the SplineCube will interpolate the parameters if the expiry and tenors are
# - exactly falling on node dates
# - some elements within the node-mesh
# - some elements outside the node-mesh which are mapped to nearest components.
irsc = IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y"],
strikes=[0.0],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
parameters=np.reshape(np.array([10.0, 20.0]), (2, 1, 1)),
id="my-c",
)
result = tuple(irsc._bilinear_interpolation(expiry=expiry, tenor=tenor).tolist())
assert result == expected
def test_cache(self):
irsc = IRSplineCube(
eval_date=dt(2026, 2, 16),
expiries=["1m", "3m"],
tenors=["1Y", "2Y"],
strikes=[-10.0, 0.0, 10.0],
irs_series="usd_irs",
id="usd_ir_vol",
parameters=10.0,
)
irsc.get_from_strike(k=1.02, f=1.04, expiry=dt(2026, 3, 30), tenor=dt(2027, 8, 12))
assert (dt(2026, 3, 30), dt(2027, 8, 12)) in irsc._cache
def test_get_node_vector(self):
irsc = IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
strikes=[-10.0, 0.0],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
parameters=np.reshape(np.array([1, 2, 3, 4, 5, 6, 7, 8]), (2, 2, 2)),
id="X",
)
result = irsc._get_node_vector()
expected = np.array([1, 2, 3, 4, 5, 6, 7, 8])
assert irsc.get_smile("1y", "1y").pricing_params == [np.float64(1.0), np.float64(2.0)]
assert np.all(result == expected)
def test_get_node_vector_ad1(self):
irsc = IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
strikes=[0.0, 10.0],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
parameters=10.0,
id="X",
ad=1,
)
result = irsc._get_node_vector()
assert result[2] == Dual(10.0, ["X2"], [])
assert result[7] == Dual(10.0, ["X7"], [])
def test_set_node_vector(self):
irsc = IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
strikes=[-10.0, 0.0],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
parameters=10.0,
id="X",
)
irsc._set_node_vector(np.array([0.1, 0.2, 0.3, 0.4, 1.0, 2.0, 3, 4]), ad=1)
result = irsc._get_node_vector()
assert result[2] == Dual(0.30, ["X2"], [])
assert result[7] == Dual(4, ["X7"], [])
@pytest.mark.skip(reason="no decision on how to use _set_ad_order for manually updated nodes.")
def test_update_single_key(self):
# TODO need to decide how _set_or_ad should work with update nodes.
irsc = IRSplineCube(
eval_date=dt(2000, 1, 1),
expiries=["1y", "2y"],
tenors=["1y", "2y"],
strikes=[-10.0, 0.0],
irs_series=IRSSeries(
currency="usd",
settle=0,
frequency="A",
convention="Act360",
calendar="all",
leg2_fixing_method="ibor(2)",
),
parameters=10.0,
id="X",
ad=1,
)
irsc.update_node(("1y", "1y", -10.0), 20.0)
result = irsc._get_node_vector()
assert result[0] == Dual(20.0, ["X0"], [])
@pytest.mark.parametrize(
("model", "metric"), [("black76", "black_vol_shift_0"), ("bachelier", "normal_vol")]
)
def test_pricing_model(self, model, metric):
irss = IRSplineCube(
parameters=[[[20.0]]],
k=2,
eval_date=dt(2001, 1, 1),
irs_series="usd_irs",
expiries=["1y"],
tenors=["3m"],
strikes=[0.0],
id="vol",
pricing_model=model,
)
curve = Curve({dt(2001, 1, 1): 1.0, dt(2003, 1, 1): 0.94})
iro = IRSCall(
expiry=dt(2002, 1, 1),
tenor="3m",
irs_series="usd_irs",
strike=3.0,
)
result = iro.rate(vol=irss, curves=curve, metric=metric)
expected = 20.0
assert abs(result - expected) < 1e-6
def test_business_day_time_and_weights(self):
nyc = calendars.get("nyc")
irsc = IRSplineCube(
eval_date=dt(2000, 1, 3),
expiries=["1m", "3m", "6m"],
tenors=["1y"],
strikes=[0],
parameters=[[[30.0]], [[35.0]], [[38.0]]],
irs_series="usd_irs",
)
irsc2 = IRSplineCube(
eval_date=dt(2000, 1, 3),
expiries=["1m", "3m", "6m"],
tenors=["1y"],
strikes=[0],
parameters=[[[30.0]], [[35.0]], [[38.0]]],
irs_series="usd_irs",
weights=Series(
index=[
_
for _ in nyc.cal_date_range(dt(2000, 1, 7), dt(2000, 7, 15))
if nyc.is_non_bus_day(_)
],
data=0.0,
),
)
curve = Curve(
nodes={dt(2000, 1, 3): 1.0, dt(2002, 1, 3): 0.93},
convention="act360",
calendar="nyc",
)
for expiry in irsc.meta.expiry_dates:
# test at expiries time remapping does not exist because these are the natural pillars
iro = IRSCall(
expiry=expiry,
strike="atm",
irs_series="usd_irs",
tenor="1y",
)
r1 = iro.rate(curves=curve, vol=irsc, metric="percentnotional") * 100.0
r2 = iro.rate(curves=curve, vol=irsc2, metric="percentnotional") * 100.0
assert abs(r1 - r2) < 1e-8
for expiry in [dt(2000, 1, 14), dt(2000, 2, 18), dt(2000, 5, 12)]:
# test at expiries inbetween the time remapping exists
iro = IRSCall(
expiry=expiry,
strike="atm",
irs_series="usd_irs",
tenor="1y",
)
r1 = iro.rate(curves=curve, vol=irsc, metric="percentnotional") * 100.0
r2 = iro.rate(curves=curve, vol=irsc2, metric="percentnotional") * 100.0
assert abs(r1 - r2) > 1e-3
for expiry in [dt(2000, 7, 20), dt(2000, 7, 25)]:
# test after weights stop being defined
iro = IRSCall(
expiry=expiry,
strike="atm",
irs_series="usd_irs",
tenor="1y",
)
r1 = iro.rate(curves=curve, vol=irsc, metric="percentnotional") * 100.0
r2 = iro.rate(curves=curve, vol=irsc2, metric="percentnotional") * 100.0
assert abs(r1 - r2) < 1e-8
class TestStateAndCache:
@pytest.mark.parametrize(
"obj",
[
IRSabrSmile(
nodes={
"alpha": 0.1,
"rho": -0.05,
"nu": 0.1,
},
beta=0.5,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
),
IRSabrCube(
eval_date=dt(2026, 2, 16),
expiries=["1m", "3m"],
tenors=["1Y", "2y", "3y"],
irs_series="usd_irs",
id="usd_ir_vol",
beta=0.5,
alpha=np.array([[0.1, 0.2, 0.3], [0.11, 0.12, 0.13]]),
rho=np.array([[0.1, 0.2, 0.3], [0.11, 0.12, 0.13]]),
nu=np.array([[0.1, 0.2, 0.3], [0.11, 0.12, 0.13]]),
),
],
)
@pytest.mark.parametrize(("method", "args"), [("_set_ad_order", (1,))])
def test_method_does_not_change_state(self, obj, method, args):
before = obj._state
getattr(obj, method)(*args)
after = obj._state
assert before == after
@pytest.mark.parametrize(
"obj",
[
IRSabrSmile(
nodes={
"alpha": 0.1,
"rho": -0.05,
"nu": 0.1,
},
beta=0.5,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
),
],
)
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98, 0.99], 1)),
("update_node", ("alpha", 0.98)),
],
)
def test_method_changes_state(self, obj, method, args):
before = obj._state
getattr(obj, method)(*args)
after = obj._state
assert before != after
@pytest.mark.parametrize(
"curve",
[
IRSabrSmile(
nodes={
"alpha": 0.1,
"rho": -0.05,
"nu": 0.1,
},
beta=0.5,
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
irs_series="eur_irs6",
tenor="2y",
id="v",
ad=2,
),
],
)
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98, 0.99], 1)),
("update_node", ("alpha", 0.98)),
],
)
def test_method_changes_state_sabr(self, curve, method, args):
before = curve._state
getattr(curve, method)(*args)
after = curve._state
assert before != after
@pytest.mark.parametrize(
"curve",
[
IRSabrCube(
eval_date=dt(2026, 2, 16),
expiries=["1m"],
tenors=["1Y"],
irs_series="usd_irs",
id="usd_ir_vol",
beta=0.5,
alpha=np.array([[0.1]]),
rho=np.array([[0.2]]),
nu=np.array([[0.3]]),
),
],
)
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98, 0.99], 1)),
("update_node", ((dt(2026, 3, 16), dt(2027, 3, 18), "alpha"), 0.98)),
],
)
def test_method_changes_state_sabr_cube(self, curve, method, args):
before = curve._state
getattr(curve, method)(*args)
after = curve._state
assert before != after
#
# def test_populate_cache(self):
# # objects have yet to implement cache handling
# pass
#
# def test_method_clears_cache(self):
# # objects have yet to implement cache handling
# pass
#
@pytest.mark.parametrize(
("method", "args"),
[
("_set_node_vector", ([0.99, 0.98, 1.0], 1)),
("_set_ad_order", (2,)),
],
)
def test_surface_clear_cache(self, method, args):
surf = IRSabrCube(
eval_date=dt(2026, 2, 16),
expiries=["1m"],
tenors=["1Y"],
irs_series="usd_irs",
id="usd_ir_vol",
beta=0.5,
alpha=np.array([[0.1]]),
rho=np.array([[0.2]]),
nu=np.array([[0.3]]),
)
surf.get_from_strike(f=1.0, k=1.01, expiry=dt(2026, 3, 1), tenor=dt(2027, 3, 1))
assert (dt(2026, 3, 1), dt(2027, 3, 1)) in surf._cache
getattr(surf, method)(*args)
assert len(surf._cache) == 0
class TestPricingModelConversion:
class TestBachelier:
@pytest.mark.parametrize(
("vol", "k", "shift", "expected"),
[
(25.0, 2.99, 0.0, 8.3496780104),
(25.0, 2.99, 50.0, 7.15460637959775),
(25.0, 2.99, 200.0, 5.005529190687043),
(25.0, 1.50, 0.0, 11.615241673583585),
(25.0, 1.50, 50.0, 9.312911744191437),
(25.0, 1.50, 200.0, 5.9394076088397645),
(25.0, 4.50, 0.0, 6.753315378082834),
(25.0, 4.50, 50.0, 5.9394076088397645),
(25.0, 4.50, 200.0, 4.368303987428187),
],
)
def test_convert_to_black_no_shift(self, vol, k, shift, expected):
result = _OptionModelBachelier.convert_to_black76(
f=3.0, k=k, shift=shift, vol=vol, t_e=1.0
)
assert abs(result - expected) < 1e-6
class TestBlack76:
@pytest.mark.parametrize(
("vol", "k", "shift", "expected"),
[
(25.0, 2.99, 0.0, 74.68039981110007),
(25.0, 2.99, 50.0, 87.14793380301037),
(25.0, 2.99, 200.0, 124.55052385921005),
(25.0, 1.50, 0.0, 53.96106256666565),
(25.0, 1.50, 50.0, 66.8366143175683),
(25.0, 1.50, 200.0, 104.86487953597288),
(25.0, 4.50, 0.0, 92.24642085914786),
(25.0, 4.50, 50.0, 104.86487953597292),
(25.0, 4.50, 200.0, 142.55991748648242),
],
)
def test_convert_to_bachelier(self, vol, k, shift, expected):
result = _OptionModelBlack76.convert_to_bachelier(
f=3.0, k=k, shift=shift, vol=vol, t_e=1.0
)
assert abs(result - expected) < 1e-9
@pytest.mark.parametrize(
("vol", "k", "shift", "tgt", "expected"),
[
(25.0, 2.99, 0.0, 50.0, 21.40861097419223),
(25.0, 2.99, 50.0, 100.0, 21.85769609359381),
(25.0, 2.99, 200.0, 100.0, 31.30396613960251),
(25.0, 1.50, 0.0, 50.0, 20.16566976523089),
(25.0, 1.50, 50.0, 100.0, 20.980647995758154),
(25.0, 1.50, 200.0, 100.0, 33.00686423510773),
(25.0, 4.50, 0.0, 50.0, 21.9787696869096),
(25.0, 4.50, 50.0, 100.0, 22.309213489533068),
(25.0, 4.50, 200.0, 100.0, 30.382178316599756),
],
)
def test_convert_to_new_shift(self, vol, k, shift, tgt, expected):
result = _OptionModelBlack76.convert_to_new_shift(
f=3.0, k=k, old_shift=shift, target_shift=tgt, vol=vol, t_e=1.0
)
assert abs(result - expected) < 1e-9
@pytest.mark.skipif(
sys.version_info[:2] == (3, 10), reason="This test is incompatible with Python 3.10"
)
class TestCookbokReplicators:
def test_z_ir_vol_risks(self):
curve = Curve(
nodes={
dt(2026, 3, 17): 1.0,
dt(2026, 9, 17): 1.0,
dt(2027, 3, 17): 1.0,
dt(2028, 3, 17): 1.0,
dt(2029, 3, 17): 1.0,
dt(2030, 3, 17): 1.0,
dt(2031, 4, 17): 1.0,
},
convention="act360",
calendar="nyc",
interpolation="log_linear",
id="sofr",
)
swap_tenors = ["6m", "1y", "2y", "3y", "4y", "5y"]
curve_solver = Solver(
curves=[curve],
instruments=[
IRS(dt(2026, 3, 17), _, spec="usd_irs", curves="sofr") for _ in swap_tenors
],
s=[4.10, 4.02, 4.08, 4.12, 4.18, 4.22],
instrument_labels=swap_tenors,
id="us_rates",
)
expiries = ["6m", "1y", "2y"]
tenors = ["3m", "1y", "2y"]
pricing_cube = IRSabrCube(
eval_date=dt(2026, 3, 17),
expiries=expiries,
tenors=tenors,
irs_series="usd_irs",
beta=0.5, # <- beta is a hyper-parameter and applies globally to this Cube
alpha=0.5, # <- alpha as scalar applies the same value to each gridpoint automatically
rho=[ # <- rho is provided in array format with a value at each gridpoint
[0.4, 0.45, 0.29],
[0.4, 0.4, 0.26],
[0.3, 0.3, 0.25],
],
nu=[ # <- nu is provided in array format with a value at each gridpoint
[1.0, 0.98, 0.87],
[0.9, 0.875, 0.7],
[0.63, 0.6, 0.56],
],
id="usd_cube",
)
pricing_solver = Solver(surfaces=[pricing_cube], pre_solvers=[curve_solver])
iro = IRSPut(
expiry=dt(2027, 3, 3),
tenor="1y",
irs_series="usd_irs",
notional=125e6,
strike=3.99,
premium=400000,
curves="sofr",
vol="usd_cube",
metric="normal_vol",
)
result = iro.npv(solver=pricing_solver)
assert abs(result - 12988.135) < 1e-2
result = iro.rate(solver=pricing_solver)
assert abs(result - 103.889) < 1e-2
expiries = ["3m", "1y", "2y"] # <- expiries are different to those above
tenors = ["1y", "2y"] # <- tenors are also different
strikes = [-100.0, -50.0, -25.0, 0.0, 25.0, 50.0, 100.0] # <- strikes are bps to ATM
risk_cube = IRSplineCube(
eval_date=dt(2026, 3, 17),
expiries=expiries,
tenors=tenors,
strikes=strikes,
irs_series="usd_irs",
parameters=25.0, # <- all normal vol values are initialised at 25bps
id="usd_cube",
)
strikes_str = [f"{_}bps" for _ in strikes]
args = dict(
irs_series="eur_irs3",
eval_date=dt(2026, 3, 11),
metric="normal_vol",
curves="sofr",
vol="usd_cube",
)
instruments = [
IRVolValue(e, t, k, **args) for e, t, k in product(expiries, tenors, strikes_str)
]
instrument_labels = [f"{e}{t}_{k}" for e, t, k in product(expiries, tenors, strikes_str)]
risk_solver = Solver.from_other(
pricing_solver=pricing_solver, # <- will determine our ``s`` rates directly
surfaces=[risk_cube],
instruments=instruments,
instrument_labels=instrument_labels,
id="us_vol",
pre_solvers=[
curve_solver
], # <- the curve_solver is still needed to pass the SOFR Curve.
grad_tol=1e-5,
func_tol=1e-5,
step_tol=1e-5,
)
df = iro.delta(solver=risk_solver)
ix = IndexSlice
delta = df.loc[ix[:, "us_rates"], :].sum(axis=None)
vega = df.loc[ix[:, "us_vol"], :].sum(axis=None)
gf = iro.gamma(solver=risk_solver)
gamma = gf.loc[ix[:, :, :, "us_rates"], ix[:, "us_rates"]].sum(axis=None)
vomma = gf.loc[ix[:, :, :, "us_vol"], ix[:, "us_vol"]].sum(axis=None)
vanna = gf.loc[ix[:, :, :, "us_rates"], ix[:, "us_vol"]].sum(axis=None)
agks = iro.analytic_greeks(solver=risk_solver)
assert abs(agks["gamma_usd"] - gamma) < 5.5
assert abs(agks["vega_usd"] - vega) < 1e-3
assert abs(agks["vomma_usd"] - vomma) < 1e-3
assert abs(agks["vanna_usd"] - vanna) < 1.1
assert abs(agks["delta_sticky_usd"] - delta) < 42.0
================================================
FILE: python/tests/test_serialization.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib.curves import Curve, LineCurve
from rateslib.curves.utils import (
_CurveInterpolator,
_CurveMeta,
_CurveNodes,
_CurveSpline,
_CurveType,
)
from rateslib.default import NoInput
from rateslib.dual import Dual, Dual2, Variable
from rateslib.scheduling import Convention, get_calendar
from rateslib.serialization import from_json
from rateslib.serialization.utils import _enum_to_json
@pytest.mark.parametrize("calendar", [get_calendar("tgt"), get_calendar(NoInput(0))])
@pytest.mark.parametrize(
"index_base",
[
100.0,
Dual(100.0, ["v"], []),
Dual2(100.0, ["v"], [], []),
NoInput(0),
],
)
@pytest.mark.parametrize("collateral", [None, "usd"])
def test_curvemeta_json_round_trip(calendar, index_base, collateral):
obj = _CurveMeta(
_calendar=calendar,
_convention=Convention.Act360,
_modifier="MF",
_index_base=index_base,
_index_lag=1,
_collateral=collateral,
_credit_discretization=20,
_credit_recovery_rate=Variable(2.5, ["x"]),
)
json_text = obj.to_json()
round_trip = from_json(json_text)
assert round_trip == obj
@pytest.mark.parametrize(
"obj",
[
_CurveSpline(t=[dt(2000, 1, 1), dt(2002, 1, 1)], endpoints=("natural", "natural")),
_CurveNodes({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 0.98}),
# _CurveNodes({dt(2000,1,1): Dual(1.0, ["x"], []), dt(2001, 1, 1): Dual(0.98, ["s"], [])}),
],
)
def test_curvespline_json_round_trip(obj):
json_text = obj.to_json()
round_trip = from_json(json_text)
assert round_trip == obj
@pytest.mark.parametrize("local", ["linear", "spline"])
@pytest.mark.parametrize("t", [NoInput(0), [dt(2000, 1, 1), dt(2002, 1, 1)]])
def test_curveinterpolator_json_round_trip(local, t):
if not isinstance(t, NoInput) and local == "spline":
with pytest.raises(ValueError, match="When defining 'spline' interpola"):
_CurveInterpolator(local, t, None, None, None, None)
return None
obj = _CurveInterpolator(
local=local,
t=t,
endpoints=("natural", "natural"),
node_dates=[dt(2000, 1, 1), dt(2002, 1, 1)],
convention="act365f",
curve_type=_CurveType.dfs,
)
json_text = obj.to_json()
round_trip = from_json(json_text)
assert round_trip == obj
@pytest.mark.parametrize("value", [-1, 0, 1])
def test_no_input_round_trip(value):
obj = NoInput(value)
json = _enum_to_json(obj)
result = from_json(json)
assert result == obj
@pytest.fixture
def curve():
return Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation="linear",
index_lag=3,
id="v",
convention="Act360",
ad=1,
)
@pytest.fixture
def line_curve():
return LineCurve(
nodes={
dt(2022, 3, 1): 2.00,
dt(2022, 3, 31): 2.01,
},
interpolation="linear",
id="v",
ad=1,
)
@pytest.fixture
def index_curve():
return Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.999,
},
interpolation="linear_index",
id="v",
ad=1,
index_base=110.0,
index_lag=3,
)
class TestCurve:
def test_serialization(self, curve) -> None:
expected = (
r'{"PyNative": '
r'{"Curve": {"meta": "{\"PyNative\": '
r"{\"_CurveMeta\": {\"calendar\": "
r"\"{\\\"NamedCal\\\":{\\\"name\\\":\\\"all\\\"}}\", "
r"\"convention\": \"{\\\"Convention\\\":\\\"Act360\\\"}\", "
r"\"modifier\": \"MF\", \"index_base\": \"{\\\"PyNative\\\":"
r"{\\\"NoInput\\\":0}}\", \"index_lag\": 3, \"collateral\": "
r"null, \"credit_discretization\": 23, \"credit_recovery_rate\": "
r'\"0.4\"}}}", "interpolator": "{\"PyNative\": {\"_CurveInterpolator\": '
r"{\"local\": \"linear\", \"spline\": \"null\", \"convention\": "
r'\"{\\\"Convention\\\":\\\"Act360\\\"}\"}}}", "id": "v", '
r'"ad": 1, "nodes": "{\"PyNative\": {\"_CurveNodes\": {\"_nodes\": '
r'{\"2022-03-01\": 1.0, \"2022-03-31\": 0.99}}}}"}}}'
)
result = curve.to_json()
assert result == expected
@pytest.mark.parametrize("c", ["curve", "line_curve", "index_curve"])
def test_serialization_round_trip(self, c, curve, line_curve, index_curve) -> None:
if c == "curve":
obj = curve
elif c == "line_curve":
obj = line_curve
elif c == "index_curve":
obj = index_curve
serial = obj.to_json()
constructed = from_json(serial)
assert constructed == obj
def test_serialization_round_trip_spline(self) -> None:
curve = Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
dt(2022, 5, 1): 0.98,
dt(2022, 6, 4): 0.97,
dt(2022, 7, 4): 0.96,
},
interpolation="linear",
id="v",
convention="Act360",
ad=1,
t=[
dt(2022, 5, 1),
dt(2022, 5, 1),
dt(2022, 5, 1),
dt(2022, 5, 1),
dt(2022, 6, 4),
dt(2022, 7, 4),
dt(2022, 7, 4),
dt(2022, 7, 4),
dt(2022, 7, 4),
],
)
serial = curve.to_json()
constructed = from_json(serial)
assert constructed == curve
def test_serialization_curve_str_calendar(self) -> None:
curve = Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation="linear",
id="v",
convention="Act360",
modifier="F",
calendar="LDN",
ad=1,
)
serial = curve.to_json()
constructed = from_json(serial)
assert constructed == curve
def test_serialization_curve_custom_calendar(self) -> None:
calendar = get_calendar("ldn")
curve = Curve(
nodes={
dt(2022, 3, 1): 1.00,
dt(2022, 3, 31): 0.99,
},
interpolation="linear",
id="v",
convention="Act360",
modifier="F",
calendar=calendar,
ad=1,
)
serial = curve.to_json()
constructed = from_json(serial)
assert constructed == curve
================================================
FILE: python/tests/test_solver.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import sys
import warnings
from datetime import datetime as dt
from math import cos, exp
import numpy as np
import pytest
from numpy.testing import assert_allclose
from pandas import DataFrame, MultiIndex, Series
from pandas.errors import PerformanceWarning
from pandas.testing import assert_frame_equal, assert_series_equal
from rateslib import add_tenor, calendars, default_context
from rateslib.curves import CompositeCurve, Curve, LineCurve, MultiCsaCurve
from rateslib.dual import Dual, Dual2, Variable, gradient, ift_1dim, newton_1dim, newton_ndim
from rateslib.fx import FXForwards, FXRates
from rateslib.instruments import (
IRS,
XCS,
FloatRateNote,
FXBrokerFly,
FXCall,
FXRiskReversal,
FXStraddle,
FXStrangle,
FXSwap,
Portfolio,
Value,
)
from rateslib.solver import Gradients, Solver
from rateslib.volatility import FXDeltaVolSmile, FXDeltaVolSurface, FXSabrSmile, FXSabrSurface
class TestIFTSolver:
@pytest.mark.parametrize("args", [(2.0, 3.0), (-2.0, -1.0)])
def test_failed_state(self, args):
def s(x):
return x
result = ift_1dim(s, 1.0, "bisection", args, raise_on_fail=False)
assert result["state"] == -2
def test_failed_state_raises(self):
def s(x):
return x
with pytest.raises(ValueError, match="The internal iterative function `h` has reported"):
ift_1dim(s, 1.0, "bisection", (2.0, 3.0), raise_on_fail=True)
def test_solution_func_tol_state(self):
def s(x):
return x**2
result = ift_1dim(s, 9.0, "bisection", (1.0, 5.0), func_tol=1e-10)
# function should perform 2 iterations and arrive at 3.0
assert result["state"] == 2
assert result["g"] == 3.0
def test_solution_conv_tol_state(self):
def s(x):
return x**2
result = ift_1dim(s, 9.0, "bisection", (1.15, 5.0), conv_tol=1e-5)
# function should perform many bisections iterations and arrive close to 3.0 with conv_tol
assert result["state"] == 1
assert result["iterations"] > 16
assert abs(result["g"] - 3.0) < 1e-5
def test_solution_max_iter_state(self):
def s(x):
return x**2
result = ift_1dim(
s, 9.0, "bisection", (1.15, 5.0), conv_tol=1e-5, max_iter=5, raise_on_fail=False
)
# function should perform many bisections iterations and arrive close to 3.0 with conv_tol
assert result["state"] == -1
def test_dual_returns(self):
def s(x):
return 3.0 / (1 + x / 100.0) + (100.0 + 3.0) / (1 + x / 100.0) ** 2
result = ift_1dim(s, Dual(101.0, ["s"], []), "bisection", (2.0, 4.0), conv_tol=1e-5)
# ds_dx = -3 / (1+g)**2 - 2*(103) / (1+g)**3
g = result["g"]
ds_dx = -3.0 / (1.0 + g / 100.0) ** 2 - 2.0 * (103.0) / (1.0 + g / 100.0) ** 3
dg_ds_analytic = 1 / ds_dx * 100.0
dg_ds_ad = gradient(g, ["s"])[0]
assert abs(dg_ds_ad - dg_ds_analytic) < 1e-10
def test_dual2_returns(self):
# second part of dual returns
def s(x):
return 3.0 / (1 + x / 100.0) + (100.0 + 3.0) / (1 + x / 100.0) ** 2
result = ift_1dim(s, Dual2(101.0, ["s"], [], []), "bisection", (2.0, 4.0), conv_tol=1e-5)
# d2s_dx2 = 2.3 / (1+g)**3 + 6*(103) / (1+g)**4
g = result["g"]
ds_dx = -3.0 / (1.0 + g / 100.0) ** 2 - 2.0 * (103.0) / (1.0 + g / 100.0) ** 3
d2s_dx2 = 6.0 / (1.0 + g / 100.0) ** 3 + 6.0 * (103.0) / (1.0 + g / 100.0) ** 4
d2g_ds2_analytic = -100 * d2s_dx2 / ds_dx**3
d2g_ds2_ad = gradient(g, ["s"], order=2)[0][0]
assert abs(d2g_ds2_ad - d2g_ds2_analytic) < 1e-10
class TestDekker:
def test_simple_linear(self):
# test should converge in one secant iteration
def s(g):
return g
s_tgt = s(2.0)
result = ift_1dim(s, s_tgt, "modified_dekker", (0, 4), conv_tol=1e-12)
assert result["g"] == 2.0
assert result["iterations"] == 1
def test_cubic_with_bracketed_intervals(self):
# test converge to different roots withing the bracketed interval
def s(g):
return g**3 - 6 * g**2 + 11 * g - 6
s_tgt = 0.0
# roots at 1, 2, 3
result = ift_1dim(s, s_tgt, "modified_dekker", (0, 1.5), conv_tol=1e-12, func_tol=1e-12)
assert abs(result["g"] - 1.0) < 1e-12
assert result["iterations"] < 10
result = ift_1dim(
s, s_tgt, "modified_dekker", (1.1, 2.9), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] - 2.0) < 1e-12
assert result["iterations"] < 10
result = ift_1dim(
s, s_tgt, "modified_dekker", (2.1, 25.0), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] - 3.0) < 1e-12
assert result["iterations"] < 15
@pytest.mark.parametrize("bracket", [(0.0, 1.0), (1.0, 10.0)])
def test_root_in_bracket(self, bracket):
# test converge to different roots withing the bracketed interval
def s(g):
return g**3 - 6 * g**2 + 11 * g - 6
s_tgt = 0.0
# roots at 1, 2, 3
result = ift_1dim(s, s_tgt, "modified_dekker", bracket, conv_tol=1e-12, func_tol=1e-12)
assert abs(result["g"] - 1.0) < 1e-12
assert result["iterations"] == 1
def test_both_roots_in_bracket_takes_left_side(self):
# test converge to different roots withing the bracketed interval
def s(g):
return g**3 - 6 * g**2 + 11 * g - 6
s_tgt = 0.0
# roots at 1, 2, 3
result = ift_1dim(
s, s_tgt, "modified_dekker", (1.0, 2.0), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] - 1.0) < 1e-12
assert result["iterations"] == 1
def test_horizontal_secant(self):
# the first iterate the boundaries yield the same value and the secant is div by zero
def s(g):
return g**2 - 2
s_tgt = 0.0
# roots at 1, 2, 3
result = ift_1dim(
s, s_tgt, "modified_dekker", (-2.0, 2.0), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] + 2**0.5) < 1e-12
assert result["iterations"] < 10
def test_asymptote(self):
def s(g):
return 1 / (g - 3) - 6
s_tgt = 0.0
# roots at 19 / 6
result = ift_1dim(
s, s_tgt, "modified_dekker", (3.02, 4.0), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] - 19 / 6) < 1e-12
assert result["iterations"] < 12
def test_dekker(self):
def s(x):
return exp(x) + x**2
s_tgt = s(2.0)
result = ift_1dim(s, s_tgt, "modified_dekker", (1.15, 5.0), conv_tol=1e-12)
assert result["g"] == 2.0
assert result["iterations"] < 12
result2 = ift_1dim(s, s_tgt, "bisection", (1.15, 5.0), conv_tol=1e-12)
assert 30 < result2["iterations"] < 50
def test_dekker_conv_tol(self):
def s(x):
return exp(x) + x**2
s_tgt = s(2.0)
result = ift_1dim(s, s_tgt, "modified_dekker", (1.15, 5.0), conv_tol=1e-3)
assert result["state"] == 1
class TestBrent:
def test_simple_linear(self):
# test should converge in one secant iteration
def s(g):
return g
s_tgt = s(2.0)
result = ift_1dim(s, s_tgt, "modified_brent", (0, 4), conv_tol=1e-12)
assert result["g"] == 2.0
assert result["iterations"] == 1
def test_cubic_with_bracketed_intervals(self):
# test converge to different roots withing the bracketed interval
def s(g):
return g**3 - 6 * g**2 + 11 * g - 6
s_tgt = 0.0
# roots at 1, 2, 3
result = ift_1dim(s, s_tgt, "modified_brent", (0, 1.5), conv_tol=1e-12, func_tol=1e-12)
assert abs(result["g"] - 1.0) < 1e-12
assert result["iterations"] < 10
result = ift_1dim(
s, s_tgt, "modified_brent", (1.1, 2.9), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] - 2.0) < 1e-12
assert result["iterations"] < 10
result = ift_1dim(
s, s_tgt, "modified_brent", (2.1, 25.0), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] - 3.0) < 1e-12
assert result["iterations"] < 15
@pytest.mark.parametrize("bracket", [(0.0, 1.0), (1.0, 10.0)])
def test_root_in_bracket(self, bracket):
# test converge to different roots withing the bracketed interval
def s(g):
return g**3 - 6 * g**2 + 11 * g - 6
s_tgt = 0.0
# roots at 1, 2, 3
result = ift_1dim(s, s_tgt, "modified_brent", bracket, conv_tol=1e-12, func_tol=1e-12)
assert abs(result["g"] - 1.0) < 1e-12
assert result["iterations"] == 1
def test_both_roots_in_bracket_takes_left_side(self):
# test converge to different roots withing the bracketed interval
def s(g):
return g**3 - 6 * g**2 + 11 * g - 6
s_tgt = 0.0
# roots at 1, 2, 3
result = ift_1dim(
s, s_tgt, "modified_brent", (1.0, 2.0), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] - 1.0) < 1e-12
assert result["iterations"] == 1
def test_horizontal_secant(self):
# the first iterate the boundaries yield the same value and the secant is div by zero
def s(g):
return g**2 - 2
s_tgt = 0.0
# roots at 1, 2, 3
result = ift_1dim(
s, s_tgt, "modified_brent", (-2.0, 2.0), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] + 2**0.5) < 1e-12
assert result["iterations"] < 10
def test_asymptote(self):
def s(g):
return 1 / (g - 3) - 6
s_tgt = 0.0
# roots at 19 / 6
result = ift_1dim(
s, s_tgt, "modified_brent", (3.02, 4.0), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] - 19 / 6) < 1e-12
assert result["iterations"] < 12
def test_brent(self):
def s(x):
return exp(x) + x**2
s_tgt = s(2.0)
result = ift_1dim(s, s_tgt, "modified_brent", (1.15, 5.0), conv_tol=1e-12)
assert result["g"] == 2.0
assert result["iterations"] < 12
# result2 = ift_1dim(s, s_tgt, "bisection", (1.15, 5.0), conv_tol=1e-12)
# assert result["time"] <= result2["time"]
def test_brent_conv_tol(self):
def s(x):
return exp(x) + x**2
s_tgt = s(2.0)
result = ift_1dim(s, s_tgt, "modified_brent", (1.15, 5.0), conv_tol=1e-3)
assert result["state"] == 1
def test_paper_replication(self):
def s(g):
return exp(g**2 / -4.0) - 2 * cos(g) + g / 2.0 - 2.5
s_tgt = 0.0
# roots at 2.1584, 4.6196 and 7.255
result = ift_1dim(
s, s_tgt, "modified_brent", (1.0, 3.0), conv_tol=1e-12, func_tol=1e-12
)
assert abs(result["g"] - 2.1584212092981225) < 1e-12
assert result["iterations"] < 8
def test_another_func(self):
def s(g):
from math import cos
return cos(g) + g**3 + 2 * g**2 - 1.2
s_tgt = s(-1.5) # close to zero, 3 roots in [-4.0, 2.0]
r_bi = ift_1dim(s, s_tgt, "bisection", (-4.0, 2.0))
r_dk = ift_1dim(s, s_tgt, "modified_dekker", (-4.0, 2.0))
r_br = ift_1dim(s, s_tgt, "modified_brent", (-4.0, 2.0))
assert r_bi["status"] == "SUCCESS"
assert r_dk["status"] == "SUCCESS"
assert r_br["status"] == "SUCCESS"
class TestGradients:
@classmethod
def setup_class(cls):
class Inst:
def __init__(self, rate):
self._rate = rate
def rate(self, *args, **kwargs):
return self._rate
class SolverProxy(Gradients):
variables = ["v1", "v2", "v3"]
r = [Dual(1.0, ["v1"], []), Dual(3.0, ["v1", "v2", "v3"], [2.0, 1.0, -2.0])]
_J = None
instruments = [
[Inst(Dual2(1.0, ["v1"], [1.0], [4.0])), {}],
[
Inst(
Dual2(
3.0,
["v1", "v2", "v3"],
[2.0, 1.0, -2.0],
[-2.0, 1.0, 1.0, 1.0, -3.0, 2.0, 1.0, 2.0, -4.0],
),
),
{},
],
]
_J2 = None
_ad = 2
_grad_s_vT = np.array(
[
[1.0, 2.0, 3.0],
[2.0, 3.0, 4.0],
],
)
setattr(cls, "solver", SolverProxy())
def test_J(self) -> None:
expected = np.array(
[
[1.0, 2.0],
[0.0, 1.0],
[0.0, -2.0],
],
)
result = self.solver.J
assert_allclose(result, expected)
def test_grad_v_rT(self) -> None:
assert_allclose(self.solver.J, self.solver.grad_v_rT)
def test_J2(self) -> None:
expected = np.array(
[
[
[8.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
],
[
[-4.0, 2.0, 2.0],
[2.0, -6.0, 4.0],
[2.0, 4.0, -8.0],
],
],
)
expected = np.transpose(expected, (1, 2, 0))
result = self.solver.J2
assert_allclose(expected, result)
def test_grad_v_v_rT(self) -> None:
assert_allclose(self.solver.J2, self.solver.grad_v_v_rT)
def test_grad_s_vT(self) -> None:
expected = np.array(
[
[1.0, 2.0, 3.0],
[2.0, 3.0, 4.0],
],
)
result = self.solver.grad_s_vT
assert_allclose(expected, result)
class TestDocs:
@pytest.mark.skipif(
sys.version_info[:2] == (3, 10), reason="This test is incompatible with Python 3.10"
)
def test_external_system_replicator(self):
TODAY = dt(2026, 3, 23)
SPOT = calendars.get("nyc").lag_bus_days(TODAY, 2, False)
TENORS = [
"1W",
"2W",
"3W",
"1M",
"2M",
"3M",
"4M",
"5M",
"6M",
"7M",
"8M",
"9M",
"10M",
"11M",
"12M",
"18M",
"2Y",
"3Y",
"4Y",
"5Y",
"6Y",
"7Y",
"8Y",
"9Y",
"10Y",
"12Y",
"15Y",
"20Y",
"25Y",
"30Y",
"40Y",
"50Y",
]
MATURITIES = [add_tenor(SPOT, _, "MF", "nyc") for _ in TENORS]
curve = Curve(
nodes={TODAY: 1.0, **dict.fromkeys(MATURITIES, 1.0)},
calendar="nyc",
convention="act360",
id="sofr",
)
solver = Solver(
curves=[curve],
instruments=[IRS(SPOT, _, spec="usd_irs", curves=[curve]) for _ in TENORS],
s=[
3.684,
3.6805,
3.677,
3.6786,
3.6941,
3.7059,
3.71675,
3.72315,
3.73,
3.74215,
3.7509,
3.75895,
3.7656,
3.77005,
3.7741,
3.7373,
3.6866,
3.6316,
3.63217,
3.6625,
3.706,
3.7515,
3.7968,
3.84117,
3.88475,
3.9714,
4.07703,
4.15708,
4.15165,
4.1093,
3.99425,
3.875,
],
)
_df = DataFrame(
{
"tenor": TENORS,
"maturity": MATURITIES,
"df": [float(curve[_]) for _ in MATURITIES],
"rate": [float(IRS(SPOT, _, spec="usd_irs", curves=curve).rate()) for _ in TENORS],
}
)
irs = IRS(
dt(2031, 3, 25),
dt(2036, 3, 25),
spec="usd_irs",
curves=["sofr"],
fixed_rate=4.0,
notional=-100e6,
)
pv = irs.npv(solver=solver)
_ct = irs.cashflows_table(solver=solver)
_cf = irs.cashflows(solver=solver)
dv01 = irs.delta(solver=solver).sum(axis=None)
pv01 = irs.analytic_delta(solver=solver, leg=1)
gamma = irs.gamma(solver=solver).sum(axis=None)
assert abs(pv + 579593.21) < 16.5 # <0.01% deviation
assert abs(dv01 + 37518.12) < 3 # <0.01% deviation
assert abs(pv01 + 37471.51) < 1.5 # <0.01% deviation
assert abs(gamma - 58.50) < 0.004 # <0.01% deviation
@pytest.mark.parametrize("algo", ["gauss_newton", "levenberg_marquardt", "gradient_descent"])
def test_basic_solver(algo) -> None:
curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="v",
)
instruments = [
(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "2Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
]
s = np.array([1.0, 1.6, 2.0])
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
algorithm=algo,
)
assert float(solver.g) < 1e-9
assert curve.nodes.nodes[dt(2022, 1, 1)] == Dual(1.0, ["v0"], [1])
expected = [1, 0.9899250357528555, 0.9680433953206192, 0.9407188354823821]
for i, key in enumerate(curve.nodes.nodes.keys()):
assert abs(float(curve.nodes.nodes[key]) - expected[i]) < 1e-6
def test_solver_repr():
curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="v",
)
instruments = [
(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "2Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
]
s = np.array([1.0, 1.6, 2.0])
solver = Solver(curves=[curve], instruments=instruments, s=s, id="S_ID")
result = solver.__repr__()
expected = f""
assert result == expected
@pytest.mark.parametrize("algo", ["gauss_newton", "levenberg_marquardt", "gradient_descent"])
def test_solver_reiterate(algo) -> None:
# test that curves are properly updated by a reiterate
curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="v",
)
instruments = [
IRS(dt(2022, 1, 1), "1Y", "Q", curves="v"),
IRS(dt(2022, 1, 1), "2Y", "Q", curves="v"),
IRS(dt(2022, 1, 1), "3Y", "Q", curves="v"),
]
s = np.array([1.0, 1.5, 2.0])
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
algorithm=algo,
)
assert float(solver.g) < 1e-9
solver.s[1] = 1.6
solver.iterate()
# now check that a reiteration has resolved the curve
assert curve.nodes.nodes[dt(2022, 1, 1)] == Dual(1.0, ["v0"], [1])
expected = [1, 0.9899250357528555, 0.9680433953206192, 0.9407188354823821]
for i, key in enumerate(curve.nodes.nodes.keys()):
assert abs(float(curve.nodes.nodes[key]) - expected[i]) < 1e-6
@pytest.mark.parametrize("algo", ["gauss_newton", "levenberg_marquardt", "gradient_descent"])
def test_basic_solver_line_curve(algo) -> None:
curve = LineCurve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
},
id="v",
)
instruments = [
(Value(dt(2022, 1, 1)), {"curves": curve}),
(Value(dt(2023, 1, 1)), {"curves": curve}),
(Value(dt(2024, 1, 1)), {"curves": curve}),
]
s = np.array([3.0, 3.6, 4.0])
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
algorithm=algo,
)
assert float(solver.g) < 1e-9
for i, key in enumerate(curve.nodes.nodes.keys()):
assert abs(float(curve.nodes.nodes[key]) - s[i]) < 1e-5
def test_basic_spline_solver() -> None:
spline_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.99,
dt(2024, 1, 1): 0.965,
dt(2025, 1, 1): 0.93,
},
interpolation="log_linear",
t=[
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2023, 1, 1),
dt(2024, 1, 1),
dt(2025, 1, 3),
dt(2025, 1, 3),
dt(2025, 1, 3),
dt(2025, 1, 3),
],
id="v",
)
instruments = [
(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": spline_curve}),
(IRS(dt(2022, 1, 1), "2Y", "Q"), {"curves": spline_curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": spline_curve}),
]
s = np.array([1.0, 1.6, 2.0])
solver = Solver(
curves=[spline_curve],
instruments=instruments,
s=s,
algorithm="gauss_newton",
)
assert float(solver.g) < 1e-12
assert spline_curve.nodes.nodes[dt(2022, 1, 1)] == Dual(1.0, ["v0"], [1])
expected = [1, 0.98992503575307, 0.9680377261843034, 0.9407048036486593]
for i, key in enumerate(spline_curve.nodes.nodes.keys()):
assert abs(float(spline_curve.nodes.nodes[key]) - expected[i]) < 1e-11
def test_large_spline_solver() -> None:
dates = [
dt(2000, 1, 3),
dt(2001, 1, 3),
dt(2002, 1, 3),
dt(2003, 1, 3),
dt(2004, 1, 3),
dt(2005, 1, 3),
dt(2006, 1, 3),
dt(2007, 1, 3),
dt(2008, 1, 3),
dt(2009, 1, 3),
dt(2010, 1, 3),
dt(2012, 1, 3),
dt(2015, 1, 3),
dt(2020, 1, 3),
dt(2025, 1, 3),
dt(2030, 1, 3),
dt(2035, 1, 3),
dt(2040, 1, 3),
dt(2050, 1, 3),
]
curve = Curve(
nodes=dict.fromkeys(dates, 1.0),
t=[dt(2000, 1, 3)] * 3 + dates[:-1] + [dt(2050, 1, 5)] * 4,
calendar="nyc",
)
solver = Solver(
curves=[curve],
instruments=[IRS(dt(2000, 1, 3), _, spec="usd_irs", curves=curve) for _ in dates[1:]],
s=[1.0 + _ / 25 for _ in range(18)],
)
assert solver.result["status"] == "SUCCESS"
def test_solver_raises_len() -> None:
with pytest.raises(ValueError, match=r"`s: 2` \(rates\) must be same length as"):
Solver(
instruments=[1],
s=[1, 2],
)
with pytest.raises(ValueError, match=r"`instrument_labels: 2` must be same length as"):
Solver(
instruments=[1],
s=[1],
instrument_labels=[1, 2],
)
with pytest.raises(ValueError, match=r"`weights: 1` must be same length as"):
Solver(
instruments=[1, 2],
s=[1, 2],
instrument_labels=[1, 2],
weights=[1],
)
def test_basic_solver_weights() -> None:
# This test replicates test_basic_solver with the 3Y rate at two different rates.
# We vary the weights argument to selectively decide which one to use.
curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="v",
)
instruments = [
(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "2Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
]
s = np.array([1.0, 1.6, 2.02, 1.98]) # average 3Y at approximately 2.0%
with default_context("algorithm", "gauss_newton"):
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
func_tol=0.00085,
)
assert float(solver.g) < 0.00085
assert curve.nodes.nodes[dt(2022, 1, 1)] == Dual(1.0, ["v0"], [1])
expected = [1, 0.9899250357528555, 0.9680433953206192, 0.9407188354823821]
for i, key in enumerate(curve.nodes.nodes.keys()):
assert abs(float(curve.nodes.nodes[key]) - expected[i]) < 1e-6
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
weights=[1, 1, 1, 1e-6],
func_tol=1e-7,
algorithm="gauss_newton",
)
assert abs(float(instruments[2][0].rate(curves=curve)) - 2.02) < 1e-4
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
weights=[1, 1, 1e-6, 1],
func_tol=1e-7,
algorithm="gauss_newton",
)
assert abs(float(instruments[2][0].rate(curves=curve)) - 1.98) < 1e-4
def test_solver_independent_curve() -> None:
# Test that a solver can use an independent curve as a static object and solve
# without mutating that un-referenced object.
independent_curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.98,
dt(2024, 1, 1): 0.96,
dt(2025, 1, 1): 0.94,
},
)
expected = independent_curve.copy()
var_curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 0.99,
dt(2024, 1, 1): 0.98,
dt(2025, 1, 1): 0.97,
},
)
instruments = [
(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": [var_curve, independent_curve]}),
(IRS(dt(2022, 1, 1), "2Y", "Q"), {"curves": [var_curve, independent_curve]}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": [var_curve, independent_curve]}),
]
s = np.array([2.00, 2.00, 2.00])
with default_context("curve_not_in_solver", "ignore"):
Solver(
curves=[var_curve],
instruments=instruments,
s=s,
func_tol=1e-13,
conv_tol=1e-13,
)
for i, instrument in enumerate(instruments):
assert abs(float(instrument[0].rate(**instrument[1]) - s[i])) < 1e-7
assert independent_curve == expected
class TestSolverCompositeCurve:
def test_solver_composite_curve(self) -> None:
# this test creates a solver with a composite curve
# for the purpose of adding a turn
c_base = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0, dt(2024, 1, 1): 1.0, dt(2025, 1, 1): 1.0},
id="sek_base",
)
c_turns = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 12, 30): 1.0,
dt(2023, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="sek_turns",
)
composite_curve = CompositeCurve([c_base, c_turns], id="sek")
instruments_turns = [
IRS(dt(2022, 1, 1), "1d", "A", curves="sek_turns"),
IRS(dt(2022, 12, 30), "1d", "A", curves="sek_turns"),
IRS(dt(2023, 1, 1), "1d", "A", curves="sek_turns"),
]
s_turns = [0.0, -0.50, 0.0]
labels_turns = ["NA1", "Turn1", "NA2"]
instruments_base = [
IRS(dt(2022, 1, 1), "1Y", "A", curves="sek"),
IRS(dt(2022, 1, 1), "2Y", "A", curves="sek"),
IRS(dt(2022, 1, 1), "3Y", "A", curves="sek"),
]
s_base = [2.0, 2.3, 2.4]
labels_base = ["1Y", "2Y", "3Y"]
solver = Solver(
curves=[c_base, c_turns, composite_curve],
instruments=instruments_turns + instruments_base,
s=s_turns + s_base,
instrument_labels=labels_turns + labels_base,
id="solv",
)
test_irs = IRS(dt(2022, 6, 1), "15M", "A", notional=1e6, curves="sek")
expected = 2.31735564
result = test_irs.rate(solver=solver)
assert (result - expected) < 1e-8
delta = test_irs.delta(solver=solver)
expected = DataFrame(
data=[
-0.22582768057036448,
0.22571855114358436,
0.00010912854804701055,
-9.15902876400274,
131.75543312,
0.0033383280,
],
columns=MultiIndex.from_tuples([("usd", "usd")], names=["local_ccy", "display_ccy"]),
index=MultiIndex.from_tuples(
[
("instruments", "solv", "NA1"),
("instruments", "solv", "Turn1"),
("instruments", "solv", "NA2"),
("instruments", "solv", "1Y"),
("instruments", "solv", "2Y"),
("instruments", "solv", "3Y"),
],
names=["type", "solver", "label"],
),
)
assert_frame_equal(delta, expected)
def test_non_unique_curves() -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="A")
curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="A")
solver = Solver(
curves=[curve],
instruments=[(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve})],
s=[1],
)
with pytest.raises(ValueError, match="`curves` must each have their own unique"):
Solver(
curves=[curve2],
instruments=[(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve})],
s=[2],
pre_solvers=[solver],
)
with pytest.raises(ValueError, match="`curves` must each have their own unique"):
Solver(
curves=[curve, curve2],
instruments=[(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve})],
s=[2],
)
def test_max_iterations() -> None:
# This test replicates has an oscillatory solution between the different 3y rates.
curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="v",
)
instruments = [
(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "2Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
]
s = np.array([1.0, 1.6, 2.02, 1.98]) # average 3Y at approximately 2.0%
with default_context("algorithm", "gauss_newton"):
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
func_tol=1e-10,
max_iter=30,
step_tol=0.0,
grad_tol=0.0,
)
assert len(solver.g_list) == 31
def test_step_tol() -> None:
curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="v",
)
instruments = [
(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "2Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
]
s = np.array([1.0, 1.6, 2.02, 1.98]) # average 3Y at approximately 2.0%
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
max_iter=30,
func_tol=0.0,
step_tol=1.0,
grad_tol=0.0,
conv_tol=0.0,
)
assert solver.result["state"] == 4
def test_grad_tol() -> None:
curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="v",
)
instruments = [
(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "2Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
(IRS(dt(2022, 1, 1), "3Y", "Q"), {"curves": curve}),
]
s = np.array([1.0, 1.6, 2.02, 1.98]) # average 3Y at approximately 2.0%
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
max_iter=30,
func_tol=0.0,
step_tol=0.0,
grad_tol=0.01,
conv_tol=0.0,
)
assert solver.result["state"] == 5
def test_solver_pre_solver_dependency_generates_same_delta() -> None:
"""
Build an ESTR curve with solver1.
Build an IBOR curve with solver2 dependent upon solver1.
Build an ESTR and IBOR curve simultaneously inside the same solver3.
Test the delta and the instrument calibration error
"""
eur_disc_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0, dt(2024, 1, 1): 1.0},
id="eur",
)
eur_instruments = [
(IRS(dt(2022, 1, 1), "8M", "A"), {"curves": eur_disc_curve}),
(IRS(dt(2022, 1, 1), "16M", "A"), {"curves": eur_disc_curve}),
(IRS(dt(2022, 1, 1), "2Y", "A"), {"curves": eur_disc_curve}),
]
eur_disc_s = [2.01, 2.22, 2.55]
eur_disc_solver = Solver([eur_disc_curve], [], eur_instruments, eur_disc_s, id="estr")
eur_ibor_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0, dt(2024, 1, 1): 1.0},
id="eur_ibor",
)
eur_ibor_instruments = [
(IRS(dt(2022, 1, 1), "1Y", "A"), {"curves": [eur_ibor_curve, eur_disc_curve]}),
(IRS(dt(2022, 1, 1), "2Y", "A"), {"curves": [eur_ibor_curve, eur_disc_curve]}),
]
eur_ibor_s = [2.25, 2.65]
eur_solver2 = Solver(
[eur_ibor_curve],
[],
eur_ibor_instruments,
eur_ibor_s,
pre_solvers=[eur_disc_solver],
id="ibor",
)
eur_disc_curve2 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0, dt(2024, 1, 1): 1.0},
id="eur",
)
eur_ibor_curve2 = Curve(
{dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0, dt(2024, 1, 1): 1.0},
id="eur_ibor",
)
eur_instruments2 = [
(IRS(dt(2022, 1, 1), "8M", "A"), {"curves": eur_disc_curve2}),
(IRS(dt(2022, 1, 1), "16M", "A"), {"curves": eur_disc_curve2}),
(IRS(dt(2022, 1, 1), "2Y", "A"), {"curves": eur_disc_curve2}),
(IRS(dt(2022, 1, 1), "1Y", "A"), {"curves": [eur_ibor_curve2, eur_disc_curve2]}),
(IRS(dt(2022, 1, 1), "2Y", "A"), {"curves": [eur_ibor_curve2, eur_disc_curve2]}),
]
eur_disc_s2 = [2.01, 2.22, 2.55, 2.25, 2.65]
eur_solver_sim = Solver(
[eur_disc_curve2, eur_ibor_curve2],
[],
eur_instruments2,
eur_disc_s2,
id="eur_sol_sim",
instrument_labels=["estr0", "estr1", "estr2", "ibor0", "ibor1"],
)
eur_swap = IRS(
dt(2022, 3, 1),
"16M",
"M",
fixed_rate=3.0,
)
delta_sim = eur_swap.delta(curves=[eur_ibor_curve2, eur_disc_curve2], solver=eur_solver_sim)
delta_pre = eur_swap.delta(curves=[eur_ibor_curve, eur_disc_curve], solver=eur_solver2)
delta_pre.index = delta_sim.index
assert_frame_equal(delta_sim, delta_pre)
error_sim = eur_solver_sim.error
error_pre = eur_solver2.error
assert_series_equal(error_pre, error_sim, check_index=False, rtol=1e-5, atol=1e-3)
def test_delta_gamma_calculation() -> None:
estr_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0},
id="estr_curve",
)
estr_instruments = [
(IRS(dt(2022, 1, 1), "10Y", "A"), {"curves": estr_curve}),
(IRS(dt(2022, 1, 1), "20Y", "A"), {"curves": estr_curve}),
]
estr_solver = Solver(
[estr_curve],
[],
estr_instruments,
[2.0, 1.5],
id="estr",
instrument_labels=["10Y", "20Y"],
)
# Mechanism 1: dynamic
eur_swap = IRS(dt(2032, 1, 1), "10Y", "A", notional=100e6)
assert (
74430 < float(eur_swap.delta(curves=estr_curve, solver=estr_solver).sum().iloc[0]) < 74432
)
assert -229 < float(eur_swap.gamma(curves=estr_curve, solver=estr_solver).sum().sum()) < -228
# Mechanism 1: dynamic names
assert (
74430 < float(eur_swap.delta(curves="estr_curve", solver=estr_solver).sum().iloc[0]) < 74432
)
assert -229 < float(eur_swap.gamma(curves="estr_curve", solver=estr_solver).sum().sum()) < -228
# Mechanism 1: fails on None curve specification
with pytest.raises(TypeError, match="`curves` have not been supplied correctly"):
assert eur_swap.delta(solver=estr_solver)
with pytest.raises(TypeError, match="`curves` have not been supplied correctly"):
assert eur_swap.gamma(solver=estr_solver)
# Mechanism 2: static specific
eur_swap = IRS(dt(2032, 1, 1), "10Y", "A", notional=100e6, curves=estr_curve)
assert 74430 < float(eur_swap.delta(solver=estr_solver).sum().iloc[0]) < 74432
assert -229 < float(eur_swap.gamma(solver=estr_solver).sum().sum()) < -228
# Mechanism 2: static named
eur_swap = IRS(dt(2032, 1, 1), "10Y", "A", notional=100e6, curves="estr_curve")
assert 74430 < float(eur_swap.delta(solver=estr_solver).sum().iloc[0]) < 74432
assert -229 < float(eur_swap.gamma(solver=estr_solver).sum().sum()) < -228
def test_solver_delta_fx_noinput() -> None:
estr_curve = Curve(
{dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0},
id="estr_curve",
)
estr_instruments = [
(IRS(dt(2022, 1, 1), "10Y", "A"), {"curves": estr_curve}),
(IRS(dt(2022, 1, 1), "20Y", "A"), {"curves": estr_curve}),
]
estr_solver = Solver(
[estr_curve],
[],
estr_instruments,
[2.0, 1.5],
id="estr",
instrument_labels=["10Y", "20Y"],
)
eur_swap = IRS(dt(2032, 1, 1), "10Y", "A", notional=100e6, fixed_rate=2)
npv = eur_swap.npv(curves=estr_curve, solver=estr_solver, local=True)
result = estr_solver.delta(npv)
assert type(result) is DataFrame
def test_solver_pre_solver_dependency_generates_same_gamma() -> None:
estr_curve = Curve({dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0})
estr_instruments = [
(IRS(dt(2022, 1, 1), "7Y", "A"), {"curves": estr_curve}),
(IRS(dt(2022, 1, 1), "15Y", "A"), {"curves": estr_curve}),
(IRS(dt(2022, 1, 1), "20Y", "A"), {"curves": estr_curve}),
]
estr_s = [2.0, 1.75, 1.5]
estr_labels = ["7ye", "15ye", "20ye"]
estr_solver = Solver(
[estr_curve],
[],
estr_instruments,
estr_s,
id="estr",
instrument_labels=estr_labels,
algorithm="gauss_newton",
)
ibor_curve = Curve({dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0})
ibor_instruments = [
(IRS(dt(2022, 1, 1), "10Y", "A"), {"curves": [ibor_curve, estr_curve]}),
(IRS(dt(2022, 1, 1), "20Y", "A"), {"curves": [ibor_curve, estr_curve]}),
]
ibor_s = [2.1, 1.65]
ibor_labels = ["10Yi", "20Yi"]
ibor_solver = Solver(
[ibor_curve],
[],
ibor_instruments,
ibor_s,
id="ibor",
instrument_labels=ibor_labels,
pre_solvers=[estr_solver],
algorithm="gauss_newton",
)
eur_swap = IRS(dt(2032, 1, 1), "10Y", "A", notional=100e6)
gamma_pre = eur_swap.gamma(curves=[ibor_curve, estr_curve], solver=ibor_solver)
delta_pre = eur_swap.delta(curves=[ibor_curve, estr_curve], solver=ibor_solver)
estr_curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0})
ibor_curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0})
sim_instruments = [
(IRS(dt(2022, 1, 1), "7Y", "A"), {"curves": estr_curve2}),
(IRS(dt(2022, 1, 1), "15Y", "A"), {"curves": estr_curve2}),
(IRS(dt(2022, 1, 1), "20Y", "A"), {"curves": estr_curve2}),
(IRS(dt(2022, 1, 1), "10Y", "A"), {"curves": [ibor_curve2, estr_curve2]}),
(IRS(dt(2022, 1, 1), "20Y", "A"), {"curves": [ibor_curve2, estr_curve2]}),
]
simultaneous_solver = Solver(
[estr_curve2, ibor_curve2],
[],
sim_instruments,
estr_s + ibor_s,
id="simul",
instrument_labels=estr_labels + ibor_labels,
algorithm="gauss_newton",
)
gamma_sim = eur_swap.gamma(curves=[ibor_curve2, estr_curve2], solver=simultaneous_solver)
delta_sim = eur_swap.delta(curves=[ibor_curve2, estr_curve2], solver=simultaneous_solver)
# check arrays in construction of gamma
grad_s_vT_sim = simultaneous_solver.grad_s_vT_pre
grad_s_vT_pre = ibor_solver.grad_s_vT_pre
assert_allclose(grad_s_vT_pre, grad_s_vT_sim, atol=1e-14, rtol=1e-10)
simultaneous_solver._set_ad_order(2)
J2_sim = simultaneous_solver.J2_pre
ibor_solver._set_ad_order(2)
J2_pre = ibor_solver.J2_pre
assert_allclose(J2_pre, J2_sim, atol=1e-14, rtol=1e-10)
grad_s_s_vT_sim = simultaneous_solver.grad_s_s_vT_pre
grad_s_s_vT_pre = ibor_solver.grad_s_s_vT_pre
assert_allclose(grad_s_s_vT_pre, grad_s_s_vT_sim, atol=1e-14, rtol=1e-10)
gamma_pre.index = gamma_sim.index
gamma_pre.columns = gamma_sim.columns
delta_pre.index = delta_sim.index
assert_frame_equal(delta_sim, delta_pre)
assert_frame_equal(gamma_sim, gamma_pre)
def test_nonmutable_presolver_defaults() -> None:
estr_curve = Curve({dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0})
estr_instruments = [
(IRS(dt(2022, 1, 1), "10Y", "A"), {"curves": estr_curve}),
]
estr_s = [2.0]
estr_labels = ["10ye"]
estr_solver = Solver(
[estr_curve],
[],
estr_instruments,
estr_s,
id="estr",
instrument_labels=estr_labels,
)
with pytest.raises(AttributeError, match="'tuple' object has no attribute"):
estr_solver.pre_solvers.extend([1, 2, 3])
def test_solver_grad_s_vT_methods_equivalent() -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
dt(2026, 1, 1): 1.0,
dt(2027, 1, 1): 1.0,
},
)
instruments = [
(IRS(dt(2022, 1, 1), "2Y", "A"), {"curves": curve}),
(IRS(dt(2023, 1, 1), "1Y", "A"), {"curves": curve}),
(IRS(dt(2023, 1, 1), "2Y", "A"), {"curves": curve}),
(IRS(dt(2022, 5, 1), "4Y", "A"), {"curves": curve}),
(IRS(dt(2023, 1, 1), "4Y", "A"), {"curves": curve}),
]
s = [1.2, 1.4, 1.6, 1.7, 1.9]
solver = Solver([curve], [], instruments, s, algorithm="gauss_newton")
solver._grad_s_vT_method = "_grad_s_vT_final_iteration_analytical"
grad_s_vT_final_iter_anal = solver.grad_s_vT
solver._grad_s_vT_method = "_grad_s_vT_final_iteration_dual"
solver._grad_s_vT_final_iteration_algo = "gauss_newton_final"
solver._reset_properties_()
grad_s_vT_final_iter_dual = solver.grad_s_vT
solver._grad_s_vT_method = "_grad_s_vT_fixed_point_iteration"
solver._reset_properties_()
grad_s_vT_fixed_point_iter = solver.grad_s_vT
assert_allclose(grad_s_vT_final_iter_dual, grad_s_vT_final_iter_anal, atol=1e-12)
assert_allclose(grad_s_vT_fixed_point_iter, grad_s_vT_final_iter_anal, atol=1e-12)
assert_allclose(grad_s_vT_final_iter_dual, grad_s_vT_fixed_point_iter, atol=1e-12)
def test_solver_grad_s_vT_methods_equivalent_overspecified_curve() -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
# dt(2026, 1, 1): 1.0,
dt(2027, 1, 1): 1.0,
},
)
instruments = [
(IRS(dt(2022, 1, 1), "2Y", "A"), {"curves": curve}),
(IRS(dt(2023, 1, 1), "1Y", "A"), {"curves": curve}),
(IRS(dt(2023, 1, 1), "2Y", "A"), {"curves": curve}),
(IRS(dt(2022, 5, 1), "4Y", "A"), {"curves": curve}),
(IRS(dt(2023, 1, 1), "4Y", "A"), {"curves": curve}),
]
s = [1.2, 1.4, 1.6, 1.7, 1.9]
solver = Solver([curve], [], instruments, s, algorithm="gauss_newton")
solver._grad_s_vT_method = "_grad_s_vT_final_iteration_analytical"
grad_s_vT_final_iter_anal = solver.grad_s_vT
solver._grad_s_vT_method = "_grad_s_vT_final_iteration_dual"
solver._grad_s_vT_final_iteration_algo = "gauss_newton_final"
solver._reset_properties_()
grad_s_vT_final_iter_dual = solver.grad_s_vT
solver._grad_s_vT_method = "_grad_s_vT_fixed_point_iteration"
solver._reset_properties_()
grad_s_vT_fixed_point_iter = solver.grad_s_vT
assert_allclose(grad_s_vT_final_iter_dual, grad_s_vT_final_iter_anal, atol=1e-6)
assert_allclose(grad_s_vT_fixed_point_iter, grad_s_vT_final_iter_anal, atol=1e-6)
assert_allclose(grad_s_vT_final_iter_dual, grad_s_vT_fixed_point_iter, atol=1e-6)
def test_solver_second_order_vars_raise_on_first_order() -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="A")
solver = Solver(
curves=[curve],
instruments=[(IRS(dt(2022, 1, 1), "1Y", "Q"), {"curves": curve})],
s=[1],
)
with pytest.raises(ValueError, match="Cannot perform second derivative calc"):
solver.J2
with pytest.raises(ValueError, match="Cannot perform second derivative calc"):
solver.grad_s_s_vT
def test_solver_second_order_vars_raise_on_first_order_pre_solvers() -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="A")
solver = Solver(
curves=[curve],
instruments=[IRS(dt(2022, 1, 1), "1Y", "Q", curves=curve)],
s=[1],
)
curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="B")
solver2 = Solver(
curves=[curve2],
instruments=[IRS(dt(2022, 1, 1), "1Y", "Q", curves=curve2)],
s=[1],
pre_solvers=[solver],
)
with pytest.raises(ValueError, match="Cannot perform second derivative calc"):
solver2.J2_pre
with pytest.raises(ValueError, match="Cannot perform second derivative calc"):
solver.grad_s_s_vT_pre
def test_bad_algo_raises() -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="A")
with pytest.raises(NotImplementedError, match="`algorithm`: bad_algo"):
Solver(
curves=[curve],
instruments=[IRS(dt(2022, 1, 1), "1Y", "Q", curves=curve)],
s=[1],
algorithm="bad_algo",
)
def test_solver_float_rate_bond() -> None:
"""
This test checks the rate method of FloatRateNote when using complex rate spread
calculations (which artificially introduces Dual2 and then removes it)
"""
d_c = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2022, 7, 1): 0.94,
dt(2023, 1, 1): 0.92,
dt(2024, 1, 1): 0.9,
},
id="credit",
)
f_c = d_c.copy()
f_c._id = "rfr"
instruments = [
(
FloatRateNote(
dt(2022, 1, 1),
"6M",
"Q",
spread_compound_method="isda_compounding",
settle=2,
),
{"metric": "spread", "curves": [f_c, d_c]},
),
(
FloatRateNote(
dt(2022, 1, 1),
"1y",
"Q",
spread_compound_method="isda_compounding",
settle=2,
curves=[f_c, d_c],
),
{"metric": "spread"},
),
(
FloatRateNote(
dt(2022, 1, 1),
"18m",
"Q",
spread_compound_method="isda_compounding",
settle=2,
curves=[f_c, d_c],
),
{"metric": "spread"},
),
]
Solver([d_c], [], instruments, [25, 25, 25])
result = d_c.rate(dt(2022, 7, 1), "1D")
expected = f_c.rate(dt(2022, 7, 1), "1D") + 0.25
assert abs(result - expected) < 3e-4
def test_solver_grad_s_s_vt_methods_equivalent() -> None:
curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
dt(2026, 1, 1): 1.0,
dt(2027, 1, 1): 1.0,
dt(2028, 1, 1): 1.0,
dt(2029, 1, 1): 1.0,
},
id="curve",
)
instruments = [
IRS(dt(2022, 1, 1), "1y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "2y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "3y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "4y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "5y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "6y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "7y", "A", curves="curve"),
]
with default_context("algorithm", "gauss_newton"):
solver = Solver(
curves=[curve],
instruments=instruments,
s=[1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7],
)
grad_s_s_vt_fwddiff = solver._grad_s_s_vT_fwd_difference_method()
solver._set_ad_order(order=2)
grad_s_s_vt_final = solver._grad_s_s_vT_final_iteration_analytical()
solver._set_ad_order(order=1)
assert_allclose(grad_s_s_vt_final, grad_s_s_vt_fwddiff, atol=5e-7)
def test_gamma_raises() -> None:
curve = Curve(
{
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2025, 1, 1): 1.0,
},
id="v",
)
instruments = [
IRS(dt(2022, 1, 1), "1Y", "Q", curves=curve),
IRS(dt(2022, 1, 1), "2Y", "Q", curves=curve),
IRS(dt(2022, 1, 1), "3Y", "Q", curves=curve),
]
s = np.array([1.0, 1.6, 2.0])
solver = Solver(
curves=[curve],
instruments=instruments,
s=s,
)
with pytest.raises(ValueError, match="`Solver` must be in ad order 2"):
solver.gamma(100)
def test_delta_irs_guide() -> None:
# this mirrors the delta user guide page
usd_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 2, 1): 1.0,
dt(2022, 4, 1): 1.0,
dt(2023, 1, 1): 1.0,
},
id="sofr",
)
instruments = [
IRS(dt(2022, 1, 1), "1m", "A", curves="sofr"),
IRS(dt(2022, 1, 1), "3m", "A", curves="sofr"),
IRS(dt(2022, 1, 1), "1y", "A", curves="sofr"),
]
usd_solver = Solver(
curves=[usd_curve],
id="usd_sofr",
instruments=instruments,
s=[2.5, 3.25, 4.0],
instrument_labels=["1m", "3m", "1y"],
)
irs = IRS(
effective=dt(2022, 1, 1),
termination="6m",
frequency="A",
currency="usd",
fixed_rate=6.0,
curves="sofr",
)
result = irs.delta(solver=usd_solver) # local overrides base to USD
# result = irs.delta(solver=usd_solver, base="eur", local=True) # local overrides base to USD
expected = DataFrame(
[[0], [16.77263], [32.60487]],
index=MultiIndex.from_product(
[["instruments"], ["usd_sofr"], ["1m", "3m", "1y"]],
names=["type", "solver", "label"],
),
columns=MultiIndex.from_tuples([("usd", "usd")], names=["local_ccy", "display_ccy"]),
)
assert_frame_equal(result, expected)
def test_delta_irs_guide_fx_base() -> None:
# this mirrors the delta user guide page
usd_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2022, 2, 1): 1.0,
dt(2022, 4, 1): 1.0,
dt(2023, 1, 1): 1.0,
},
id="sofr",
)
instruments = [
IRS(dt(2022, 1, 1), "1m", "A", curves="sofr"),
IRS(dt(2022, 1, 1), "3m", "A", curves="sofr"),
IRS(dt(2022, 1, 1), "1y", "A", curves="sofr"),
]
usd_solver = Solver(
curves=[usd_curve],
id="usd_sofr",
instruments=instruments,
s=[2.5, 3.25, 4.0],
instrument_labels=["1m", "3m", "1y"],
)
irs = IRS(
effective=dt(2022, 1, 1),
termination="6m",
frequency="A",
currency="usd",
fixed_rate=6.0,
curves="sofr",
)
fxr = FXRates({"eurusd": 1.1})
result = irs.delta(solver=usd_solver, base="eur", fx=fxr)
expected = DataFrame(
[
[0, 0, 0],
[15.247847, 15.247847, 16.772632],
[29.640788, 29.640788, 32.60487],
[0.926514, 0.926514, 0.0],
],
index=MultiIndex.from_tuples(
[
("instruments", "usd_sofr", "1m"),
("instruments", "usd_sofr", "3m"),
("instruments", "usd_sofr", "1y"),
("fx", "fx", "eurusd"),
],
names=["type", "solver", "label"],
),
columns=MultiIndex.from_tuples(
[
("all", "eur"),
("usd", "eur"),
("usd", "usd"),
],
names=["local_ccy", "display_ccy"],
),
)
assert_frame_equal(result, expected)
# def test_irs_delta_curves_undefined():
# # the IRS is not constructed under best practice.
# # The delta solver does not know how to price the irs
# curve = Curve({dt(2022, 1, 1): 1.0, dt(2027, 1, 1): 0.99, dt(2032, 1, 1): 0.98},
# id="sonia")
# instruments = [
# IRS(dt(2022, 1, 1), "5y", "A", curves="sonia"),
# IRS(dt(2027, 1, 1), "5y", "A", curves="sonia"),
# ]
# solver = Solver(
# curves=[curve],
# instruments=instruments,
# s=[2.0, 2.5],
# )
# irs = IRS(dt(2022, 1, 1), "10y", "S", fixed_rate=2.38)
# with pytest.raises(TypeError, match="`curves` have not been supplied"):
# irs.delta(solver=solver)
def test_mechanisms_guide_gamma() -> None:
instruments = [
IRS(dt(2022, 1, 1), "4m", "Q", curves="sofr"),
IRS(dt(2022, 1, 1), "8m", "Q", curves="sofr"),
]
s = [1.85, 2.10]
ll_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 5, 1): 1.0, dt(2022, 9, 1): 1.0},
interpolation="log_linear",
id="sofr",
)
ll_solver = Solver(
curves=[ll_curve],
instruments=instruments,
s=s,
instrument_labels=["4m", "8m"],
id="sofr",
)
instruments = [
IRS(dt(2022, 1, 1), "3m", "Q", curves="estr"),
IRS(dt(2022, 1, 1), "9m", "Q", curves="estr"),
]
s = [0.75, 1.65]
ll_curve = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 4, 1): 1.0, dt(2022, 10, 1): 1.0},
interpolation="log_linear",
id="estr",
)
combined_solver = Solver(
curves=[ll_curve],
instruments=instruments,
s=s,
instrument_labels=["3m", "9m"],
pre_solvers=[ll_solver],
id="estr",
)
irs = IRS(
effective=dt(2022, 1, 1),
termination="6m",
frequency="Q",
currency="usd",
notional=500e6,
fixed_rate=2.0,
curves="sofr",
)
irs2 = IRS(
effective=dt(2022, 1, 1),
termination="6m",
frequency="Q",
currency="eur",
notional=-300e6,
fixed_rate=1.0,
curves="estr",
)
pf = Portfolio([irs, irs2])
pf.npv(solver=combined_solver, local=True)
pf.delta(solver=combined_solver)
fxr = FXRates({"eurusd": 1.10})
fxr._set_ad_order(2)
result = pf.gamma(solver=combined_solver, fx=fxr, base="eur")
expected = DataFrame(
data=[
[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.13769, 0.28088, 0.0],
[0.0, 0.0, 0.28088, 0.44493, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
[-0.28930, -0.45081, 0.0, 0.0, -0.68937],
[-0.45081, -0.47449, 0.0, 0.0, -1.37372],
[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
[-0.68937, -1.37372, 0.0, 0.0, 0.00064],
[-0.31823, -0.49590, 0.0, 0.0, 0.0],
[-0.49590, -0.52194, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
[-0.28930, -0.45081, 0.0, 0.0, -0.68937],
[-0.45081, -0.47449, 0.0, 0.0, -1.37372],
[0.0, 0.0, 0.13770, 0.28088, 0.0],
[0.0, 0.0, 0.28088, 0.44493, 0.0],
[-0.68937, -1.37372, 0.0, 0.0, 0.00064],
],
index=MultiIndex.from_tuples(
[
("eur", "eur", "instruments", "sofr", "4m"),
("eur", "eur", "instruments", "sofr", "8m"),
("eur", "eur", "instruments", "estr", "3m"),
("eur", "eur", "instruments", "estr", "9m"),
("eur", "eur", "fx", "fx", "eurusd"),
("usd", "eur", "instruments", "sofr", "4m"),
("usd", "eur", "instruments", "sofr", "8m"),
("usd", "eur", "instruments", "estr", "3m"),
("usd", "eur", "instruments", "estr", "9m"),
("usd", "eur", "fx", "fx", "eurusd"),
("usd", "usd", "instruments", "sofr", "4m"),
("usd", "usd", "instruments", "sofr", "8m"),
("usd", "usd", "instruments", "estr", "3m"),
("usd", "usd", "instruments", "estr", "9m"),
("usd", "usd", "fx", "fx", "eurusd"),
("all", "eur", "instruments", "sofr", "4m"),
("all", "eur", "instruments", "sofr", "8m"),
("all", "eur", "instruments", "estr", "3m"),
("all", "eur", "instruments", "estr", "9m"),
("all", "eur", "fx", "fx", "eurusd"),
],
names=["local_ccy", "display_ccy", "type", "solver", "label"],
),
columns=MultiIndex.from_tuples(
[
("instruments", "sofr", "4m"),
("instruments", "sofr", "8m"),
("instruments", "estr", "3m"),
("instruments", "estr", "9m"),
("fx", "fx", "eurusd"),
],
names=["type", "solver", "label"],
),
)
assert_frame_equal(result, expected, atol=1e-2, rtol=1e-4)
def test_solver_gamma_pnl_explain() -> None:
instruments = [
IRS(dt(2022, 1, 1), "10y", "A", currency="usd", curves="sofr"),
IRS(dt(2032, 1, 1), "10y", "A", currency="usd", curves="sofr"),
IRS(dt(2022, 1, 1), "10y", "A", currency="eur", curves="estr"),
IRS(dt(2032, 1, 1), "10y", "A", currency="eur", curves="estr"),
XCS(
dt(2022, 1, 1),
"10y",
"A",
currency="eur",
pair="eurusd",
curves=["estr", "eurusd", "sofr", "sofr"],
),
XCS(
dt(2032, 1, 1),
"10y",
"A",
currency="usd",
pair="usdeur",
curves=["estr", "eurusd", "sofr", "sofr"],
),
]
# s_base = np.array([3.45, 2.85, 2.25, 0.9, -15, -10])
sofr = Curve(nodes={dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0}, id="sofr")
estr = Curve(nodes={dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0}, id="estr")
eurusd = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0},
id="eurusd",
)
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(fxr, {"eureur": estr, "eurusd": eurusd, "usdusd": sofr})
sofr_solver = Solver(
curves=[sofr],
instruments=instruments[:2],
s=[3.45, 2.85],
instrument_labels=["10y", "10y10y"],
id="sofr",
fx=fxf,
)
estr_solver = Solver(
curves=[estr],
instruments=instruments[2:4],
s=[2.25, 0.90],
instrument_labels=["10y", "10y10y"],
id="estr",
fx=fxf,
)
solver = Solver(
curves=[eurusd],
instruments=instruments[4:],
s=[-10, -15],
instrument_labels=["10y", "10y10y"],
id="xccy",
fx=fxf,
pre_solvers=[sofr_solver, estr_solver],
)
pf = Portfolio(
[
IRS(
dt(2022, 1, 1),
"20Y",
"A",
currency="eur",
fixed_rate=2.0,
notional=1e8,
curves="estr",
),
],
)
npv_base = pf.npv(solver=solver, base="eur")
expected_npv = -6230451.035973
assert (npv_base - expected_npv) < 1e-5
delta_base = pf.delta(solver=solver, base="usd")
# this expectation is directly input from reviewed output.
expected_delta = DataFrame(
data=[
[3.51021, 0.0, 3.51021],
[-0.00005, 0.0, -0.00005],
[101841.37433, 97001.98184, 101841.37433],
[85750.45235, 81672.83139, 85750.45235],
[-3.55593, 0.0, -3.55593],
[0.00004, 0.0, 0.00004],
[-623.00136, 0.0, -623.00136],
],
index=MultiIndex.from_tuples(
[
("instruments", "sofr", "10y"),
("instruments", "sofr", "10y10y"),
("instruments", "estr", "10y"),
("instruments", "estr", "10y10y"),
("instruments", "xccy", "10y"),
("instruments", "xccy", "10y10y"),
("fx", "fx", "eurusd"),
],
names=["type", "solver", "label"],
),
columns=MultiIndex.from_tuples(
[("all", "usd"), ("eur", "eur"), ("eur", "usd")],
names=["local_ccy", "display_ccy"],
),
)
assert_frame_equal(delta_base, expected_delta, atol=1e-2, rtol=1e-4)
gamma_base = pf.gamma(solver=solver, base="eur")
expected_gamma = DataFrame(
data=[
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, -102.972447, -81.00807888, 0.0, 0.0, 0.0],
[0.0, 0.0, -81.00807888, -87.84105303, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
],
index=MultiIndex.from_tuples(
[
("eur", "eur", "instruments", "sofr", "10y"),
("eur", "eur", "instruments", "sofr", "10y10y"),
("eur", "eur", "instruments", "estr", "10y"),
("eur", "eur", "instruments", "estr", "10y10y"),
("eur", "eur", "instruments", "xccy", "10y"),
("eur", "eur", "instruments", "xccy", "10y10y"),
("eur", "eur", "fx", "fx", "eurusd"),
],
names=["local_ccy", "display_ccy", "type", "solver", "label"],
),
columns=MultiIndex.from_tuples(
[
("instruments", "sofr", "10y"),
("instruments", "sofr", "10y10y"),
("instruments", "estr", "10y"),
("instruments", "estr", "10y10y"),
("instruments", "xccy", "10y"),
("instruments", "xccy", "10y10y"),
("fx", "fx", "eurusd"),
],
names=["type", "solver", "label"],
),
)
with warnings.catch_warnings():
# TODO: pandas 3.0.0 can optionally turn off these PerformanceWarnings
warnings.simplefilter(action="ignore", category=PerformanceWarning)
assert_frame_equal(
gamma_base.loc[("all", "eur")], expected_gamma.loc[("eur", "eur")], atol=1e-2, rtol=1e-4
)
def test_gamma_with_fxrates_ad_order_1_raises() -> None:
# when calculating gamma, AD order 2 is needed, the fx rates object passed
# must also be converted. TODO
pass
def test_error_labels() -> None:
solver_with_error = Solver(
curves=[
Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2022, 7, 1): 1.0, dt(2023, 1, 1): 1.0},
id="curve1",
),
],
instruments=[
IRS(dt(2022, 1, 1), "1M", "A", curves="curve1"),
IRS(dt(2022, 1, 1), "2M", "A", curves="curve1"),
IRS(dt(2022, 1, 1), "3M", "A", curves="curve1"),
IRS(dt(2022, 1, 1), "4M", "A", curves="curve1"),
IRS(dt(2022, 1, 1), "8M", "A", curves="curve1"),
IRS(dt(2022, 1, 1), "12M", "A", curves="curve1"),
],
s=[2.0, 2.2, 2.3, 2.4, 2.45, 2.55],
id="rates",
)
result = solver_with_error.error
assert abs(result.loc[("rates", "rates0")] - 22.798) < 1e-2
def test_solver_non_unique_id_raises() -> None:
curve = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="A")
solver = Solver(
curves=[curve],
instruments=[IRS(dt(2022, 1, 1), "1Y", "Q", curves=curve)],
s=[1],
id="bad",
)
curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.98}, id="B")
with pytest.raises(ValueError, match="Solver `id`s must be unique"):
Solver(
curves=[curve2],
instruments=[IRS(dt(2022, 1, 1), "1Y", "Q", curves=curve2)],
s=[1],
id="bad",
pre_solvers=[solver],
)
def test_solving_indirect_parameters_from_proxy_composite() -> None:
eureur = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="eureur")
eurspd = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.999}, id="eurspd")
eur3m = CompositeCurve([eureur, eurspd], id="eur3m")
usdusd = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="usdusd")
eurusd = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 1.0}, id="eurusd")
fxr = FXRates({"eurusd": 1.1}, settlement=dt(2022, 1, 3))
fxf = FXForwards(
fx_rates=fxr,
fx_curves={
"eureur": eureur,
"usdusd": usdusd,
"eurusd": eurusd,
},
)
usdeur = fxf.curve("usd", "eur", id="usdeur")
instruments = [
IRS(dt(2022, 1, 1), "1Y", "A", currency="eur", curves=["eur3m", "eureur"]),
IRS(dt(2022, 1, 1), "1Y", "A", currency="usd", curves="usdusd"),
XCS(
dt(2022, 1, 1),
"1Y",
"A",
currency="eur",
pair="eurusd",
curves=["eureur", "eureur", "usdusd", "usdeur"],
),
]
Solver(
curves=[eureur, eur3m, usdusd, eurusd, usdeur],
instruments=instruments,
s=[2.0, 2.7, -15],
fx=fxf,
)
def test_solver_dimensions_of_matmul() -> None:
swaps = [
IRS(dt(2023, 7, 21), "9m", "A", fixed_rate=2.0, curves="chf", currency="chf"),
IRS(dt(2023, 7, 21), "9m", "A", fixed_rate=2.0, curves="gbp", currency="gbp"),
IRS(dt(2023, 7, 21), "9m", "A", fixed_rate=2.0, curves="usd", currency="usd"),
]
chf_inst = [
IRS(dt(2023, 7, 21), "6m", "A", curves="chf", currency="chf"),
IRS(dt(2023, 7, 21), "1y", "A", curves="chf", currency="chf"),
]
gbp_inst = [
IRS(dt(2023, 7, 21), "6m", "A", curves="gbp", currency="gbp"),
IRS(dt(2023, 7, 21), "1y", "A", curves="gbp", currency="gbp"),
]
usd_inst = [
IRS(dt(2023, 7, 21), "6m", "A", curves="usd", currency="usd"),
IRS(dt(2023, 7, 21), "1y", "A", curves="usd", currency="usd"),
]
usd = Curve(
{dt(2023, 7, 21): 1.0, dt(2024, 1, 21): 1.0, dt(2024, 7, 21): 1.0},
id="usd",
)
gbp = Curve(
{dt(2023, 7, 21): 1.0, dt(2024, 1, 21): 1.0, dt(2024, 7, 21): 1.0},
id="gbp",
)
chf = Curve(
{dt(2023, 7, 21): 1.0, dt(2024, 1, 21): 1.0, dt(2024, 7, 21): 1.0},
id="chf",
)
fxr = FXRates({"gbpusd": 1.25, "chfgbp": 1.1})
solver1 = Solver(curves=[chf], instruments=chf_inst, s=[1.5, 1.8], id="CHF")
solver2 = Solver(
curves=[gbp],
instruments=gbp_inst,
s=[1.6, 1.7],
id="GBP",
pre_solvers=[solver1],
)
solver3 = Solver(
curves=[usd],
instruments=usd_inst,
s=[1.7, 1.9],
id="USD",
pre_solvers=[solver2],
)
pf = Portfolio(swaps)
pf.delta(solver=solver3, base="gbp", fx=fxr)
pf.gamma(solver=solver3, base="gbp", fx=fxr)
def test_pre_solver_single_fx_object() -> None:
# this test considers building up FXForwards using chined solvers.
uu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="uu")
ee = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="ee")
gg = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="gg")
eu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="eu")
gu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="gu")
fxf1 = FXForwards(
fx_rates=FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
fx_curves={
"usdusd": uu,
"eureur": ee,
"eurusd": eu,
},
)
fxf2 = FXForwards(
fx_rates=FXRates({"eurusd": 1.0, "gbpusd": 1.5}, settlement=dt(2022, 1, 1)),
fx_curves={
"usdusd": uu,
"eureur": ee,
"gbpgbp": gg,
"eurusd": eu,
"gbpusd": gu,
},
)
s1 = Solver(
curves=[uu, ee, gg],
instruments=[
IRS(dt(2022, 1, 1), "1y", "A", curves="uu"),
IRS(dt(2022, 1, 1), "1y", "A", curves="ee"),
IRS(dt(2022, 1, 1), "1y", "A", curves="gg"),
],
s=[1.5, 1.5, 1.0],
id="local",
)
s2 = Solver(
curves=[eu],
instruments=[
XCS(
dt(2022, 1, 1),
"1Y",
"Q",
currency="eur",
pair="eurusd",
curves=["ee", "eu", "uu", "uu"],
),
],
s=[10.0],
id="x1",
fx=fxf1,
pre_solvers=[s1],
)
Solver(
curves=[gu],
instruments=[
XCS(
dt(2022, 1, 1),
"1Y",
"Q",
currency="gbp",
pair="gbpusd",
curves=["gg", "gu", "uu", "uu"],
),
],
s=[20.0],
id="x2",
fx=fxf2,
pre_solvers=[s2],
)
result = gu[dt(2023, 1, 1)]
expected = 0.988
assert (result - expected) < 1e-4
def test_pre_solver_set_ad_order() -> None:
curve1 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99})
curve2 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99})
curve3 = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99})
cc = CompositeCurve([curve2, curve3])
s1 = Solver(curves=[curve1], instruments=[Value(dt(2022, 5, 1), curves=curve1)], s=[0.99])
s2 = Solver(curves=[curve2], instruments=[Value(dt(2022, 5, 1), curves=curve1)], s=[0.99])
s3 = Solver(
pre_solvers=[s1, s2],
curves=[cc, curve3],
instruments=[Value(dt(2022, 5, 1), curves=curve1)],
s=[0.99],
)
s3._set_ad_order(2)
for c in [curve1, curve2, curve3, cc]:
assert c._ad == 2
assert s2._ad == 2
assert s1._ad == 2
s3._set_ad_order(1)
for c in [curve1, curve2, curve3, cc]:
assert c._ad == 1
assert s2._ad == 1
assert s1._ad == 1
def test_solver_jacobians_in_text() -> None:
par_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2027, 1, 1): 1.0,
dt(2032, 1, 1): 1.0,
},
id="curve",
)
par_instruments = [
IRS(dt(2022, 1, 1), "1Y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "2Y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "5Y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "10Y", "A", curves="curve"),
]
par_solver = Solver(
curves=[par_curve],
instruments=par_instruments,
s=[1.21, 1.635, 1.885, 1.93],
id="par_solver",
instrument_labels=["1Y", "2Y", "5Y", "10Y"],
)
fwd_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
dt(2027, 1, 1): 1.0,
dt(2032, 1, 1): 1.0,
},
id="curve",
)
fwd_instruments = [
IRS(dt(2022, 1, 1), "1Y", "A", curves="curve"),
IRS(dt(2023, 1, 1), "1Y", "A", curves="curve"),
IRS(dt(2024, 1, 1), "3Y", "A", curves="curve"),
IRS(dt(2027, 1, 1), "5Y", "A", curves="curve"),
]
s_fwd = [float(_.rate(solver=par_solver)) for _ in fwd_instruments]
fwd_solver = Solver(
curves=[fwd_curve],
instruments=fwd_instruments,
s=s_fwd,
id="fwd_solver",
instrument_labels=["1Y", "1Y1Y", "2Y3Y", "5Y5Y"],
)
S_BA = par_solver.jacobian(fwd_solver).to_numpy()
S_AB = fwd_solver.jacobian(par_solver).to_numpy()
assert np.all(np.isclose(np.eye(4), np.matmul(S_AB, S_BA)))
def test_solver_jacobians_pre() -> None:
par_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
},
id="curve",
)
par_instruments = [
IRS(dt(2022, 1, 1), "1Y", "A", curves="curve"),
IRS(dt(2022, 1, 1), "2Y", "A", curves="curve"),
]
par_solver = Solver(
curves=[par_curve],
instruments=par_instruments,
s=[1.21, 1.635],
id="par_solver",
instrument_labels=["1Y", "2Y"],
)
par_curve2 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
},
id="curve2",
)
par_instruments2 = [
IRS(dt(2022, 1, 1), "1Y", "A", curves="curve2"),
IRS(dt(2022, 1, 1), "2Y", "A", curves="curve2"),
]
par_solver2 = Solver(
curves=[par_curve2],
instruments=par_instruments2,
s=[1.21, 1.635],
id="par_solver2",
instrument_labels=["1Y", "2Y"],
pre_solvers=[par_solver],
)
fwd_curve = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
},
id="curve",
)
fwd_instruments = [
IRS(dt(2022, 1, 1), "1Y", "A", curves="curve"),
IRS(dt(2023, 1, 1), "1Y", "A", curves="curve"),
]
s_fwd = [float(_.rate(solver=par_solver2)) for _ in fwd_instruments]
fwd_solver = Solver(
curves=[fwd_curve],
instruments=fwd_instruments,
s=s_fwd,
id="fwd_solver",
instrument_labels=["1Y", "1Y1Y"],
)
fwd_curve2 = Curve(
nodes={
dt(2022, 1, 1): 1.0,
dt(2023, 1, 1): 1.0,
dt(2024, 1, 1): 1.0,
},
id="curve2",
)
fwd_instruments2 = [
IRS(dt(2022, 1, 1), "1Y", "A", curves="curve2"),
IRS(dt(2023, 1, 1), "1Y", "A", curves="curve2"),
]
s_fwd2 = [float(_.rate(solver=par_solver2)) for _ in fwd_instruments2]
fwd_solver2 = Solver(
curves=[fwd_curve2],
instruments=fwd_instruments2,
s=s_fwd2,
id="fwd_solver2",
instrument_labels=["1Y", "1Y1Y"],
pre_solvers=[fwd_solver],
)
S_BA = par_solver2.jacobian(fwd_solver2)
S_AB = fwd_solver2.jacobian(par_solver2)
assert np.all(np.isclose(np.eye(4), np.matmul(S_AB.to_numpy(), S_BA.to_numpy())))
def test_newton_solver_1dim_dual() -> None:
def root(x, s):
return x**2 - s, 2 * x
x0 = Dual(1.0, ["x"], [])
s = Dual(2.0, ["s"], [])
result = newton_1dim(root, x0, args=(s,))
expected = 0.5 / 2.0**0.5
sensitivity = gradient(result["g"], ["s"])[0]
assert abs(expected - sensitivity) < 1e-9
def test_newton_solver_1dim_dual2() -> None:
def root(x, s):
return x**2 - s, 2 * x
x0 = Dual2(1.0, ["x"], [], [])
s = Dual2(2.0, ["s"], [], [])
result = newton_1dim(root, x0, args=(s,))
expected = 0.5 / 2.0**0.5
sensitivity = gradient(result["g"], ["s"])[0]
assert abs(expected - sensitivity) < 1e-9
expected = -0.25 * (1 / 2.0**1.5)
sensitivity = gradient(result["g"], ["s"], order=2)[0, 0]
assert abs(expected - sensitivity) < 1e-9
def test_newton_solver_2dim_dual() -> None:
def root(g, s):
f0 = g[0] ** 2 + g[1] ** 2 + s
f1 = g[0] ** 2 - 2 * g[1] ** 2 - s
f00 = 2 * g[0]
f01 = 2 * g[1]
f10 = 2 * g[0]
f11 = -4 * g[1]
return [f0, f1], [[f00, f01], [f10, f11]]
g0 = [Dual(1.0, ["x"], []), Dual(2.0, ["y"], [])]
s = Dual(-2.0, ["s"], [])
result = newton_ndim(root, g0, args=(s,))
expected_x = (2 / 3) ** 0.5
assert abs(result["g"][0] - expected_x) < 1e-9
expected_y = (4 / 3) ** 0.5
assert abs(result["g"][1] - expected_y) < 1e-9
expected_y = -0.5 * (2 / 3) ** 0.5 * (2.0) ** -0.5
expected_x = -0.5 * (1 / 3.0) ** 0.5 * (2.0) ** -0.5
sensitivity_x = gradient(result["g"][0], ["s"])[0]
sensitivity_y = gradient(result["g"][1], ["s"])[0]
assert abs(expected_x - sensitivity_x) < 1e-9
assert abs(expected_y - sensitivity_y) < 1e-9
def test_newton_solver_2dim_dual2() -> None:
def root(g, s):
f0 = g[0] ** 2 + g[1] ** 2 + s
f1 = g[0] ** 2 - 2 * g[1] ** 2 - s
f00 = 2 * g[0]
f01 = 2 * g[1]
f10 = 2 * g[0]
f11 = -4 * g[1]
return [f0, f1], [[f00, f01], [f10, f11]]
g0 = [Dual2(1.0, ["x"], [], []), Dual2(2.0, ["y"], [], [])]
s = Dual2(-2.0, ["s"], [], [])
result = newton_ndim(root, g0, args=(s,))
expected_x = (2 / 3) ** 0.5
assert abs(result["g"][0] - expected_x) < 1e-9
expected_y = (4 / 3) ** 0.5
assert abs(result["g"][1] - expected_y) < 1e-9
expected_y = -0.5 * (2 / 3) ** 0.5 * (2.0) ** -0.5
expected_x = -0.5 * (1 / 3.0) ** 0.5 * (2.0) ** -0.5
sensitivity_x = gradient(result["g"][0], ["s"])[0]
sensitivity_y = gradient(result["g"][1], ["s"])[0]
assert abs(expected_x - sensitivity_x) < 1e-9
assert abs(expected_y - sensitivity_y) < 1e-9
expected_y2 = -0.25 * (2 / 3) ** 0.5 * (2.0) ** -1.5
expected_x2 = -0.25 * (1 / 3) ** 0.5 * (2.0) ** -1.5
sensitivity_x2 = gradient(result["g"][0], ["s"], order=2)[0, 0]
sensitivity_y2 = gradient(result["g"][1], ["s"], order=2)[0, 0]
assert abs(expected_x2 - sensitivity_x2) < 1e-9
assert abs(expected_y2 - sensitivity_y2) < 1e-9
def test_newton_1d_failed_state() -> None:
def root(g):
f0 = g**2 + 10.0
f1 = 2 * g
return f0, f1
result = newton_1dim(root, 1.5, max_iter=5, raise_on_fail=False)
assert result["state"] == -1
def test_newton_ndim_raises() -> None:
def root(g):
f0_0 = g[0] ** 2 + 10.0
f0_1 = g[0] + g[1] ** 2 - 2.0
return [f0_0, f0_1], [[2 * g[0], 0.0], [1.0, 2 * g[1]]]
with pytest.raises(ValueError, match="`max_iter`: 5 exceeded in 'newton_ndim'"):
newton_ndim(root, [0.5, 1.0], max_iter=5)
def test_newton_solver_object_args():
def root(x, s):
return x**2 - s["some_obj"], 2 * x
x0 = Dual(1.0, ["x"], [])
s = {"some_obj": Dual(2.0, ["s"], [])}
result = newton_1dim(root, x0, args=(s,))
expected = 0.5 / 2.0**0.5
sensitivity = gradient(result["g"], ["s"])[0]
assert abs(expected - sensitivity) < 1e-9
def test_solver_with_vol_smile() -> None:
eureur = Curve(
{dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.9851909811629752},
calendar="tgt",
id="eureur",
)
usdusd = Curve(
{dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.976009366603271},
calendar="nyc",
id="usdusd",
)
# eurusd = Curve({dt(2023, 3, 16): 1.0, dt(2023, 9, 16): 0.987092591908283}, id="eurusd")
fxr = FXRates({"eurusd": 1.3088}, settlement=dt(2023, 3, 20))
fxf = FXForwards(fx_curves={"eureur": eureur, "eurusd": eureur, "usdusd": usdusd}, fx_rates=fxr)
fxf._set_ad_order(1)
solver = Solver(
curves=[eureur, usdusd],
instruments=[
IRS(dt(2023, 3, 20), "1m", curves=[eureur], spec="eur_irs"),
IRS(dt(2023, 3, 20), "1m", curves=[usdusd], spec="usd_irs"),
],
s=[2.0113, 0.3525],
fx=fxf,
)
eurusd_1m_smile = FXDeltaVolSmile(
nodes={
0.25: 10.0,
0.50: 10.0,
0.75: 10.0,
},
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 4, 18),
delta_type="spot",
id="smile",
)
args = {
"pair": "eurusd",
"expiry": dt(2023, 4, 18),
"curves": ["eureur", "usdusd"],
"delta_type": "spot",
"vol": "smile",
}
Solver(
pre_solvers=[solver],
curves=[eurusd_1m_smile],
instruments=[
FXStraddle(strike="atm_delta", **args),
FXRiskReversal(strike=["-25d", "25d"], **args),
FXStrangle(strike=["-25d", "25d"], **args),
],
s=[21.6215, -0.5, 22.359],
fx=fxf,
)
def test_solver_with_surface() -> None:
eureur = Curve({dt(2024, 5, 7): 1.0, dt(2025, 5, 30): 1.0}, calendar="tgt", id="eureur")
eurusd = Curve({dt(2024, 5, 7): 1.0, dt(2025, 5, 30): 1.0}, id="eurusd")
usdusd = Curve({dt(2024, 5, 7): 1.0, dt(2025, 5, 30): 1.0}, calendar="nyc", id="usdusd")
# Create an FX Forward market with spot FX rate data
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.0760}, settlement=dt(2024, 5, 9)),
fx_curves={"eureur": eureur, "usdusd": usdusd, "eurusd": eurusd},
)
solver = Solver(
curves=[eureur, eurusd, usdusd],
instruments=[
IRS(dt(2024, 5, 9), "3W", spec="eur_irs", curves="eureur"),
IRS(dt(2024, 5, 9), "3W", spec="usd_irs", curves="usdusd"),
FXSwap(
dt(2024, 5, 9),
"3W",
pair="eurusd",
curves=["eurusd", "usdusd"],
),
],
s=[3.90, 5.32, 8.85],
instrument_labels=["3w EU", "3w US", "3w FXSw"],
fx=fxf,
)
surface = FXDeltaVolSurface(
eval_date=dt(2024, 5, 7),
expiries=[dt(2024, 5, 28), dt(2024, 6, 7)],
delta_indexes=[0.1, 0.25, 0.5, 0.75, 0.9],
delta_type="forward",
node_values=np.ones(shape=(2, 5)) * 5.0,
id="eurusd_vol",
)
data = DataFrame(
data=[
[5.493, -0.157, 0.071, -0.289, 0.238],
[5.525, -0.213, 0.075, -0.400, 0.250],
],
columns=["ATM", "25dRR", "25dBF", "10dRR", "25dBF"],
index=[dt(2024, 5, 28), dt(2024, 6, 7)],
)
fx_args = dict(
pair="eurusd",
delta_type="spot",
calendar="tgt",
curves=["eurusd", "usdusd"],
vol="eurusd_vol",
)
instruments, s, labels = [], [], []
for e, row in enumerate(data.itertuples()):
instruments.extend(
[
FXStraddle(strike="atm_delta", expiry=row[0], **fx_args),
FXRiskReversal(strike=("-25d", "25d"), expiry=row[0], **fx_args),
FXBrokerFly(strike=(("-25d", "25d"), "atm_delta"), expiry=row[0], **fx_args),
FXRiskReversal(strike=("-10d", "10d"), expiry=row[0], **fx_args),
FXBrokerFly(strike=(("-10d", "10d"), "atm_delta"), expiry=row[0], **fx_args),
],
)
s.extend([row[1], row[2], row[3], row[4], row[5]])
labels.extend([f"atm{e}", f"25rr{e}", f"25bf{e}", f"10rr{e}", f"10bf{e}"])
surf_solver = Solver(
surfaces=[surface],
instruments=instruments,
s=s,
pre_solvers=[solver],
instrument_labels=labels,
fx=fxf,
)
fxc = FXCall(expiry=dt(2024, 6, 7), strike=1.08, **fx_args)
fxc.analytic_greeks(solver=surf_solver)
fxc.delta(solver=surf_solver)
fxc.gamma(solver=surf_solver)
class TestStateManagement:
def test_solver_state_storage(self):
# test the solver stores hashes of its objects: FXForwards, Curves and presolvers
uu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="uu")
ee = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="ee")
eu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="eu")
fxf1 = FXForwards(
fx_rates=FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
fx_curves={
"usdusd": uu,
"eureur": ee,
"eurusd": eu,
},
)
s1 = Solver(
curves=[uu, ee],
instruments=[
IRS(dt(2022, 1, 1), "1y", "A", curves="uu"),
IRS(dt(2022, 1, 1), "1y", "A", curves="ee"),
],
s=[1.5, 1.5],
id="local",
)
s2 = Solver(
curves=[eu],
instruments=[
XCS(
dt(2022, 1, 1),
"1Y",
"Q",
currency="eur",
pair="eurusd",
curves=["ee", "eu", "uu", "uu"],
),
],
s=[10.0],
id="x1",
fx=fxf1,
pre_solvers=[s1],
)
hashes = {"fx": s2.fx._state, **{k: curve._state for k, curve in s2.pre_curves.items()}}
assert s2._states == hashes
@pytest.mark.parametrize(
"method",
[
"delta",
"gamma",
"npv",
"rate",
],
)
@pytest.mark.parametrize(
("obj", "args"), [("fxr", ({"eurusd": 1.0},)), ("fxf", ([{"eurusd": 1.10}],))]
)
def test_warning_on_fx_mutation(self, method, obj, args):
# test the solver stores hashes of its objects: FXForwards, Curves and presolvers
uu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="uu")
ee = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="ee")
eu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="eu")
fxr = FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1))
fxf = FXForwards(fx_rates=fxr, fx_curves={"usdusd": uu, "eureur": ee, "eurusd": eu})
s1 = Solver(
curves=[uu, ee],
instruments=[
IRS(dt(2022, 1, 1), "1y", "A", curves="uu"),
IRS(dt(2022, 1, 1), "1y", "A", curves="ee"),
],
s=[1.5, 1.5],
id="local",
)
s2 = Solver(
curves=[eu],
instruments=[
XCS(
dt(2022, 1, 1),
"1Y",
"Q",
currency="eur",
pair="eurusd",
curves=["ee", "eu", "uu", "uu"],
),
],
s=[10.0],
id="x1",
fx=fxf,
pre_solvers=[s1],
)
vars()[obj].update(*args)
irs = IRS(dt(2022, 1, 1), "3y", "A", curves="uu")
with pytest.warns(UserWarning, match="The `fx` object associated with `solver`"):
getattr(irs, method)(solver=s2)
@pytest.mark.parametrize(
"method",
[
"delta",
"gamma",
"npv",
"rate",
],
)
def test_raise_on_pre_curve_mutation(self, method):
# test the solver stores hashes of its objects: FXForwards, Curves and presolvers
uu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="uu")
ee = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="ee")
eu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="eu")
fxf1 = FXForwards(
fx_rates=FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
fx_curves={
"usdusd": uu,
"eureur": ee,
"eurusd": eu,
},
)
s1 = Solver(
curves=[uu, ee],
instruments=[
IRS(dt(2022, 1, 1), "1y", "A", curves="uu"),
IRS(dt(2022, 1, 1), "1y", "A", curves="ee"),
],
s=[1.5, 1.5],
id="local",
)
s2 = Solver(
curves=[eu],
instruments=[
XCS(
dt(2022, 1, 1),
"1Y",
"Q",
currency="eur",
pair="eurusd",
curves=["ee", "eu", "uu", "uu"],
),
],
s=[10.0],
id="x1",
fx=fxf1,
pre_solvers=[s1],
)
uu._set_node_vector([0.995], 1)
irs = IRS(dt(2022, 1, 1), "3y", "A", curves="uu")
with pytest.raises(ValueError, match="The `curves` associated with `solver` have been upd"):
getattr(irs, method)(solver=s2)
@pytest.mark.parametrize(
"method",
[
"delta",
"gamma",
"npv",
"rate",
],
)
def test_raise_on_curve_mutation(self, method):
# test the solver stores hashes of its objects: FXForwards, Curves and presolvers
uu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="uu")
ee = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="ee")
eu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="eu")
fxf1 = FXForwards(
fx_rates=FXRates({"eurusd": 1.0}, settlement=dt(2022, 1, 1)),
fx_curves={
"usdusd": uu,
"eureur": ee,
"eurusd": eu,
},
)
s1 = Solver(
curves=[uu, ee],
instruments=[
IRS(dt(2022, 1, 1), "1y", "A", curves="uu"),
IRS(dt(2022, 1, 1), "1y", "A", curves="ee"),
],
s=[1.5, 1.5],
id="local",
)
s2 = Solver(
curves=[eu],
instruments=[
XCS(
dt(2022, 1, 1),
"1Y",
"Q",
currency="eur",
pair="eurusd",
curves=["ee", "eu", "uu", "uu"],
),
],
s=[10.0],
id="x1",
fx=fxf1,
pre_solvers=[s1],
)
eu._set_node_vector([0.995], 1)
irs = IRS(dt(2022, 1, 1), "3y", "A", curves="uu")
with pytest.raises(ValueError, match="The `curves` associated with `solver` have been up"):
getattr(irs, method)(solver=s2)
@pytest.mark.parametrize(
"method",
[
"delta",
"gamma",
"npv",
"rate",
],
)
def test_raise_on_composite_curve_mutation(self, method):
# test the solver stores hashes of its objects: FXForwards, Curves and presolvers
uu = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="uu")
ee = Curve({dt(2022, 1, 1): 1.0, dt(2023, 1, 1): 0.99}, id="ee")
cc = CompositeCurve([uu, ee], id="cc")
s1 = Solver(
curves=[ee, cc],
instruments=[
IRS(dt(2022, 1, 1), "1y", "A", curves="cc"),
],
s=[1.5],
id="local",
)
uu.update_node(dt(2023, 1, 1), 0.98)
irs = IRS(dt(2022, 1, 1), "3y", "A", curves="cc")
with pytest.raises(ValueError, match="The `curves` associated with `solver` have been up"):
getattr(irs, method)(solver=s1)
def test_solver_auto_updates_fx_before_state_setting(self):
# added `self.fx._set_ad_order(1)` to Solver.__init__
with warnings.catch_warnings():
warnings.simplefilter(action="error", category=UserWarning)
smile = FXDeltaVolSmile(
nodes={
0.10: 10.0,
0.25: 10.0,
0.50: 10.0,
0.75: 10.0,
0.90: 10.0,
},
eval_date=dt(2024, 5, 7),
expiry=dt(2024, 5, 28),
delta_type="spot",
id="eurusd_3w_smile",
)
# Define the interest rate curves for EUR, USD and X-Ccy basis
eureur = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="tgt", id="eureur")
eurusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, id="eurusd")
usdusd = Curve({dt(2024, 5, 7): 1.0, dt(2024, 5, 30): 1.0}, calendar="nyc", id="usdusd")
# Create an FX Forward market with spot FX rate data
fxf = FXForwards(
fx_rates=FXRates({"eurusd": 1.0760}, settlement=dt(2024, 5, 9)),
fx_curves={"eureur": eureur, "usdusd": usdusd, "eurusd": eurusd},
)
# Setup the Solver instrument calibration for rates Curves and vol Smiles
option_args = dict(
pair="eurusd",
expiry=dt(2024, 5, 28),
calendar="tgt",
delta_type="spot",
curves=["eurusd", "usdusd"],
vol="eurusd_3w_smile",
)
Solver(
curves=[eureur, eurusd, usdusd, smile],
instruments=[
IRS(dt(2024, 5, 9), "3W", spec="eur_irs", curves="eureur"),
IRS(dt(2024, 5, 9), "3W", spec="usd_irs", curves="usdusd"),
FXSwap(
dt(2024, 5, 9), "3W", pair="eurusd", curves=[None, "eurusd", None, "usdusd"]
),
FXStraddle(strike="atm_delta", **option_args),
FXRiskReversal(strike=("-25d", "25d"), **option_args),
FXRiskReversal(strike=("-10d", "10d"), **option_args),
FXBrokerFly(strike=(("-25d", "25d"), "atm_delta"), **option_args),
FXBrokerFly(strike=(("-10d", "10d"), "atm_delta"), **option_args),
],
s=[3.90, 5.32, 8.85, 5.493, -0.157, -0.289, 0.071, 0.238],
fx=fxf,
)
def test_solver_dual2_auto_updates_fx_before_state_setting(self):
with warnings.catch_warnings():
warnings.simplefilter(action="error", category=UserWarning)
# tests the doc page j_gamma.rst
sofr = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0}, id="sofr"
)
estr = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0}, id="estr"
)
eurusd = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0}, id="eurusd"
)
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(fxr, {"eureur": estr, "eurusd": eurusd, "usdusd": sofr})
instruments = [
IRS(dt(2022, 1, 1), "10y", "A", currency="usd", curves="sofr"),
IRS(dt(2032, 1, 1), "10y", "A", currency="usd", curves="sofr"),
IRS(dt(2022, 1, 1), "10y", "A", currency="eur", curves="estr"),
IRS(dt(2032, 1, 1), "10y", "A", currency="eur", curves="estr"),
XCS(
dt(2022, 1, 1),
"10y",
"A",
currency="usd",
pair="eurusd",
curves=["estr", "eurusd", "sofr", "sofr"],
),
XCS(
dt(2032, 1, 1),
"10y",
"A",
currency="usd",
pair="eurusd",
curves=["estr", "eurusd", "sofr", "sofr"],
),
]
sofr_solver = Solver(
curves=[sofr],
instruments=instruments[:2],
s=[3.45, 2.85],
instrument_labels=["10y", "10y10y"],
id="sofr",
fx=fxf,
)
estr_solver = Solver(
curves=[estr],
instruments=instruments[2:4],
s=[2.25, 0.90],
instrument_labels=["10y", "10y10y"],
id="estr",
fx=fxf,
)
solver = Solver(
curves=[eurusd],
instruments=instruments[4:],
s=[-10, -15],
instrument_labels=["10y", "10y10y"],
id="eurusd",
fx=fxf,
pre_solvers=[sofr_solver, estr_solver],
)
pf = Portfolio(
[
IRS(
dt(2022, 1, 1),
"20Y",
"A",
currency="eur",
fixed_rate=2.0,
notional=1e8,
curves="estr",
),
IRS(
dt(2022, 1, 1),
"20Y",
"A",
currency="usd",
fixed_rate=1.5,
notional=-1.1e8,
curves="sofr",
),
]
)
pf.gamma(solver=solver, base="eur")
def test_pre_solvers_fx_is_updated_and_does_not_cause_validation_issue(self):
with warnings.catch_warnings():
warnings.simplefilter(action="error", category=UserWarning)
# tests the doc page j_gamma.rst
sofr = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0}, id="sofr"
)
estr = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0}, id="estr"
)
eurusd = Curve(
nodes={dt(2022, 1, 1): 1.0, dt(2032, 1, 1): 1.0, dt(2042, 1, 1): 1.0}, id="eurusd"
)
fxr = FXRates({"eurusd": 1.05}, settlement=dt(2022, 1, 3))
fxf = FXForwards(fxr, {"eureur": estr, "eurusd": eurusd, "usdusd": sofr})
instruments = [
IRS(dt(2022, 1, 1), "10y", "A", currency="usd", curves="sofr"),
IRS(dt(2032, 1, 1), "10y", "A", currency="usd", curves="sofr"),
IRS(dt(2022, 1, 1), "10y", "A", currency="eur", curves="estr"),
IRS(dt(2032, 1, 1), "10y", "A", currency="eur", curves="estr"),
XCS(
dt(2022, 1, 1),
"10y",
"A",
currency="eur",
pair="eurusd",
curves=["estr", "eurusd", "sofr", "sofr"],
),
XCS(
dt(2032, 1, 1),
"10y",
"A",
currency="usd",
pair="eurusd",
curves=["estr", "eurusd", "sofr", "sofr"],
),
]
solver1 = Solver(
curves=[sofr, estr],
instruments=instruments[:4],
s=[3.45, 2.85, 2.4, 1.7],
instrument_labels=["10y", "10y10y", "10ye", "10y10ye"],
id="sofr/estr",
fx=fxf,
)
# solver 2 will solve the FX basis and update the FXForwards object which is also
# associated with solver1. If solver1 is state validated it will then fail.
# except when the _update_fx method of solver2 also nests calls to pre_solvers
_solver2 = Solver(
curves=[eurusd],
instruments=instruments[4:],
s=[-10, -15],
instrument_labels=["10y", "10y10y"],
id="eurusd",
fx=fxf,
pre_solvers=[solver1],
)
irs = IRS(
dt(2022, 1, 1),
"20Y",
"A",
currency="eur",
fixed_rate=2.0,
notional=1e8,
curves="estr",
)
irs.gamma(solver=solver1, base="eur")
@pytest.mark.parametrize(
"obj",
[
Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
LineCurve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
FXDeltaVolSmile(
nodes={0.5: 10.0},
expiry=dt(2000, 1, 1),
eval_date=dt(1999, 1, 1),
delta_type="forward",
),
FXRates({"eurusd": 1.0}),
FXForwards(
FXRates({"eurusd": 1.0}, settlement=dt(2000, 1, 3)),
{
"eurusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
"eureur": Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
"usdusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
},
),
CompositeCurve(
[
Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
]
),
MultiCsaCurve(
[
Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
]
),
FXDeltaVolSurface(
delta_type="forward",
delta_indexes=[0.5],
expiries=[dt(2000, 1, 8), dt(2001, 1, 1)],
eval_date=dt(1999, 1, 1),
node_values=[[10], [11]],
),
Solver(
curves=[Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}, id="abc")],
instruments=[IRS(dt(2000, 1, 1), "1m", spec="usd_irs", curves="abc")],
s=[2.0],
fx=FXForwards(
FXRates({"eurusd": 1.0}, settlement=dt(2000, 1, 3)),
{
"eurusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
"eureur": Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
"usdusd": Curve({dt(2000, 1, 1): 1.0, dt(2000, 3, 2): 0.99}),
},
),
),
],
)
def test_set_ad_order_does_not_change_object_state(self, obj):
pre_state = obj._state
obj._set_ad_order(2)
post_state = obj._state
assert pre_state == post_state
def test_solver_validation_control(self):
curve = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0})
solver = Solver(
curves=[curve],
instruments=[IRS(dt(2000, 1, 1), "1m", spec="usd_irs", curves=curve)],
s=[2.0],
)
curve.update_node(dt(2001, 1, 1), 0.99)
irs = IRS(dt(2000, 1, 1), "2m", spec="usd_irs", curves=curve)
with pytest.raises(ValueError, match="The `curves` associated with `solver` have"):
irs.rate(solver=solver)
solver._do_not_validate = True
result = irs.rate(solver=solver)
assert abs(result - 0.989345) < 1e-5
@pytest.mark.parametrize(
"obj",
[
Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}),
LineCurve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 2.0}),
FXRates({"eurusd": 1.0}),
FXForwards(
fx_curves={
"eureur": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}),
"eurusd": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}),
"usdusd": Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}),
},
fx_rates=FXRates({"eurusd": 1.0}, settlement=dt(2000, 1, 3)),
),
FXSabrSmile(
nodes={
"alpha": 0.17431060,
"beta": 1.0,
"rho": -0.11268306,
"nu": 0.81694072,
},
eval_date=dt(2001, 1, 1),
expiry=dt(2002, 1, 1),
id="vol",
),
FXSabrSurface(
eval_date=dt(2024, 5, 28),
expiries=[dt(2025, 2, 2), dt(2025, 3, 3)],
node_values=[[0.05, 1.0, 0.01, 0.15]] * 2,
pair="eurusd",
delivery_lag=2,
calendar="tgt|fed",
id="eurusd_vol",
),
FXDeltaVolSurface(
delta_indexes=[0.25, 0.5, 0.75],
expiries=[dt(2024, 1, 1), dt(2025, 1, 1)],
node_values=[[11, 10, 12], [8, 7, 9]],
eval_date=dt(2023, 1, 1),
delta_type="forward",
id="vol",
),
FXDeltaVolSmile(
nodes={
0.25: 10.15,
0.5: 7.8,
0.75: 8.9,
},
delta_type="forward",
eval_date=dt(2023, 3, 16),
expiry=dt(2023, 6, 16),
id="vol",
ad=1,
),
MultiCsaCurve(
[
Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}),
Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}),
]
),
CompositeCurve(
[
Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}),
Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}),
]
),
],
)
def test_objects_ad_attribute(obj):
result = getattr(obj, "_ad", None)
assert result is not None
@pytest.mark.parametrize("label", ["shift", "rolled", "translated"])
def test_curves_without_their_own_params(label):
curve = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="curve")
_map = {
"shift": curve.shift(5, id="shift"),
"rolled": curve.roll(5, id="rolled"),
"translated": curve.translate(dt(2000, 1, 1), id="translated"),
}
sv = Solver(
curves=[curve, _map[label]],
instruments=[IRS(dt(2000, 2, 1), dt(2000, 3, 1), spec="usd_irs", curves=["curve", label])],
s=[2.0],
)
assert sv.result["status"] == "SUCCESS"
def test_from_other() -> None:
pricing_curve = Curve(
nodes={dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0, dt(2002, 1, 10): 1.0},
interpolation="spline",
id="sofr",
)
pricing_solver = Solver(
curves=[pricing_curve],
instruments=[
IRS(dt(2000, 1, 1), "1y", spec="usd_irs", curves=["sofr"]),
IRS(dt(2000, 1, 1), "2y", spec="usd_irs", curves=["sofr"]),
],
s=[4.10, 4.25],
instrument_labels=["1y", "2y"],
id="price_sv",
)
risk_curve = Curve(
nodes={
dt(2000, 1, 1): 1.0,
dt(2000, 4, 1): 1.0,
dt(2000, 7, 1): 1.0,
dt(2000, 10, 1): 1.0,
dt(2001, 1, 1): 1.0,
dt(2001, 4, 1): 1.0,
dt(2001, 7, 1): 1.0,
dt(2001, 10, 1): 1.0,
dt(2002, 1, 10): 1.0,
},
interpolation="log_linear",
id="sofr",
)
risk_solver = Solver.from_other(
pricing_solver=pricing_solver,
curves=[risk_curve],
instruments=[
IRS(dt(2000, 1, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2000, 4, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2000, 7, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2000, 10, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2001, 1, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2001, 4, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2001, 7, 1), "3m", spec="usd_irs", curves=["sofr"]),
IRS(dt(2001, 10, 1), "3m", spec="usd_irs", curves=["sofr"]),
],
instrument_labels=["0m3m", "3m3m", "6m3m", "9m3m", "1y3m", "15m3m", "18m3m", "21m3m"],
id="risk_sv",
)
expected = [3.967, 3.995, 4.051, 4.134, 4.235, 4.318, 4.375, 4.406]
assert all(abs(r - e) < 1e-3 for r, e in zip(expected, risk_solver.s))
class TestContainerSolver:
# these tests involve a Solver that has no instruments of its own and is just a
# wrapper of 1 or multiple `pre_solvers`
def test_combine_separate_solvers_for_delta(self):
curve = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="x")
curve2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="y")
solver = Solver(
curves=[curve],
instruments=[Value(dt(2000, 9, 12), curves="x", metric="o/n_rate")],
s=[2.0],
instrument_labels=["X"],
id="A1",
)
solver2 = Solver(
curves=[curve2],
instruments=[Value(dt(2000, 9, 12), curves="y", metric="o/n_rate")],
s=[3.0],
instrument_labels=["Y"],
id="A2",
)
solver3 = Solver(pre_solvers=[solver, solver2])
v = IRS(dt(2000, 9, 12), "1d", "M", curves="x")
w = IRS(dt(2000, 9, 12), "1d", "M", curves="y")
result = Portfolio([v, w]).delta(solver=solver3)
m_idx = MultiIndex.from_tuples(
[("instruments", "A1", "X"), ("instruments", "A2", "Y")],
names=["type", "solver", "label"],
)
c_idx = MultiIndex.from_tuples([("usd", "usd")], names=["local_ccy", "display_ccy"])
expected = DataFrame([0.273825, 0.271870], index=m_idx, columns=c_idx)
assert_frame_equal(result, expected)
def test_combine_separate_solvers_for_exo_delta(self):
curve = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="x")
curve2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="y")
solver = Solver(
curves=[curve],
instruments=[Value(dt(2000, 9, 12), curves="x", metric="o/n_rate")],
s=[2.0],
instrument_labels=["X"],
id="A1",
)
solver2 = Solver(
curves=[curve2],
instruments=[Value(dt(2000, 9, 12), curves="y", metric="o/n_rate")],
s=[3.0],
instrument_labels=["Y"],
id="A2",
)
solver3 = Solver(pre_solvers=[solver, solver2])
v = IRS(
dt(2000, 9, 12), "1d", "M", curves="x", notional=Variable(1e8, ["exo"]), fixed_rate=5
)
w = IRS(
dt(2000, 9, 12), "1d", "M", curves="y", notional=Variable(1e8, ["exo"]), fixed_rate=4
)
result = (
Portfolio([v, w]).exo_delta(solver=solver3, vars=["exo"], vars_scalar=[1e8]).to_numpy()
)
pv = Portfolio([v, w]).npv(solver=solver3)
assert abs(result[0, 0] - pv) < 1e-7
def test_combine_separate_solvers_for_gamma(self):
curve = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="x")
curve2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="y")
solver = Solver(
curves=[curve],
instruments=[Value(dt(2000, 9, 12), curves="x", metric="o/n_rate")],
s=[2.0],
instrument_labels=["X"],
id="A1",
)
solver2 = Solver(
curves=[curve2],
instruments=[Value(dt(2000, 9, 12), curves="y", metric="o/n_rate")],
s=[3.0],
instrument_labels=["Y"],
id="A2",
)
solver3 = Solver(pre_solvers=[solver, solver2])
v = IRS(dt(2000, 9, 12), "1d", "M", curves="x")
w = IRS(dt(2000, 9, 12), "1d", "M", curves="y")
result = Portfolio([v, w]).gamma(solver=solver3).to_numpy()
partial_result1 = v.gamma(solver=solver).to_numpy()
partial_result2 = w.gamma(solver=solver2).to_numpy()
assert np.all(
result
== np.block(
[
[partial_result1, np.zeros(shape=(1, 1))],
[np.zeros(shape=(1, 1)), partial_result2],
]
)
)
def test_combine_separate_solvers_error(self):
curve = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="x")
curve2 = Curve({dt(2000, 1, 1): 1.0, dt(2001, 1, 1): 1.0}, id="y")
solver = Solver(
curves=[curve],
instruments=[Value(dt(2000, 9, 12), curves="x", metric="o/n_rate")],
s=[2.0],
instrument_labels=["X"],
id="A1",
)
solver2 = Solver(
curves=[curve2],
instruments=[Value(dt(2000, 9, 12), curves="y", metric="o/n_rate")],
s=[3.0],
instrument_labels=["Y"],
id="A2",
)
solver3 = Solver(pre_solvers=[solver, solver2])
result = solver3.error
assert isinstance(result, Series)
def test_error_empty(self):
s1 = Solver()
s2 = Solver()
s3 = Solver(pre_solvers=[s1, s2])
assert s3.error.empty
================================================
FILE: python/tests/test_splines.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
import copy
import numpy as np
import pytest
from rateslib.dual import Dual, Dual2, Variable, gradient, set_order_convert
from rateslib.serialization import from_json
from rateslib.splines import PPSplineDual, PPSplineDual2, PPSplineF64, evaluate
@pytest.fixture
def t():
return np.array([1, 1, 1, 1, 2, 2, 2, 3, 4, 4, 4, 4])
@pytest.fixture
def x():
return np.linspace(1, 4, 7)
@pytest.mark.parametrize(
("i", "expected"),
[
(0, np.array([1.0, 0.125, 0.0, 0.0, 0.0, 0.0, 0.0])),
(1, np.array([0.0, 0.375, 0.0, 0.0, 0.0, 0.0, 0.0])),
(2, np.array([0.0, 0.375, 0.0, 0.0, 0.0, 0.0, 0.0])),
(3, np.array([0.0, 0.125, 1.0, 0.125, 0.0, 0.0, 0.0])),
(4, np.array([0.0, 0.0, 0.0, 0.59375, 0.25, 0.03125, 0.0])),
(5, np.array([0.0, 0.0, 0.0, 0.25, 0.5, 0.25, 0.0])),
(6, np.array([0.0, 0.0, 0.0, 0.03125, 0.25, 0.59375, 0.0])),
(7, np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 1.0])),
],
)
def test_individual_bsplines(t, x, i, expected) -> None:
bs = PPSplineF64(k=4, t=t)
result = bs.bsplev(x, i=i)
assert (result == expected).all()
@pytest.mark.parametrize(
("i", "expected"),
[
(0, np.array([-3.0, -0.75, 0.0, 0.0, 0.0, 0.0, 0.0])),
(1, np.array([3.0, -0.75, 0.0, 0.0, 0.0, 0.0, 0.0])),
(2, np.array([0.0, 0.75, 0.0, 0.0, 0.0, 0.0, 0.0])),
(3, np.array([0.0, 0.75, -3.0, -0.75, 0.0, 0.0, 0.0])),
(4, np.array([0.0, 0.0, 3.0, -0.1875, -0.75, -0.1875, 0.0])),
(5, np.array([0.0, 0.0, 0.0, 0.75, 0.0, -0.75, 0.0])),
(6, np.array([0.0, 0.0, 0.0, 0.1875, 0.75, 0.1875, -3.0])),
(7, np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.75, 3.0])),
],
)
def test_first_derivative_endpoint_support(t, x, i, expected) -> None:
bs = PPSplineF64(k=4, t=t)
result = bs.bspldnev(x, i=i, m=1)
assert (result == expected).all()
@pytest.mark.parametrize(
("i", "expected"),
[
(0, np.array([6.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0])),
(1, np.array([-12.0, -3.0, 0.0, 0.0, 0.0, 0.0, 0.0])),
(2, np.array([6.0, -3.0, 0.0, 0.0, 0.0, 0.0, 0.0])),
(3, np.array([0.0, 3.0, 6.0, 3.0, 0.0, 0.0, 0.0])),
(4, np.array([0.0, 0.0, -9.0, -3.75, 1.5, 0.75, 0.0])),
(5, np.array([0.0, 0.0, 3.0, 0.0, -3.0, 0.0, 3.0])),
(6, np.array([0.0, 0.0, 0.0, 0.75, 1.5, -3.75, -9.0])),
(7, np.array([0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 6.0])),
],
)
def test_second_derivative_endpoint_support(t, x, i, expected) -> None:
bs = PPSplineF64(k=4, t=t)
result = bs.bspldnev(x, i=i, m=2)
assert (result == expected).all()
@pytest.mark.parametrize(
("i", "expected"),
[
(0, np.array([-6.0, -6.0, 0.0, 0.0, 0.0, 0.0, 0.0])),
(1, np.array([18.0, 18.0, 0.0, 0.0, 0.0, 0.0, 0.0])),
(2, np.array([-18.0, -18.0, 0.0, 0.0, 0.0, 0.0, 0.0])),
(3, np.array([6.0, 6.0, -6.0, -6.0, 0.0, 0.0, 0.0])),
(4, np.array([0.0, 0.0, 10.5, 10.5, -1.5, -1.5, -1.5])),
(5, np.array([0.0, 0.0, -6.0, -6.0, 6.0, 6.0, 6.0])),
(6, np.array([0.0, 0.0, 1.5, 1.5, -10.5, -10.5, -10.5])),
(7, np.array([0.0, 0.0, 0.0, 0.0, 6.0, 6.0, 6.0])),
],
)
def test_third_derivative_endpoint_support(t, x, i, expected) -> None:
bs = PPSplineF64(k=4, t=t)
result = bs.bspldnev(x, i=i, m=3)
assert (result == expected).all()
def test_fourth_derivative_endpoint_support(t, x) -> None:
bs = PPSplineF64(k=4, t=t)
expected = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
for i in range(8):
test = bs.bspldnev(x, i=i, m=4) == expected
assert test.all()
def test_ppdnev(t) -> None:
bs = PPSplineF64(k=4, t=t, c=[1, 2, -1, 2, 1, 1, 2, 2.0])
r1 = bs.ppdnev_single(1.1, 2)
r2 = bs.ppdnev_single(1.8, 2)
r3 = bs.ppdnev_single(2.8, 2)
result = bs.ppdnev(np.array([1.1, 1.8, 2.8]), 2)
assert (result == np.array([r1, r2, r3])).all()
def test_ppev(t) -> None:
bs = PPSplineF64(k=4, t=t, c=[1, 2, -1, 2, 1, 1, 2, 2.0])
r1 = bs.ppev_single(1.1)
r2 = bs.ppev_single(1.8)
r3 = bs.ppev_single(2.8)
result = bs.ppev(np.array([1.1, 1.8, 2.8]))
assert (result == np.array([r1, r2, r3])).all()
def test_csolve() -> None:
t = [0, 0, 0, 0, 4, 4, 4, 4]
tau = np.array([0, 1, 3, 4])
val = np.array([0, 0, 2, 2])
bs = PPSplineF64(k=4, t=t, c=None)
bs.csolve(tau, val, 0, 0, False) # values solve spline
result = bs.c
expected = np.array([0.0, -1.11111111111111, 3.11111111111, 2.0], dtype=object)
for i, res in enumerate(result):
assert abs(expected[i] - res) < 1e-7
def test_csolve_lsq() -> None:
t = [0, 0, 0, 0, 4, 4, 4, 4]
tau = np.array([0, 1, 2, 3, 4])
val = np.array([0, 0, 1.5, 2, 2])
bs = PPSplineF64(k=4, t=t)
bs.csolve(tau, val, 0, 0, allow_lsq=True) # values solve spline
result = bs.c
expected = np.array([-0.042857, -0.7730158, 3.44920634, 1.9571428], dtype=object)
for i, res in enumerate(result):
assert abs(expected[i] - res) < 1e-5
@pytest.mark.parametrize(
("tau", "val", "allow"),
[
([0, 1, 2, 3], [0, 0, 2, 2, 5], False),
([0, 1, 2, 3, 5], [0, 0, 2, 2], False),
([0, 1, 2, 3], [0, 0, 2, 2, 5], True),
],
)
def test_csolve_raises(tau, val, allow) -> None:
t = [0, 0, 0, 0, 4, 4, 4, 4]
tau = np.array(tau)
val = np.array(val)
bs = PPSplineF64(k=4, t=t)
with pytest.raises(ValueError):
bs.csolve(tau, val, 0, 0, allow_lsq=allow)
def test_copy() -> None:
bs = PPSplineF64(k=2, t=[1, 1, 2, 3, 3], c=[1, 2, 3])
bsc = copy.copy(bs)
assert id(bs) != id(bsc)
def test_spline_equality_type() -> None:
spline = PPSplineF64(k=1, t=[1, 2])
assert spline != "bad"
spline2 = PPSplineF64(k=1, t=[1, 2, 3])
assert spline != spline2
spline3 = PPSplineF64(k=1, t=[1, 3, 5])
assert spline2 != spline3
spline4 = PPSplineF64(k=2, t=[1, 3, 5])
assert spline3 != spline4
spline5 = PPSplineF64(k=2, t=[1, 3, 5])
assert spline4 == spline5
spline6 = PPSplineF64(k=2, t=[1, 1, 3, 5, 5], c=[1, 2, 3])
spline7 = PPSplineF64(k=2, t=[1, 1, 3, 5, 5], c=[1, 2, 3])
assert spline6 == spline7
@pytest.mark.parametrize(
("klass", "order"),
[
(PPSplineF64, 0),
(PPSplineDual, 1),
],
)
def test_dual_AD(klass, order) -> None:
sp = klass(
t=[0, 0, 0, 0, 1, 3, 4, 4, 4, 4],
k=4,
)
y = [set_order_convert(_, order, []) for _ in [0, 0, 0, 2, 2, 0]]
sp.csolve([0, 0, 1, 3, 4, 4], y, 2, 2, False)
analytic_deriv = sp.ppdnev_single(3.5, 1)
dual_deriv = gradient(sp.ppev_single_dual(Dual(3.5, ["x"], [2.0])))[0]
assert abs(dual_deriv - 2.0 * analytic_deriv) < 1e-9
@pytest.mark.parametrize(
("klass", "order"),
[
(PPSplineF64, 0),
(PPSplineDual2, 2),
],
)
def test_dual2_AD(klass, order) -> None:
sp = klass(
t=[0, 0, 0, 0, 1, 3, 4, 4, 4, 4],
k=4,
)
y = [set_order_convert(_, order, []) for _ in [0, 0, 0, 2, 2, 0]]
sp.csolve([0, 0, 1, 3, 4, 4], y, 2, 2, False)
analytic_deriv = sp.ppdnev_single(3.5, 1)
dual_deriv = gradient(sp.ppev_single_dual2(Dual2(3.5, ["x"], [3.0], [])))[0]
assert abs(dual_deriv - 3.0 * analytic_deriv) < 1e-9
analytic_deriv2 = sp.ppdnev_single(3.5, 2)
dual_deriv2 = gradient(sp.ppev_single_dual2(Dual2(3.5, ["x"], [3.0], [])), order=2)[0, 0]
assert abs(dual_deriv2 - 9.0 * analytic_deriv2) < 1e-9
dual_deriv_x = gradient(
sp.ppev_single_dual2(Dual2(3.5, ["x1", "x2"], [3.0, 1.5], [1, 1, 1, 1])),
order=2,
)[0, 1]
analytic_deriv_x = analytic_deriv2 * 3.0 * 1.5 + analytic_deriv * 2.0
assert abs(dual_deriv_x - analytic_deriv_x) < 1e-9
def test_dual_AD_raises() -> None:
sp = PPSplineDual(
t=[0, 0, 0, 0, 1, 3, 4, 4, 4, 4],
k=4,
)
_0 = Dual(0.0, [], [])
y0, y1 = Dual(0.0, ["y0"], []), Dual(0.0, ["y1"], [])
y2, y3 = Dual(2.0, ["y2"], []), Dual(2.0, ["y3"], [])
sp.csolve([0, 0, 1, 3, 4, 4], [_0, y0, y1, y2, y3, _0], 2, 2, False)
with pytest.raises(TypeError, match="Cannot index with type `Dual2`"):
sp.ppev_single_dual2(Dual2(3.5, ["x"], [], []))
with pytest.raises(TypeError, match="Cannot mix `Dual2` and `Dual` types"):
sp.ppev_single_dual(Dual2(3.5, ["x"], [], []))
def test_dual2_AD_raises() -> None:
sp = PPSplineDual2(
t=[0, 0, 0, 0, 1, 3, 4, 4, 4, 4],
k=4,
)
_0 = Dual2(0.0, [], [], [])
y0, y1 = Dual2(0.0, ["y0"], [], []), Dual2(0.0, ["y1"], [], [])
y2, y3 = Dual2(2.0, ["y2"], [], []), Dual2(2.0, ["y3"], [], [])
sp.csolve([0, 0, 1, 3, 4, 4], [_0, y0, y1, y2, y3, _0], 2, 2, False)
with pytest.raises(TypeError, match="Cannot index with type `Dual`"):
sp.ppev_single_dual(Dual(3.5, ["x"], []))
with pytest.raises(TypeError, match="Cannot mix `Dual2` and `Dual` types"):
sp.ppev_single_dual2(Dual(3.5, ["x"], []))
def test_dual_float_raises() -> None:
sp = PPSplineDual(
t=[0, 0, 0, 0, 1, 3, 4, 4, 4, 4],
k=4,
)
_0 = Dual(0.0, [], [])
y0, y1 = Dual(0.0, ["y0"], []), Dual(0.0, ["y1"], [])
y2, y3 = Dual(2.0, ["y2"], []), Dual(2.0, ["y3"], [])
with pytest.raises(TypeError, match="argument 'y': 'float' object is not an instance of 'Dua"):
sp.csolve([0, 0, 1, 3, 4, 4], [0.0, y0, y1, y2, y3, _0], 2, 2, False)
def test_bsplmatrix() -> None:
t = [1, 1, 1, 1, 2, 2, 2, 3, 4, 4, 4, 4]
spline = PPSplineF64(k=4, t=t)
tau = np.array([1.1, 1.3, 1.9, 2.2, 2.5, 3.1, 3.5, 3.9])
matrix = spline.bsplmatrix(tau, 0, 0)
assert matrix.shape == (8, 8)
def test_json_round_trip() -> None:
t = [0, 0, 0, 0, 4, 4, 4, 4]
tau = np.array([0, 1, 3, 4])
val = np.array([0, 0, 2, 2])
bs = PPSplineF64(k=4, t=t, c=None)
bs.csolve(tau, val, 0, 0, False) # values solve spline
result = bs.to_json()
obj = from_json(result)
assert bs == obj
# test unsolved
t = [0, 0, 0, 0, 4, 4, 4, 4]
bs = PPSplineF64(k=4, t=t, c=None)
result = bs.to_json()
obj = from_json(result)
assert bs == obj
@pytest.mark.skip(reason="TODO: devise a post solve check for NaN.")
def test_should_raise_bad_solve() -> None:
pps = PPSplineF64(k=4, t=[1, 1, 1, 1, 4, 4, 4, 4], c=None)
with pytest.raises(ValueError):
pps.csolve(np.array([0, 1, 3, 4]), np.array([0, 0, 2, 2]), 0, 0, False)
@pytest.mark.parametrize(
("obj", "val", "exp"),
[
(PPSplineF64, [0, 0, 2, 2], Dual),
(PPSplineDual, [Dual(0, [], []), Dual(0, [], []), Dual(2, [], []), Dual(2, [], [])], Dual),
(
PPSplineDual2,
[
Dual2(0, [], [], []),
Dual2(0, [], [], []),
Dual2(2, [], [], []),
Dual2(2, [], [], []),
],
Dual2,
),
],
)
def test_evaluate_with_Variable_x(obj, val, exp):
t = [0, 0, 0, 0, 4, 4, 4, 4]
tau = np.array([0, 1, 3, 4])
bs = obj(k=4, t=t, c=None)
bs.csolve(tau, val, 0, 0, False) # values solve spline
x = Variable(1.5, ["x"])
result = evaluate(bs, x, 0)
assert abs(result - 0.437499999999999) < 1e-12
assert isinstance(result, exp)
================================================
FILE: python/tests/test_to_fix.py
================================================
# SPDX-License-Identifier: LicenseRef-Rateslib-Dual
#
# Copyright (c) 2026 Siffrorna Technology Limited
#
# Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
# Source-available, not open source.
#
# See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
# and/or contact info (at) rateslib (dot) com
####################################################################################################
from datetime import datetime as dt
import pytest
from rateslib.dual import Dual
from rateslib.volatility import FXDeltaVolSmile
def test_fxsmile_update_node():
# update node does not validate the AD order of the supplied value
# this should probably return a more helpful error message
fxs = FXDeltaVolSmile(
eval_date=dt(2000, 1, 1),
expiry=dt(2000, 12, 1),
nodes={0.1: 1, 0.2: 2},
delta_type="forward",
)
fxs._set_ad_order(2)
with pytest.raises(TypeError):
fxs.update_node(0.1, Dual(2.0, ["x"], []))
================================================
FILE: robots.txt
================================================
User-agent: GPTBot
Disallow: /
User-agent: CCBot
Disallow: /
User-agent: ClaudeBot
Disallow: /
================================================
FILE: rust/_README.txt
================================================
This 'src' directory contains the rust implementation of rateslib. The "rateslibrs" elements.
Some configuration is available from the "cargo.toml" file.
Rust tests are contained in the "tests" subfolder and are executed with >$ cargo test.
This package has a library --lib and a binary called "rateslibrs" defined by toml.
To call otehr files in the bin use --bin scratch, for example,
================================================
FILE: rust/curves/curve.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::curves::interpolation::utils::index_left;
use crate::curves::nodes::{Nodes, NodesTimestamp};
use crate::dual::{get_variable_tags, ADOrder, Dual, Dual2, Number};
use crate::scheduling::{Convention, DateRoll};
use chrono::NaiveDateTime;
use indexmap::IndexMap;
use pyo3::exceptions::PyValueError;
use pyo3::{pyclass, PyErr};
use serde::{Deserialize, Serialize};
use std::cmp::PartialEq;
/// Default struct for storing datetime indexed discount factors (DFs).
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
pub struct CurveDF {
pub(crate) nodes: NodesTimestamp,
pub(crate) interpolator: T,
pub(crate) id: String,
pub(crate) convention: Convention,
pub(crate) modifier: Modifier,
pub(crate) index_base: Option,
pub(crate) calendar: U,
}
/// A rule to adjust a non-business day to a business day.
#[pyclass(module = "rateslib.rs", eq, eq_int, from_py_object)]
#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Modifier {
/// Actual: date is unchanged, even if it is a non-business day.
Act,
/// Following: date is rolled to the next business day.
F,
/// Modified following: date is rolled to the next except if it changes month.
ModF,
/// Previous: date is rolled to the previous business day.
P,
/// Modified previous: date is rolled to the previous except if it changes month.
ModP,
}
/// Assigns methods for returning values from datetime indexed Curves.
pub trait CurveInterpolation {
/// Get a value from the curve's `Nodes` expressed in its input form, i.e. discount factor or value.
fn interpolated_value(&self, nodes: &NodesTimestamp, date: &NaiveDateTime) -> Number;
/// Get the left side node key index of the given datetime
fn node_index(&self, nodes: &NodesTimestamp, date_timestamp: i64) -> usize {
// let timestamp = date.and_utc().timestamp();
index_left(&nodes.keys(), &date_timestamp, None)
}
}
impl CurveDF {
pub fn try_new(
nodes: Nodes,
interpolator: T,
id: &str,
convention: Convention,
modifier: Modifier,
index_base: Option,
calendar: U,
) -> Result {
let mut nodes = NodesTimestamp::from(nodes);
nodes.sort_keys();
Ok(Self {
nodes,
interpolator,
id: id.to_string(),
convention,
modifier,
index_base,
calendar,
})
}
/// Get the `ADOrder` of the `Curve`.
pub fn ad(&self) -> ADOrder {
match self.nodes {
NodesTimestamp::F64(_) => ADOrder::Zero,
NodesTimestamp::Dual(_) => ADOrder::One,
NodesTimestamp::Dual2(_) => ADOrder::Two,
}
}
pub fn interpolated_value(&self, date: &NaiveDateTime) -> Number {
self.interpolator.interpolated_value(&self.nodes, date)
}
pub fn node_index(&self, date_timestamp: i64) -> usize {
self.interpolator.node_index(&self.nodes, date_timestamp)
}
pub fn set_ad_order(&mut self, ad: ADOrder) -> Result<(), PyErr> {
let vars: Vec = get_variable_tags(&self.id, self.nodes.keys().len());
match (ad, &self.nodes) {
(ADOrder::Zero, NodesTimestamp::F64(_))
| (ADOrder::One, NodesTimestamp::Dual(_))
| (ADOrder::Two, NodesTimestamp::Dual2(_)) => {
// leave unchanged.
Ok(())
}
(ADOrder::One, NodesTimestamp::F64(i)) => {
// rebuild the derivatives
self.nodes = NodesTimestamp::Dual(IndexMap::from_iter(
i.into_iter()
.enumerate()
.map(|(i, (k, v))| (*k, Dual::new(*v, vec![vars[i].clone()]))),
));
Ok(())
}
(ADOrder::Two, NodesTimestamp::F64(i)) => {
// rebuild the derivatives
self.nodes = NodesTimestamp::Dual2(IndexMap::from_iter(
i.into_iter()
.enumerate()
.map(|(i, (k, v))| (*k, Dual2::new(*v, vec![vars[i].clone()]))),
));
Ok(())
}
(ADOrder::One, NodesTimestamp::Dual2(i)) => {
self.nodes = NodesTimestamp::Dual(IndexMap::from_iter(
i.into_iter().map(|(k, v)| (*k, Dual::from(v))),
));
Ok(())
}
(ADOrder::Zero, NodesTimestamp::Dual(i)) => {
// covert dual into f64
self.nodes = NodesTimestamp::F64(IndexMap::from_iter(
i.into_iter().map(|(k, v)| (*k, f64::from(v))),
));
Ok(())
}
(ADOrder::Zero, NodesTimestamp::Dual2(i)) => {
// covert dual into f64
self.nodes = NodesTimestamp::F64(IndexMap::from_iter(
i.into_iter().map(|(k, v)| (*k, f64::from(v))),
));
Ok(())
}
(ADOrder::Two, NodesTimestamp::Dual(i)) => {
// rebuild derivatives
self.nodes = NodesTimestamp::Dual2(IndexMap::from_iter(
i.into_iter().map(|(k, v)| (*k, Dual2::from(v))),
));
Ok(())
}
}
}
pub fn index_value(&self, date: &NaiveDateTime) -> Result {
match self.index_base {
None => Err(PyValueError::new_err("Can only calculate `index_value` for a Curve which has been initialised with `index_base`.")),
Some(ib) => {
if date.and_utc().timestamp() < self.nodes.first_key() {
Ok(Number::F64(0.0))
} else {
Ok(Number::F64(ib) / self.interpolated_value(date))
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::curves::LogLinearInterpolator;
use crate::scheduling::{ndt, NamedCal};
use indexmap::IndexMap;
fn curve_fixture() -> CurveDF {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
let interpolator = LogLinearInterpolator::new();
let convention = Convention::Act360;
let modifier = Modifier::ModF;
let cal = NamedCal::try_new("all").unwrap();
CurveDF::try_new(nodes, interpolator, "crv", convention, modifier, None, cal).unwrap()
}
fn index_curve_fixture() -> CurveDF {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
let interpolator = LogLinearInterpolator::new();
let convention = Convention::Act360;
let modifier = Modifier::ModF;
let cal = NamedCal::try_new("all").unwrap();
CurveDF::try_new(
nodes,
interpolator,
"crv",
convention,
modifier,
Some(100.0),
cal,
)
.unwrap()
}
fn curve_dual_fixture() -> CurveDF {
let nodes = Nodes::Dual(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), Dual::new(1.0, vec!["x".to_string()])),
(ndt(2001, 1, 1), Dual::new(0.99, vec!["y".to_string()])),
(ndt(2002, 1, 1), Dual::new(0.98, vec!["z".to_string()])),
]));
let interpolator = LogLinearInterpolator::new();
let convention = Convention::Act360;
let modifier = Modifier::ModF;
let cal = NamedCal::try_new("all").unwrap();
CurveDF::try_new(nodes, interpolator, "crv", convention, modifier, None, cal).unwrap()
}
#[test]
fn test_get_index() {
let c = curve_fixture();
let result = c.node_index(ndt(2001, 7, 30).and_utc().timestamp());
assert_eq!(result, 1_usize)
}
#[test]
fn test_get_value() {
let c = curve_fixture();
let result = c.interpolated_value(&ndt(2000, 7, 1));
assert_eq!(result, Number::F64(0.9950147597711371))
}
fn nodes_timestamp_fixture() -> NodesTimestamp {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
NodesTimestamp::from(nodes)
}
#[test]
fn test_log_linear() {
let nts = nodes_timestamp_fixture();
let ll = LogLinearInterpolator::new();
let result = ll.interpolated_value(&nts, &ndt(2000, 7, 1));
// expected = exp(0 + (182 / 366) * (ln(0.99) - ln(1.0)) = 0.995015
assert_eq!(result, Number::F64(0.9950147597711371));
}
#[test]
fn test_set_order() {
// converts the input f64 nodes to dual with ordered variables tagged by id
let mut curve = curve_fixture();
let _ = curve.set_ad_order(ADOrder::One);
let result = curve.interpolated_value(&ndt(2001, 1, 1));
assert_eq!(
result,
Number::Dual(Dual::new(0.99, vec!["crv1".to_string()]))
);
}
#[test]
fn test_set_order_no_change() {
// asserts no change in values when AD order remains same
let mut curve = curve_dual_fixture();
let _ = curve.set_ad_order(ADOrder::One);
let result = curve.interpolated_value(&ndt(2001, 1, 1));
assert_eq!(result, Number::Dual(Dual::new(0.99, vec!["y".to_string()])));
}
#[test]
fn test_set_order_vars_remain() {
// asserts no change in variables transitioning ADone to ADtwo
let mut curve = curve_dual_fixture();
let _ = curve.set_ad_order(ADOrder::Two);
let result = curve.interpolated_value(&ndt(2001, 1, 1));
assert_eq!(
result,
Number::Dual2(Dual2::new(0.99, vec!["y".to_string()]))
);
}
#[test]
fn test_index_value() {
let index_curve = index_curve_fixture();
let result = index_curve.index_value(&ndt(2001, 1, 1)).unwrap();
assert_eq!(result, Number::F64(100.0 / 0.99))
}
#[test]
fn test_index_value_prior_to_first() {
let index_curve = index_curve_fixture();
let result = index_curve.index_value(&ndt(1980, 1, 1)).unwrap();
assert_eq!(result, Number::F64(0.0))
}
}
================================================
FILE: rust/curves/curve_py.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
//! Wrapper module to export Rust curve data types to Python using pyo3 bindings.
use crate::curves::nodes::{Nodes, NodesTimestamp};
use crate::curves::{
CurveDF, CurveInterpolation, FlatBackwardInterpolator, FlatForwardInterpolator,
LinearInterpolator, LinearZeroRateInterpolator, LogLinearInterpolator, Modifier,
NullInterpolator,
};
use crate::dual::{get_variable_tags, set_order, ADOrder, Dual, Dual2, Number};
use crate::json::json_py::DeserializedObj;
use crate::json::JSON;
use crate::scheduling::{Calendar, Convention};
use bincode::config::legacy;
use bincode::serde::{decode_from_slice, encode_to_vec};
use chrono::NaiveDateTime;
use indexmap::IndexMap;
use pyo3::exceptions::PyValueError;
use pyo3::prelude::*;
use pyo3::types::PyBytes;
use serde::{Deserialize, Serialize};
/// Interpolation
#[derive(Debug, Clone, PartialEq, FromPyObject, Deserialize, Serialize, IntoPyObject)]
pub(crate) enum CurveInterpolator {
LogLinear(LogLinearInterpolator),
Linear(LinearInterpolator),
LinearZeroRate(LinearZeroRateInterpolator),
FlatForward(FlatForwardInterpolator),
FlatBackward(FlatBackwardInterpolator),
Null(NullInterpolator),
}
// // removed upgrading to pyo3 0.23, see https://pyo3.rs/v0.23.0/migration#intopyobject-and-intopyobjectref-derive-macros
// impl IntoPy for CurveInterpolator {
// fn into_py(self, py: Python<'_>) -> PyObject {
// macro_rules! into_py {
// ($obj: ident) => {
// Py::new(py, $obj).unwrap().to_object(py)
// };
// }
//
// match self {
// CurveInterpolator::LogLinear(i) => into_py!(i),
// CurveInterpolator::Linear(i) => into_py!(i),
// CurveInterpolator::LinearZeroRate(i) => into_py!(i),
// CurveInterpolator::FlatForward(i) => into_py!(i),
// CurveInterpolator::FlatBackward(i) => into_py!(i),
// CurveInterpolator::Null(i) => into_py!(i),
// }
// }
// }
impl CurveInterpolation for CurveInterpolator {
fn interpolated_value(&self, nodes: &NodesTimestamp, date: &NaiveDateTime) -> Number {
match self {
CurveInterpolator::LogLinear(i) => i.interpolated_value(nodes, date),
CurveInterpolator::Linear(i) => i.interpolated_value(nodes, date),
CurveInterpolator::LinearZeroRate(i) => i.interpolated_value(nodes, date),
CurveInterpolator::FlatBackward(i) => i.interpolated_value(nodes, date),
CurveInterpolator::FlatForward(i) => i.interpolated_value(nodes, date),
CurveInterpolator::Null(i) => i.interpolated_value(nodes, date),
}
}
}
#[pyclass(module = "rateslib.rs", from_py_object)]
#[derive(Clone, Deserialize, Serialize)]
pub(crate) struct Curve {
inner: CurveDF,
}
#[pymethods]
impl Curve {
#[new]
#[pyo3(signature = (nodes, interpolator, ad, id, convention, modifier, calendar, index_base=None))]
fn new_py(
nodes: IndexMap,
interpolator: CurveInterpolator,
ad: ADOrder,
id: String,
convention: Convention,
modifier: Modifier,
calendar: Calendar,
index_base: Option,
) -> PyResult {
let nodes_ = nodes_into_order(nodes, ad, &id);
let inner = CurveDF::try_new(
nodes_,
interpolator,
&id,
convention,
modifier,
index_base,
calendar,
)?;
Ok(Self { inner })
}
#[getter]
fn id(&self) -> String {
self.inner.id.clone()
}
#[getter]
fn nodes(&self) -> IndexMap {
let nodes = Nodes::from(self.inner.nodes.clone());
match nodes {
Nodes::F64(i) => IndexMap::from_iter(i.into_iter().map(|(k, v)| (k, Number::F64(v)))),
Nodes::Dual(i) => IndexMap::from_iter(i.into_iter().map(|(k, v)| (k, Number::Dual(v)))),
Nodes::Dual2(i) => {
IndexMap::from_iter(i.into_iter().map(|(k, v)| (k, Number::Dual2(v))))
}
}
}
#[getter]
fn ad(&self) -> ADOrder {
self.inner.ad()
}
#[getter]
fn interpolation(&self) -> String {
match self.inner.interpolator {
CurveInterpolator::Linear(_) => "linear".to_string(),
CurveInterpolator::LogLinear(_) => "log_linear".to_string(),
CurveInterpolator::LinearZeroRate(_) => "linear_zero_rate".to_string(),
CurveInterpolator::FlatForward(_) => "flat_forward".to_string(),
CurveInterpolator::FlatBackward(_) => "flat_backward".to_string(),
CurveInterpolator::Null(_) => "null".to_string(),
}
}
#[getter]
fn convention(&self) -> Convention {
self.inner.convention
}
#[getter]
fn modifier(&self) -> Modifier {
self.inner.modifier
}
#[pyo3(name = "index_value")]
fn index_value_py(&self, date: NaiveDateTime) -> PyResult {
self.inner.index_value(&date)
}
fn set_ad_order(&mut self, ad: ADOrder) -> PyResult<()> {
let _ = self.inner.set_ad_order(ad);
Ok(())
}
fn __getitem__(&self, date: NaiveDateTime) -> Number {
self.inner.interpolated_value(&date)
}
fn __eq__(&self, other: Curve) -> bool {
self.inner.eq(&other.inner)
}
// JSON
/// Create a JSON string representation of the object.
///
/// Returns
/// -------
/// str
#[pyo3(name = "to_json")]
fn to_json_py(&self) -> PyResult {
match DeserializedObj::Curve(self.clone()).to_json() {
Ok(v) => Ok(v),
Err(_) => Err(PyValueError::new_err(
"Failed to serialize `Curve` to JSON.",
)),
}
}
// Pickling
pub fn __setstate__(&mut self, state: Bound<'_, PyBytes>) -> PyResult<()> {
*self = decode_from_slice(state.as_bytes(), legacy()).unwrap().0;
Ok(())
}
pub fn __getstate__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyBytes::new(py, &encode_to_vec(&self, legacy()).unwrap()))
}
pub fn __getnewargs__(
&self,
) -> PyResult<(
IndexMap,
CurveInterpolator,
ADOrder,
String,
Convention,
Modifier,
Calendar,
Option,
)> {
Ok((
self.inner.nodes.index_map(),
self.inner.interpolator.clone(),
self.inner.ad(),
self.inner.id.clone(),
self.inner.convention,
self.inner.modifier,
self.inner.calendar.clone(),
self.inner.index_base,
))
}
}
// /// Convert the `nodes`of a `Curve` from a `HashMap` input form into the local data model.
// /// Will upcast f64 values to a new ADOrder adding curve variable tags by id.
// fn hashmap_into_nodes_timestamp(
// h: HashMap,
// ad: ADOrder,
// id: &str,
// ) -> NodesTimestamp {
// let vars: Vec = get_variable_tags(id, h.keys().len());
//
// /// First convert to IndexMap and sort key order.
// // let mut im: IndexMap = IndexMap::from_iter(h.into_iter());
// let mut im: IndexMap = IndexMap::from_iter(h.into_iter().map(|(k,v)| (k.and_utc().timestamp(), v)));
// im.sort_keys();
//
// match ad {
// ADOrder::Zero => { NodesTimestamp::F64(IndexMap::from_iter(im.into_iter().map(|(k,v)| (k, f64::from(v))))) }
// ADOrder::One => { NodesTimestamp::Dual(IndexMap::from_iter(im.into_iter().enumerate().map(|(i,(k,v))| (k, Dual::from(set_order_with_conversion(v, ad, vec![vars[i].clone()])))))) }
// ADOrder::Two => { NodesTimestamp::Dual2(IndexMap::from_iter(im.into_iter().enumerate().map(|(i,(k,v))| (k, Dual2::from(set_order_with_conversion(v, ad, vec![vars[i].clone()])))))) }
// }
// }
fn nodes_into_order(mut nodes: IndexMap, ad: ADOrder, id: &str) -> Nodes {
let vars: Vec = get_variable_tags(id, nodes.keys().len());
nodes.sort_keys();
match ad {
ADOrder::Zero => Nodes::F64(IndexMap::from_iter(
nodes.into_iter().map(|(k, v)| (k, f64::from(v))),
)),
ADOrder::One => {
Nodes::Dual(IndexMap::from_iter(nodes.into_iter().enumerate().map(
|(i, (k, v))| (k, Dual::from(set_order(v, ad, vec![vars[i].clone()]))),
)))
}
ADOrder::Two => {
Nodes::Dual2(IndexMap::from_iter(nodes.into_iter().enumerate().map(
|(i, (k, v))| (k, Dual2::from(set_order(v, ad, vec![vars[i].clone()]))),
)))
}
}
}
#[pymethods]
impl Modifier {
// Pickling
#[new]
fn new_py(ad: u8) -> PyResult {
match ad {
0_u8 => Ok(Modifier::Act),
1_u8 => Ok(Modifier::F),
2_u8 => Ok(Modifier::ModF),
3_u8 => Ok(Modifier::P),
4_u8 => Ok(Modifier::ModP),
_ => Err(PyValueError::new_err(
"unreachable code on Convention pickle.",
)),
}
}
pub fn __getnewargs__<'py>(&self) -> PyResult<(u8,)> {
match self {
Modifier::Act => Ok((0_u8,)),
Modifier::F => Ok((1_u8,)),
Modifier::ModF => Ok((2_u8,)),
Modifier::P => Ok((3_u8,)),
Modifier::ModP => Ok((4_u8,)),
}
}
}
#[pyfunction]
pub(crate) fn _get_modifier_str(modifier: Modifier) -> String {
match modifier {
Modifier::F => "F".to_string(),
Modifier::ModF => "MF".to_string(),
Modifier::P => "P".to_string(),
Modifier::ModP => "MP".to_string(),
Modifier::Act => "NONE".to_string(),
}
}
================================================
FILE: rust/curves/interpolation/interpolation_py.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::curves::interpolation::utils::index_left;
use pyo3::pyfunction;
macro_rules! create_interface {
($name: ident, $type: ident) => {
#[pyfunction]
#[pyo3(signature = (list_input, value, left_count=None))]
pub fn $name(list_input: Vec<$type>, value: $type, left_count: Option) -> usize {
index_left(&list_input[..], &value, left_count)
}
};
}
create_interface!(index_left_f64, f64);
================================================
FILE: rust/curves/interpolation/intp_flat_backward.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::curves::nodes::NodesTimestamp;
use crate::curves::CurveInterpolation;
use crate::dual::Number;
use bincode::config::legacy;
use bincode::serde::{decode_from_slice, encode_to_vec};
use chrono::NaiveDateTime;
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyTuple};
use pyo3::{pyclass, pymethods, Bound, PyResult, Python};
use serde::{Deserialize, Serialize};
use std::cmp::PartialEq;
/// Define flat backward interpolation of nodes.
#[pyclass(module = "rateslib.rs", from_py_object)]
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct FlatBackwardInterpolator {}
#[pymethods]
impl FlatBackwardInterpolator {
#[new]
pub fn new() -> Self {
FlatBackwardInterpolator {}
}
// Pickling
pub fn __setstate__(&mut self, state: Bound<'_, PyBytes>) -> PyResult<()> {
*self = decode_from_slice(state.as_bytes(), legacy()).unwrap().0;
Ok(())
}
pub fn __getstate__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyBytes::new(py, &encode_to_vec(&self, legacy()).unwrap()))
}
pub fn __getnewargs__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyTuple::empty(py))
}
}
impl CurveInterpolation for FlatBackwardInterpolator {
fn interpolated_value(&self, nodes: &NodesTimestamp, date: &NaiveDateTime) -> Number {
let x = date.and_utc().timestamp();
let index = self.node_index(nodes, x);
macro_rules! interp {
($Variant: ident, $indexmap: expr) => {{
let (x1, y1) = $indexmap.get_index(index).unwrap();
let (_x2, y2) = $indexmap.get_index(index + 1_usize).unwrap();
if x <= *x1 {
Number::$Variant(y1.clone())
} else {
Number::$Variant(y2.clone())
}
}};
}
match nodes {
NodesTimestamp::F64(m) => interp!(F64, m),
NodesTimestamp::Dual(m) => interp!(Dual, m),
NodesTimestamp::Dual2(m) => interp!(Dual2, m),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::curves::nodes::Nodes;
use crate::scheduling::ndt;
use indexmap::IndexMap;
fn nodes_timestamp_fixture() -> NodesTimestamp {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
NodesTimestamp::from(nodes)
}
#[test]
fn test_flat_backward() {
let nts = nodes_timestamp_fixture();
let li = FlatBackwardInterpolator::new();
let result = li.interpolated_value(&nts, &ndt(2000, 7, 1));
assert_eq!(result, Number::F64(0.99));
}
#[test]
fn test_flat_backward_left_out_of_bounds() {
let nts = nodes_timestamp_fixture();
let li = FlatBackwardInterpolator::new();
let result = li.interpolated_value(&nts, &ndt(1999, 7, 1));
assert_eq!(result, Number::F64(1.0));
}
#[test]
fn test_flat_backward_right_out_of_bounds() {
let nts = nodes_timestamp_fixture();
let li = FlatBackwardInterpolator::new();
let result = li.interpolated_value(&nts, &ndt(2005, 7, 1));
assert_eq!(result, Number::F64(0.98));
}
#[test]
fn test_flat_backward_equals_interval_value() {
let nts = nodes_timestamp_fixture();
let li = FlatBackwardInterpolator::new();
let result = li.interpolated_value(&nts, &ndt(2001, 1, 1));
assert_eq!(result, Number::F64(0.99));
}
}
================================================
FILE: rust/curves/interpolation/intp_flat_forward.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::curves::nodes::NodesTimestamp;
use crate::curves::CurveInterpolation;
use crate::dual::Number;
use bincode::config::legacy;
use bincode::serde::{decode_from_slice, encode_to_vec};
use chrono::NaiveDateTime;
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyTuple};
use pyo3::{pyclass, pymethods, Bound, PyResult, Python};
use serde::{Deserialize, Serialize};
use std::cmp::PartialEq;
/// Define flat forward interpolation of nodes.
#[pyclass(module = "rateslib.rs", from_py_object)]
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct FlatForwardInterpolator {}
#[pymethods]
impl FlatForwardInterpolator {
#[new]
pub fn new() -> Self {
FlatForwardInterpolator {}
}
// Pickling
pub fn __setstate__(&mut self, state: Bound<'_, PyBytes>) -> PyResult<()> {
*self = decode_from_slice(state.as_bytes(), legacy()).unwrap().0;
Ok(())
}
pub fn __getstate__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyBytes::new(py, &encode_to_vec(&self, legacy()).unwrap()))
}
pub fn __getnewargs__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyTuple::empty(py))
}
}
impl CurveInterpolation for FlatForwardInterpolator {
fn interpolated_value(&self, nodes: &NodesTimestamp, date: &NaiveDateTime) -> Number {
let x = date.and_utc().timestamp();
let index = self.node_index(nodes, x);
macro_rules! interp {
($Variant: ident, $indexmap: expr) => {{
let (_x1, y1) = $indexmap.get_index(index).unwrap();
let (x2, y2) = $indexmap.get_index(index + 1_usize).unwrap();
if x >= *x2 {
Number::$Variant(y2.clone())
} else {
Number::$Variant(y1.clone())
}
}};
}
match nodes {
NodesTimestamp::F64(m) => interp!(F64, m),
NodesTimestamp::Dual(m) => interp!(Dual, m),
NodesTimestamp::Dual2(m) => interp!(Dual2, m),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::curves::nodes::Nodes;
use crate::scheduling::ndt;
use indexmap::IndexMap;
fn nodes_timestamp_fixture() -> NodesTimestamp {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
NodesTimestamp::from(nodes)
}
#[test]
fn test_flat_forward() {
let nts = nodes_timestamp_fixture();
let li = FlatForwardInterpolator::new();
let result = li.interpolated_value(&nts, &ndt(2000, 7, 1));
assert_eq!(result, Number::F64(1.0));
}
#[test]
fn test_flat_forward_left_out_of_bounds() {
let nts = nodes_timestamp_fixture();
let li = FlatForwardInterpolator::new();
let result = li.interpolated_value(&nts, &ndt(1999, 7, 1));
assert_eq!(result, Number::F64(1.0));
}
#[test]
fn test_flat_forward_right_out_of_bounds() {
let nts = nodes_timestamp_fixture();
let li = FlatForwardInterpolator::new();
let result = li.interpolated_value(&nts, &ndt(2005, 7, 1));
assert_eq!(result, Number::F64(0.98));
}
#[test]
fn test_flat_forward_equals_interval_value() {
let nts = nodes_timestamp_fixture();
let li = FlatForwardInterpolator::new();
let result = li.interpolated_value(&nts, &ndt(2001, 1, 1));
assert_eq!(result, Number::F64(0.99));
}
}
================================================
FILE: rust/curves/interpolation/intp_linear.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::curves::interpolation::utils::linear_interp;
use crate::curves::nodes::NodesTimestamp;
use crate::curves::CurveInterpolation;
use crate::dual::Number;
use bincode::config::legacy;
use bincode::serde::{decode_from_slice, encode_to_vec};
use chrono::NaiveDateTime;
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyTuple};
use pyo3::{pyclass, pymethods, Bound, PyResult, Python};
use serde::{Deserialize, Serialize};
use std::cmp::PartialEq;
/// Define linear interpolation of nodes.
#[pyclass(module = "rateslib.rs", from_py_object)]
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct LinearInterpolator {}
#[pymethods]
impl LinearInterpolator {
#[new]
pub fn new() -> Self {
LinearInterpolator {}
}
// Pickling
pub fn __setstate__(&mut self, state: Bound<'_, PyBytes>) -> PyResult<()> {
*self = decode_from_slice(state.as_bytes(), legacy()).unwrap().0;
Ok(())
}
pub fn __getstate__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyBytes::new(py, &encode_to_vec(&self, legacy()).unwrap()))
}
pub fn __getnewargs__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyTuple::empty(py))
}
}
impl CurveInterpolation for LinearInterpolator {
fn interpolated_value(&self, nodes: &NodesTimestamp, date: &NaiveDateTime) -> Number {
let x = date.and_utc().timestamp();
let index = self.node_index(nodes, x);
macro_rules! interp {
($Variant: ident, $indexmap: expr) => {{
let (x1, y1) = $indexmap.get_index(index).unwrap();
let (x2, y2) = $indexmap.get_index(index + 1_usize).unwrap();
Number::$Variant(linear_interp(*x1 as f64, y1, *x2 as f64, y2, x as f64))
}};
}
match nodes {
NodesTimestamp::F64(m) => interp!(F64, m),
NodesTimestamp::Dual(m) => interp!(Dual, m),
NodesTimestamp::Dual2(m) => interp!(Dual2, m),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::curves::nodes::Nodes;
use crate::scheduling::ndt;
use indexmap::IndexMap;
fn nodes_timestamp_fixture() -> NodesTimestamp {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
NodesTimestamp::from(nodes)
}
#[test]
fn test_linear() {
let nts = nodes_timestamp_fixture();
let li = LinearInterpolator::new();
let result = li.interpolated_value(&nts, &ndt(2000, 7, 1));
// expected = 1.0 + (182 / 366) * (0.99 - 1.0) = 0.995027
assert_eq!(result, Number::F64(0.9950273224043715));
}
}
================================================
FILE: rust/curves/interpolation/intp_linear_zero_rate.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::curves::interpolation::utils::linear_zero_interp;
use crate::curves::nodes::NodesTimestamp;
use crate::curves::CurveInterpolation;
use crate::dual::Number;
use bincode::config::legacy;
use bincode::serde::{decode_from_slice, encode_to_vec};
use chrono::NaiveDateTime;
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyTuple};
use pyo3::{pyclass, pymethods, Bound, PyResult, Python};
use serde::{Deserialize, Serialize};
use std::cmp::PartialEq;
/// Define linear zero rate interpolation of nodes.
///
/// This interpolation can only be used with discount factors node values.
#[pyclass(module = "rateslib.rs", from_py_object)]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LinearZeroRateInterpolator {}
#[pymethods]
impl LinearZeroRateInterpolator {
#[new]
pub fn new() -> Self {
LinearZeroRateInterpolator {}
}
// Pickling
pub fn __setstate__(&mut self, state: Bound<'_, PyBytes>) -> PyResult<()> {
*self = decode_from_slice(state.as_bytes(), legacy()).unwrap().0;
Ok(())
}
pub fn __getstate__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyBytes::new(py, &encode_to_vec(&self, legacy()).unwrap()))
}
pub fn __getnewargs__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyTuple::empty(py))
}
}
impl CurveInterpolation for LinearZeroRateInterpolator {
fn interpolated_value(&self, nodes: &NodesTimestamp, date: &NaiveDateTime) -> Number {
let x = date.and_utc().timestamp();
let index = self.node_index(nodes, x);
macro_rules! interp {
($Variant: ident, $indexmap: expr) => {{
let (x0, _) = $indexmap.get_index(0_usize).unwrap();
let (x2, y2) = $indexmap.get_index(index + 1_usize).unwrap();
let (x1, y1) = $indexmap.get_index(index).unwrap();
Number::$Variant(linear_zero_interp(
*x0 as f64, *x1 as f64, y1, *x2 as f64, y2, x as f64,
))
}};
}
match nodes {
NodesTimestamp::F64(m) => interp!(F64, m),
NodesTimestamp::Dual(m) => interp!(Dual, m),
NodesTimestamp::Dual2(m) => interp!(Dual2, m),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::curves::nodes::Nodes;
use crate::scheduling::ndt;
use indexmap::IndexMap;
fn nodes_timestamp_fixture() -> NodesTimestamp {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
NodesTimestamp::from(nodes)
}
#[test]
fn test_log_linear() {
let nts = nodes_timestamp_fixture();
let ll = LinearZeroRateInterpolator::new();
let result = ll.interpolated_value(&nts, &ndt(2001, 7, 1));
// r1 = -ln(0.99) / 366, r2 = -ln(0.98) / 731
// r = r1 + (181 / 365) * (r2 - r1)
// expected = exp(-r * 547) r1 = 0.985044328
assert_eq!(result, Number::F64(0.9850443279738612));
}
#[test]
fn test_log_linear_first_period() {
let nts = nodes_timestamp_fixture();
let ll = LinearZeroRateInterpolator::new();
let result = ll.interpolated_value(&nts, &ndt(2000, 7, 1));
// r1 = r2, r2 = -ln(0.99) / 366
// r = r1
// expected = exp(-r * 182) = 0.99501476
assert_eq!(result, Number::F64(0.9950147597711371));
}
}
================================================
FILE: rust/curves/interpolation/intp_log_cubic.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::curves::interpolation::utils::log_linear_interp;
use crate::curves::nodes::NodesTimestamp;
use crate::curves::CurveInterpolation;
use crate::dual::DualsOrF64;
use bincode::serde::{decode_from_slice, encode_to_vec};
use bincode::config::legacy;
use chrono::NaiveDateTime;
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyTuple};
use pyo3::{pyclass, pymethods, Bound, PyResult, Python};
use serde::{Deserialize, Serialize};
use std::cmp::PartialEq;
/// Define log-linear interpolation of nodes.
#[pyclass(module = "rateslib.rs", from_py_object)]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogCubicInterpolator {
spline: T
}
#[pymethods]
impl LogCubicInterpolator
where T: PartialOrd + Signed + Clone + Sum + Zero,
for<'a> &'a T: Sub<&'a T, Output = T>,
for<'a> &'a f64: Mul<&'a T, Output = T>,
{
#[new]
pub fn new(t: Vec, c: Option>) -> Self {
let spline: PPSpline = PPSpline.new(3_usize, t, c);
LogCubicInterpolator {
spline
}
}
// Pickling
pub fn __setstate__(&mut self, state: Bound<'_, PyBytes>) -> PyResult<()> {
*self = decode_from_slice(state.as_bytes(), legacy()).unwrap().0;
Ok(())
}
pub fn __getstate__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyBytes::new_bound(py, &encode_to_vec(&self, legacy()).unwrap()))
}
pub fn __getnewargs__<'py>(&self, py: Python<'py>) -> PyResult<(Vec, Option>)> {
Ok((self.t.clone(), ))
}
}
impl CurveInterpolation for LogLinearInterpolator {
fn interpolated_value(&self, nodes: &NodesTimestamp, date: &NaiveDateTime) -> DualsOrF64 {
let x = date.and_utc().timestamp();
let index = self.node_index(nodes, x);
macro_rules! interp {
($Variant: ident, $indexmap: expr) => {{
let (x1, y1) = $indexmap.get_index(index).unwrap();
let (x2, y2) = $indexmap.get_index(index + 1_usize).unwrap();
DualsOrF64::$Variant(log_linear_interp(*x1 as f64, y1, *x2 as f64, y2, x as f64))
}};
}
match nodes {
NodesTimestamp::F64(m) => interp!(F64, m),
NodesTimestamp::Dual(m) => interp!(Dual, m),
NodesTimestamp::Dual2(m) => interp!(Dual2, m),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::scheduling::ndt;
use crate::curves::nodes::Nodes;
use indexmap::IndexMap;
fn nodes_timestamp_fixture() -> NodesTimestamp {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
NodesTimestamp::from(nodes)
}
#[test]
fn test_log_linear() {
let nts = nodes_timestamp_fixture();
let ll = LogLinearInterpolator::new();
let result = ll.interpolated_value(&nts, &ndt(2000, 7, 1));
// expected = exp(0 + (182 / 366) * (ln(0.99) - ln(1.0)) = 0.995015
assert_eq!(result, DualsOrF64::F64(0.9950147597711371));
}
}
================================================
FILE: rust/curves/interpolation/intp_log_linear.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::curves::interpolation::utils::log_linear_interp;
use crate::curves::nodes::NodesTimestamp;
use crate::curves::CurveInterpolation;
use crate::dual::Number;
use bincode::config::legacy;
use bincode::serde::{decode_from_slice, encode_to_vec};
use chrono::NaiveDateTime;
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyTuple};
use pyo3::{pyclass, pymethods, Bound, PyResult, Python};
use serde::{Deserialize, Serialize};
use std::cmp::PartialEq;
/// Define log-linear interpolation of nodes.
#[pyclass(module = "rateslib.rs", from_py_object)]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogLinearInterpolator {}
#[pymethods]
impl LogLinearInterpolator {
#[new]
pub fn new() -> Self {
LogLinearInterpolator {}
}
// Pickling
pub fn __setstate__(&mut self, state: Bound<'_, PyBytes>) -> PyResult<()> {
*self = decode_from_slice(state.as_bytes(), legacy()).unwrap().0;
Ok(())
}
pub fn __getstate__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyBytes::new(py, &encode_to_vec(&self, legacy()).unwrap()))
}
pub fn __getnewargs__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyTuple::empty(py))
}
}
impl CurveInterpolation for LogLinearInterpolator {
fn interpolated_value(&self, nodes: &NodesTimestamp, date: &NaiveDateTime) -> Number {
let x = date.and_utc().timestamp();
let index = self.node_index(nodes, x);
macro_rules! interp {
($Variant: ident, $indexmap: expr) => {{
let (x1, y1) = $indexmap.get_index(index).unwrap();
let (x2, y2) = $indexmap.get_index(index + 1_usize).unwrap();
Number::$Variant(log_linear_interp(*x1 as f64, y1, *x2 as f64, y2, x as f64))
}};
}
match nodes {
NodesTimestamp::F64(m) => interp!(F64, m),
NodesTimestamp::Dual(m) => interp!(Dual, m),
NodesTimestamp::Dual2(m) => interp!(Dual2, m),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::curves::nodes::Nodes;
use crate::scheduling::ndt;
use indexmap::IndexMap;
fn nodes_timestamp_fixture() -> NodesTimestamp {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
NodesTimestamp::from(nodes)
}
#[test]
fn test_log_linear() {
let nts = nodes_timestamp_fixture();
let ll = LogLinearInterpolator::new();
let result = ll.interpolated_value(&nts, &ndt(2000, 7, 1));
// expected = exp(0 + (182 / 366) * (ln(0.99) - ln(1.0)) = 0.995015
assert_eq!(result, Number::F64(0.9950147597711371));
}
}
================================================
FILE: rust/curves/interpolation/intp_null.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::curves::nodes::NodesTimestamp;
use crate::curves::CurveInterpolation;
use crate::dual::Number;
use bincode::config::legacy;
use bincode::serde::{decode_from_slice, encode_to_vec};
use chrono::NaiveDateTime;
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyTuple};
use pyo3::{pyclass, pymethods, Bound, PyResult, Python};
use serde::{Deserialize, Serialize};
use std::cmp::PartialEq;
/// Define a null interpolation object.
///
/// This is used by PyO3 binding to indicate interpolation occurs in Python.
#[pyclass(module = "rateslib.rs", from_py_object)]
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub struct NullInterpolator {}
#[pymethods]
impl NullInterpolator {
#[new]
pub fn new() -> Self {
NullInterpolator {}
}
// Pickling
pub fn __setstate__(&mut self, state: Bound<'_, PyBytes>) -> PyResult<()> {
*self = decode_from_slice(state.as_bytes(), legacy()).unwrap().0;
Ok(())
}
pub fn __getstate__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyBytes::new(py, &encode_to_vec(&self, legacy()).unwrap()))
}
pub fn __getnewargs__<'py>(&self, py: Python<'py>) -> PyResult> {
Ok(PyTuple::empty(py))
}
}
impl CurveInterpolation for NullInterpolator {
fn interpolated_value(&self, _nodes: &NodesTimestamp, _date: &NaiveDateTime) -> Number {
panic!("NullInterpolator cannot be used to obtain interpolated values.");
#[allow(unreachable_code)]
Number::F64(0.0)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::curves::nodes::Nodes;
use crate::scheduling::ndt;
use indexmap::IndexMap;
fn nodes_timestamp_fixture() -> NodesTimestamp {
let nodes = Nodes::F64(IndexMap::from_iter(vec![
(ndt(2000, 1, 1), 1.0_f64),
(ndt(2001, 1, 1), 0.99_f64),
(ndt(2002, 1, 1), 0.98_f64),
]));
NodesTimestamp::from(nodes)
}
#[test]
#[should_panic]
fn test_null_interpolation() {
let nts = nodes_timestamp_fixture();
let li = NullInterpolator::new();
li.interpolated_value(&nts, &ndt(2000, 7, 1));
}
}
================================================
FILE: rust/curves/interpolation/mod.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
pub(crate) mod interpolation_py;
pub(crate) mod intp_flat_backward;
pub(crate) mod intp_flat_forward;
pub(crate) mod intp_linear;
pub(crate) mod intp_linear_zero_rate;
pub(crate) mod intp_log_linear;
pub(crate) mod intp_null;
pub(crate) mod utils;
================================================
FILE: rust/curves/interpolation/utils.rs
================================================
// SPDX-License-Identifier: LicenseRef-Rateslib-Dual
//
// Copyright (c) 2026 Siffrorna Technology Limited
// This code cannot be used or copied externally
//
// Dual-licensed: Free Educational Licence or Paid Commercial Licence (commercial/professional use)
// Source-available, not open source.
//
// See LICENSE and https://rateslib.com/py/en/latest/i_licence.html for details,
// and/or contact info (at) rateslib (dot) com
////////////////////////////////////////////////////////////////////////////////////////////////////
use crate::dual::{MathFuncs, NumberOps};
use std::{
cmp::{PartialEq, PartialOrd},
ops::{Mul, Sub},
};
// pub(crate) fn linear_interp(x1: &T, y1: &U, x2: &T, y2: &U, x: &T) -> U
// where
// for<'a> &'a T: NumberOps,
// for<'a> &'a U: NumberOps,
// U: Mul,
// {
// y1 + &((y2 - y1) * (&(x - x1) / &(x2 - x1)))
// }
/// Calculate the linear interpolation between two coordinates.
pub(crate) fn linear_interp(x1: f64, y1: &T, x2: f64, y2: &T, x: f64) -> T
where
for<'a> &'a T: NumberOps,
T: Mul,
{
y1 + &((y2 - y1) * ((x - x1) / (x2 - x1)))
}
/// Calculate the log-linear interpolation between two coordinates.
pub(crate) fn log_linear_interp(x1: f64, y1: &T, x2: f64, y2: &T, x: f64) -> T
where
for<'a> &'a T: NumberOps